文件
tennis-training-hub/client/src/lib/camera.ts
2026-03-16 22:23:58 +08:00

251 行
7.4 KiB
TypeScript

export type CameraQualityPreset = "economy" | "balanced" | "clarity";
export type CameraZoomState = {
supported: boolean;
min: number;
max: number;
step: number;
current: number;
focusMode: string;
};
export type CameraRequestResult = {
stream: MediaStream;
appliedFacingMode: "user" | "environment";
audioEnabled: boolean;
usedFallback: boolean;
};
type NumericRange = {
min: number;
max: number;
step: number;
};
function clamp(value: number, min: number, max: number) {
return Math.max(min, Math.min(max, value));
}
function parseNumericRange(value: unknown): NumericRange | null {
if (!value || typeof value !== "object") {
return null;
}
const candidate = value as { min?: unknown; max?: unknown; step?: unknown };
if (typeof candidate.min !== "number" || typeof candidate.max !== "number") {
return null;
}
return {
min: candidate.min,
max: candidate.max,
step: typeof candidate.step === "number" && candidate.step > 0 ? candidate.step : 0.1,
};
}
export function getCameraVideoConstraints(
facingMode: "user" | "environment",
isMobile: boolean,
preset: CameraQualityPreset,
): MediaTrackConstraints {
switch (preset) {
case "economy":
return {
facingMode,
width: { ideal: isMobile ? 960 : 1280 },
height: { ideal: isMobile ? 540 : 720 },
frameRate: { ideal: 24, max: 24 },
};
case "clarity":
return {
facingMode,
width: { ideal: isMobile ? 1280 : 1920 },
height: { ideal: isMobile ? 720 : 1080 },
frameRate: { ideal: 30, max: 30 },
};
default:
return {
facingMode,
width: { ideal: isMobile ? 1280 : 1600 },
height: { ideal: isMobile ? 720 : 900 },
frameRate: { ideal: 30, max: 30 },
};
}
}
function normalizeVideoConstraintCandidate(candidate: MediaTrackConstraints | true) {
if (candidate === true) {
return { label: "camera-any", video: true as const };
}
return {
label: JSON.stringify(candidate),
video: candidate,
};
}
function createFallbackVideoCandidates(
facingMode: "user" | "environment",
isMobile: boolean,
preset: CameraQualityPreset,
) {
const base = getCameraVideoConstraints(facingMode, isMobile, preset);
const alternateFacing = facingMode === "environment" ? "user" : "environment";
const lowRes = {
facingMode,
width: { ideal: isMobile ? 640 : 960 },
height: { ideal: isMobile ? 360 : 540 },
} satisfies MediaTrackConstraints;
const lowResAlternate = {
facingMode: alternateFacing,
width: { ideal: isMobile ? 640 : 960 },
height: { ideal: isMobile ? 360 : 540 },
} satisfies MediaTrackConstraints;
const anyCamera = {
width: { ideal: isMobile ? 640 : 960 },
height: { ideal: isMobile ? 360 : 540 },
} satisfies MediaTrackConstraints;
const candidates = [
normalizeVideoConstraintCandidate(base),
normalizeVideoConstraintCandidate({
...base,
frameRate: undefined,
}),
normalizeVideoConstraintCandidate(lowRes),
normalizeVideoConstraintCandidate(lowResAlternate),
normalizeVideoConstraintCandidate(anyCamera),
normalizeVideoConstraintCandidate(true),
];
const deduped = new Map<string, { video: MediaTrackConstraints | true }>();
candidates.forEach((candidate) => {
if (!deduped.has(candidate.label)) {
deduped.set(candidate.label, { video: candidate.video });
}
});
return Array.from(deduped.values());
}
export async function requestCameraStream(options: {
facingMode: "user" | "environment";
isMobile: boolean;
preset: CameraQualityPreset;
audio?: false | MediaTrackConstraints;
}) {
const videoCandidates = createFallbackVideoCandidates(options.facingMode, options.isMobile, options.preset);
const audioCandidates = options.audio ? [options.audio, false] : [false];
let lastError: unknown = null;
for (const audio of audioCandidates) {
for (let index = 0; index < videoCandidates.length; index += 1) {
const video = videoCandidates[index]?.video ?? true;
try {
const stream = await navigator.mediaDevices.getUserMedia({ video, audio });
const videoTrack = stream.getVideoTracks()[0] || null;
const settings = (
videoTrack && typeof (videoTrack as MediaStreamTrack & { getSettings?: () => unknown }).getSettings === "function"
? (videoTrack as MediaStreamTrack & { getSettings: () => unknown }).getSettings()
: {}
) as Record<string, unknown>;
const appliedFacingMode = settings.facingMode === "user" ? "user" : settings.facingMode === "environment" ? "environment" : options.facingMode;
return {
stream,
appliedFacingMode,
audioEnabled: stream.getAudioTracks().length > 0,
usedFallback: index > 0 || audio === false && Boolean(options.audio),
} satisfies CameraRequestResult;
} catch (error) {
lastError = error;
}
}
}
throw lastError instanceof Error ? lastError : new Error("无法访问摄像头");
}
export function getLiveAnalysisBitrate(preset: CameraQualityPreset, isMobile: boolean) {
switch (preset) {
case "economy":
return isMobile ? 900_000 : 1_100_000;
case "clarity":
return isMobile ? 1_900_000 : 2_500_000;
default:
return isMobile ? 1_300_000 : 1_900_000;
}
}
export function readTrackZoomState(track: MediaStreamTrack | null): CameraZoomState {
if (!track) {
return {
supported: false,
min: 1,
max: 1,
step: 0.1,
current: 1,
focusMode: "auto",
};
}
const capabilities = (
typeof (track as MediaStreamTrack & { getCapabilities?: () => unknown }).getCapabilities === "function"
? (track as MediaStreamTrack & { getCapabilities: () => unknown }).getCapabilities()
: {}
) as Record<string, unknown>;
const settings = (
typeof (track as MediaStreamTrack & { getSettings?: () => unknown }).getSettings === "function"
? (track as MediaStreamTrack & { getSettings: () => unknown }).getSettings()
: {}
) as Record<string, unknown>;
const zoomRange = parseNumericRange(capabilities.zoom);
const focusModes = Array.isArray(capabilities.focusMode)
? capabilities.focusMode.filter((item: unknown): item is string => typeof item === "string")
: [];
const focusMode = typeof settings.focusMode === "string"
? settings.focusMode
: focusModes.includes("continuous")
? "continuous"
: focusModes[0] || "auto";
if (!zoomRange || zoomRange.max - zoomRange.min <= 0.001) {
return {
supported: false,
min: 1,
max: 1,
step: 0.1,
current: 1,
focusMode,
};
}
const current = typeof settings.zoom === "number"
? clamp(settings.zoom, zoomRange.min, zoomRange.max)
: zoomRange.min;
return {
supported: true,
min: zoomRange.min,
max: zoomRange.max,
step: zoomRange.step,
current,
focusMode,
};
}
export async function applyTrackZoom(track: MediaStreamTrack | null, rawZoom: number) {
const currentState = readTrackZoomState(track);
if (!track || !currentState.supported) {
return currentState;
}
const zoom = clamp(rawZoom, currentState.min, currentState.max);
try {
await track.applyConstraints({ advanced: [{ zoom }] } as unknown as MediaTrackConstraints);
} catch {
await track.applyConstraints({ zoom } as unknown as MediaTrackConstraints);
}
return readTrackZoomState(track);
}