比较提交

..

3 次代码提交

作者 SHA1 备注 提交日期
cryptocommuniums-afk
63dbfd2787 fix live camera preview recovery 2026-03-17 07:39:22 +08:00
cryptocommuniums-afk
06b9701e03 docs record live camera runtime refresh rollout 2026-03-16 23:55:10 +08:00
cryptocommuniums-afk
8e9e4915e2 fix live camera runtime refresh and title recovery 2026-03-16 23:53:10 +08:00
修改 5 个文件,包含 336 行新增38 行删除

查看文件

@@ -8,6 +8,42 @@ export type ChangeLogEntry = {
};
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
{
version: "2026.03.17-live-camera-preview-recovery",
releaseDate: "2026-03-17",
repoVersion: "06b9701",
summary: "修复实时分析页标题乱码、同步观看残留状态导致的黑屏,以及切回本机摄像头后预览无法恢复的问题。",
features: [
"runtime 标题恢复逻辑新增更严格的乱码筛除与二次 UTF-8 解码兜底,`服...`、带替换字符的脏标题现在会优先恢复为正常中文,无法恢复时会安全回退到稳定默认标题",
"同步观看退出时会完整重置 viewer 轮询、连接标记和帧版本,不再把旧 viewer 状态残留到 owner 或空闲态,避免页面继续停留在黑屏或“等待同步画面”",
"本地摄像头预览新增独立重绑流程和多次 watchdog 重试,即使浏览器在首帧时没有及时绑定 `srcObject` 或 `play()` 被短暂打断,也会自动恢复预览",
"视频区域是否显示画面改为按当前 runtime 角色分别判断,避免 viewer 的旧连接状态误导 owner 模式,导致本地没有预览时仍隐藏占位提示",
],
tests: [
"pnpm check",
"pnpm vitest run client/src/lib/liveCamera.test.ts",
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"live camera\"",
"pnpm build",
"线上 smoke: `curl -I https://te.hao.work/`,并检查页面源码中的 `/assets/index-*.js`、`/assets/index-*.css`、`/assets/pose-*.js` 已切换到新构建且返回正确 MIME",
],
},
{
version: "2026.03.16-live-camera-runtime-refresh",
releaseDate: "2026-03-16",
repoVersion: "8e9e491",
summary: "修复实时分析页偶发残留在同步观看状态、标题乱码,以及摄像头预览绑定波动导致的启动失败。",
features: [
"live-camera 在打开拍摄引导、启用摄像头、开始分析前,都会先向服务端强制刷新 runtime 状态,避免旧的 viewer 锁残留导致本机明明已释放却仍无法启动",
"同步观看标题新增乱码恢复逻辑,可自动把 UTF-8 被误按 Latin-1 显示的标题恢复成正常中文,避免出现 `服...` 一类异常标题",
"摄像头启动链路改为以 `getUserMedia` 成功为准;即使本地预览 `<video>` 的 `srcObject` 或 `play()` 在当前浏览器里短暂失败,也不会直接把整次启动判死",
"e2e mock 的媒体流补齐为带假视频轨道的流对象,并把 viewer 回归改为校验“服务端 relay、无 viewer-signal”行为,减少和旧 P2P 逻辑混淆",
],
tests: [
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"live camera page exposes camera startup controls|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active\"",
"pnpm build",
"部署后线上 smoke: `https://te.hao.work/live-camera` 登录 H1 后可见空闲态“启动摄像头”入口,确认不再被残留 viewer 锁卡住;公开站点前端资源为 `assets/index-33wVjC4p.js` 与 `assets/index-tNGuStgv.css`",
],
},
{
version: "2026.03.16-live-viewer-server-relay",
releaseDate: "2026-03-16",

查看文件

@@ -219,6 +219,59 @@ function formatDuration(ms: number) {
return `${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`;
}
function normalizeRuntimeTitle(value: string | null | undefined) {
if (typeof value !== "string") return "";
const trimmed = value.trim();
if (!trimmed) return "";
const suspicious = /[ÃÂÆÐÑØæåçéèêëïîôöûüœŠŽƒ€¦<E282AC>]/;
const control = /[\u0000-\u001f\u007f]/g;
const score = (text: string) => {
const cjkCount = text.match(/[\u3400-\u9fff]/g)?.length ?? 0;
const latinCount = text.match(/[A-Za-z0-9]/g)?.length ?? 0;
const whitespaceCount = text.match(/\s/g)?.length ?? 0;
const punctuationCount = text.match(/[()\-_:./]/g)?.length ?? 0;
const badCount = text.match(/[ÃÂÆÐÑØæåçéèêëïîôöûüœŠŽƒ€¦<E282AC>]/g)?.length ?? 0;
const controlCount = text.match(control)?.length ?? 0;
return (cjkCount * 3) + latinCount + whitespaceCount + punctuationCount - (badCount * 4) - (controlCount * 6);
};
const sanitize = (candidate: string) => {
const normalized = candidate.replace(control, "").trim();
if (!normalized || normalized.includes("<22>")) {
return "";
}
return score(normalized) > 0 ? normalized : "";
};
if (!suspicious.test(trimmed)) {
return sanitize(trimmed);
}
const candidates = [trimmed];
try {
const bytes = Uint8Array.from(Array.from(trimmed).map((char) => char.charCodeAt(0) & 0xff));
const decoded = new TextDecoder("utf-8").decode(bytes).trim();
if (decoded && decoded !== trimmed) {
candidates.push(decoded);
if (suspicious.test(decoded)) {
const decodedBytes = Uint8Array.from(Array.from(decoded).map((char) => char.charCodeAt(0) & 0xff));
const twiceDecoded = new TextDecoder("utf-8").decode(decodedBytes).trim();
if (twiceDecoded && twiceDecoded !== decoded) {
candidates.push(twiceDecoded);
}
}
}
} catch {
return sanitize(trimmed);
}
return candidates
.map((candidate) => sanitize(candidate))
.filter(Boolean)
.sort((left, right) => score(right) - score(left))[0] || "";
}
function isMobileDevice() {
if (typeof window === "undefined") return false;
return /Android|iPhone|iPad|iPod/i.test(navigator.userAgent) || window.matchMedia("(max-width: 768px)").matches;
@@ -648,6 +701,8 @@ export default function LiveCamera() {
const runtimeRole = (runtimeQuery.data?.role ?? "idle") as RuntimeRole;
const runtimeSession = (runtimeQuery.data?.runtimeSession ?? null) as RuntimeSession | null;
const runtimeSnapshot = runtimeSession?.snapshot ?? null;
const normalizedRuntimeTitle = normalizeRuntimeTitle(runtimeSession?.title);
const normalizedSnapshotTitle = normalizeRuntimeTitle(runtimeSnapshot?.title);
useEffect(() => {
avatarRenderRef.current = {
@@ -763,6 +818,14 @@ export default function LiveCamera() {
[displayVisibleSegments.length, knownRatio, liveScore?.overall, runtimeRole, runtimeSnapshot?.liveScore?.overall],
);
const refreshRuntimeState = useCallback(async () => {
const result = await runtimeQuery.refetch();
return {
role: (result.data?.role ?? runtimeRole) as RuntimeRole,
runtimeSession: (result.data?.runtimeSession ?? runtimeSession) as RuntimeSession | null,
};
}, [runtimeQuery, runtimeRole, runtimeSession]);
useEffect(() => {
navigator.mediaDevices?.enumerateDevices().then((devices) => {
const cameras = devices.filter((device) => device.kind === "videoinput");
@@ -770,13 +833,44 @@ export default function LiveCamera() {
}).catch(() => undefined);
}, []);
useEffect(() => {
if (!cameraActive || !streamRef.current || !videoRef.current) return;
if (videoRef.current.srcObject !== streamRef.current) {
videoRef.current.srcObject = streamRef.current;
void videoRef.current.play().catch(() => undefined);
const bindLocalPreview = useCallback(async (providedStream?: MediaStream | null) => {
const stream = providedStream || streamRef.current;
const video = videoRef.current;
if (!stream || !video) {
return false;
}
}, [cameraActive, immersivePreview]);
if (video.srcObject !== stream) {
video.srcObject = stream;
}
video.muted = true;
video.defaultMuted = true;
video.playsInline = true;
await video.play().catch(() => undefined);
return video.srcObject === stream;
}, []);
useEffect(() => {
if (!cameraActive || !streamRef.current || runtimeRole === "viewer") return;
let cancelled = false;
const ensurePreview = () => {
if (cancelled) return;
const video = videoRef.current;
const stream = streamRef.current;
if (!video || !stream) return;
if (video.srcObject !== stream || video.videoWidth === 0 || video.paused) {
void bindLocalPreview(stream);
}
};
ensurePreview();
const timers = [300, 900, 1800].map((delay) => window.setTimeout(ensurePreview, delay));
return () => {
cancelled = true;
timers.forEach((timer) => window.clearTimeout(timer));
};
}, [bindLocalPreview, cameraActive, immersivePreview, runtimeRole]);
const ensureCompositeCanvas = useCallback(() => {
if (typeof document === "undefined") {
@@ -872,7 +966,7 @@ export default function LiveCamera() {
phase: phase ?? leaveStatusRef.current,
startedAt: sessionStartedAtRef.current || undefined,
durationMs: durationMsRef.current,
title: runtimeSession?.title ?? `实时分析 ${ACTION_META[currentActionRef.current].label}`,
title: normalizedRuntimeTitle || `实时分析 ${ACTION_META[currentActionRef.current].label}`,
sessionMode: sessionModeRef.current,
qualityPreset,
facingMode: facing,
@@ -890,7 +984,17 @@ export default function LiveCamera() {
unknownSegments: segmentsRef.current.filter((segment) => segment.isUnknown).length,
archivedVideoCount: archivedVideosRef.current.length,
recentSegments: segmentsRef.current.slice(-5),
}), [facing, mobile, qualityPreset, runtimeSession?.title]);
}), [facing, mobile, normalizedRuntimeTitle, qualityPreset]);
const openSetupGuide = useCallback(async () => {
const latest = await refreshRuntimeState();
if (latest.role === "viewer") {
setShowSetupGuide(false);
toast.error("当前账号已有其他设备正在实时分析,请先切换到同步观看模式");
return;
}
setShowSetupGuide(true);
}, [refreshRuntimeState]);
const uploadLiveFrame = useCallback(async (sessionId: string) => {
const compositeCanvas = ensureCompositeCanvas();
@@ -935,17 +1039,18 @@ export default function LiveCamera() {
frameRelayInFlightRef.current = false;
}, []);
const closeViewerPeer = useCallback(() => {
const closeViewerPeer = useCallback((options?: { clearFrameVersion?: boolean }) => {
if (viewerRetryTimerRef.current) {
window.clearTimeout(viewerRetryTimerRef.current);
viewerRetryTimerRef.current = 0;
}
viewerSessionIdRef.current = null;
if (videoRef.current && !cameraActive) {
videoRef.current.srcObject = null;
if (options?.clearFrameVersion) {
setViewerFrameVersion(0);
}
setViewerConnected(false);
}, [cameraActive]);
setViewerError("");
}, []);
const releaseRuntime = useCallback(async (phase: RuntimeSnapshot["phase"]) => {
if (!runtimeIdRef.current) return;
@@ -1076,10 +1181,12 @@ export default function LiveCamera() {
useEffect(() => {
if (runtimeRole !== "viewer" || !runtimeSession?.mediaSessionId) {
if (!cameraActive) {
closeViewerPeer();
closeViewerPeer({
clearFrameVersion: !cameraActive,
});
if (streamRef.current) {
void bindLocalPreview();
}
setViewerError("");
return;
}
@@ -1102,6 +1209,7 @@ export default function LiveCamera() {
}
};
}, [
bindLocalPreview,
cameraActive,
closeViewerPeer,
runtimeRole,
@@ -1162,7 +1270,8 @@ export default function LiveCamera() {
preferredZoom = zoomTargetRef.current,
preset: CameraQualityPreset = qualityPreset,
) => {
if (runtimeRole === "viewer") {
const latest = await refreshRuntimeState();
if (latest.role === "viewer") {
toast.error("当前账号已有其他设备正在实时分析,请切换到同步观看模式");
return;
}
@@ -1176,15 +1285,13 @@ export default function LiveCamera() {
preset,
});
streamRef.current = stream;
closeViewerPeer();
if (appliedFacingMode !== nextFacing) {
setFacing(appliedFacingMode);
}
if (videoRef.current) {
videoRef.current.srcObject = stream;
await videoRef.current.play();
}
await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null);
await bindLocalPreview(stream);
setCameraActive(true);
await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null);
if (usedFallback) {
toast.info("当前设备已自动切换到兼容摄像头模式");
}
@@ -1192,7 +1299,7 @@ export default function LiveCamera() {
} catch (error: any) {
toast.error(`摄像头启动失败: ${error?.message || "未知错误"}`);
}
}, [facing, mobile, qualityPreset, runtimeRole, syncZoomState]);
}, [bindLocalPreview, closeViewerPeer, facing, mobile, qualityPreset, refreshRuntimeState, syncZoomState]);
const switchCamera = useCallback(async () => {
const nextFacing: CameraFacing = facing === "user" ? "environment" : "user";
@@ -1417,12 +1524,13 @@ export default function LiveCamera() {
}, [flushSegment, liveScore, mobile, saveLiveSessionMutation, sessionMode, stopSessionRecorder]);
const startAnalysis = useCallback(async () => {
const latest = await refreshRuntimeState();
if (!cameraActive || !videoRef.current || !streamRef.current) {
toast.error("请先启动摄像头");
return;
}
if (analyzingRef.current || saving) return;
if (runtimeRole === "viewer") {
if (latest.role === "viewer") {
toast.error("当前设备处于同步观看模式,不能重复开启分析");
return;
}
@@ -1570,10 +1678,10 @@ export default function LiveCamera() {
appendFrameToSegment,
cameraActive,
closeBroadcastPeer,
refreshRuntimeState,
releaseRuntime,
runtimeAcquireMutation,
runtimeQuery,
runtimeRole,
saving,
sessionMode,
startBroadcastSession,
@@ -1632,9 +1740,15 @@ export default function LiveCamera() {
}, [analyzing, saving]);
const handleSetupComplete = useCallback(async () => {
const latest = await refreshRuntimeState();
if (latest.role === "viewer") {
setShowSetupGuide(false);
toast.error("当前账号已有其他设备正在实时分析,请切换到同步观看模式");
return;
}
setShowSetupGuide(false);
await startCamera(facing, zoomTargetRef.current, qualityPreset);
}, [facing, qualityPreset, startCamera]);
}, [facing, qualityPreset, refreshRuntimeState, startCamera]);
const displayLeaveStatus = runtimeRole === "viewer" ? (runtimeSnapshot?.phase ?? "idle") : leaveStatus;
const displayAction = runtimeRole === "viewer" ? (runtimeSnapshot?.currentAction ?? "unknown") : currentAction;
@@ -1673,12 +1787,12 @@ export default function LiveCamera() {
const runtimeSyncDelayMs = runtimeRole === "viewer" ? getRuntimeSyncDelayMs(runtimeSession?.lastHeartbeatAt) : null;
const runtimeSyncLabel = runtimeRole === "viewer" ? formatRuntimeSyncDelay(runtimeSyncDelayMs) : "";
const displayRuntimeTitle = runtimeRole === "viewer"
? (runtimeSnapshot?.title ?? runtimeSession?.title ?? "其他设备实时分析")
: (runtimeSession?.title ?? `实时分析 ${ACTION_META[currentAction].label}`);
? (normalizedSnapshotTitle || normalizedRuntimeTitle || "其他设备实时分析")
: (normalizedRuntimeTitle || `实时分析 ${ACTION_META[currentAction].label}`);
const viewerFrameSrc = runtimeRole === "viewer" && runtimeSession?.mediaSessionId
? getMediaAssetUrl(`/assets/sessions/${runtimeSession.mediaSessionId}/live-frame.jpg?ts=${viewerFrameVersion || runtimeSnapshot?.updatedAt || Date.now()}`)
: "";
const hasVideoFeed = cameraActive || viewerConnected;
const hasVideoFeed = runtimeRole === "viewer" ? viewerConnected : cameraActive;
const heroAction = ACTION_META[displayAction];
const rawActionMeta = ACTION_META[displayRawAction];
const pendingActionMeta = displayStabilityMeta.pendingAction ? ACTION_META[displayStabilityMeta.pendingAction] : null;
@@ -1698,7 +1812,7 @@ export default function LiveCamera() {
? "准备开始实时分析"
: "摄像头待启动";
const viewerModeLabel = runtimeSession?.title || "其他设备正在实时分析";
const viewerModeLabel = normalizedSnapshotTitle || normalizedRuntimeTitle || "其他设备正在实时分析";
const renderPrimaryActions = (rail = false) => {
const buttonClass = rail
@@ -1738,7 +1852,7 @@ export default function LiveCamera() {
<Button
data-testid={rail ? undefined : "live-camera-toolbar-start-button"}
className={buttonClass}
onClick={() => setShowSetupGuide(true)}
onClick={() => void openSetupGuide()}
>
<Camera className={rail ? "h-5 w-5" : "mr-2 h-4 w-4"} />
{!rail && "启动摄像头"}
@@ -2120,7 +2234,7 @@ export default function LiveCamera() {
{viewerConnected ? "刷新同步" : "获取同步画面"}
</Button>
) : (
<Button data-testid="live-camera-start-button" onClick={() => setShowSetupGuide(true)} className="rounded-2xl">
<Button data-testid="live-camera-start-button" onClick={() => void openSetupGuide()} className="rounded-2xl">
<Camera className="mr-2 h-4 w-4" />
</Button>

查看文件

@@ -1,5 +1,60 @@
# Tennis Training Hub - 变更日志
## 2026.03.17-live-camera-preview-recovery (2026-03-17)
### 功能更新
- `/live-camera` 的 runtime 标题恢复逻辑新增更严格的乱码筛除与二次 UTF-8 解码兜底,`服...` 这类异常标题会优先恢复为正常中文;无法恢复时会自动回退到稳定默认标题,避免继续显示脏字符串
- 同步观看退出时会完整重置 viewer 轮询、连接标记和帧版本,不再把旧的 viewer 状态带回 owner 或空闲态,修复退出同步后仍黑屏、仍显示“等待同步画面”的问题
- 本地摄像头预览增加独立重绑流程和多次 watchdog 重试,即使浏览器首帧没有及时绑定 `srcObject``play()` 被短暂中断,也会继续自动恢复本地预览
- 视频区域是否显示画面改为按当前 runtime 角色分别判断,避免 viewer 旧连接状态误导 owner 模式,导致本地没有预览时仍错误隐藏占位提示
### 测试
- `pnpm check`
- `pnpm vitest run client/src/lib/liveCamera.test.ts`
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"`
- `pnpm build`
- 线上 smoke`curl -I https://te.hao.work/`
- 线上 smoke`curl -I https://te.hao.work/assets/index-BJ7rV3xe.js`
- 线上 smoke`curl -I https://te.hao.work/assets/index-tNGuStgv.css`
- 线上 smoke`curl -I https://te.hao.work/assets/pose-CZKsH31a.js`
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建
- 当前公开站点前端资源 revision`assets/index-BJ7rV3xe.js``assets/index-tNGuStgv.css``assets/pose-CZKsH31a.js`
- 已确认 `index``css``pose` 模块均返回 `200`,且 MIME 分别为 `application/javascript``text/css``application/javascript`,不再出现此前的模块脚本和样式被当成 `text/html` 返回的问题
### 仓库版本
- `06b9701`
## 2026.03.16-live-camera-runtime-refresh (2026-03-16)
### 功能更新
- `/live-camera` 在打开拍摄引导、启用摄像头、开始分析前,都会先向服务端强制刷新 runtime 状态,避免旧的同步观看锁残留导致本机明明已释放却仍无法启动
- 新增 runtime 标题乱码恢复逻辑,可自动把 UTF-8 被误按 Latin-1 显示的标题恢复成正常中文,避免出现 `服...` 一类异常标题
- 摄像头启动链路改为以 `getUserMedia` 成功为准;即使本地预览 `<video>``srcObject``play()` 在当前浏览器中短暂失败,也不会直接把整次启动判死
- e2e mock 的媒体流补齐为带假视频轨道的流对象,并把 viewer 回归改为校验“服务端 relay、无 viewer-signal”行为,避免继续按旧 P2P 逻辑断言
### 测试
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"`
- `pnpm build`
- 部署后线上 smoke登录 `H1` 后访问 `https://te.hao.work/live-camera`,确认空闲态“启动摄像头”入口可见,不再被残留 viewer 锁卡住
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建
- 当前公开站点前端资源 revision`assets/index-33wVjC4p.js``assets/index-tNGuStgv.css`
- 真实验证已通过:登录 `H1` 后访问 `https://te.hao.work/live-camera`,页面会正常显示“摄像头未启动 / 启动摄像头”,说明旧的 viewer 锁残留不会再把空闲设备卡在同步观看模式
### 仓库版本
- `8e9e491`
## 2026.03.16-live-viewer-server-relay (2026-03-16)
### 功能更新

查看文件

@@ -82,7 +82,23 @@ test("live camera switches into viewer mode when another device already owns ana
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
});
test("live camera retries viewer stream when owner track is not ready on first attempt", async ({ page }) => {
test("live camera recovers mojibake viewer titles before rendering", async ({ page }) => {
const state = await installAppMocks(page, { authenticated: true, liveViewerMode: true });
const mojibakeTitle = Buffer.from("服务端同步烟雾测试", "utf8").toString("latin1");
if (state.liveRuntime.runtimeSession) {
state.liveRuntime.runtimeSession.title = mojibakeTitle;
state.liveRuntime.runtimeSession.snapshot = {
...state.liveRuntime.runtimeSession.snapshot,
title: mojibakeTitle,
};
}
await page.goto("/live-camera");
await expect(page.getByRole("heading", { name: "服务端同步烟雾测试" })).toBeVisible();
await expect(page.getByText(mojibakeTitle)).toHaveCount(0);
});
test("live camera no longer opens viewer peer retries when server relay is active", async ({ page }) => {
const state = await installAppMocks(page, {
authenticated: true,
liveViewerMode: true,
@@ -91,9 +107,9 @@ test("live camera retries viewer stream when owner track is not ready on first a
await page.goto("/live-camera");
await expect(page.getByText("同步观看模式")).toBeVisible();
await expect.poll(() => state.viewerSignalConflictRemaining).toBe(0);
await expect.poll(() => state.mediaSession?.viewerCount ?? 0).toBe(1);
await expect(page.getByText(/同步观看中|重新同步/).first()).toBeVisible();
await expect.poll(() => state.viewerSignalConflictRemaining).toBe(1);
await expect.poll(() => state.mediaSession?.viewerCount ?? 0).toBe(0);
await expect(page.locator('img[alt="同步中的实时分析画面"]')).toBeVisible();
});
test("live camera archives overlay videos into the library after analysis stops", async ({ page }) => {

查看文件

@@ -866,6 +866,73 @@ export async function installAppMocks(
return points;
};
class FakeVideoTrack {
kind = "video";
enabled = true;
muted = false;
readyState = "live";
id = "fake-video-track";
label = "Fake Camera";
stop() {}
getSettings() {
return {
facingMode: "environment",
width: 1280,
height: 720,
frameRate: 30,
};
}
getCapabilities() {
return {};
}
async applyConstraints() {
return undefined;
}
}
class FakeAudioTrack {
kind = "audio";
enabled = true;
muted = false;
readyState = "live";
id = "fake-audio-track";
label = "Fake Mic";
stop() {}
getSettings() {
return {};
}
getCapabilities() {
return {};
}
async applyConstraints() {
return undefined;
}
}
const createFakeMediaStream = (withAudio = false) => {
const videoTrack = new FakeVideoTrack();
const audioTrack = withAudio ? new FakeAudioTrack() : null;
const tracks = audioTrack ? [videoTrack, audioTrack] : [videoTrack];
return {
active: true,
id: `fake-stream-${Math.random().toString(36).slice(2)}`,
getTracks: () => tracks,
getVideoTracks: () => [videoTrack],
getAudioTracks: () => (audioTrack ? [audioTrack] : []),
addTrack: () => undefined,
removeTrack: () => undefined,
clone: () => createFakeMediaStream(withAudio),
} as unknown as MediaStream;
};
class FakePose {
callback = null;
@@ -894,9 +961,19 @@ export async function installAppMocks(
value: async () => undefined,
});
Object.defineProperty(HTMLMediaElement.prototype, "srcObject", {
configurable: true,
get() {
return (this as HTMLMediaElement & { __srcObject?: MediaStream }).__srcObject ?? null;
},
set(value) {
(this as HTMLMediaElement & { __srcObject?: MediaStream }).__srcObject = value as MediaStream;
},
});
Object.defineProperty(HTMLCanvasElement.prototype, "captureStream", {
configurable: true,
value: () => new MediaStream(),
value: () => createFakeMediaStream(),
});
class FakeMediaRecorder extends EventTarget {
@@ -961,7 +1038,7 @@ export async function installAppMocks(
async setRemoteDescription(description: { type: string; sdp: string }) {
this.remoteDescription = description;
this.connectionState = "connected";
this.ontrack?.({ streams: [new MediaStream()] });
this.ontrack?.({ streams: [createFakeMediaStream()] });
this.onconnectionstatechange?.();
}
@@ -984,7 +1061,7 @@ export async function installAppMocks(
Object.defineProperty(navigator, "mediaDevices", {
configurable: true,
value: {
getUserMedia: async () => new MediaStream(),
getUserMedia: async (constraints?: { audio?: unknown }) => createFakeMediaStream(Boolean(constraints?.audio)),
enumerateDevices: async () => [
{ deviceId: "cam-1", kind: "videoinput", label: "Front Camera", groupId: "g1" },
{ deviceId: "cam-2", kind: "videoinput", label: "Back Camera", groupId: "g1" },