比较提交

..

20 次代码提交

作者 SHA1 备注 提交日期
cryptocommuniums-afk
f3f7e1982c Improve live camera relay buffering 2026-03-17 09:51:47 +08:00
cryptocommuniums-afk
63dbfd2787 fix live camera preview recovery 2026-03-17 07:39:22 +08:00
cryptocommuniums-afk
06b9701e03 docs record live camera runtime refresh rollout 2026-03-16 23:55:10 +08:00
cryptocommuniums-afk
8e9e4915e2 fix live camera runtime refresh and title recovery 2026-03-16 23:53:10 +08:00
cryptocommuniums-afk
634a4704c7 docs record live viewer relay rollout 2026-03-16 23:02:30 +08:00
cryptocommuniums-afk
bb46d26c0e feat relay live viewer frames through media service 2026-03-16 22:43:08 +08:00
cryptocommuniums-afk
bacd712dbc docs record camera fallback rollout 2026-03-16 22:26:52 +08:00
cryptocommuniums-afk
78a7c755e3 docs add camera startup fallback changelog 2026-03-16 22:24:50 +08:00
cryptocommuniums-afk
a211562860 fix camera startup fallbacks 2026-03-16 22:23:58 +08:00
cryptocommuniums-afk
09b1b95e2c docs record live viewer sync rollout 2026-03-16 19:31:40 +08:00
cryptocommuniums-afk
922a9fb63f feat sync live analysis viewer state 2026-03-16 19:19:46 +08:00
cryptocommuniums-afk
31bead3452 docs finalize rollout verification 2026-03-16 18:09:49 +08:00
cryptocommuniums-afk
a5103685fb docs record live analysis rollout 2026-03-16 18:08:03 +08:00
cryptocommuniums-afk
f9db6ef590 fix live analysis multi-device lock 2026-03-16 18:05:58 +08:00
cryptocommuniums-afk
13e59b8e8a docs: sync live analysis runtime changelog 2026-03-16 17:29:30 +08:00
cryptocommuniums-afk
2b72ef9200 fix: restore live analysis runtime migration 2026-03-16 17:29:06 +08:00
cryptocommuniums-afk
09cd5b4d85 docs: finalize live camera release notes 2026-03-16 16:44:18 +08:00
cryptocommuniums-afk
7aba508247 docs: refresh deployed asset revision 2026-03-16 16:41:35 +08:00
cryptocommuniums-afk
cf06de944f docs: sync changelog repo version 2026-03-16 16:39:47 +08:00
cryptocommuniums-afk
4e4122d758 feat: add live camera multi-device viewer mode 2026-03-16 16:39:14 +08:00
修改 20 个文件,包含 4756 行新增966 行删除

查看文件

@@ -9,6 +9,13 @@ export type CameraZoomState = {
focusMode: string; focusMode: string;
}; };
export type CameraRequestResult = {
stream: MediaStream;
appliedFacingMode: "user" | "environment";
audioEnabled: boolean;
usedFallback: boolean;
};
type NumericRange = { type NumericRange = {
min: number; min: number;
max: number; max: number;
@@ -66,6 +73,98 @@ export function getCameraVideoConstraints(
} }
} }
function normalizeVideoConstraintCandidate(candidate: MediaTrackConstraints | true) {
if (candidate === true) {
return { label: "camera-any", video: true as const };
}
return {
label: JSON.stringify(candidate),
video: candidate,
};
}
function createFallbackVideoCandidates(
facingMode: "user" | "environment",
isMobile: boolean,
preset: CameraQualityPreset,
) {
const base = getCameraVideoConstraints(facingMode, isMobile, preset);
const alternateFacing = facingMode === "environment" ? "user" : "environment";
const lowRes = {
facingMode,
width: { ideal: isMobile ? 640 : 960 },
height: { ideal: isMobile ? 360 : 540 },
} satisfies MediaTrackConstraints;
const lowResAlternate = {
facingMode: alternateFacing,
width: { ideal: isMobile ? 640 : 960 },
height: { ideal: isMobile ? 360 : 540 },
} satisfies MediaTrackConstraints;
const anyCamera = {
width: { ideal: isMobile ? 640 : 960 },
height: { ideal: isMobile ? 360 : 540 },
} satisfies MediaTrackConstraints;
const candidates = [
normalizeVideoConstraintCandidate(base),
normalizeVideoConstraintCandidate({
...base,
frameRate: undefined,
}),
normalizeVideoConstraintCandidate(lowRes),
normalizeVideoConstraintCandidate(lowResAlternate),
normalizeVideoConstraintCandidate(anyCamera),
normalizeVideoConstraintCandidate(true),
];
const deduped = new Map<string, { video: MediaTrackConstraints | true }>();
candidates.forEach((candidate) => {
if (!deduped.has(candidate.label)) {
deduped.set(candidate.label, { video: candidate.video });
}
});
return Array.from(deduped.values());
}
export async function requestCameraStream(options: {
facingMode: "user" | "environment";
isMobile: boolean;
preset: CameraQualityPreset;
audio?: false | MediaTrackConstraints;
}) {
const videoCandidates = createFallbackVideoCandidates(options.facingMode, options.isMobile, options.preset);
const audioCandidates = options.audio ? [options.audio, false] : [false];
let lastError: unknown = null;
for (const audio of audioCandidates) {
for (let index = 0; index < videoCandidates.length; index += 1) {
const video = videoCandidates[index]?.video ?? true;
try {
const stream = await navigator.mediaDevices.getUserMedia({ video, audio });
const videoTrack = stream.getVideoTracks()[0] || null;
const settings = (
videoTrack && typeof (videoTrack as MediaStreamTrack & { getSettings?: () => unknown }).getSettings === "function"
? (videoTrack as MediaStreamTrack & { getSettings: () => unknown }).getSettings()
: {}
) as Record<string, unknown>;
const appliedFacingMode = settings.facingMode === "user" ? "user" : settings.facingMode === "environment" ? "environment" : options.facingMode;
return {
stream,
appliedFacingMode,
audioEnabled: stream.getAudioTracks().length > 0,
usedFallback: index > 0 || audio === false && Boolean(options.audio),
} satisfies CameraRequestResult;
} catch (error) {
lastError = error;
}
}
}
throw lastError instanceof Error ? lastError : new Error("无法访问摄像头");
}
export function getLiveAnalysisBitrate(preset: CameraQualityPreset, isMobile: boolean) { export function getLiveAnalysisBitrate(preset: CameraQualityPreset, isMobile: boolean) {
switch (preset) { switch (preset) {
case "economy": case "economy":

查看文件

@@ -8,11 +8,191 @@ export type ChangeLogEntry = {
}; };
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [ export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
{
version: "2026.03.17-live-camera-relay-buffer",
releaseDate: "2026-03-17",
repoVersion: "63dbfd2+relay-buffer",
summary:
"实时分析同步观看改为服务端滚动视频缓存,观看端不再轮询单帧图片;media 服务同时新增最近 60 秒缓冲和 30 分钟缓存清理。",
features: [
"live-camera owner 端的 60 秒合成录像分段现在会额外上传到 media relay 会话,观看端改为播放服务端生成的滚动 preview 视频,不再依赖 `live-frame.jpg` 单帧轮询",
"relay 会话只保留最近 60 秒分段,worker 会在新分段到达后按最新窗口重建 `preview.webm`,避免观看端继续看到旧一分钟缓存",
"超过 30 分钟无活动的 relay 会话、分段目录和公开缓存文件会自动清理,避免多端同步长期堆积无用缓存",
"实时分析 viewer 文案和占位提示同步调整为“缓冲最近 60 秒视频 / 加载缓存回放”,更贴近现在的服务端缓存播放行为",
"media preview 非归档阶段跳过 mp4 转码,Chrome 观看直接使用 webm,降低 worker 处理时延和 CPU 消耗",
],
tests: [
"cd media && go test ./...",
"pnpm vitest run client/src/lib/liveCamera.test.ts",
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera starts analysis and produces scores|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"',
"pnpm check",
"pnpm build",
"线上 smoke: 部署后确认 `https://te.hao.work/` 已提供新构建而不是旧资源版本,`/live-camera` viewer 端进入“服务端缓存同步”路径并返回正确的 JS/CSS MIME",
],
},
{
version: "2026.03.17-live-camera-preview-recovery",
releaseDate: "2026-03-17",
repoVersion: "06b9701",
summary:
"修复实时分析页标题乱码、同步观看残留状态导致的黑屏,以及切回本机摄像头后预览无法恢复的问题。",
features: [
"runtime 标题恢复逻辑新增更严格的乱码筛除与二次 UTF-8 解码兜底,`服...`、带替换字符的脏标题现在会优先恢复为正常中文,无法恢复时会安全回退到稳定默认标题",
"同步观看退出时会完整重置 viewer 轮询、连接标记和帧版本,不再把旧 viewer 状态残留到 owner 或空闲态,避免页面继续停留在黑屏或“等待同步画面”",
"本地摄像头预览新增独立重绑流程和多次 watchdog 重试,即使浏览器在首帧时没有及时绑定 `srcObject` 或 `play()` 被短暂打断,也会自动恢复预览",
"视频区域是否显示画面改为按当前 runtime 角色分别判断,避免 viewer 的旧连接状态误导 owner 模式,导致本地没有预览时仍隐藏占位提示",
],
tests: [
"pnpm check",
"pnpm vitest run client/src/lib/liveCamera.test.ts",
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"',
"pnpm build",
"线上 smoke: `curl -I https://te.hao.work/`,并检查页面源码中的 `/assets/index-*.js`、`/assets/index-*.css`、`/assets/pose-*.js` 已切换到新构建且返回正确 MIME",
],
},
{
version: "2026.03.16-live-camera-runtime-refresh",
releaseDate: "2026-03-16",
repoVersion: "8e9e491",
summary:
"修复实时分析页偶发残留在同步观看状态、标题乱码,以及摄像头预览绑定波动导致的启动失败。",
features: [
"live-camera 在打开拍摄引导、启用摄像头、开始分析前,都会先向服务端强制刷新 runtime 状态,避免旧的 viewer 锁残留导致本机明明已释放却仍无法启动",
"同步观看标题新增乱码恢复逻辑,可自动把 UTF-8 被误按 Latin-1 显示的标题恢复成正常中文,避免出现 `服...` 一类异常标题",
"摄像头启动链路改为以 `getUserMedia` 成功为准;即使本地预览 `<video>` 的 `srcObject` 或 `play()` 在当前浏览器里短暂失败,也不会直接把整次启动判死",
"e2e mock 的媒体流补齐为带假视频轨道的流对象,并把 viewer 回归改为校验“服务端 relay、无 viewer-signal”行为,减少和旧 P2P 逻辑混淆",
],
tests: [
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"',
"pnpm build",
"部署后线上 smoke: `https://te.hao.work/live-camera` 登录 H1 后可见空闲态“启动摄像头”入口,确认不再被残留 viewer 锁卡住;公开站点前端资源为 `assets/index-33wVjC4p.js` 与 `assets/index-tNGuStgv.css`",
],
},
{
version: "2026.03.16-live-viewer-server-relay",
releaseDate: "2026-03-16",
repoVersion: "bb46d26",
summary:
"实时分析同步观看改为由 media 服务中转帧图,不再依赖浏览器之间的 P2P 视频连接。",
features: [
"owner 端现在会把带骨架、关键点和虚拟形象叠层的合成画布压缩成 JPEG 并持续上传到 media 服务",
"viewer 端改为直接拉取 media 服务中的最新同步帧图,不再建立 WebRTC viewer peer 连接,因此跨网络和多端观看更稳定",
"同步观看模式文案改为明确提示“通过 media 服务中转”,等待同步时也会自动轮询最新画面",
"media 服务新增 live-frame 上传与静态分发能力,并记录最近同步帧的更新时间,方便后续扩展成更高频的服务端中转流",
],
tests: [
"cd media && go test ./...",
"pnpm build",
"playwright-skill 线上 smoke: 先用 media 服务创建 relay session、上传 live-frame,并把 H1 的 `live_analysis_runtime` 注入为 active viewer 场景;随后访问 `https://te.hao.work/live-camera`,确认页面进入“同步观看模式”、同步帧来自 `/media/assets/sessions/.../live-frame.jpg`,且 `viewer-signal` 请求数为 0",
],
},
{
version: "2026.03.16-camera-startup-fallbacks",
releaseDate: "2026-03-16",
repoVersion: "a211562",
summary:
"修复部分设备上摄像头因后置镜头约束、分辨率约束或麦克风不可用而直接启动失败的问题。",
features: [
"live-camera 与 recorder 改为共用分级降级的摄像头请求流程,会在当前画质失败时自动降分辨率、降约束并回退到兼容镜头",
"当设备不支持默认后置摄像头或当前镜头不可用时,页面会自动切换到实际可用的镜头方向,避免直接报错后卡死在未启动状态",
"recorder 预览启动不再被麦克风权限或麦克风设备异常整体拖死;麦克风不可用时会自动回退到仅视频模式",
"兼容模式命中时前端会给出明确提示,方便区分“已自动降级成功”与“仍然无法访问摄像头”的场景",
],
tests: [
"pnpm build",
"部署后线上 smoke: `https://te.hao.work/` 已提供 `assets/index-CRxtWK07.js` 与 `assets/index-tNGuStgv.css`;通过注入 `getUserMedia` 回归验证 `/live-camera` 首轮高约束失败后会自动切到兼容摄像头模式,`/recorder` 在麦克风不可用时会自动回退到仅视频模式并继续启动预览",
],
},
{
version: "2026.03.16-live-analysis-viewer-full-sync",
releaseDate: "2026-03-16",
repoVersion: "922a9fb",
summary:
"多端同步观看改为按持有端快照完整渲染,另一设备可同步看到视频状态、模式、画质、虚拟形象和保存阶段信息。",
features: [
"viewer 端现在同步显示持有端的会话标题、训练模式、设备端、拍摄视角、画质模式、虚拟形象状态和最近同步时间",
"同步观看时的分析阶段、保存阶段、已完成状态也会跟随主端刷新,不再只显示本地默认状态",
"viewer 页面会自动关闭拍摄校准弹窗,避免同步观看时被“启用摄像头”流程遮挡",
"新增 viewer 同步信息卡,明确允许 1 秒级延迟,并持续显示最近心跳时间",
],
tests: [
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera switches into viewer mode|viewer stream|recorder blocks"',
"pnpm build",
"部署后线上 smoke: `https://te.hao.work/` 已提供 `assets/index-HRdM3fxq.js` 与 `assets/index-tNGuStgv.css`;同账号 H1 双端登录后,移动端 owner 可开始实时分析,桌面端 `/live-camera` 自动进入同步观看并显示主端信息、同步视频流,owner 点击结束分析后 viewer 会同步进入保存阶段",
],
},
{
version: "2026.03.16-live-analysis-lock-hardening",
releaseDate: "2026-03-16",
repoVersion: "f9db6ef",
summary:
"修复同账号多端实时分析在旧登录态下仍可重复占用摄像头的问题,补强同步观看重试、录制页占用锁,并修复部署后启动阶段长时间 502。",
features: [
"旧用户名登录 token 即使缺少 `sid`,现在也会按 token 本身派生唯一会话标识,不再把不同设备错误识别成同一持有端",
"同步观看模式新增 viewer 自动重试当持有端刚启动推流、viewer 首次连接返回 `viewer stream not ready` 时,会自动重连而不是一直黑屏",
"在线录制页接入实时分析占用锁;当其他设备正在 `/live-camera` 分析时,本页会禁止再次启动摄像头和录制",
"应用启动改为先监听 HTTP 端口、再后台串行执行教程图同步和标准库预热,修复新容器上线时公网长时间返回 502 的问题",
"线上 smoke 已确认 `https://te.hao.work/live-camera` 与 `/recorder` 都已切换到本次新构建,公开站点不再返回 502",
],
tests: [
"curl -I https://te.hao.work/",
"pnpm check",
"pnpm exec vitest run server/_core/sdk.test.ts server/features.test.ts",
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "viewer mode|viewer stream|recorder blocks"',
"pnpm build",
"线上 smoke: H1 手机端开启实时分析后,PC 端 `/live-camera` 自动进入同步观看并显示同步画面,`/recorder` 禁止启动摄像头;结束分析后会话可正常释放",
],
},
{
version: "2026.03.16-live-analysis-runtime-migration",
releaseDate: "2026-03-16",
repoVersion: "2b72ef9",
summary:
"修复实时分析因缺失 `live_analysis_runtime` 表导致的启动失败,并补齐迁移记录避免后续部署再次漏表。",
features: [
"生产库补建 `live_analysis_runtime` 表,并补写 `__drizzle_migrations` 中缺失的 `0011_live_analysis_runtime` 记录",
"仓库内 Drizzle migration journal 补齐 `0011_live_analysis_runtime` 条目,后续 `docker compose` 部署可正确感知该迁移",
"实时分析启动链路恢复,`/live-camera` 再次可以读取 runtime 锁并正常进入分析准备流程",
"线上 smoke 已确认 `https://te.hao.work/` 正在提供本次新构建,当前前端资源为 `assets/index-B3BN5hY-.js` 与 `assets/index-BL6GQzUF.css`",
],
tests: [
"pnpm check",
"pnpm exec vitest run server/features.test.ts",
"pnpm build",
"docker compose exec -T db mysql ... SHOW TABLES LIKE 'live_analysis_runtime'",
"curl -I https://te.hao.work/live-camera",
"Playwright smoke: 登录 `H1` 后访问 `/live-camera`,`analysis.runtimeGet` / `analysis.runtimeAcquire` / `analysis.runtimeRelease` 全部返回 200",
],
},
{
version: "2026.03.16-live-camera-multidevice-viewer",
releaseDate: "2026-03-16",
repoVersion: "4e4122d",
summary:
"实时分析新增同账号多端互斥和同步观看模式,分析持有端独占摄像头,其它端只能查看同步画面与核心识别结果。",
features: [
"同一账号在 `/live-camera` 进入实时分析后,会写入按用户维度的 runtime 锁,其他设备不能重复启动摄像头或分析",
"其他设备会自动进入“同步观看模式”,可订阅持有端的实时画面,并同步看到动作、评分、反馈、最近片段和归档段数",
"同步观看复用 media 服务的 WebRTC viewer 通道,传输的是带骨架、关键点和虚拟形象覆盖后的合成画面",
"runtime 锁按 session sid 区分持有端,兼容缺少 sid 的旧 token,超过 15 秒无心跳会自动判定为陈旧并释放",
"线上 smoke 已确认 `https://te.hao.work/live-camera` 已切换到本次新构建,公开站点正在提供这次发布的最新前端资源",
],
tests: [
"pnpm check",
"pnpm exec vitest run server/features.test.ts",
"go test ./... && go build ./... (media)",
"pnpm build",
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"',
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "recorder flow archives a session and exposes it in videos"',
"curl -I https://te.hao.work/live-camera",
],
},
{ {
version: "2026.03.16-live-analysis-overlay-archive", version: "2026.03.16-live-analysis-overlay-archive",
releaseDate: "2026-03-16", releaseDate: "2026-03-16",
repoVersion: "4fb2d09", repoVersion: "4fb2d09",
summary: "实时分析新增 60 秒自动归档录像,录制内容会保留骨架、关键点和虚拟形象叠层,并同步进入视频库。", summary:
"实时分析新增 60 秒自动归档录像,录制内容会保留骨架、关键点和虚拟形象叠层,并同步进入视频库。",
features: [ features: [
"实时分析开始后会自动录制合成画布,每 60 秒自动切段归档", "实时分析开始后会自动录制合成画布,每 60 秒自动切段归档",
"归档录像会保留原视频、骨架线、关键点和当前虚拟形象覆盖效果", "归档录像会保留原视频、骨架线、关键点和当前虚拟形象覆盖效果",
@@ -32,17 +212,15 @@ export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
version: "2026.03.15-live-analysis-leave-hint", version: "2026.03.15-live-analysis-leave-hint",
releaseDate: "2026-03-15", releaseDate: "2026-03-15",
repoVersion: "5c2dcf2", repoVersion: "5c2dcf2",
summary: "实时分析结束后增加离开提示,明确何时必须停留、何时可以安全关闭或切页。", summary:
"实时分析结束后增加离开提示,明确何时必须停留、何时可以安全关闭或切页。",
features: [ features: [
"分析进行中显示“不要关闭或切走页面”提示", "分析进行中显示“不要关闭或切走页面”提示",
"结束分析后保存阶段显示“请暂时停留当前页面”提示", "结束分析后保存阶段显示“请暂时停留当前页面”提示",
"保存成功后明确提示“现在可以关闭浏览器或切换到其他页面”", "保存成功后明确提示“现在可以关闭浏览器或切换到其他页面”",
"分析中和保存中挂接 beforeunload 提醒,减少误关页面导致的数据丢失", "分析中和保存中挂接 beforeunload 提醒,减少误关页面导致的数据丢失",
], ],
tests: [ tests: ["pnpm check", "pnpm build"],
"pnpm check",
"pnpm build",
],
}, },
{ {
version: "2026.03.15-training-generator-collapse", version: "2026.03.15-training-generator-collapse",
@@ -55,10 +233,7 @@ export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
"移动端继续直接展示完整生成器,避免隐藏关键操作", "移动端继续直接展示完整生成器,避免隐藏关键操作",
"未生成计划时点击“前往生成训练计划”会自动展开并滚动到生成面板", "未生成计划时点击“前往生成训练计划”会自动展开并滚动到生成面板",
], ],
tests: [ tests: ["pnpm check", "pnpm build"],
"pnpm check",
"pnpm build",
],
}, },
{ {
version: "2026.03.15-progress-time-actions", version: "2026.03.15-progress-time-actions",
@@ -71,10 +246,7 @@ export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
"展开态动作明细统一用中文动作标签展示", "展开态动作明细统一用中文动作标签展示",
"提醒页通知时间统一切换为 Asia/Shanghai", "提醒页通知时间统一切换为 Asia/Shanghai",
], ],
tests: [ tests: ["pnpm check", "pnpm build"],
"pnpm check",
"pnpm build",
],
}, },
{ {
version: "2026.03.15-session-changelog", version: "2026.03.15-session-changelog",
@@ -126,7 +298,7 @@ export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
], ],
tests: [ tests: [
"pnpm check", "pnpm check",
"pnpm exec vitest run server/features.test.ts -t \"video\\\\.\"", 'pnpm exec vitest run server/features.test.ts -t "video\\\\."',
"Playwright 真实站点完成 /videos 新增-编辑-删除全链路", "Playwright 真实站点完成 /videos 新增-编辑-删除全链路",
], ],
}, },
@@ -141,8 +313,6 @@ export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
"训练提醒通知", "训练提醒通知",
"通知历史管理", "通知历史管理",
], ],
tests: [ tests: ["教程库、提醒、通知相关测试通过"],
"教程库、提醒、通知相关测试通过",
],
}, },
]; ];

查看文件

@@ -14,11 +14,7 @@ export type ArchiveStatus =
| "completed" | "completed"
| "failed"; | "failed";
export type PreviewStatus = export type PreviewStatus = "idle" | "processing" | "ready" | "failed";
| "idle"
| "processing"
| "ready"
| "failed";
export type MediaMarker = { export type MediaMarker = {
id: string; id: string;
@@ -33,6 +29,7 @@ export type MediaSession = {
id: string; id: string;
userId: string; userId: string;
title: string; title: string;
purpose?: "recording" | "relay";
status: MediaSessionStatus; status: MediaSessionStatus;
archiveStatus: ArchiveStatus; archiveStatus: ArchiveStatus;
previewStatus: PreviewStatus; previewStatus: PreviewStatus;
@@ -50,6 +47,9 @@ export type MediaSession = {
previewUpdatedAt?: string; previewUpdatedAt?: string;
streamConnected: boolean; streamConnected: boolean;
lastStreamAt?: string; lastStreamAt?: string;
viewerCount?: number;
liveFrameUrl?: string;
liveFrameUpdatedAt?: string;
playback: { playback: {
webmUrl?: string; webmUrl?: string;
mp4Url?: string; mp4Url?: string;
@@ -61,11 +61,14 @@ export type MediaSession = {
markers: MediaMarker[]; markers: MediaMarker[];
}; };
const MEDIA_BASE = (import.meta.env.VITE_MEDIA_BASE_URL || "/media").replace(/\/$/, ""); const MEDIA_BASE = (import.meta.env.VITE_MEDIA_BASE_URL || "/media").replace(
/\/$/,
""
);
const RETRYABLE_STATUS = new Set([502, 503, 504]); const RETRYABLE_STATUS = new Set([502, 503, 504]);
function sleep(ms: number) { function sleep(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms)); return new Promise(resolve => setTimeout(resolve, ms));
} }
async function request<T>(path: string, init?: RequestInit): Promise<T> { async function request<T>(path: string, init?: RequestInit): Promise<T> {
@@ -76,7 +79,11 @@ async function request<T>(path: string, init?: RequestInit): Promise<T> {
const response = await fetch(`${MEDIA_BASE}${path}`, init); const response = await fetch(`${MEDIA_BASE}${path}`, init);
if (!response.ok) { if (!response.ok) {
const errorBody = await response.json().catch(() => ({})); const errorBody = await response.json().catch(() => ({}));
const error = new Error(errorBody.error || errorBody.message || `Media service error (${response.status})`); const error = new Error(
errorBody.error ||
errorBody.message ||
`Media service error (${response.status})`
);
if (RETRYABLE_STATUS.has(response.status) && attempt < 2) { if (RETRYABLE_STATUS.has(response.status) && attempt < 2) {
lastError = error; lastError = error;
await sleep(400 * (attempt + 1)); await sleep(400 * (attempt + 1));
@@ -86,7 +93,8 @@ async function request<T>(path: string, init?: RequestInit): Promise<T> {
} }
return response.json() as Promise<T>; return response.json() as Promise<T>;
} catch (error) { } catch (error) {
lastError = error instanceof Error ? error : new Error("Media request failed"); lastError =
error instanceof Error ? error : new Error("Media request failed");
if (attempt < 2) { if (attempt < 2) {
await sleep(400 * (attempt + 1)); await sleep(400 * (attempt + 1));
continue; continue;
@@ -106,6 +114,7 @@ export async function createMediaSession(payload: {
qualityPreset: string; qualityPreset: string;
facingMode: string; facingMode: string;
deviceKind: string; deviceKind: string;
purpose?: "recording" | "relay";
}) { }) {
return request<{ session: MediaSession }>("/sessions", { return request<{ session: MediaSession }>("/sessions", {
method: "POST", method: "POST",
@@ -114,12 +123,43 @@ export async function createMediaSession(payload: {
}); });
} }
export async function signalMediaSession(sessionId: string, payload: { sdp: string; type: string }) { export async function signalMediaSession(
return request<{ sdp: string; type: string }>(`/sessions/${sessionId}/signal`, { sessionId: string,
payload: { sdp: string; type: string }
) {
return request<{ sdp: string; type: string }>(
`/sessions/${sessionId}/signal`,
{
method: "POST", method: "POST",
headers: { "Content-Type": "application/json" }, headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload), body: JSON.stringify(payload),
}); }
);
}
export async function signalMediaViewerSession(
sessionId: string,
payload: { sdp: string; type: string }
) {
return request<{ viewerId: string; sdp: string; type: string }>(
`/sessions/${sessionId}/viewer-signal`,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
}
);
}
export async function uploadMediaLiveFrame(sessionId: string, blob: Blob) {
return request<{ session: MediaSession }>(
`/sessions/${sessionId}/live-frame`,
{
method: "POST",
headers: { "Content-Type": blob.type || "image/jpeg" },
body: blob,
}
);
} }
export async function uploadMediaSegment( export async function uploadMediaSegment(
@@ -140,7 +180,12 @@ export async function uploadMediaSegment(
export async function createMediaMarker( export async function createMediaMarker(
sessionId: string, sessionId: string,
payload: { type: string; label: string; timestampMs: number; confidence?: number } payload: {
type: string;
label: string;
timestampMs: number;
confidence?: number;
}
) { ) {
return request<{ session: MediaSession }>(`/sessions/${sessionId}/markers`, { return request<{ session: MediaSession }>(`/sessions/${sessionId}/markers`, {
method: "POST", method: "POST",
@@ -164,6 +209,10 @@ export async function getMediaSession(sessionId: string) {
return request<{ session: MediaSession }>(`/sessions/${sessionId}`); return request<{ session: MediaSession }>(`/sessions/${sessionId}`);
} }
export function getMediaAssetUrl(path: string) {
return `${MEDIA_BASE}${path.startsWith("/") ? path : `/${path}`}`;
}
export function formatRecordingTime(milliseconds: number) { export function formatRecordingTime(milliseconds: number) {
const totalSeconds = Math.max(0, Math.floor(milliseconds / 1000)); const totalSeconds = Math.max(0, Math.floor(milliseconds / 1000));
const minutes = Math.floor(totalSeconds / 60); const minutes = Math.floor(totalSeconds / 60);
@@ -178,7 +227,11 @@ export function pickRecorderMimeType() {
"video/webm;codecs=h264,opus", "video/webm;codecs=h264,opus",
"video/webm", "video/webm",
]; ];
return candidates.find((candidate) => window.MediaRecorder?.isTypeSupported(candidate)) || "video/webm"; return (
candidates.find(candidate =>
window.MediaRecorder?.isTypeSupported(candidate)
) || "video/webm"
);
} }
export function pickBitrate(preset: string, isMobile: boolean) { export function pickBitrate(preset: string, isMobile: boolean) {

文件差异内容过多而无法显示 加载差异

查看文件

@@ -31,7 +31,7 @@ import {
recognizeActionFrame, recognizeActionFrame,
stabilizeActionFrame, stabilizeActionFrame,
} from "@/lib/actionRecognition"; } from "@/lib/actionRecognition";
import { applyTrackZoom, getCameraVideoConstraints, readTrackZoomState } from "@/lib/camera"; import { applyTrackZoom, readTrackZoomState, requestCameraStream } from "@/lib/camera";
import { formatDateTimeShanghai } from "@/lib/time"; import { formatDateTimeShanghai } from "@/lib/time";
import { import {
Activity, Activity,
@@ -189,6 +189,10 @@ function summarizeActions(actionSummary: Record<ActionType, number>) {
export default function Recorder() { export default function Recorder() {
const { user } = useAuth(); const { user } = useAuth();
const utils = trpc.useUtils(); const utils = trpc.useUtils();
const runtimeQuery = trpc.analysis.runtimeGet.useQuery(undefined, {
refetchInterval: 1000,
refetchIntervalInBackground: true,
});
const finalizeTaskMutation = trpc.task.createMediaFinalize.useMutation({ const finalizeTaskMutation = trpc.task.createMediaFinalize.useMutation({
onSuccess: (data) => { onSuccess: (data) => {
setArchiveTaskId(data.taskId); setArchiveTaskId(data.taskId);
@@ -262,6 +266,9 @@ export default function Recorder() {
const mobile = useMemo(() => isMobileDevice(), []); const mobile = useMemo(() => isMobileDevice(), []);
const mimeType = useMemo(() => pickRecorderMimeType(), []); const mimeType = useMemo(() => pickRecorderMimeType(), []);
const runtimeRole = runtimeQuery.data?.role ?? "idle";
const liveAnalysisRuntime = runtimeQuery.data?.runtimeSession;
const liveAnalysisOccupied = runtimeRole === "viewer" && liveAnalysisRuntime?.status === "active";
const currentPlaybackUrl = mediaSession?.playback.mp4Url || mediaSession?.playback.webmUrl || ""; const currentPlaybackUrl = mediaSession?.playback.mp4Url || mediaSession?.playback.webmUrl || "";
const archiveTaskQuery = useBackgroundTask(archiveTaskId); const archiveTaskQuery = useBackgroundTask(archiveTaskId);
const archiveProgress = archiveTaskQuery.data?.progress ?? getArchiveProgress(mediaSession); const archiveProgress = archiveTaskQuery.data?.progress ?? getArchiveProgress(mediaSession);
@@ -402,14 +409,21 @@ export default function Recorder() {
preferredZoom = zoomTargetRef.current, preferredZoom = zoomTargetRef.current,
preset: keyof typeof QUALITY_PRESETS = qualityPreset, preset: keyof typeof QUALITY_PRESETS = qualityPreset,
) => { ) => {
if (liveAnalysisOccupied) {
const title = liveAnalysisRuntime?.title || "其他设备正在实时分析";
toast.error(`${title},当前设备不能再开启录制摄像头`);
throw new Error("当前账号已有其他设备正在实时分析");
}
try { try {
if (streamRef.current) { if (streamRef.current) {
streamRef.current.getTracks().forEach((track) => track.stop()); streamRef.current.getTracks().forEach((track) => track.stop());
streamRef.current = null; streamRef.current = null;
} }
const stream = await navigator.mediaDevices.getUserMedia({ const { stream, appliedFacingMode, audioEnabled, usedFallback } = await requestCameraStream({
video: getCameraVideoConstraints(nextFacingMode, mobile, preset), facingMode: nextFacingMode,
isMobile: mobile,
preset,
audio: { audio: {
echoCancellation: true, echoCancellation: true,
noiseSuppression: true, noiseSuppression: true,
@@ -426,6 +440,9 @@ export default function Recorder() {
suppressTrackEndedRef.current = false; suppressTrackEndedRef.current = false;
streamRef.current = stream; streamRef.current = stream;
if (appliedFacingMode !== nextFacingMode) {
setFacingMode(appliedFacingMode);
}
if (liveVideoRef.current) { if (liveVideoRef.current) {
liveVideoRef.current.srcObject = stream; liveVideoRef.current.srcObject = stream;
await liveVideoRef.current.play(); await liveVideoRef.current.play();
@@ -433,6 +450,12 @@ export default function Recorder() {
await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null); await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null);
setCameraError(""); setCameraError("");
setCameraActive(true); setCameraActive(true);
if (usedFallback) {
toast.info("当前设备已自动切换到兼容摄像头模式");
}
if (!audioEnabled) {
toast.warning("麦克风不可用,已切换为仅视频模式");
}
return stream; return stream;
} catch (error: any) { } catch (error: any) {
const message = error?.message || "无法访问摄像头"; const message = error?.message || "无法访问摄像头";
@@ -440,7 +463,7 @@ export default function Recorder() {
toast.error(`摄像头启动失败: ${message}`); toast.error(`摄像头启动失败: ${message}`);
throw error; throw error;
} }
}), [facingMode, mobile, qualityPreset, syncZoomState]); }), [facingMode, liveAnalysisOccupied, liveAnalysisRuntime?.title, mobile, qualityPreset, syncZoomState]);
const ensurePreviewStream = useCallback(async () => { const ensurePreviewStream = useCallback(async () => {
if (streamRef.current) { if (streamRef.current) {
@@ -849,6 +872,11 @@ export default function Recorder() {
toast.error("请先登录后再开始录制"); toast.error("请先登录后再开始录制");
return; return;
} }
if (liveAnalysisOccupied) {
const title = liveAnalysisRuntime?.title || "其他设备正在实时分析";
toast.error(`${title},当前设备不能同时开始录制`);
return;
}
try { try {
setMode("preparing"); setMode("preparing");
@@ -898,7 +926,21 @@ export default function Recorder() {
setMode("idle"); setMode("idle");
toast.error(`启动录制失败: ${error?.message || "未知错误"}`); toast.error(`启动录制失败: ${error?.message || "未知错误"}`);
} }
}, [ensurePreviewStream, facingMode, mimeType, mobile, qualityPreset, startActionSampling, startRealtimePush, startRecorderLoop, syncSessionState, title, user]); }, [
ensurePreviewStream,
facingMode,
liveAnalysisOccupied,
liveAnalysisRuntime?.title,
mimeType,
mobile,
qualityPreset,
startActionSampling,
startRealtimePush,
startRecorderLoop,
syncSessionState,
title,
user,
]);
const finishRecording = useCallback(async () => { const finishRecording = useCallback(async () => {
const session = currentSessionRef.current; const session = currentSessionRef.current;
@@ -1140,9 +1182,10 @@ export default function Recorder() {
data-testid="recorder-start-camera-button" data-testid="recorder-start-camera-button"
onClick={() => void startCamera()} onClick={() => void startCamera()}
className={buttonClass()} className={buttonClass()}
disabled={liveAnalysisOccupied}
> >
<Camera className={iconClass} /> <Camera className={iconClass} />
{labelFor("启动摄像头", "启动")} {labelFor(liveAnalysisOccupied ? "实时分析占用中" : "启动摄像头", liveAnalysisOccupied ? "占用" : "启动")}
</Button> </Button>
) : ( ) : (
<> <>
@@ -1150,9 +1193,10 @@ export default function Recorder() {
data-testid="recorder-start-recording-button" data-testid="recorder-start-recording-button"
onClick={() => void beginRecording()} onClick={() => void beginRecording()}
className={buttonClass("record")} className={buttonClass("record")}
disabled={liveAnalysisOccupied}
> >
<Circle className={`${iconClass} ${rail ? "fill-current" : "fill-current"}`} /> <Circle className={`${iconClass} ${rail ? "fill-current" : "fill-current"}`} />
{labelFor("开始录制", "录制")} {labelFor(liveAnalysisOccupied ? "实时分析占用中" : "开始录制", liveAnalysisOccupied ? "占用" : "录制")}
</Button> </Button>
<Button variant="outline" onClick={stopCamera} className={buttonClass("outline")}> <Button variant="outline" onClick={stopCamera} className={buttonClass("outline")}>
<VideoOff className={iconClass} /> <VideoOff className={iconClass} />
@@ -1362,6 +1406,23 @@ export default function Recorder() {
</Alert> </Alert>
) : null} ) : null}
{liveAnalysisOccupied ? (
<Alert className="border-amber-300/70 bg-amber-50 text-amber-950">
<ShieldAlert className="h-4 w-4" />
<AlertTitle></AlertTitle>
<AlertDescription>
{liveAnalysisRuntime?.title || "其他设备正在实时分析"}
{" "}
<a href="/live-camera" className="font-medium underline underline-offset-4">
</a>
{" "}
</AlertDescription>
</Alert>
) : null}
<div className="grid gap-4 xl:grid-cols-[minmax(0,1.7fr)_minmax(340px,0.9fr)]"> <div className="grid gap-4 xl:grid-cols-[minmax(0,1.7fr)_minmax(340px,0.9fr)]">
<section className="space-y-4"> <section className="space-y-4">
<Card className="overflow-hidden border-0 shadow-lg"> <Card className="overflow-hidden border-0 shadow-lg">

查看文件

@@ -1,5 +1,252 @@
# Tennis Training Hub - 变更日志 # Tennis Training Hub - 变更日志
## 2026.03.17-live-camera-relay-buffer (2026-03-17)
### 功能更新
- `/live-camera` 的同步观看改为播放 media 服务生成的滚动缓存视频,不再轮询 `live-frame.jpg` 单帧图片,因此观看端的画面会按最近 60 秒缓存视频平滑播放
- owner 端每个 60 秒的合成录像分段现在会额外上传到 `relay` 会话,worker 会在收到新分段后自动重建最近窗口的 `preview.webm`
- `relay` 会话只保留最近 60 秒视频分段,旧分段会从会话元数据和磁盘同步清理,避免观看端继续读到旧一分钟之前的缓存
- media worker 会自动清理超过 30 分钟无活动的 relay 会话、分段目录和公开缓存文件,降低磁盘堆积风险
- viewer 页面文案、加载提示和按钮文案已同步更新为“缓存视频 / 缓存回放”语义;预览阶段跳过 mp4 转码,Chrome 直接使用 webm,降低处理时延
### 测试
- `cd media && go test ./...`
- `pnpm vitest run client/src/lib/liveCamera.test.ts`
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera starts analysis and produces scores|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"`
- `pnpm check`
- `pnpm build`
- 线上 smoke部署后确认 `https://te.hao.work/` 已提供新构建而不是旧资源版本,`/live-camera` viewer 端进入“服务端缓存同步”路径,首页与资源文件返回正确 MIME
### 线上 smoke
- 部署完成后已确认 `https://te.hao.work/` 提供的是本次新构建,而不是旧资源版本
- `https://te.hao.work/live-camera` 的 viewer 端会走“服务端缓存同步”路径,不再请求旧的 `live-frame.jpg` 单帧同步
- 首页、主 JS、主 CSS 与 `pose` 模块均返回 `200` 和正确 MIME,未再出现脚本/样式被回退成 `text/html` 的问题
### 仓库版本
- `63dbfd2+relay-buffer`
## 2026.03.17-live-camera-preview-recovery (2026-03-17)
### 功能更新
- `/live-camera` 的 runtime 标题恢复逻辑新增更严格的乱码筛除与二次 UTF-8 解码兜底,`服...` 这类异常标题会优先恢复为正常中文;无法恢复时会自动回退到稳定默认标题,避免继续显示脏字符串
- 同步观看退出时会完整重置 viewer 轮询、连接标记和帧版本,不再把旧的 viewer 状态带回 owner 或空闲态,修复退出同步后仍黑屏、仍显示“等待同步画面”的问题
- 本地摄像头预览增加独立重绑流程和多次 watchdog 重试,即使浏览器首帧没有及时绑定 `srcObject``play()` 被短暂中断,也会继续自动恢复本地预览
- 视频区域是否显示画面改为按当前 runtime 角色分别判断,避免 viewer 旧连接状态误导 owner 模式,导致本地没有预览时仍错误隐藏占位提示
### 测试
- `pnpm check`
- `pnpm vitest run client/src/lib/liveCamera.test.ts`
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"`
- `pnpm build`
- 线上 smoke`curl -I https://te.hao.work/`
- 线上 smoke`curl -I https://te.hao.work/assets/index-BJ7rV3xe.js`
- 线上 smoke`curl -I https://te.hao.work/assets/index-tNGuStgv.css`
- 线上 smoke`curl -I https://te.hao.work/assets/pose-CZKsH31a.js`
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建
- 当前公开站点前端资源 revision`assets/index-BJ7rV3xe.js``assets/index-tNGuStgv.css``assets/pose-CZKsH31a.js`
- 已确认 `index``css``pose` 模块均返回 `200`,且 MIME 分别为 `application/javascript``text/css``application/javascript`,不再出现此前的模块脚本和样式被当成 `text/html` 返回的问题
### 仓库版本
- `06b9701`
## 2026.03.16-live-camera-runtime-refresh (2026-03-16)
### 功能更新
- `/live-camera` 在打开拍摄引导、启用摄像头、开始分析前,都会先向服务端强制刷新 runtime 状态,避免旧的同步观看锁残留导致本机明明已释放却仍无法启动
- 新增 runtime 标题乱码恢复逻辑,可自动把 UTF-8 被误按 Latin-1 显示的标题恢复成正常中文,避免出现 `服...` 一类异常标题
- 摄像头启动链路改为以 `getUserMedia` 成功为准;即使本地预览 `<video>``srcObject``play()` 在当前浏览器中短暂失败,也不会直接把整次启动判死
- e2e mock 的媒体流补齐为带假视频轨道的流对象,并把 viewer 回归改为校验“服务端 relay、无 viewer-signal”行为,避免继续按旧 P2P 逻辑断言
### 测试
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"`
- `pnpm build`
- 部署后线上 smoke登录 `H1` 后访问 `https://te.hao.work/live-camera`,确认空闲态“启动摄像头”入口可见,不再被残留 viewer 锁卡住
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建
- 当前公开站点前端资源 revision`assets/index-33wVjC4p.js``assets/index-tNGuStgv.css`
- 真实验证已通过:登录 `H1` 后访问 `https://te.hao.work/live-camera`,页面会正常显示“摄像头未启动 / 启动摄像头”,说明旧的 viewer 锁残留不会再把空闲设备卡在同步观看模式
### 仓库版本
- `8e9e491`
## 2026.03.16-live-viewer-server-relay (2026-03-16)
### 功能更新
- `/live-camera` 的同步观看改为由 media 服务中转最新合成帧图,不再依赖浏览器之间的 P2P WebRTC viewer 连接
- owner 端会把“原视频 + 骨架/关键点 + 虚拟形象”的合成画布压缩成 JPEG 并持续上传到 media 服务
- viewer 端改为自动轮询 media 服务中的最新同步帧图,因此即使浏览器之间无法直连,也能继续看到同步画面和状态
- 同步观看模式文案已调整为明确提示“通过 media 服务中转”,等待阶段会继续自动刷新,而不是停留在 P2P 连接失败状态
- media 服务新增 live-frame 上传与静态分发能力,并记录最近同步帧时间,方便后续继续扩展更高频的服务端 relay
### 测试
- `cd media && go test ./...`
- `pnpm build`
- `playwright-skill` 线上 smoke先用 media 服务创建 relay session、上传 live-frame,并把 `H1``live_analysis_runtime` 注入为 active viewer 场景;随后访问 `https://te.hao.work/live-camera`,确认页面进入“同步观看模式”、同步帧来自 `/media/assets/sessions/.../live-frame.jpg`,且 `viewer-signal` 请求数为 `0`
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建
- 当前公开站点前端资源 revision`assets/index-BC-IupO8.js``assets/index-tNGuStgv.css`
- 真实验证已通过viewer 端进入“同步观看模式”后,画面由 media 服务静态分发的 `live-frame.jpg` 提供,已确认不再触发 `/viewer-signal` P2P 观看请求
### 仓库版本
- `bb46d26`
## 2026.03.16-camera-startup-fallbacks (2026-03-16)
### 功能更新
- 修复部分设备在 `/live-camera``/recorder` 中因默认后置镜头、分辨率或帧率约束不兼容而直接启动摄像头失败的问题
- 摄像头请求现在会自动按当前画质、去掉高约束、低分辨率、备用镜头、任意可用镜头依次降级重试
- `/recorder` 在麦克风不可用或麦克风权限未给出时,会自动回退到仅视频模式,不再让整次预览启动失败
- 如果实际启用的是兼容镜头或降级模式,页面会显示提示,帮助区分“自动修复成功”与“仍然无法访问摄像头”
### 测试
- `pnpm build`
- `playwright-skill` 线上 smoke通过注入 `getUserMedia` 回归验证 `/live-camera` 首轮高约束失败后会自动降级到兼容摄像头模式,`/recorder` 在麦克风不可用时会自动回退到仅视频模式并继续启动预览
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建
- 当前公开站点前端资源 revision`assets/index-CRxtWK07.js``assets/index-tNGuStgv.css`
- 真实回归已通过:模拟高约束失败时,`/live-camera` 会提示“当前设备已自动切换到兼容摄像头模式”并继续启动;模拟麦克风不可用时,`/recorder` 会提示“麦克风不可用,已切换为仅视频模式”并继续显示录制入口
### 仓库版本
- `a211562`
## 2026.03.16-live-analysis-viewer-full-sync (2026-03-16)
### 功能更新
- 同账号多端同步观看时,viewer 端现在会按持有端 runtime snapshot 完整渲染,不再混用本地默认状态
- `/live-camera` viewer 端新增主端同步信息卡,可看到当前会话标题、训练模式、设备端、拍摄视角、画质模式、虚拟形象状态和最近同步时间
- viewer 端现在会同步显示主端当前处于“分析中 / 保存中 / 已保存 / 保存失败”的阶段状态
- viewer 页面在同步观看模式下会自动关闭拍摄校准弹窗,避免被“启用摄像头”引导遮挡画面和状态信息
### 测试
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera switches into viewer mode|viewer stream|recorder blocks"`
- `pnpm build`
- `playwright-skill` 线上 smoke同账号 `H1` 双端登录后,移动端 owner 开始实时分析,桌面端 `/live-camera` 进入同步观看并显示主端信息、同步视频流,owner 点击结束分析后 viewer 同步进入保存阶段
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建
- 当前公开站点前端资源 revision`assets/index-HRdM3fxq.js``assets/index-tNGuStgv.css`
- 真实双端验证已通过:同账号 `H1` 在移动端开启实时分析后,桌面端 `/live-camera` 会自动进入同步观看模式,显示主端设备信息、最近同步时间和远端视频流;owner 点击结束分析后,viewer 会同步进入“保存中”阶段
### 仓库版本
- `922a9fb`
## 2026.03.16-live-analysis-lock-hardening (2026-03-16)
### 功能更新
- 修复同账号多端实时分析在旧登录态下仍可重复占用摄像头的问题;缺少 `sid` 的旧 token 现在会按 token 本身派生唯一会话标识
- `/live-camera` 的同步观看模式新增自动重试;当持有端刚启动推流、viewer 首次连接返回 `viewer stream not ready` 时,会继续重连,不再长时间停留在无画面状态
- `/recorder` 接入实时分析占用锁;其他设备正在实时分析时,本页会禁止再次启动摄像头和开始录制,并提示前往 `/live-camera` 查看同步画面
- 应用启动改为先监听 HTTP 端口、再后台串行执行教程图同步和标准库预热,修复新容器上线时公网长时间返回 `502`
### 测试
- `curl -I https://te.hao.work/`
- `pnpm check`
- `pnpm exec vitest run server/_core/sdk.test.ts server/features.test.ts`
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "viewer mode|viewer stream|recorder blocks"`
- `playwright-skill` 线上校验:登录 `H1` 后访问 `/changelog`,确认 `2026.03.16-live-analysis-lock-hardening` 与仓库版本 `f9db6ef` 已展示
- `pnpm build`
- Playwright 线上 smoke`H1` 手机端开启实时分析后,PC 端 `/live-camera` 自动进入同步观看并显示同步画面,`/recorder` 禁止启动摄像头;结束分析后会话可正常释放
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建,不再返回 `502`
- 当前公开站点前端资源 revision`assets/index-mi8CPCFI.js``assets/index-Cp_VJ8sf.css`
- 真实双端验证已通过:同账号 `H1` 手机端开始实时分析后,PC 端 `/live-camera` 进入同步观看模式且可拉起同步流,`/recorder` 页面会阻止再次占用摄像头
### 仓库版本
- `f9db6ef`
## 2026.03.16-live-analysis-runtime-migration (2026-03-16)
### 功能更新
- 修复生产环境缺失 `live_analysis_runtime` 表导致 `/live-camera` 启动实时分析时报 SQL 查询失败的问题
- 生产库已补建 `live_analysis_runtime` 表,并写入缺失的 `0011_live_analysis_runtime` 迁移记录,避免后续重复报错
- 仓库内 `drizzle/meta/_journal.json` 已补齐 `0011_live_analysis_runtime` 条目,后续 `docker compose` 部署可正确识别该迁移
- 实时分析 runtime 锁恢复正常后,同账号多端互斥与同步观看流程可继续工作
### 测试
- `pnpm check`
- `pnpm exec vitest run server/features.test.ts`
- `pnpm build`
- `docker compose exec -T db mysql ... SHOW TABLES LIKE 'live_analysis_runtime'`
- `curl -I https://te.hao.work/live-camera`
- Playwright smoke登录 `H1` 后访问 `/live-camera``analysis.runtimeGet` / `analysis.runtimeAcquire` / `analysis.runtimeRelease` 全部返回 `200`
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建
- 当前公开站点前端资源 revision`assets/index-B3BN5hY-.js``assets/index-BL6GQzUF.css`
- `/live-camera` 已恢复可用,线上不再出现 `live_analysis_runtime` 缺表导致的 SQL 查询失败
### 仓库版本
- `2b72ef9`
## 2026.03.16-live-camera-multidevice-viewer (2026-03-16)
### 功能更新
- `/live-camera` 新增同账号多端 runtime 锁;一个设备开始实时分析后,其他设备不能再次启动摄像头或分析
- 其他设备会自动进入“同步观看模式”,可查看持有端同步推送的实时画面、当前动作、评分、反馈和最近动作片段
- 同步观看复用 media 服务新增的 `/viewer-signal` WebRTC 通道,直接订阅“原视频 + 骨架 + 关键点 + 虚拟形象”的合成画面
- runtime 心跳按 `sid` 维度识别持有端,兼容旧 token 缺失可选字段的情况;超过 15 秒无心跳会自动释放陈旧锁
- `/live-camera` 前端新增 owner / viewer 双模式切换,观看端会禁用镜头切换、重新校准、质量调整和分析启动
- e2e mock 新增 viewer 模式和 runtime 接口覆盖,保证浏览器测试可以直接验证多端互斥与同步观看
### 测试
- `pnpm check`
- `pnpm exec vitest run server/features.test.ts`
- `go test ./...`
- `go build ./...`
- `pnpm build`
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"`
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "recorder flow archives a session and exposes it in videos"`
- `curl -I https://te.hao.work/live-camera`
### 线上 smoke
- `https://te.hao.work/live-camera` 已切换到本次新前端构建
- 公开站点确认已经提供本次发布的最新前端资源
### 仓库版本
- `4e4122d`
## 2026.03.16-live-analysis-overlay-archive (2026-03-16) ## 2026.03.16-live-analysis-overlay-archive (2026-03-16)
### 功能更新 ### 功能更新

查看文件

@@ -0,0 +1,17 @@
CREATE TABLE `live_analysis_runtime` (
`id` int AUTO_INCREMENT NOT NULL,
`userId` int NOT NULL,
`ownerSid` varchar(96),
`status` enum('idle','active','ended') NOT NULL DEFAULT 'idle',
`title` varchar(256),
`sessionMode` enum('practice','pk') NOT NULL DEFAULT 'practice',
`mediaSessionId` varchar(96),
`startedAt` timestamp,
`endedAt` timestamp,
`lastHeartbeatAt` timestamp,
`snapshot` json,
`createdAt` timestamp NOT NULL DEFAULT (now()),
`updatedAt` timestamp NOT NULL DEFAULT (now()) ON UPDATE CURRENT_TIMESTAMP,
CONSTRAINT `live_analysis_runtime_id` PRIMARY KEY(`id`),
CONSTRAINT `live_analysis_runtime_user_idx` UNIQUE(`userId`)
);

查看文件

@@ -78,6 +78,13 @@
"when": 1773662400000, "when": 1773662400000,
"tag": "0010_remove_non_tennis_tutorials", "tag": "0010_remove_non_tennis_tutorials",
"breakpoints": true "breakpoints": true
},
{
"idx": 11,
"version": "5",
"when": 1773691200000,
"tag": "0011_live_analysis_runtime",
"breakpoints": true
} }
] ]
} }

查看文件

@@ -16,6 +16,21 @@ export const users = mysqlTable("users", {
trainingGoals: text("trainingGoals"), trainingGoals: text("trainingGoals"),
/** NTRP rating (1.0 - 5.0) */ /** NTRP rating (1.0 - 5.0) */
ntrpRating: float("ntrpRating").default(1.5), ntrpRating: float("ntrpRating").default(1.5),
/** Manual NTRP baseline before automated rating is established */
manualNtrpRating: float("manualNtrpRating"),
manualNtrpCapturedAt: timestamp("manualNtrpCapturedAt"),
/** Training assessment profile */
heightCm: float("heightCm"),
weightKg: float("weightKg"),
sprintSpeedScore: int("sprintSpeedScore"),
explosivePowerScore: int("explosivePowerScore"),
agilityScore: int("agilityScore"),
enduranceScore: int("enduranceScore"),
flexibilityScore: int("flexibilityScore"),
coreStabilityScore: int("coreStabilityScore"),
shoulderMobilityScore: int("shoulderMobilityScore"),
hipMobilityScore: int("hipMobilityScore"),
assessmentNotes: text("assessmentNotes"),
/** Total training sessions completed */ /** Total training sessions completed */
totalSessions: int("totalSessions").default(0), totalSessions: int("totalSessions").default(0),
/** Total training minutes */ /** Total training minutes */
@@ -215,6 +230,30 @@ export const liveAnalysisSessions = mysqlTable("live_analysis_sessions", {
export type LiveAnalysisSession = typeof liveAnalysisSessions.$inferSelect; export type LiveAnalysisSession = typeof liveAnalysisSessions.$inferSelect;
export type InsertLiveAnalysisSession = typeof liveAnalysisSessions.$inferInsert; export type InsertLiveAnalysisSession = typeof liveAnalysisSessions.$inferInsert;
/**
* Per-user runtime state for the current live-camera analysis lock.
*/
export const liveAnalysisRuntime = mysqlTable("live_analysis_runtime", {
id: int("id").autoincrement().primaryKey(),
userId: int("userId").notNull(),
ownerSid: varchar("ownerSid", { length: 96 }),
status: mysqlEnum("status", ["idle", "active", "ended"]).default("idle").notNull(),
title: varchar("title", { length: 256 }),
sessionMode: mysqlEnum("sessionMode", ["practice", "pk"]).default("practice").notNull(),
mediaSessionId: varchar("mediaSessionId", { length: 96 }),
startedAt: timestamp("startedAt"),
endedAt: timestamp("endedAt"),
lastHeartbeatAt: timestamp("lastHeartbeatAt"),
snapshot: json("snapshot"),
createdAt: timestamp("createdAt").defaultNow().notNull(),
updatedAt: timestamp("updatedAt").defaultNow().onUpdateNow().notNull(),
}, (table) => ({
userIdUnique: uniqueIndex("live_analysis_runtime_user_idx").on(table.userId),
}));
export type LiveAnalysisRuntime = typeof liveAnalysisRuntime.$inferSelect;
export type InsertLiveAnalysisRuntime = typeof liveAnalysisRuntime.$inferInsert;
/** /**
* Action segments extracted from a realtime analysis session. * Action segments extracted from a realtime analysis session.
*/ */
@@ -390,15 +429,34 @@ export type InsertUserAchievement = typeof userAchievements.$inferInsert;
*/ */
export const tutorialVideos = mysqlTable("tutorial_videos", { export const tutorialVideos = mysqlTable("tutorial_videos", {
id: int("id").autoincrement().primaryKey(), id: int("id").autoincrement().primaryKey(),
slug: varchar("slug", { length: 128 }),
title: varchar("title", { length: 256 }).notNull(), title: varchar("title", { length: 256 }).notNull(),
category: varchar("category", { length: 64 }).notNull(), category: varchar("category", { length: 64 }).notNull(),
skillLevel: mysqlEnum("skillLevel", ["beginner", "intermediate", "advanced"]).default("beginner"), skillLevel: mysqlEnum("skillLevel", ["beginner", "intermediate", "advanced"]).default("beginner"),
topicArea: varchar("topicArea", { length: 32 }).default("tennis_skill"),
contentFormat: varchar("contentFormat", { length: 16 }).default("video"),
sourcePlatform: varchar("sourcePlatform", { length: 16 }).default("none"),
description: text("description"), description: text("description"),
heroSummary: text("heroSummary"),
keyPoints: json("keyPoints"), keyPoints: json("keyPoints"),
commonMistakes: json("commonMistakes"), commonMistakes: json("commonMistakes"),
videoUrl: text("videoUrl"), videoUrl: text("videoUrl"),
externalUrl: text("externalUrl"),
platformVideoId: varchar("platformVideoId", { length: 64 }),
thumbnailUrl: text("thumbnailUrl"), thumbnailUrl: text("thumbnailUrl"),
duration: int("duration"), duration: int("duration"),
estimatedEffortMinutes: int("estimatedEffortMinutes"),
prerequisites: json("prerequisites"),
learningObjectives: json("learningObjectives"),
stepSections: json("stepSections"),
deliverables: json("deliverables"),
relatedDocPaths: json("relatedDocPaths"),
viewCount: int("viewCount"),
commentCount: int("commentCount"),
metricsFetchedAt: timestamp("metricsFetchedAt"),
completionAchievementKey: varchar("completionAchievementKey", { length: 64 }),
isFeatured: int("isFeatured").default(0),
featuredOrder: int("featuredOrder").default(0),
sortOrder: int("sortOrder").default(0), sortOrder: int("sortOrder").default(0),
isPublished: int("isPublished").default(1), isPublished: int("isPublished").default(1),
createdAt: timestamp("createdAt").defaultNow().notNull(), createdAt: timestamp("createdAt").defaultNow().notNull(),
@@ -416,6 +474,8 @@ export const tutorialProgress = mysqlTable("tutorial_progress", {
userId: int("userId").notNull(), userId: int("userId").notNull(),
tutorialId: int("tutorialId").notNull(), tutorialId: int("tutorialId").notNull(),
watched: int("watched").default(0), watched: int("watched").default(0),
completed: int("completed").default(0),
completedAt: timestamp("completedAt"),
comparisonVideoId: int("comparisonVideoId"), comparisonVideoId: int("comparisonVideoId"),
selfScore: float("selfScore"), selfScore: float("selfScore"),
notes: text("notes"), notes: text("notes"),

查看文件

@@ -53,6 +53,18 @@ const (
PreviewFailed PreviewStatus = "failed" PreviewFailed PreviewStatus = "failed"
) )
type SessionPurpose string
const (
PurposeRecording SessionPurpose = "recording"
PurposeRelay SessionPurpose = "relay"
)
const (
relayPreviewWindow = 60 * time.Second
relayCacheTTL = 30 * time.Minute
)
type PlaybackInfo struct { type PlaybackInfo struct {
WebMURL string `json:"webmUrl,omitempty"` WebMURL string `json:"webmUrl,omitempty"`
MP4URL string `json:"mp4Url,omitempty"` MP4URL string `json:"mp4Url,omitempty"`
@@ -84,6 +96,7 @@ type Session struct {
ID string `json:"id"` ID string `json:"id"`
UserID string `json:"userId"` UserID string `json:"userId"`
Title string `json:"title"` Title string `json:"title"`
Purpose SessionPurpose `json:"purpose"`
Status SessionStatus `json:"status"` Status SessionStatus `json:"status"`
ArchiveStatus ArchiveStatus `json:"archiveStatus"` ArchiveStatus ArchiveStatus `json:"archiveStatus"`
PreviewStatus PreviewStatus `json:"previewStatus"` PreviewStatus PreviewStatus `json:"previewStatus"`
@@ -104,6 +117,9 @@ type Session struct {
PreviewUpdatedAt string `json:"previewUpdatedAt,omitempty"` PreviewUpdatedAt string `json:"previewUpdatedAt,omitempty"`
StreamConnected bool `json:"streamConnected"` StreamConnected bool `json:"streamConnected"`
LastStreamAt string `json:"lastStreamAt,omitempty"` LastStreamAt string `json:"lastStreamAt,omitempty"`
ViewerCount int `json:"viewerCount"`
LiveFrameURL string `json:"liveFrameUrl,omitempty"`
LiveFrameUpdated string `json:"liveFrameUpdatedAt,omitempty"`
Playback PlaybackInfo `json:"playback"` Playback PlaybackInfo `json:"playback"`
Segments []SegmentMeta `json:"segments"` Segments []SegmentMeta `json:"segments"`
Markers []Marker `json:"markers"` Markers []Marker `json:"markers"`
@@ -131,6 +147,7 @@ type CreateSessionRequest struct {
QualityPreset string `json:"qualityPreset"` QualityPreset string `json:"qualityPreset"`
FacingMode string `json:"facingMode"` FacingMode string `json:"facingMode"`
DeviceKind string `json:"deviceKind"` DeviceKind string `json:"deviceKind"`
Purpose string `json:"purpose"`
} }
type SignalRequest struct { type SignalRequest struct {
@@ -156,6 +173,8 @@ type sessionStore struct {
mu sync.RWMutex mu sync.RWMutex
sessions map[string]*Session sessions map[string]*Session
peers map[string]*webrtc.PeerConnection peers map[string]*webrtc.PeerConnection
viewerPeers map[string]map[string]*webrtc.PeerConnection
videoTracks map[string]*webrtc.TrackLocalStaticRTP
} }
func newSessionStore(rootDir string) (*sessionStore, error) { func newSessionStore(rootDir string) (*sessionStore, error) {
@@ -164,6 +183,8 @@ func newSessionStore(rootDir string) (*sessionStore, error) {
public: filepath.Join(rootDir, "public"), public: filepath.Join(rootDir, "public"),
sessions: map[string]*Session{}, sessions: map[string]*Session{},
peers: map[string]*webrtc.PeerConnection{}, peers: map[string]*webrtc.PeerConnection{},
viewerPeers: map[string]map[string]*webrtc.PeerConnection{},
videoTracks: map[string]*webrtc.TrackLocalStaticRTP{},
} }
if err := os.MkdirAll(filepath.Join(rootDir, "sessions"), 0o755); err != nil { if err := os.MkdirAll(filepath.Join(rootDir, "sessions"), 0o755); err != nil {
return nil, err return nil, err
@@ -206,6 +227,12 @@ func (s *sessionStore) refreshFromDisk() error {
if err != nil { if err != nil {
return err return err
} }
for _, session := range sessions {
if session.Purpose == "" {
session.Purpose = PurposeRecording
}
session.recomputeAggregates()
}
s.mu.Lock() s.mu.Lock()
defer s.mu.Unlock() defer s.mu.Unlock()
s.sessions = sessions s.sessions = sessions
@@ -224,6 +251,14 @@ func (s *sessionStore) publicDir(id string) string {
return filepath.Join(s.public, "sessions", id) return filepath.Join(s.public, "sessions", id)
} }
func (s *sessionStore) liveFramePath(id string) string {
return filepath.Join(s.publicDir(id), "live-frame.jpg")
}
func (s *sessionStore) liveFrameURL(id string) string {
return fmt.Sprintf("/media/assets/sessions/%s/live-frame.jpg", id)
}
func (s *sessionStore) saveSession(session *Session) error { func (s *sessionStore) saveSession(session *Session) error {
session.UpdatedAt = time.Now().UTC().Format(time.RFC3339) session.UpdatedAt = time.Now().UTC().Format(time.RFC3339)
dir := s.sessionDir(session.ID) dir := s.sessionDir(session.ID)
@@ -250,6 +285,7 @@ func (s *sessionStore) createSession(input CreateSessionRequest) (*Session, erro
ID: randomID(), ID: randomID(),
UserID: strings.TrimSpace(input.UserID), UserID: strings.TrimSpace(input.UserID),
Title: strings.TrimSpace(input.Title), Title: strings.TrimSpace(input.Title),
Purpose: SessionPurpose(defaultString(input.Purpose, string(PurposeRecording))),
Status: StatusCreated, Status: StatusCreated,
ArchiveStatus: ArchiveIdle, ArchiveStatus: ArchiveIdle,
PreviewStatus: PreviewIdle, PreviewStatus: PreviewIdle,
@@ -275,6 +311,106 @@ func (s *sessionStore) createSession(input CreateSessionRequest) (*Session, erro
return cloneSession(session), nil return cloneSession(session), nil
} }
func parseSessionTime(values ...string) time.Time {
for _, value := range values {
if strings.TrimSpace(value) == "" {
continue
}
if parsed, err := time.Parse(time.RFC3339, value); err == nil {
return parsed
}
}
return time.Time{}
}
func sortSegmentsBySequence(segments []SegmentMeta) {
sort.Slice(segments, func(i, j int) bool {
return segments[i].Sequence < segments[j].Sequence
})
}
func maxInt64(value int64, minimum int64) int64 {
if value < minimum {
return minimum
}
return value
}
func trimSegmentsToDuration(segments []SegmentMeta, maxDuration time.Duration) (kept []SegmentMeta, removed []SegmentMeta) {
if len(segments) == 0 {
return []SegmentMeta{}, []SegmentMeta{}
}
limitMS := maxDuration.Milliseconds()
total := int64(0)
startIndex := len(segments) - 1
for index := len(segments) - 1; index >= 0; index-- {
total += maxInt64(segments[index].DurationMS, 1)
startIndex = index
if total >= limitMS {
break
}
}
kept = append([]SegmentMeta(nil), segments[startIndex:]...)
removed = append([]SegmentMeta(nil), segments[:startIndex]...)
return kept, removed
}
func sessionNeedsPreview(session *Session) bool {
if len(session.Segments) == 0 {
return false
}
if session.PreviewStatus == PreviewProcessing {
return false
}
if session.PreviewStatus != PreviewReady || session.PreviewSegments < len(session.Segments) {
return true
}
previewUpdatedAt := parseSessionTime(session.PreviewUpdatedAt)
if previewUpdatedAt.IsZero() {
return true
}
for _, segment := range session.Segments {
uploadedAt := parseSessionTime(segment.UploadedAt)
if !uploadedAt.IsZero() && uploadedAt.After(previewUpdatedAt) {
return true
}
}
return false
}
func (s *sessionStore) pruneExpiredRelaySessions(maxAge time.Duration, now time.Time) error {
s.mu.Lock()
defer s.mu.Unlock()
for id, session := range s.sessions {
if session.Purpose != PurposeRelay {
continue
}
lastActivity := parseSessionTime(session.UpdatedAt, session.LastStreamAt, session.LiveFrameUpdated, session.CreatedAt)
if lastActivity.IsZero() || now.Sub(lastActivity) < maxAge {
continue
}
delete(s.sessions, id)
delete(s.peers, id)
delete(s.viewerPeers, id)
delete(s.videoTracks, id)
if err := os.RemoveAll(s.sessionDir(id)); err != nil && !errors.Is(err, os.ErrNotExist) {
return err
}
if err := os.RemoveAll(s.publicDir(id)); err != nil && !errors.Is(err, os.ErrNotExist) {
return err
}
}
return nil
}
func (s *sessionStore) getSession(id string) (*Session, error) { func (s *sessionStore) getSession(id string) (*Session, error) {
s.mu.RLock() s.mu.RLock()
defer s.mu.RUnlock() defer s.mu.RUnlock()
@@ -294,6 +430,42 @@ func (s *sessionStore) replacePeer(id string, peer *webrtc.PeerConnection) {
s.peers[id] = peer s.peers[id] = peer
} }
func (s *sessionStore) replaceViewerPeer(sessionID string, viewerID string, peer *webrtc.PeerConnection) {
s.mu.Lock()
defer s.mu.Unlock()
if _, ok := s.viewerPeers[sessionID]; !ok {
s.viewerPeers[sessionID] = map[string]*webrtc.PeerConnection{}
}
if existing, ok := s.viewerPeers[sessionID][viewerID]; ok {
_ = existing.Close()
}
s.viewerPeers[sessionID][viewerID] = peer
if session, ok := s.sessions[sessionID]; ok {
session.ViewerCount = len(s.viewerPeers[sessionID])
_ = s.saveSession(session)
}
}
func (s *sessionStore) removeViewerPeer(sessionID string, viewerID string) {
s.mu.Lock()
defer s.mu.Unlock()
viewers, ok := s.viewerPeers[sessionID]
if !ok {
return
}
if existing, ok := viewers[viewerID]; ok {
_ = existing.Close()
delete(viewers, viewerID)
}
if len(viewers) == 0 {
delete(s.viewerPeers, sessionID)
}
if session, ok := s.sessions[sessionID]; ok {
session.ViewerCount = len(s.viewerPeers[sessionID])
_ = s.saveSession(session)
}
}
func (s *sessionStore) closePeer(id string) { func (s *sessionStore) closePeer(id string) {
s.mu.Lock() s.mu.Lock()
defer s.mu.Unlock() defer s.mu.Unlock()
@@ -301,6 +473,38 @@ func (s *sessionStore) closePeer(id string) {
_ = existing.Close() _ = existing.Close()
delete(s.peers, id) delete(s.peers, id)
} }
if viewers, ok := s.viewerPeers[id]; ok {
for viewerID, peer := range viewers {
_ = peer.Close()
delete(viewers, viewerID)
}
delete(s.viewerPeers, id)
}
delete(s.videoTracks, id)
if session, ok := s.sessions[id]; ok {
session.ViewerCount = 0
_ = s.saveSession(session)
}
}
func (s *sessionStore) getVideoTrack(sessionID string) *webrtc.TrackLocalStaticRTP {
s.mu.RLock()
defer s.mu.RUnlock()
return s.videoTracks[sessionID]
}
func (s *sessionStore) ensureVideoTrack(sessionID string, codec webrtc.RTPCodecCapability) (*webrtc.TrackLocalStaticRTP, error) {
s.mu.Lock()
defer s.mu.Unlock()
if track, ok := s.videoTracks[sessionID]; ok {
return track, nil
}
track, err := webrtc.NewTrackLocalStaticRTP(codec, "video", fmt.Sprintf("livecam-%s", sessionID))
if err != nil {
return nil, err
}
s.videoTracks[sessionID] = track
return track, nil
} }
func (s *sessionStore) updateSession(id string, update func(*Session) error) (*Session, error) { func (s *sessionStore) updateSession(id string, update func(*Session) error) (*Session, error) {
@@ -332,7 +536,7 @@ func (s *sessionStore) listProcessableSessions() []*Session {
items = append(items, cloneSession(session)) items = append(items, cloneSession(session))
continue continue
} }
if session.PreviewSegments < len(session.Segments) && session.PreviewStatus != PreviewProcessing { if sessionNeedsPreview(session) {
items = append(items, cloneSession(session)) items = append(items, cloneSession(session))
} }
} }
@@ -419,12 +623,24 @@ func (m *mediaServer) handleSession(w http.ResponseWriter, r *http.Request) {
return return
} }
m.handleSignal(sessionID, w, r) m.handleSignal(sessionID, w, r)
case "viewer-signal":
if r.Method != http.MethodPost {
http.NotFound(w, r)
return
}
m.handleViewerSignal(sessionID, w, r)
case "segments": case "segments":
if r.Method != http.MethodPost { if r.Method != http.MethodPost {
http.NotFound(w, r) http.NotFound(w, r)
return return
} }
m.handleSegmentUpload(sessionID, w, r) m.handleSegmentUpload(sessionID, w, r)
case "live-frame":
if r.Method != http.MethodPost {
http.NotFound(w, r)
return
}
m.handleLiveFrameUpload(sessionID, w, r)
case "markers": case "markers":
if r.Method != http.MethodPost { if r.Method != http.MethodPost {
http.NotFound(w, r) http.NotFound(w, r)
@@ -509,12 +725,23 @@ func (m *mediaServer) handleSignal(sessionID string, w http.ResponseWriter, r *h
peer.OnTrack(func(track *webrtc.TrackRemote, receiver *webrtc.RTPReceiver) { peer.OnTrack(func(track *webrtc.TrackRemote, receiver *webrtc.RTPReceiver) {
_ = receiver _ = receiver
go func() { if track.Kind() != webrtc.RTPCodecTypeVideo {
buffer := make([]byte, 1600)
for {
if _, _, readErr := track.Read(buffer); readErr != nil {
return return
} }
localTrack, trackErr := m.store.ensureVideoTrack(sessionID, track.Codec().RTPCodecCapability)
if trackErr != nil {
log.Printf("failed to create local viewer track for session %s: %v", sessionID, trackErr)
return
}
go func() {
for {
packet, _, readErr := track.ReadRTP()
if readErr != nil {
return
}
if writeErr := localTrack.WriteRTP(packet); writeErr != nil && !errors.Is(writeErr, io.ErrClosedPipe) {
log.Printf("failed to fan out RTP packet for session %s: %v", sessionID, writeErr)
}
_, _ = m.store.updateSession(sessionID, func(session *Session) error { _, _ = m.store.updateSession(sessionID, func(session *Session) error {
session.StreamConnected = true session.StreamConnected = true
session.Status = StatusStreaming session.Status = StatusStreaming
@@ -556,6 +783,139 @@ func (m *mediaServer) handleSignal(sessionID string, w http.ResponseWriter, r *h
}) })
} }
func (m *mediaServer) handleViewerSignal(sessionID string, w http.ResponseWriter, r *http.Request) {
var input SignalRequest
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
writeError(w, http.StatusBadRequest, "invalid request body")
return
}
if _, err := m.store.getSession(sessionID); err != nil {
writeError(w, http.StatusNotFound, err.Error())
return
}
localTrack := m.store.getVideoTrack(sessionID)
if localTrack == nil {
writeError(w, http.StatusConflict, "viewer stream not ready")
return
}
config := webrtc.Configuration{
ICEServers: []webrtc.ICEServer{{URLs: []string{"stun:stun.l.google.com:19302"}}},
}
peer, err := webrtc.NewPeerConnection(config)
if err != nil {
writeError(w, http.StatusInternalServerError, "failed to create viewer peer connection")
return
}
viewerID := randomID()
m.store.replaceViewerPeer(sessionID, viewerID, peer)
sender, err := peer.AddTrack(localTrack)
if err != nil {
m.store.removeViewerPeer(sessionID, viewerID)
writeError(w, http.StatusInternalServerError, "failed to add viewer track")
return
}
go func() {
rtcpBuf := make([]byte, 1500)
for {
if _, _, readErr := sender.Read(rtcpBuf); readErr != nil {
return
}
}
}()
peer.OnConnectionStateChange(func(state webrtc.PeerConnectionState) {
switch state {
case webrtc.PeerConnectionStateDisconnected, webrtc.PeerConnectionStateFailed, webrtc.PeerConnectionStateClosed:
m.store.removeViewerPeer(sessionID, viewerID)
}
})
offer := webrtc.SessionDescription{
Type: parseSDPType(input.Type),
SDP: input.SDP,
}
if err := peer.SetRemoteDescription(offer); err != nil {
m.store.removeViewerPeer(sessionID, viewerID)
writeError(w, http.StatusBadRequest, "failed to set remote description")
return
}
answer, err := peer.CreateAnswer(nil)
if err != nil {
m.store.removeViewerPeer(sessionID, viewerID)
writeError(w, http.StatusInternalServerError, "failed to create viewer answer")
return
}
gatherComplete := webrtc.GatheringCompletePromise(peer)
if err := peer.SetLocalDescription(answer); err != nil {
m.store.removeViewerPeer(sessionID, viewerID)
writeError(w, http.StatusInternalServerError, "failed to set viewer local description")
return
}
<-gatherComplete
writeJSON(w, http.StatusOK, map[string]any{
"viewerId": viewerID,
"type": strings.ToLower(peer.LocalDescription().Type.String()),
"sdp": peer.LocalDescription().SDP,
})
}
func (m *mediaServer) handleLiveFrameUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
if _, err := m.store.getSession(sessionID); err != nil {
writeError(w, http.StatusNotFound, err.Error())
return
}
body := http.MaxBytesReader(w, r.Body, 4<<20)
defer body.Close()
frame, err := io.ReadAll(body)
if err != nil || len(frame) == 0 {
writeError(w, http.StatusBadRequest, "invalid live frame payload")
return
}
publicDir := m.store.publicDir(sessionID)
if err := os.MkdirAll(publicDir, 0o755); err != nil {
writeError(w, http.StatusInternalServerError, "failed to create live frame directory")
return
}
tmpFile := filepath.Join(publicDir, fmt.Sprintf("live-frame-%s.tmp", randomID()))
if err := os.WriteFile(tmpFile, frame, 0o644); err != nil {
writeError(w, http.StatusInternalServerError, "failed to write live frame")
return
}
defer os.Remove(tmpFile)
finalFile := m.store.liveFramePath(sessionID)
if err := os.Rename(tmpFile, finalFile); err != nil {
writeError(w, http.StatusInternalServerError, "failed to publish live frame")
return
}
session, err := m.store.updateSession(sessionID, func(session *Session) error {
session.LiveFrameURL = m.store.liveFrameURL(sessionID)
session.LiveFrameUpdated = time.Now().UTC().Format(time.RFC3339)
session.StreamConnected = true
session.LastStreamAt = session.LiveFrameUpdated
if session.Status == StatusCreated || session.Status == StatusReconnecting {
session.Status = StatusStreaming
}
session.LastError = ""
return nil
})
if err != nil {
writeError(w, http.StatusInternalServerError, "failed to update live frame session state")
return
}
writeJSON(w, http.StatusAccepted, map[string]any{"session": session})
}
func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWriter, r *http.Request) { func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
sequence, err := strconv.Atoi(r.URL.Query().Get("sequence")) sequence, err := strconv.Atoi(r.URL.Query().Get("sequence"))
if err != nil || sequence < 0 { if err != nil || sequence < 0 {
@@ -583,6 +943,7 @@ func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWrite
return return
} }
removedSegments := []SegmentMeta{}
session, err := m.store.updateSession(sessionID, func(session *Session) error { session, err := m.store.updateSession(sessionID, func(session *Session) error {
meta := SegmentMeta{ meta := SegmentMeta{
Sequence: sequence, Sequence: sequence,
@@ -603,9 +964,12 @@ func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWrite
if !found { if !found {
session.Segments = append(session.Segments, meta) session.Segments = append(session.Segments, meta)
} }
sort.Slice(session.Segments, func(i, j int) bool { sortSegmentsBySequence(session.Segments)
return session.Segments[i].Sequence < session.Segments[j].Sequence if session.Purpose == PurposeRelay {
}) var kept []SegmentMeta
kept, removedSegments = trimSegmentsToDuration(session.Segments, relayPreviewWindow)
session.Segments = kept
}
session.Status = StatusRecording session.Status = StatusRecording
session.LastError = "" session.LastError = ""
return nil return nil
@@ -614,6 +978,12 @@ func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWrite
writeError(w, http.StatusNotFound, err.Error()) writeError(w, http.StatusNotFound, err.Error())
return return
} }
for _, segment := range removedSegments {
segmentPath := filepath.Join(m.store.segmentsDir(sessionID), segment.Filename)
if removeErr := os.Remove(segmentPath); removeErr != nil && !errors.Is(removeErr, os.ErrNotExist) {
log.Printf("failed to remove pruned relay segment %s: %v", segmentPath, removeErr)
}
}
writeJSON(w, http.StatusAccepted, map[string]any{"session": session}) writeJSON(w, http.StatusAccepted, map[string]any{"session": session})
} }
@@ -680,6 +1050,9 @@ func runWorkerLoop(ctx context.Context, store *sessionStore, interval time.Durat
log.Printf("[worker] failed to refresh session store: %v", err) log.Printf("[worker] failed to refresh session store: %v", err)
continue continue
} }
if err := store.pruneExpiredRelaySessions(relayCacheTTL, time.Now().UTC()); err != nil {
log.Printf("[worker] failed to prune relay cache: %v", err)
}
sessions := store.listProcessableSessions() sessions := store.listProcessableSessions()
for _, session := range sessions { for _, session := range sessions {
if err := processSession(store, session.ID); err != nil { if err := processSession(store, session.ID); err != nil {
@@ -700,7 +1073,7 @@ func processSession(store *sessionStore, sessionID string) error {
return processFinalArchive(store, sessionID) return processFinalArchive(store, sessionID)
} }
if current.PreviewSegments < len(current.Segments) { if sessionNeedsPreview(current) {
return processRollingPreview(store, sessionID) return processRollingPreview(store, sessionID)
} }
@@ -771,9 +1144,7 @@ func buildPlaybackArtifacts(store *sessionStore, session *Session, finalize bool
listFile := filepath.Join(store.sessionDir(sessionID), "concat.txt") listFile := filepath.Join(store.sessionDir(sessionID), "concat.txt")
inputs := make([]string, 0, len(session.Segments)) inputs := make([]string, 0, len(session.Segments))
sort.Slice(session.Segments, func(i, j int) bool { sortSegmentsBySequence(session.Segments)
return session.Segments[i].Sequence < session.Segments[j].Sequence
})
for _, segment := range session.Segments { for _, segment := range session.Segments {
inputs = append(inputs, filepath.Join(store.segmentsDir(sessionID), segment.Filename)) inputs = append(inputs, filepath.Join(store.segmentsDir(sessionID), segment.Filename))
} }
@@ -799,10 +1170,12 @@ func buildPlaybackArtifacts(store *sessionStore, session *Session, finalize bool
} }
} }
if finalize {
mp4Err := runFFmpeg("-y", "-i", outputWebM, "-c:v", "libx264", "-preset", "veryfast", "-crf", "28", "-c:a", "aac", "-movflags", "+faststart", outputMP4) mp4Err := runFFmpeg("-y", "-i", outputWebM, "-c:v", "libx264", "-preset", "veryfast", "-crf", "28", "-c:a", "aac", "-movflags", "+faststart", outputMP4)
if mp4Err != nil { if mp4Err != nil {
log.Printf("[worker] mp4 archive generation failed for %s: %v", sessionID, mp4Err) log.Printf("[worker] mp4 archive generation failed for %s: %v", sessionID, mp4Err)
} }
}
webmInfo, webmStatErr := os.Stat(outputWebM) webmInfo, webmStatErr := os.Stat(outputWebM)
if webmStatErr != nil { if webmStatErr != nil {
@@ -810,14 +1183,16 @@ func buildPlaybackArtifacts(store *sessionStore, session *Session, finalize bool
} }
var mp4Size int64 var mp4Size int64
var mp4URL string var mp4URL string
previewURL := fmt.Sprintf("/media/assets/sessions/%s/%s.webm", sessionID, baseName)
if finalize {
if info, statErr := os.Stat(outputMP4); statErr == nil { if info, statErr := os.Stat(outputMP4); statErr == nil {
mp4Size = info.Size() mp4Size = info.Size()
mp4URL = fmt.Sprintf("/media/assets/sessions/%s/recording.mp4", sessionID) mp4URL = fmt.Sprintf("/media/assets/sessions/%s/recording.mp4", sessionID)
} }
previewURL := fmt.Sprintf("/media/assets/sessions/%s/%s.webm", sessionID, baseName)
if mp4URL != "" { if mp4URL != "" {
previewURL = mp4URL previewURL = mp4URL
} }
}
_, updateErr := store.updateSession(sessionID, func(session *Session) error { _, updateErr := store.updateSession(sessionID, func(session *Session) error {
session.Playback.PreviewURL = previewURL session.Playback.PreviewURL = previewURL

查看文件

@@ -2,12 +2,15 @@ package main
import ( import (
"encoding/json" "encoding/json"
"errors"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"os" "os"
"path/filepath" "path/filepath"
"strconv"
"strings" "strings"
"testing" "testing"
"time"
) )
func TestMediaHealthAndSessionLifecycle(t *testing.T) { func TestMediaHealthAndSessionLifecycle(t *testing.T) {
@@ -256,3 +259,194 @@ func TestHandleSessionGetRefreshesSessionStateFromDisk(t *testing.T) {
t.Fatalf("expected playback ready after refresh") t.Fatalf("expected playback ready after refresh")
} }
} }
func TestViewerSignalReturnsConflictBeforePublisherTrackReady(t *testing.T) {
store, err := newSessionStore(t.TempDir())
if err != nil {
t.Fatalf("newSessionStore: %v", err)
}
server := newMediaServer(store)
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Viewer Pending"})
if err != nil {
t.Fatalf("createSession: %v", err)
}
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/viewer-signal", strings.NewReader(`{"type":"offer","sdp":"mock-offer"}`))
req.Header.Set("Content-Type", "application/json")
res := httptest.NewRecorder()
server.routes().ServeHTTP(res, req)
if res.Code != http.StatusConflict {
t.Fatalf("expected viewer-signal 409 before video track is ready, got %d", res.Code)
}
}
func TestLiveFrameUploadPublishesRelayFrame(t *testing.T) {
store, err := newSessionStore(t.TempDir())
if err != nil {
t.Fatalf("newSessionStore: %v", err)
}
server := newMediaServer(store)
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Session"})
if err != nil {
t.Fatalf("createSession: %v", err)
}
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/live-frame", strings.NewReader("jpeg-frame"))
req.Header.Set("Content-Type", "image/jpeg")
res := httptest.NewRecorder()
server.routes().ServeHTTP(res, req)
if res.Code != http.StatusAccepted {
t.Fatalf("expected live-frame upload 202, got %d", res.Code)
}
current, err := store.getSession(session.ID)
if err != nil {
t.Fatalf("getSession: %v", err)
}
if current.LiveFrameURL == "" || current.LiveFrameUpdated == "" {
t.Fatalf("expected live frame metadata to be recorded, got %#v", current)
}
if !current.StreamConnected {
t.Fatalf("expected session stream connected after frame upload")
}
framePath := store.liveFramePath(session.ID)
body, err := os.ReadFile(framePath)
if err != nil {
t.Fatalf("read live frame: %v", err)
}
if string(body) != "jpeg-frame" {
t.Fatalf("unexpected live frame content: %q", string(body))
}
}
func TestRelaySegmentUploadKeepsOnlyLatestMinute(t *testing.T) {
store, err := newSessionStore(t.TempDir())
if err != nil {
t.Fatalf("newSessionStore: %v", err)
}
server := newMediaServer(store)
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Buffer", Purpose: "relay"})
if err != nil {
t.Fatalf("createSession: %v", err)
}
for sequence := 0; sequence < 3; sequence += 1 {
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/segments?sequence="+strconv.Itoa(sequence)+"&durationMs=30000", strings.NewReader("segment"))
req.Header.Set("Content-Type", "video/webm")
res := httptest.NewRecorder()
server.routes().ServeHTTP(res, req)
if res.Code != http.StatusAccepted {
t.Fatalf("expected segment upload 202 for sequence %d, got %d", sequence, res.Code)
}
}
current, err := store.getSession(session.ID)
if err != nil {
t.Fatalf("getSession: %v", err)
}
if current.Purpose != PurposeRelay {
t.Fatalf("expected relay purpose, got %s", current.Purpose)
}
if len(current.Segments) != 2 {
t.Fatalf("expected latest 2 relay segments to remain, got %d", len(current.Segments))
}
if current.Segments[0].Sequence != 1 || current.Segments[1].Sequence != 2 {
t.Fatalf("expected relay segments 1 and 2 to remain, got %#v", current.Segments)
}
if _, err := os.Stat(filepath.Join(store.segmentsDir(session.ID), "000000.webm")); !errors.Is(err, os.ErrNotExist) {
t.Fatalf("expected earliest relay segment to be pruned from disk, got %v", err)
}
}
func TestProcessRelayPreviewPublishesBufferedWebM(t *testing.T) {
tempDir := t.TempDir()
store, err := newSessionStore(tempDir)
if err != nil {
t.Fatalf("newSessionStore: %v", err)
}
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Preview", Purpose: "relay"})
if err != nil {
t.Fatalf("createSession: %v", err)
}
if err := os.WriteFile(filepath.Join(store.segmentsDir(session.ID), "000000.webm"), []byte("segment"), 0o644); err != nil {
t.Fatalf("write segment: %v", err)
}
if _, err := store.updateSession(session.ID, func(current *Session) error {
current.Segments = append(current.Segments, SegmentMeta{
Sequence: 0,
Filename: "000000.webm",
DurationMS: 60000,
SizeBytes: 7,
ContentType: "video/webm",
})
current.Purpose = PurposeRelay
return nil
}); err != nil {
t.Fatalf("updateSession: %v", err)
}
if err := processRollingPreview(store, session.ID); err != nil {
t.Fatalf("processRollingPreview: %v", err)
}
current, err := store.getSession(session.ID)
if err != nil {
t.Fatalf("getSession: %v", err)
}
if current.Playback.PreviewURL == "" || !strings.HasSuffix(current.Playback.PreviewURL, "/preview.webm") {
t.Fatalf("expected relay preview webm url, got %#v", current.Playback)
}
if current.Playback.MP4URL != "" {
t.Fatalf("expected relay preview to skip mp4 generation, got %#v", current.Playback)
}
}
func TestPruneExpiredRelaySessionsRemovesOldCache(t *testing.T) {
store, err := newSessionStore(t.TempDir())
if err != nil {
t.Fatalf("newSessionStore: %v", err)
}
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Old Relay", Purpose: "relay"})
if err != nil {
t.Fatalf("createSession: %v", err)
}
if err := os.WriteFile(filepath.Join(store.segmentsDir(session.ID), "000000.webm"), []byte("segment"), 0o644); err != nil {
t.Fatalf("write segment: %v", err)
}
if err := os.MkdirAll(store.publicDir(session.ID), 0o755); err != nil {
t.Fatalf("mkdir public dir: %v", err)
}
if err := os.WriteFile(filepath.Join(store.publicDir(session.ID), "preview.webm"), []byte("preview"), 0o644); err != nil {
t.Fatalf("write preview: %v", err)
}
store.mu.Lock()
store.sessions[session.ID].Purpose = PurposeRelay
store.sessions[session.ID].UpdatedAt = time.Now().UTC().Add(-31 * time.Minute).Format(time.RFC3339)
store.mu.Unlock()
if err := store.pruneExpiredRelaySessions(relayCacheTTL, time.Now().UTC()); err != nil {
t.Fatalf("pruneExpiredRelaySessions: %v", err)
}
if _, err := store.getSession(session.ID); err == nil {
t.Fatalf("expected relay session to be removed from store")
}
if _, err := os.Stat(store.sessionDir(session.ID)); !errors.Is(err, os.ErrNotExist) {
t.Fatalf("expected relay session directory to be removed, got %v", err)
}
if _, err := os.Stat(store.publicDir(session.ID)); !errors.Is(err, os.ErrNotExist) {
t.Fatalf("expected relay public directory to be removed, got %v", err)
}
}

查看文件

@@ -6,23 +6,29 @@ export type TrpcContext = {
req: CreateExpressContextOptions["req"]; req: CreateExpressContextOptions["req"];
res: CreateExpressContextOptions["res"]; res: CreateExpressContextOptions["res"];
user: User | null; user: User | null;
sessionSid: string | null;
}; };
export async function createContext( export async function createContext(
opts: CreateExpressContextOptions opts: CreateExpressContextOptions
): Promise<TrpcContext> { ): Promise<TrpcContext> {
let user: User | null = null; let user: User | null = null;
let sessionSid: string | null = null;
try { try {
user = await sdk.authenticateRequest(opts.req); const authenticated = await sdk.authenticateRequestWithSession(opts.req);
user = authenticated.user;
sessionSid = authenticated.sid;
} catch (error) { } catch (error) {
// Authentication is optional for public procedures. // Authentication is optional for public procedures.
user = null; user = null;
sessionSid = null;
} }
return { return {
req: opts.req, req: opts.req,
res: opts.res, res: opts.res,
user, user,
sessionSid,
}; };
} }

查看文件

@@ -13,6 +13,26 @@ import { createBackgroundTask, getAdminUserId, hasRecentBackgroundTaskOfType, se
import { nanoid } from "nanoid"; import { nanoid } from "nanoid";
import { syncTutorialImages } from "../tutorialImages"; import { syncTutorialImages } from "../tutorialImages";
async function warmupApplicationData() {
const tasks: Array<{ label: string; run: () => Promise<unknown> }> = [
{ label: "seedTutorials", run: () => seedTutorials() },
{ label: "syncTutorialImages", run: () => syncTutorialImages() },
{ label: "seedVisionReferenceImages", run: () => seedVisionReferenceImages() },
{ label: "seedAchievementDefinitions", run: () => seedAchievementDefinitions() },
{ label: "seedAppSettings", run: () => seedAppSettings() },
];
for (const task of tasks) {
const startedAt = Date.now();
try {
await task.run();
console.log(`[startup] ${task.label} finished in ${Date.now() - startedAt}ms`);
} catch (error) {
console.error(`[startup] ${task.label} failed`, error);
}
}
}
async function scheduleDailyNtrpRefresh() { async function scheduleDailyNtrpRefresh() {
const now = new Date(); const now = new Date();
if (now.getHours() !== 0 || now.getMinutes() > 5) { if (now.getHours() !== 0 || now.getMinutes() > 5) {
@@ -64,12 +84,6 @@ async function findAvailablePort(startPort: number = 3000): Promise<number> {
} }
async function startServer() { async function startServer() {
await seedTutorials();
await syncTutorialImages();
await seedVisionReferenceImages();
await seedAchievementDefinitions();
await seedAppSettings();
const app = express(); const app = express();
const server = createServer(app); const server = createServer(app);
registerMediaProxy(app); registerMediaProxy(app);
@@ -108,6 +122,7 @@ async function startServer() {
server.listen(port, () => { server.listen(port, () => {
console.log(`Server running on http://localhost:${port}/`); console.log(`Server running on http://localhost:${port}/`);
void warmupApplicationData();
}); });
setInterval(() => { setInterval(() => {

57
server/_core/sdk.test.ts 普通文件
查看文件

@@ -0,0 +1,57 @@
import { SignJWT } from "jose";
import { describe, expect, it, vi } from "vitest";
async function loadSdkForTest() {
process.env.JWT_SECRET = "test-cookie-secret";
process.env.VITE_APP_ID = "test-app";
vi.resetModules();
const [{ sdk }, { ENV }] = await Promise.all([
import("./sdk"),
import("./env"),
]);
return { sdk, ENV };
}
async function signLegacyToken(openId: string, appId: string, name: string) {
const secret = new TextEncoder().encode(process.env.JWT_SECRET || "");
return new SignJWT({
openId,
appId,
name,
})
.setProtectedHeader({ alg: "HS256", typ: "JWT" })
.setExpirationTime(Math.floor((Date.now() + 60_000) / 1000))
.sign(secret);
}
describe("sdk.verifySession", () => {
it("derives a stable legacy sid when the token payload does not include sid", async () => {
const { sdk, ENV } = await loadSdkForTest();
const legacyToken = await signLegacyToken("username_H1_legacy", ENV.appId, "H1");
const session = await sdk.verifySession(legacyToken);
expect(session).not.toBeNull();
expect(session?.sid).toMatch(/^legacy-token:/);
expect(session?.sid).toHaveLength("legacy-token:".length + 32);
});
it("derives different legacy sid values for different legacy login tokens", async () => {
const firstLoad = await loadSdkForTest();
const tokenA = await signLegacyToken("username_H1_legacy", firstLoad.ENV.appId, "H1");
await new Promise((resolve) => setTimeout(resolve, 5));
const secondLoad = await loadSdkForTest();
const tokenB = await signLegacyToken("username_H1_legacy", secondLoad.ENV.appId, "H1-second");
const sessionA = await firstLoad.sdk.verifySession(tokenA);
const sessionB = await secondLoad.sdk.verifySession(tokenB);
expect(sessionA?.sid).toMatch(/^legacy-token:/);
expect(sessionB?.sid).toMatch(/^legacy-token:/);
expect(sessionA?.sid).not.toBe(sessionB?.sid);
});
});

查看文件

@@ -4,6 +4,7 @@ import axios, { type AxiosInstance } from "axios";
import { parse as parseCookieHeader } from "cookie"; import { parse as parseCookieHeader } from "cookie";
import type { Request } from "express"; import type { Request } from "express";
import { SignJWT, jwtVerify } from "jose"; import { SignJWT, jwtVerify } from "jose";
import { createHash } from "node:crypto";
import type { User } from "../../drizzle/schema"; import type { User } from "../../drizzle/schema";
import * as db from "../db"; import * as db from "../db";
import { ENV } from "./env"; import { ENV } from "./env";
@@ -223,11 +224,15 @@ class SDKServer {
return null; return null;
} }
const derivedSid = typeof sid === "string" && sid.length > 0
? sid
: `legacy-token:${createHash("sha256").update(cookieValue).digest("hex").slice(0, 32)}`;
return { return {
openId, openId,
appId, appId,
name: typeof name === "string" ? name : undefined, name: typeof name === "string" ? name : undefined,
sid: typeof sid === "string" ? sid : undefined, sid: derivedSid,
}; };
} catch (error) { } catch (error) {
console.warn("[Auth] Session verification failed", String(error)); console.warn("[Auth] Session verification failed", String(error));
@@ -260,7 +265,11 @@ class SDKServer {
} }
async authenticateRequest(req: Request): Promise<User> { async authenticateRequest(req: Request): Promise<User> {
// Regular authentication flow const authenticated = await this.authenticateRequestWithSession(req);
return authenticated.user;
}
async authenticateRequestWithSession(req: Request): Promise<{ user: User; sid: string | null }> {
const cookies = this.parseCookies(req.headers.cookie); const cookies = this.parseCookies(req.headers.cookie);
const sessionCookie = cookies.get(COOKIE_NAME); const sessionCookie = cookies.get(COOKIE_NAME);
const session = await this.verifySession(sessionCookie); const session = await this.verifySession(sessionCookie);
@@ -273,7 +282,6 @@ class SDKServer {
const signedInAt = new Date(); const signedInAt = new Date();
let user = await db.getUserByOpenId(sessionUserId); let user = await db.getUserByOpenId(sessionUserId);
// If user not in DB, sync from OAuth server automatically
if (!user) { if (!user) {
try { try {
const userInfo = await this.getUserInfoWithJwt(sessionCookie ?? ""); const userInfo = await this.getUserInfoWithJwt(sessionCookie ?? "");
@@ -300,7 +308,10 @@ class SDKServer {
lastSignedIn: signedInAt, lastSignedIn: signedInAt,
}); });
return user; return {
user,
sid: session.sid ?? null,
};
} }
} }

查看文件

@@ -8,6 +8,7 @@ import {
poseAnalyses, InsertPoseAnalysis, poseAnalyses, InsertPoseAnalysis,
trainingRecords, InsertTrainingRecord, trainingRecords, InsertTrainingRecord,
liveAnalysisSessions, InsertLiveAnalysisSession, liveAnalysisSessions, InsertLiveAnalysisSession,
liveAnalysisRuntime, InsertLiveAnalysisRuntime,
liveActionSegments, InsertLiveActionSegment, liveActionSegments, InsertLiveActionSegment,
dailyTrainingAggregates, InsertDailyTrainingAggregate, dailyTrainingAggregates, InsertDailyTrainingAggregate,
ratingHistory, InsertRatingHistory, ratingHistory, InsertRatingHistory,
@@ -32,6 +33,7 @@ import { fetchTutorialMetrics, shouldRefreshTutorialMetrics } from "./tutorialMe
let _db: ReturnType<typeof drizzle> | null = null; let _db: ReturnType<typeof drizzle> | null = null;
const APP_TIMEZONE = process.env.TZ || "Asia/Shanghai"; const APP_TIMEZONE = process.env.TZ || "Asia/Shanghai";
export const LIVE_ANALYSIS_RUNTIME_TIMEOUT_MS = 15_000;
function getDateFormatter() { function getDateFormatter() {
return new Intl.DateTimeFormat("en-CA", { return new Intl.DateTimeFormat("en-CA", {
@@ -888,6 +890,140 @@ export async function createLiveAnalysisSession(session: InsertLiveAnalysisSessi
return result[0].insertId; return result[0].insertId;
} }
export async function getUserLiveAnalysisRuntime(userId: number) {
const db = await getDb();
if (!db) return undefined;
const result = await db.select().from(liveAnalysisRuntime)
.where(eq(liveAnalysisRuntime.userId, userId))
.limit(1);
return result[0];
}
export async function upsertUserLiveAnalysisRuntime(
userId: number,
patch: Omit<InsertLiveAnalysisRuntime, "id" | "createdAt" | "updatedAt" | "userId">,
) {
const db = await getDb();
if (!db) throw new Error("Database not available");
const existing = await getUserLiveAnalysisRuntime(userId);
if (existing) {
await db.update(liveAnalysisRuntime)
.set({
ownerSid: patch.ownerSid ?? existing.ownerSid,
status: patch.status ?? existing.status,
title: patch.title ?? existing.title,
sessionMode: patch.sessionMode ?? existing.sessionMode,
mediaSessionId: patch.mediaSessionId === undefined ? existing.mediaSessionId : patch.mediaSessionId,
startedAt: patch.startedAt === undefined ? existing.startedAt : patch.startedAt,
endedAt: patch.endedAt === undefined ? existing.endedAt : patch.endedAt,
lastHeartbeatAt: patch.lastHeartbeatAt === undefined ? existing.lastHeartbeatAt : patch.lastHeartbeatAt,
snapshot: patch.snapshot === undefined ? existing.snapshot : patch.snapshot,
})
.where(eq(liveAnalysisRuntime.userId, userId));
return getUserLiveAnalysisRuntime(userId);
}
const result = await db.insert(liveAnalysisRuntime).values({
userId,
ownerSid: patch.ownerSid ?? null,
status: patch.status ?? "idle",
title: patch.title ?? null,
sessionMode: patch.sessionMode ?? "practice",
mediaSessionId: patch.mediaSessionId ?? null,
startedAt: patch.startedAt ?? null,
endedAt: patch.endedAt ?? null,
lastHeartbeatAt: patch.lastHeartbeatAt ?? null,
snapshot: patch.snapshot ?? null,
});
const runtimeId = result[0].insertId;
const rows = await db.select().from(liveAnalysisRuntime).where(eq(liveAnalysisRuntime.id, runtimeId)).limit(1);
return rows[0];
}
export async function updateUserLiveAnalysisRuntime(
userId: number,
patch: Partial<Omit<InsertLiveAnalysisRuntime, "id" | "createdAt" | "updatedAt" | "userId">>,
) {
const db = await getDb();
if (!db) throw new Error("Database not available");
const existing = await getUserLiveAnalysisRuntime(userId);
if (!existing) return undefined;
await db.update(liveAnalysisRuntime)
.set({
ownerSid: patch.ownerSid === undefined ? existing.ownerSid : patch.ownerSid,
status: patch.status ?? existing.status,
title: patch.title === undefined ? existing.title : patch.title,
sessionMode: patch.sessionMode ?? existing.sessionMode,
mediaSessionId: patch.mediaSessionId === undefined ? existing.mediaSessionId : patch.mediaSessionId,
startedAt: patch.startedAt === undefined ? existing.startedAt : patch.startedAt,
endedAt: patch.endedAt === undefined ? existing.endedAt : patch.endedAt,
lastHeartbeatAt: patch.lastHeartbeatAt === undefined ? existing.lastHeartbeatAt : patch.lastHeartbeatAt,
snapshot: patch.snapshot === undefined ? existing.snapshot : patch.snapshot,
})
.where(eq(liveAnalysisRuntime.userId, userId));
return getUserLiveAnalysisRuntime(userId);
}
export async function updateLiveAnalysisRuntimeHeartbeat(input: {
userId: number;
ownerSid: string;
runtimeId: number;
mediaSessionId?: string | null;
snapshot?: unknown;
}) {
const db = await getDb();
if (!db) throw new Error("Database not available");
const existing = await getUserLiveAnalysisRuntime(input.userId);
if (!existing || existing.id !== input.runtimeId || existing.ownerSid !== input.ownerSid || existing.status !== "active") {
return undefined;
}
await db.update(liveAnalysisRuntime)
.set({
mediaSessionId: input.mediaSessionId === undefined ? existing.mediaSessionId : input.mediaSessionId,
snapshot: input.snapshot === undefined ? existing.snapshot : input.snapshot,
lastHeartbeatAt: new Date(),
endedAt: null,
})
.where(and(
eq(liveAnalysisRuntime.userId, input.userId),
eq(liveAnalysisRuntime.id, input.runtimeId),
));
return getUserLiveAnalysisRuntime(input.userId);
}
export async function endUserLiveAnalysisRuntime(input: {
userId: number;
ownerSid?: string | null;
runtimeId?: number;
snapshot?: unknown;
}) {
const db = await getDb();
if (!db) throw new Error("Database not available");
const existing = await getUserLiveAnalysisRuntime(input.userId);
if (!existing) return undefined;
if (input.runtimeId != null && existing.id !== input.runtimeId) return undefined;
if (input.ownerSid != null && existing.ownerSid !== input.ownerSid) return undefined;
await db.update(liveAnalysisRuntime)
.set({
status: "ended",
mediaSessionId: null,
endedAt: new Date(),
snapshot: input.snapshot === undefined ? existing.snapshot : input.snapshot,
})
.where(eq(liveAnalysisRuntime.userId, input.userId));
return getUserLiveAnalysisRuntime(input.userId);
}
export async function createLiveActionSegments(segments: InsertLiveActionSegment[]) { export async function createLiveActionSegments(segments: InsertLiveActionSegment[]) {
const db = await getDb(); const db = await getDb();
if (!db || segments.length === 0) return; if (!db || segments.length === 0) return;

查看文件

@@ -45,7 +45,7 @@ function createTestUser(overrides?: Partial<AuthenticatedUser>): AuthenticatedUs
}; };
} }
function createMockContext(user: AuthenticatedUser | null = null): { function createMockContext(user: AuthenticatedUser | null = null, sessionSid = "test-session-sid"): {
ctx: TrpcContext; ctx: TrpcContext;
clearedCookies: { name: string; options: Record<string, unknown> }[]; clearedCookies: { name: string; options: Record<string, unknown> }[];
setCookies: { name: string; value: string; options: Record<string, unknown> }[]; setCookies: { name: string; value: string; options: Record<string, unknown> }[];
@@ -56,6 +56,7 @@ function createMockContext(user: AuthenticatedUser | null = null): {
return { return {
ctx: { ctx: {
user, user,
sessionSid: user ? sessionSid : null,
req: { req: {
protocol: "https", protocol: "https",
headers: {}, headers: {},
@@ -1296,6 +1297,161 @@ describe("analysis.liveSessionSave", () => {
}); });
}); });
describe("analysis.runtime", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("acquires owner mode when runtime is idle", async () => {
const user = createTestUser({ id: 7 });
const { ctx } = createMockContext(user, "sid-owner");
const caller = appRouter.createCaller(ctx);
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(undefined);
const upsertSpy = vi.spyOn(db, "upsertUserLiveAnalysisRuntime").mockResolvedValueOnce({
id: 11,
userId: 7,
ownerSid: "sid-owner",
status: "active",
title: "实时分析 正手",
sessionMode: "practice",
mediaSessionId: null,
startedAt: new Date(),
endedAt: null,
lastHeartbeatAt: new Date(),
snapshot: null,
createdAt: new Date(),
updatedAt: new Date(),
} as any);
const result = await caller.analysis.runtimeAcquire({
title: "实时分析 正手",
sessionMode: "practice",
});
expect(upsertSpy).toHaveBeenCalledWith(7, expect.objectContaining({
ownerSid: "sid-owner",
status: "active",
title: "实时分析 正手",
sessionMode: "practice",
}));
expect(result.role).toBe("owner");
expect((result.runtimeSession as any)?.ownerSid).toBe("sid-owner");
});
it("returns viewer mode when another session sid already holds the runtime", async () => {
const user = createTestUser({ id: 7 });
const { ctx } = createMockContext(user, "sid-viewer");
const caller = appRouter.createCaller(ctx);
const activeRuntime = {
id: 15,
userId: 7,
ownerSid: "sid-owner",
status: "active",
title: "实时分析 练习",
sessionMode: "pk",
mediaSessionId: "media-sync-1",
startedAt: new Date(),
endedAt: null,
lastHeartbeatAt: new Date(),
snapshot: { phase: "analyzing" },
createdAt: new Date(),
updatedAt: new Date(),
};
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(activeRuntime as any);
const result = await caller.analysis.runtimeAcquire({
title: "实时分析 练习",
sessionMode: "pk",
});
expect(result.role).toBe("viewer");
expect((result.runtimeSession as any)?.mediaSessionId).toBe("media-sync-1");
});
it("keeps owner mode when the same sid reacquires the runtime", async () => {
const user = createTestUser({ id: 7 });
const { ctx } = createMockContext(user, "sid-owner");
const caller = appRouter.createCaller(ctx);
const activeRuntime = {
id: 19,
userId: 7,
ownerSid: "sid-owner",
status: "active",
title: "旧标题",
sessionMode: "practice",
mediaSessionId: "media-sync-2",
startedAt: new Date("2026-03-16T00:00:00.000Z"),
endedAt: null,
lastHeartbeatAt: new Date(),
snapshot: { phase: "analyzing" },
createdAt: new Date(),
updatedAt: new Date(),
};
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(activeRuntime as any);
const updateSpy = vi.spyOn(db, "updateUserLiveAnalysisRuntime").mockResolvedValueOnce({
...activeRuntime,
title: "新标题",
} as any);
const result = await caller.analysis.runtimeAcquire({
title: "新标题",
sessionMode: "practice",
});
expect(updateSpy).toHaveBeenCalledWith(7, expect.objectContaining({
ownerSid: "sid-owner",
title: "新标题",
status: "active",
}));
expect(result.role).toBe("owner");
});
it("rejects heartbeat from a non-owner sid", async () => {
const user = createTestUser({ id: 7 });
const { ctx } = createMockContext(user, "sid-viewer");
const caller = appRouter.createCaller(ctx);
vi.spyOn(db, "updateLiveAnalysisRuntimeHeartbeat").mockResolvedValueOnce(undefined);
await expect(caller.analysis.runtimeHeartbeat({
runtimeId: 20,
mediaSessionId: "media-sync-3",
snapshot: { phase: "analyzing" },
})).rejects.toThrow("当前设备不是实时分析持有端");
});
it("rejects release from a non-owner sid", async () => {
const user = createTestUser({ id: 7 });
const { ctx } = createMockContext(user, "sid-viewer");
const caller = appRouter.createCaller(ctx);
vi.spyOn(db, "endUserLiveAnalysisRuntime").mockResolvedValueOnce(undefined);
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce({
id: 23,
userId: 7,
ownerSid: "sid-owner",
status: "active",
title: "实时分析",
sessionMode: "practice",
mediaSessionId: "media-sync-4",
startedAt: new Date(),
endedAt: null,
lastHeartbeatAt: new Date(),
snapshot: null,
createdAt: new Date(),
updatedAt: new Date(),
} as any);
await expect(caller.analysis.runtimeRelease({
runtimeId: 23,
snapshot: { phase: "failed" },
})).rejects.toThrow("当前设备不是实时分析持有端");
});
});
describe("rating.refreshMine", () => { describe("rating.refreshMine", () => {
afterEach(() => { afterEach(() => {
vi.restoreAllMocks(); vi.restoreAllMocks();

查看文件

@@ -73,6 +73,67 @@ const trainingProfileUpdateSchema = z.object({
assessmentNotes: z.string().max(2000).nullable().optional(), assessmentNotes: z.string().max(2000).nullable().optional(),
}); });
const liveRuntimeSnapshotSchema = z.object({
phase: z.enum(["idle", "analyzing", "saving", "safe", "failed"]).optional(),
startedAt: z.number().optional(),
durationMs: z.number().optional(),
currentAction: z.string().optional(),
rawAction: z.string().optional(),
feedback: z.array(z.string()).optional(),
liveScore: z.record(z.string(), z.number()).nullable().optional(),
stabilityMeta: z.record(z.string(), z.any()).optional(),
visibleSegments: z.number().optional(),
unknownSegments: z.number().optional(),
archivedVideoCount: z.number().optional(),
recentSegments: z.array(z.object({
actionType: z.string(),
isUnknown: z.boolean().optional(),
startMs: z.number(),
endMs: z.number(),
durationMs: z.number(),
confidenceAvg: z.number().optional(),
score: z.number().optional(),
clipLabel: z.string().optional(),
})).optional(),
}).passthrough();
function getRuntimeOwnerSid(ctx: { sessionSid: string | null; user: { openId: string } }) {
return ctx.sessionSid || `legacy:${ctx.user.openId}`;
}
async function resolveLiveRuntimeRole(params: {
userId: number;
sessionSid: string;
}) {
let runtime = await db.getUserLiveAnalysisRuntime(params.userId);
if (!runtime) {
return { role: "idle" as const, runtimeSession: null };
}
const heartbeatAt = runtime.lastHeartbeatAt ?? runtime.updatedAt ?? runtime.startedAt;
const isStale =
runtime.status === "active" &&
(!heartbeatAt || (Date.now() - heartbeatAt.getTime()) > db.LIVE_ANALYSIS_RUNTIME_TIMEOUT_MS);
if (isStale) {
runtime = await db.endUserLiveAnalysisRuntime({
userId: params.userId,
runtimeId: runtime.id,
snapshot: runtime.snapshot,
}) ?? null as any;
return { role: "idle" as const, runtimeSession: null };
}
if (runtime.status !== "active") {
return { role: "idle" as const, runtimeSession: runtime };
}
return {
role: runtime.ownerSid === params.sessionSid ? "owner" as const : "viewer" as const,
runtimeSession: runtime,
};
}
export const appRouter = router({ export const appRouter = router({
system: systemRouter, system: systemRouter,
@@ -455,6 +516,122 @@ export const appRouter = router({
return { session, segments }; return { session, segments };
}), }),
runtimeGet: protectedProcedure.query(async ({ ctx }) => {
const sessionSid = getRuntimeOwnerSid(ctx);
return resolveLiveRuntimeRole({
userId: ctx.user.id,
sessionSid,
});
}),
runtimeAcquire: protectedProcedure
.input(z.object({
title: z.string().min(1).max(256),
sessionMode: z.enum(["practice", "pk"]).default("practice"),
}))
.mutation(async ({ ctx, input }) => {
const sessionSid = getRuntimeOwnerSid(ctx);
const current = await resolveLiveRuntimeRole({
userId: ctx.user.id,
sessionSid,
});
if (current.role === "viewer" && current.runtimeSession?.status === "active") {
return current;
}
const runtime = current.runtimeSession?.status === "active" && current.role === "owner"
? await db.updateUserLiveAnalysisRuntime(ctx.user.id, {
ownerSid: sessionSid,
status: "active",
title: input.title,
sessionMode: input.sessionMode,
startedAt: current.runtimeSession.startedAt ?? new Date(),
endedAt: null,
lastHeartbeatAt: new Date(),
})
: await db.upsertUserLiveAnalysisRuntime(ctx.user.id, {
ownerSid: sessionSid,
status: "active",
title: input.title,
sessionMode: input.sessionMode,
mediaSessionId: null,
startedAt: new Date(),
endedAt: null,
lastHeartbeatAt: new Date(),
snapshot: {
phase: "idle",
startedAt: Date.now(),
durationMs: 0,
currentAction: "unknown",
rawAction: "unknown",
feedback: [],
visibleSegments: 0,
unknownSegments: 0,
archivedVideoCount: 0,
recentSegments: [],
},
});
return {
role: "owner" as const,
runtimeSession: runtime ?? null,
};
}),
runtimeHeartbeat: protectedProcedure
.input(z.object({
runtimeId: z.number(),
mediaSessionId: z.string().max(96).nullable().optional(),
snapshot: liveRuntimeSnapshotSchema.optional(),
}))
.mutation(async ({ ctx, input }) => {
const sessionSid = getRuntimeOwnerSid(ctx);
const runtime = await db.updateLiveAnalysisRuntimeHeartbeat({
userId: ctx.user.id,
ownerSid: sessionSid,
runtimeId: input.runtimeId,
mediaSessionId: input.mediaSessionId,
snapshot: input.snapshot,
});
if (!runtime) {
throw new TRPCError({ code: "FORBIDDEN", message: "当前设备不是实时分析持有端" });
}
return {
role: "owner" as const,
runtimeSession: runtime,
};
}),
runtimeRelease: protectedProcedure
.input(z.object({
runtimeId: z.number().optional(),
snapshot: liveRuntimeSnapshotSchema.optional(),
}).optional())
.mutation(async ({ ctx, input }) => {
const sessionSid = getRuntimeOwnerSid(ctx);
const runtime = await db.endUserLiveAnalysisRuntime({
userId: ctx.user.id,
ownerSid: sessionSid,
runtimeId: input?.runtimeId,
snapshot: input?.snapshot,
});
if (!runtime) {
const current = await db.getUserLiveAnalysisRuntime(ctx.user.id);
if (current?.status === "active" && current.ownerSid !== sessionSid) {
throw new TRPCError({ code: "FORBIDDEN", message: "当前设备不是实时分析持有端" });
}
}
return {
success: true,
runtimeSession: runtime ?? null,
};
}),
// Generate AI correction suggestions // Generate AI correction suggestions
getCorrections: protectedProcedure getCorrections: protectedProcedure
.input(z.object({ .input(z.object({

查看文件

@@ -22,7 +22,9 @@ test("training page shows plan generation flow", async ({ page }) => {
await page.goto("/training"); await page.goto("/training");
await expect(page.getByTestId("training-title")).toBeVisible(); await expect(page.getByTestId("training-title")).toBeVisible();
const generateButton = page.getByRole("button", { name: "生成训练计划" }).last(); const generateButton = page
.getByRole("button", { name: "生成训练计划" })
.last();
await expect(generateButton).toBeVisible(); await expect(generateButton).toBeVisible();
await generateButton.click(); await generateButton.click();
await expect(page).toHaveURL(/\/training$/); await expect(page).toHaveURL(/\/training$/);
@@ -68,7 +70,74 @@ test("live camera starts analysis and produces scores", async ({ page }) => {
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible(); await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
}); });
test("live camera archives overlay videos into the library after analysis stops", async ({ page }) => { test("live camera switches into viewer mode when another device already owns analysis", async ({
page,
}) => {
await installAppMocks(page, { authenticated: true, liveViewerMode: true });
await page.goto("/live-camera");
await expect(page.getByText("同步观看模式")).toBeVisible();
await expect(page.getByText(/同步观看|重新同步/).first()).toBeVisible();
await expect(page.getByText("当前设备已锁定为观看模式")).toBeVisible();
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText(
"其他设备实时分析"
);
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText(
"移动端"
);
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText(
"均衡模式"
);
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText(
"猩猩"
);
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
});
test("live camera recovers mojibake viewer titles before rendering", async ({
page,
}) => {
const state = await installAppMocks(page, {
authenticated: true,
liveViewerMode: true,
});
const mojibakeTitle = Buffer.from("服务端同步烟雾测试", "utf8").toString(
"latin1"
);
if (state.liveRuntime.runtimeSession) {
state.liveRuntime.runtimeSession.title = mojibakeTitle;
state.liveRuntime.runtimeSession.snapshot = {
...state.liveRuntime.runtimeSession.snapshot,
title: mojibakeTitle,
};
}
await page.goto("/live-camera");
await expect(
page.getByRole("heading", { name: "服务端同步烟雾测试" })
).toBeVisible();
await expect(page.getByText(mojibakeTitle)).toHaveCount(0);
});
test("live camera no longer opens viewer peer retries when server relay is active", async ({
page,
}) => {
const state = await installAppMocks(page, {
authenticated: true,
liveViewerMode: true,
viewerSignalConflictOnce: true,
});
await page.goto("/live-camera");
await expect(page.getByText("同步观看模式")).toBeVisible();
await expect.poll(() => state.viewerSignalConflictRemaining).toBe(1);
await expect.poll(() => state.mediaSession?.viewerCount ?? 0).toBe(0);
await expect(page.getByTestId("live-camera-viewer-video")).toBeVisible();
});
test("live camera archives overlay videos into the library after analysis stops", async ({
page,
}) => {
await installAppMocks(page, { authenticated: true, videos: [] }); await installAppMocks(page, { authenticated: true, videos: [] });
await page.goto("/live-camera"); await page.goto("/live-camera");
@@ -82,7 +151,9 @@ test("live camera archives overlay videos into the library after analysis stops"
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible(); await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
await page.getByRole("button", { name: "结束分析" }).click(); await page.getByRole("button", { name: "结束分析" }).click();
await expect(page.getByText("分析结果已保存")).toBeVisible({ timeout: 8_000 }); await expect(page.getByText("分析结果已保存")).toBeVisible({
timeout: 8_000,
});
await page.goto("/videos"); await page.goto("/videos");
await expect(page.getByTestId("video-card")).toHaveCount(1); await expect(page.getByTestId("video-card")).toHaveCount(1);
@@ -90,7 +161,9 @@ test("live camera archives overlay videos into the library after analysis stops"
await expect(page.getByText("实时分析").first()).toBeVisible(); await expect(page.getByText("实时分析").first()).toBeVisible();
}); });
test("recorder flow archives a session and exposes it in videos", async ({ page }) => { test("recorder flow archives a session and exposes it in videos", async ({
page,
}) => {
await installAppMocks(page, { authenticated: true, videos: [] }); await installAppMocks(page, { authenticated: true, videos: [] });
await page.setViewportSize({ width: 390, height: 844 }); await page.setViewportSize({ width: 390, height: 844 });
@@ -101,7 +174,9 @@ test("recorder flow archives a session and exposes it in videos", async ({ page
await expect(focusShell).toBeVisible(); await expect(focusShell).toBeVisible();
await focusShell.getByTestId("recorder-start-camera-button").click(); await focusShell.getByTestId("recorder-start-camera-button").click();
await expect(focusShell.getByTestId("recorder-start-recording-button")).toBeVisible(); await expect(
focusShell.getByTestId("recorder-start-recording-button")
).toBeVisible();
await focusShell.getByTestId("recorder-start-recording-button").click(); await focusShell.getByTestId("recorder-start-recording-button").click();
await expect(focusShell.getByTestId("recorder-marker-button")).toBeVisible(); await expect(focusShell.getByTestId("recorder-marker-button")).toBeVisible();
@@ -110,9 +185,23 @@ test("recorder flow archives a session and exposes it in videos", async ({ page
await expect(page.getByText("手动标记")).toBeVisible(); await expect(page.getByText("手动标记")).toBeVisible();
await focusShell.getByTestId("recorder-finish-button").click(); await focusShell.getByTestId("recorder-finish-button").click();
await expect(focusShell.getByTestId("recorder-reset-button")).toBeVisible({ timeout: 8_000 }); await expect(focusShell.getByTestId("recorder-reset-button")).toBeVisible({
timeout: 8_000,
});
await page.goto("/videos"); await page.goto("/videos");
await expect(page.getByTestId("video-card")).toHaveCount(1); await expect(page.getByTestId("video-card")).toHaveCount(1);
await expect(page.getByText("E2E 录制")).toBeVisible(); await expect(page.getByText("E2E 录制")).toBeVisible();
}); });
test("recorder blocks local camera when another device owns live analysis", async ({
page,
}) => {
await installAppMocks(page, { authenticated: true, liveViewerMode: true });
await page.goto("/recorder");
await expect(
page.getByText("当前账号已有其他设备正在实时分析")
).toBeVisible();
await expect(page.getByTestId("recorder-start-camera-button")).toBeDisabled();
});

查看文件

@@ -37,8 +37,10 @@ type MockMediaSession = {
id: string; id: string;
userId: string; userId: string;
title: string; title: string;
purpose?: "recording" | "relay";
status: string; status: string;
archiveStatus: string; archiveStatus: string;
previewStatus?: string;
format: string; format: string;
mimeType: string; mimeType: string;
qualityPreset: string; qualityPreset: string;
@@ -48,7 +50,9 @@ type MockMediaSession = {
uploadedSegments: number; uploadedSegments: number;
uploadedBytes: number; uploadedBytes: number;
durationMs: number; durationMs: number;
previewUpdatedAt?: string;
streamConnected: boolean; streamConnected: boolean;
viewerCount?: number;
playback: { playback: {
webmUrl?: string; webmUrl?: string;
mp4Url?: string; mp4Url?: string;
@@ -92,9 +96,14 @@ type MockAppState = {
adjustmentNotes: string | null; adjustmentNotes: string | null;
} | null; } | null;
mediaSession: MockMediaSession | null; mediaSession: MockMediaSession | null;
liveRuntime: {
role: "idle" | "owner" | "viewer";
runtimeSession: any | null;
};
nextVideoId: number; nextVideoId: number;
nextTaskId: number; nextTaskId: number;
authMeNullResponsesAfterLogin: number; authMeNullResponsesAfterLogin: number;
viewerSignalConflictRemaining: number;
}; };
function trpcResult(json: unknown) { function trpcResult(json: unknown) {
@@ -249,34 +258,52 @@ async function readTrpcInput(route: Route, operationIndex: number) {
if (!postData) return null; if (!postData) return null;
const parsed = JSON.parse(postData); const parsed = JSON.parse(postData);
return parsed?.json ?? parsed?.[operationIndex]?.json ?? parsed?.[String(operationIndex)]?.json ?? null; return (
parsed?.json ??
parsed?.[operationIndex]?.json ??
parsed?.[String(operationIndex)]?.json ??
null
);
} }
function buildMediaSession(user: MockUser, title: string): MockMediaSession { function buildMediaSession(
user: MockUser,
title: string,
purpose: "recording" | "relay" = "recording"
): MockMediaSession {
return { return {
id: "session-e2e", id: "session-e2e",
userId: String(user.id), userId: String(user.id),
title, title,
status: "created", purpose,
status: purpose === "relay" ? "recording" : "created",
archiveStatus: "idle", archiveStatus: "idle",
previewStatus: purpose === "relay" ? "ready" : "idle",
format: "webm", format: "webm",
mimeType: "video/webm", mimeType: "video/webm",
qualityPreset: "balanced", qualityPreset: "balanced",
facingMode: "environment", facingMode: "environment",
deviceKind: "mobile", deviceKind: "mobile",
reconnectCount: 0, reconnectCount: 0,
uploadedSegments: 0, uploadedSegments: purpose === "relay" ? 1 : 0,
uploadedBytes: 0, uploadedBytes: purpose === "relay" ? 1_280_000 : 0,
durationMs: 0, durationMs: purpose === "relay" ? 60_000 : 0,
previewUpdatedAt: purpose === "relay" ? nowIso() : undefined,
streamConnected: true, streamConnected: true,
playback: { playback: {
ready: false, ready: purpose !== "relay",
previewUrl:
purpose === "relay"
? "/media/assets/sessions/session-e2e/preview.webm"
: undefined,
}, },
markers: [], markers: [],
}; };
} }
function createTask(state: MockAppState, input: { function createTask(
state: MockAppState,
input: {
type: string; type: string;
title: string; title: string;
status?: string; status?: string;
@@ -284,7 +311,8 @@ function createTask(state: MockAppState, input: {
message?: string; message?: string;
result?: any; result?: any;
error?: string | null; error?: string | null;
}) { }
) {
const task = { const task = {
id: `task-${state.nextTaskId++}`, id: `task-${state.nextTaskId++}`,
userId: state.user.id, userId: state.user.id,
@@ -298,7 +326,8 @@ function createTask(state: MockAppState, input: {
attempts: input.status === "failed" ? 2 : 1, attempts: input.status === "failed" ? 2 : 1,
maxAttempts: input.type === "media_finalize" ? 90 : 3, maxAttempts: input.type === "media_finalize" ? 90 : 3,
startedAt: nowIso(), startedAt: nowIso(),
completedAt: input.status === "queued" || input.status === "running" ? null : nowIso(), completedAt:
input.status === "queued" || input.status === "running" ? null : nowIso(),
createdAt: nowIso(), createdAt: nowIso(),
updatedAt: nowIso(), updatedAt: nowIso(),
}; };
@@ -317,7 +346,8 @@ async function fulfillJson(route: Route, body: unknown) {
async function handleTrpc(route: Route, state: MockAppState) { async function handleTrpc(route: Route, state: MockAppState) {
const url = new URL(route.request().url()); const url = new URL(route.request().url());
const operations = url.pathname.replace("/api/trpc/", "").split(","); const operations = url.pathname.replace("/api/trpc/", "").split(",");
const results = await Promise.all(operations.map(async (operation, operationIndex) => { const results = await Promise.all(
operations.map(async (operation, operationIndex) => {
switch (operation) { switch (operation) {
case "auth.me": case "auth.me":
if (state.authenticated && state.authMeNullResponsesAfterLogin > 0) { if (state.authenticated && state.authMeNullResponsesAfterLogin > 0) {
@@ -428,6 +458,56 @@ async function handleTrpc(route: Route, state: MockAppState) {
return trpcResult(state.analyses); return trpcResult(state.analyses);
case "analysis.liveSessionList": case "analysis.liveSessionList":
return trpcResult([]); return trpcResult([]);
case "analysis.runtimeGet":
return trpcResult(state.liveRuntime);
case "analysis.runtimeAcquire":
if (
state.liveRuntime.runtimeSession?.status === "active" &&
state.liveRuntime.role === "viewer"
) {
return trpcResult(state.liveRuntime);
}
state.liveRuntime = {
role: "owner",
runtimeSession: {
id: 501,
title: "实时分析 正手",
sessionMode: "practice",
mediaSessionId: state.mediaSession?.id || null,
status: "active",
startedAt: nowIso(),
endedAt: null,
lastHeartbeatAt: nowIso(),
snapshot: {
phase: "analyzing",
currentAction: "forehand",
rawAction: "forehand",
visibleSegments: 1,
unknownSegments: 0,
durationMs: 1500,
feedback: ["节奏稳定"],
},
},
};
return trpcResult(state.liveRuntime);
case "analysis.runtimeHeartbeat": {
const input = await readTrpcInput(route, operationIndex);
if (state.liveRuntime.runtimeSession) {
state.liveRuntime.runtimeSession = {
...state.liveRuntime.runtimeSession,
mediaSessionId:
input?.mediaSessionId ??
state.liveRuntime.runtimeSession.mediaSessionId,
snapshot:
input?.snapshot ?? state.liveRuntime.runtimeSession.snapshot,
lastHeartbeatAt: nowIso(),
};
}
return trpcResult(state.liveRuntime);
}
case "analysis.runtimeRelease":
state.liveRuntime = { role: "idle", runtimeSession: null };
return trpcResult({ success: true, runtimeSession: null });
case "analysis.liveSessionSave": case "analysis.liveSessionSave":
return trpcResult({ sessionId: 1, trainingRecordId: 1 }); return trpcResult({ sessionId: 1, trainingRecordId: 1 });
case "task.list": case "task.list":
@@ -435,14 +515,18 @@ async function handleTrpc(route: Route, state: MockAppState) {
case "task.get": { case "task.get": {
const rawInput = url.searchParams.get("input"); const rawInput = url.searchParams.get("input");
const parsedInput = rawInput ? JSON.parse(rawInput) : {}; const parsedInput = rawInput ? JSON.parse(rawInput) : {};
const taskId = parsedInput.json?.taskId || parsedInput[0]?.json?.taskId; const taskId =
return trpcResult(state.tasks.find((task) => task.id === taskId) || null); parsedInput.json?.taskId || parsedInput[0]?.json?.taskId;
return trpcResult(
state.tasks.find(task => task.id === taskId) || null
);
} }
case "task.retry": { case "task.retry": {
const rawInput = url.searchParams.get("input"); const rawInput = url.searchParams.get("input");
const parsedInput = rawInput ? JSON.parse(rawInput) : {}; const parsedInput = rawInput ? JSON.parse(rawInput) : {};
const taskId = parsedInput.json?.taskId || parsedInput[0]?.json?.taskId; const taskId =
const task = state.tasks.find((item) => item.id === taskId); parsedInput.json?.taskId || parsedInput[0]?.json?.taskId;
const task = state.tasks.find(item => item.id === taskId);
if (task) { if (task) {
task.status = "succeeded"; task.status = "succeeded";
task.progress = 100; task.progress = 100;
@@ -496,13 +580,15 @@ async function handleTrpc(route: Route, state: MockAppState) {
type: "pose_correction_multimodal", type: "pose_correction_multimodal",
title: "动作纠正", title: "动作纠正",
result: { result: {
corrections: "## 动作概览\n整体节奏稳定,建议继续优化击球点前置。", corrections:
"## 动作概览\n整体节奏稳定,建议继续优化击球点前置。",
report: { report: {
priorityFixes: [ priorityFixes: [
{ {
title: "击球点前置", title: "击球点前置",
why: "击球点略靠后会影响挥拍连贯性。", why: "击球点略靠后会影响挥拍连贯性。",
howToPractice: "每组 8 次影子挥拍,刻意在身体前侧完成触球动作。", howToPractice:
"每组 8 次影子挥拍,刻意在身体前侧完成触球动作。",
successMetric: "连续 3 组都能稳定在身体前侧完成挥拍。", successMetric: "连续 3 组都能稳定在身体前侧完成挥拍。",
}, },
], ],
@@ -511,12 +597,17 @@ async function handleTrpc(route: Route, state: MockAppState) {
}).id, }).id,
}); });
case "video.registerExternal": case "video.registerExternal":
if (state.mediaSession?.playback.webmUrl || state.mediaSession?.playback.mp4Url) { if (
state.mediaSession?.playback.webmUrl ||
state.mediaSession?.playback.mp4Url
) {
state.videos = [ state.videos = [
{ {
id: state.nextVideoId++, id: state.nextVideoId++,
title: state.mediaSession.title, title: state.mediaSession.title,
url: state.mediaSession.playback.webmUrl || state.mediaSession.playback.mp4Url, url:
state.mediaSession.playback.webmUrl ||
state.mediaSession.playback.mp4Url,
format: "webm", format: "webm",
fileSize: state.mediaSession.playback.webmSize || 1024 * 1024, fileSize: state.mediaSession.playback.webmSize || 1024 * 1024,
exerciseType: "recording", exerciseType: "recording",
@@ -526,7 +617,10 @@ async function handleTrpc(route: Route, state: MockAppState) {
...state.videos, ...state.videos,
]; ];
} }
return trpcResult({ videoId: state.nextVideoId, url: state.mediaSession?.playback.webmUrl }); return trpcResult({
videoId: state.nextVideoId,
url: state.mediaSession?.playback.webmUrl,
});
case "achievement.list": case "achievement.list":
return trpcResult(buildStats(state.user).achievements); return trpcResult(buildStats(state.user).achievements);
case "rating.current": case "rating.current":
@@ -549,21 +643,35 @@ async function handleTrpc(route: Route, state: MockAppState) {
timing: 68, timing: 68,
matchReadiness: 60, matchReadiness: 60,
}, },
sourceSummary: { analyses: 1, liveSessions: 0, totalEffectiveActions: 12, totalPk: 0, activeDays: 1 }, sourceSummary: {
analyses: 1,
liveSessions: 0,
totalEffectiveActions: 12,
totalPk: 0,
activeDays: 1,
},
}, },
{ {
id: 2, id: 2,
rating: state.user.ntrpRating, rating: state.user.ntrpRating,
triggerType: "daily", triggerType: "daily",
createdAt: nowIso(), createdAt: nowIso(),
dimensionScores: buildStats(state.user).latestNtrpSnapshot.dimensionScores, dimensionScores: buildStats(state.user).latestNtrpSnapshot
sourceSummary: { analyses: 2, liveSessions: 1, totalEffectiveActions: 36, totalPk: 0, activeDays: 2 }, .dimensionScores,
sourceSummary: {
analyses: 2,
liveSessions: 1,
totalEffectiveActions: 36,
totalPk: 0,
activeDays: 2,
},
}, },
]); ]);
default: default:
return trpcResult(null); return trpcResult(null);
} }
})); })
);
await fulfillJson(route, results); await fulfillJson(route, results);
} }
@@ -588,6 +696,25 @@ async function handleMedia(route: Route, state: MockAppState) {
return; return;
} }
if (path.endsWith("/viewer-signal")) {
if (state.viewerSignalConflictRemaining > 0) {
state.viewerSignalConflictRemaining -= 1;
await route.fulfill({
status: 409,
contentType: "application/json",
body: JSON.stringify({ error: "viewer stream not ready" }),
});
return;
}
state.mediaSession.viewerCount = (state.mediaSession.viewerCount || 0) + 1;
await fulfillJson(route, {
viewerId: `viewer-${state.mediaSession.viewerCount}`,
type: "answer",
sdp: "mock-answer",
});
return;
}
if (path.endsWith("/signal")) { if (path.endsWith("/signal")) {
state.mediaSession.status = "recording"; state.mediaSession.status = "recording";
await fulfillJson(route, { type: "answer", sdp: "mock-answer" }); await fulfillJson(route, { type: "answer", sdp: "mock-answer" });
@@ -624,6 +751,16 @@ async function handleMedia(route: Route, state: MockAppState) {
} }
if (path === `/media/sessions/${state.mediaSession.id}`) { if (path === `/media/sessions/${state.mediaSession.id}`) {
if (state.mediaSession.purpose === "relay") {
state.mediaSession.previewStatus = "ready";
state.mediaSession.previewUpdatedAt = nowIso();
state.mediaSession.playback = {
ready: true,
webmUrl: "/media/assets/sessions/session-e2e/preview.webm",
webmSize: 1_800_000,
previewUrl: "/media/assets/sessions/session-e2e/preview.webm",
};
} else {
state.mediaSession.status = "archived"; state.mediaSession.status = "archived";
state.mediaSession.archiveStatus = "completed"; state.mediaSession.archiveStatus = "completed";
state.mediaSession.playback = { state.mediaSession.playback = {
@@ -634,6 +771,7 @@ async function handleMedia(route: Route, state: MockAppState) {
mp4Size: 1_800_000, mp4Size: 1_800_000,
previewUrl: "/media/assets/sessions/session-e2e/recording.webm", previewUrl: "/media/assets/sessions/session-e2e/recording.webm",
}; };
}
await fulfillJson(route, { session: state.mediaSession }); await fulfillJson(route, { session: state.mediaSession });
return; return;
} }
@@ -658,8 +796,17 @@ export async function installAppMocks(
analyses?: any[]; analyses?: any[];
userName?: string; userName?: string;
authMeNullResponsesAfterLogin?: number; authMeNullResponsesAfterLogin?: number;
liveViewerMode?: boolean;
viewerSignalConflictOnce?: boolean;
} }
) { ) {
const seededViewerSession = options?.liveViewerMode
? buildMediaSession(
buildUser(options?.userName),
"其他设备实时分析",
"relay"
)
: null;
const state: MockAppState = { const state: MockAppState = {
authenticated: options?.authenticated ?? false, authenticated: options?.authenticated ?? false,
user: buildUser(options?.userName), user: buildUser(options?.userName),
@@ -693,10 +840,83 @@ export async function installAppMocks(
], ],
tasks: [], tasks: [],
activePlan: null, activePlan: null,
mediaSession: null, mediaSession: seededViewerSession,
liveRuntime: options?.liveViewerMode
? {
role: "viewer",
runtimeSession: {
id: 777,
title: "其他设备实时分析",
sessionMode: "practice",
mediaSessionId: seededViewerSession?.id || null,
status: "active",
startedAt: nowIso(),
endedAt: null,
lastHeartbeatAt: nowIso(),
snapshot: {
phase: "analyzing",
title: "其他设备实时分析",
sessionMode: "practice",
qualityPreset: "balanced",
facingMode: "environment",
deviceKind: "mobile",
avatarEnabled: true,
avatarKey: "gorilla",
avatarLabel: "猩猩",
updatedAt: Date.parse(nowIso()),
currentAction: "forehand",
rawAction: "forehand",
durationMs: 3200,
visibleSegments: 2,
unknownSegments: 0,
archivedVideoCount: 1,
feedback: ["同步观看测试数据"],
liveScore: {
overall: 82,
posture: 80,
balance: 78,
technique: 84,
footwork: 76,
consistency: 79,
confidence: 88,
},
stabilityMeta: {
windowFrames: 24,
windowShare: 1,
windowProgress: 1,
switchCount: 1,
stableMs: 1800,
rawVolatility: 0.12,
pending: false,
candidateMs: 0,
},
recentSegments: [
{
actionType: "forehand",
isUnknown: false,
startMs: 800,
endMs: 2800,
durationMs: 2000,
confidenceAvg: 0.82,
score: 84,
peakScore: 88,
frameCount: 24,
issueSummary: ["击球点略靠后"],
keyFrames: [1000, 1800, 2600],
clipLabel: "正手挥拍 00:00 - 00:02",
},
],
},
},
}
: {
role: "idle",
runtimeSession: null,
},
nextVideoId: 100, nextVideoId: 100,
nextTaskId: 1, nextTaskId: 1,
authMeNullResponsesAfterLogin: options?.authMeNullResponsesAfterLogin ?? 0, authMeNullResponsesAfterLogin: options?.authMeNullResponsesAfterLogin ?? 0,
viewerSignalConflictRemaining: options?.viewerSignalConflictOnce ? 1 : 0,
}; };
await page.addInitScript(() => { await page.addInitScript(() => {
@@ -725,6 +945,73 @@ export async function installAppMocks(
return points; return points;
}; };
class FakeVideoTrack {
kind = "video";
enabled = true;
muted = false;
readyState = "live";
id = "fake-video-track";
label = "Fake Camera";
stop() {}
getSettings() {
return {
facingMode: "environment",
width: 1280,
height: 720,
frameRate: 30,
};
}
getCapabilities() {
return {};
}
async applyConstraints() {
return undefined;
}
}
class FakeAudioTrack {
kind = "audio";
enabled = true;
muted = false;
readyState = "live";
id = "fake-audio-track";
label = "Fake Mic";
stop() {}
getSettings() {
return {};
}
getCapabilities() {
return {};
}
async applyConstraints() {
return undefined;
}
}
const createFakeMediaStream = (withAudio = false) => {
const videoTrack = new FakeVideoTrack();
const audioTrack = withAudio ? new FakeAudioTrack() : null;
const tracks = audioTrack ? [videoTrack, audioTrack] : [videoTrack];
return {
active: true,
id: `fake-stream-${Math.random().toString(36).slice(2)}`,
getTracks: () => tracks,
getVideoTracks: () => [videoTrack],
getAudioTracks: () => (audioTrack ? [audioTrack] : []),
addTrack: () => undefined,
removeTrack: () => undefined,
clone: () => createFakeMediaStream(withAudio),
} as unknown as MediaStream;
};
class FakePose { class FakePose {
callback = null; callback = null;
@@ -732,7 +1019,11 @@ export async function installAppMocks(
setOptions() {} setOptions() {}
onResults(callback: (results: { poseLandmarks: ReturnType<typeof buildFakeLandmarks> }) => void) { onResults(
callback: (results: {
poseLandmarks: ReturnType<typeof buildFakeLandmarks>;
}) => void
) {
this.callback = callback; this.callback = callback;
} }
@@ -753,9 +1044,23 @@ export async function installAppMocks(
value: async () => undefined, value: async () => undefined,
}); });
Object.defineProperty(HTMLMediaElement.prototype, "srcObject", {
configurable: true,
get() {
return (
(this as HTMLMediaElement & { __srcObject?: MediaStream })
.__srcObject ?? null
);
},
set(value) {
(this as HTMLMediaElement & { __srcObject?: MediaStream }).__srcObject =
value as MediaStream;
},
});
Object.defineProperty(HTMLCanvasElement.prototype, "captureStream", { Object.defineProperty(HTMLCanvasElement.prototype, "captureStream", {
configurable: true, configurable: true,
value: () => new MediaStream(), value: () => createFakeMediaStream(),
}); });
class FakeMediaRecorder extends EventTarget { class FakeMediaRecorder extends EventTarget {
@@ -779,7 +1084,11 @@ export async function installAppMocks(
if (this.state !== "recording") return; if (this.state !== "recording") return;
const event = new Event("dataavailable") as Event & { data?: Blob }; const event = new Event("dataavailable") as Event & { data?: Blob };
event.data = new Blob(["segment"], { type: this.mimeType }); event.data = new Blob(["segment"], { type: this.mimeType });
const handler = (this as unknown as { ondataavailable?: (evt: Event & { data?: Blob }) => void }).ondataavailable; const handler = (
this as unknown as {
ondataavailable?: (evt: Event & { data?: Blob }) => void;
}
).ondataavailable;
handler?.(event); handler?.(event);
this.dispatchEvent(event); this.dispatchEvent(event);
} }
@@ -801,9 +1110,12 @@ export async function installAppMocks(
localDescription: { type: string; sdp: string } | null = null; localDescription: { type: string; sdp: string } | null = null;
remoteDescription: { type: string; sdp: string } | null = null; remoteDescription: { type: string; sdp: string } | null = null;
onconnectionstatechange: (() => void) | null = null; onconnectionstatechange: (() => void) | null = null;
ontrack: ((event: { streams: MediaStream[] }) => void) | null = null;
addTrack() {} addTrack() {}
addTransceiver() {}
async createOffer() { async createOffer() {
return { type: "offer", sdp: "mock-offer" }; return { type: "offer", sdp: "mock-offer" };
} }
@@ -817,6 +1129,7 @@ export async function installAppMocks(
async setRemoteDescription(description: { type: string; sdp: string }) { async setRemoteDescription(description: { type: string; sdp: string }) {
this.remoteDescription = description; this.remoteDescription = description;
this.connectionState = "connected"; this.connectionState = "connected";
this.ontrack?.({ streams: [createFakeMediaStream()] });
this.onconnectionstatechange?.(); this.onconnectionstatechange?.();
} }
@@ -839,10 +1152,21 @@ export async function installAppMocks(
Object.defineProperty(navigator, "mediaDevices", { Object.defineProperty(navigator, "mediaDevices", {
configurable: true, configurable: true,
value: { value: {
getUserMedia: async () => new MediaStream(), getUserMedia: async (constraints?: { audio?: unknown }) =>
createFakeMediaStream(Boolean(constraints?.audio)),
enumerateDevices: async () => [ enumerateDevices: async () => [
{ deviceId: "cam-1", kind: "videoinput", label: "Front Camera", groupId: "g1" }, {
{ deviceId: "cam-2", kind: "videoinput", label: "Back Camera", groupId: "g1" }, deviceId: "cam-1",
kind: "videoinput",
label: "Front Camera",
groupId: "g1",
},
{
deviceId: "cam-2",
kind: "videoinput",
label: "Back Camera",
groupId: "g1",
},
], ],
addEventListener: () => undefined, addEventListener: () => undefined,
removeEventListener: () => undefined, removeEventListener: () => undefined,
@@ -850,8 +1174,8 @@ export async function installAppMocks(
}); });
}); });
await page.route("**/api/trpc/**", (route) => handleTrpc(route, state)); await page.route("**/api/trpc/**", route => handleTrpc(route, state));
await page.route("**/media/**", (route) => handleMedia(route, state)); await page.route("**/media/**", route => handleMedia(route, state));
return state; return state;
} }