比较提交
18 次代码提交
f0bbe4c82f
...
main
| 作者 | SHA1 | 提交日期 | |
|---|---|---|---|
|
|
06b9701e03 | ||
|
|
8e9e4915e2 | ||
|
|
634a4704c7 | ||
|
|
bb46d26c0e | ||
|
|
bacd712dbc | ||
|
|
78a7c755e3 | ||
|
|
a211562860 | ||
|
|
09b1b95e2c | ||
|
|
922a9fb63f | ||
|
|
31bead3452 | ||
|
|
a5103685fb | ||
|
|
f9db6ef590 | ||
|
|
13e59b8e8a | ||
|
|
2b72ef9200 | ||
|
|
09cd5b4d85 | ||
|
|
7aba508247 | ||
|
|
cf06de944f | ||
|
|
4e4122d758 |
@@ -9,6 +9,13 @@ export type CameraZoomState = {
|
||||
focusMode: string;
|
||||
};
|
||||
|
||||
export type CameraRequestResult = {
|
||||
stream: MediaStream;
|
||||
appliedFacingMode: "user" | "environment";
|
||||
audioEnabled: boolean;
|
||||
usedFallback: boolean;
|
||||
};
|
||||
|
||||
type NumericRange = {
|
||||
min: number;
|
||||
max: number;
|
||||
@@ -66,6 +73,98 @@ export function getCameraVideoConstraints(
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeVideoConstraintCandidate(candidate: MediaTrackConstraints | true) {
|
||||
if (candidate === true) {
|
||||
return { label: "camera-any", video: true as const };
|
||||
}
|
||||
|
||||
return {
|
||||
label: JSON.stringify(candidate),
|
||||
video: candidate,
|
||||
};
|
||||
}
|
||||
|
||||
function createFallbackVideoCandidates(
|
||||
facingMode: "user" | "environment",
|
||||
isMobile: boolean,
|
||||
preset: CameraQualityPreset,
|
||||
) {
|
||||
const base = getCameraVideoConstraints(facingMode, isMobile, preset);
|
||||
const alternateFacing = facingMode === "environment" ? "user" : "environment";
|
||||
const lowRes = {
|
||||
facingMode,
|
||||
width: { ideal: isMobile ? 640 : 960 },
|
||||
height: { ideal: isMobile ? 360 : 540 },
|
||||
} satisfies MediaTrackConstraints;
|
||||
const lowResAlternate = {
|
||||
facingMode: alternateFacing,
|
||||
width: { ideal: isMobile ? 640 : 960 },
|
||||
height: { ideal: isMobile ? 360 : 540 },
|
||||
} satisfies MediaTrackConstraints;
|
||||
const anyCamera = {
|
||||
width: { ideal: isMobile ? 640 : 960 },
|
||||
height: { ideal: isMobile ? 360 : 540 },
|
||||
} satisfies MediaTrackConstraints;
|
||||
|
||||
const candidates = [
|
||||
normalizeVideoConstraintCandidate(base),
|
||||
normalizeVideoConstraintCandidate({
|
||||
...base,
|
||||
frameRate: undefined,
|
||||
}),
|
||||
normalizeVideoConstraintCandidate(lowRes),
|
||||
normalizeVideoConstraintCandidate(lowResAlternate),
|
||||
normalizeVideoConstraintCandidate(anyCamera),
|
||||
normalizeVideoConstraintCandidate(true),
|
||||
];
|
||||
|
||||
const deduped = new Map<string, { video: MediaTrackConstraints | true }>();
|
||||
candidates.forEach((candidate) => {
|
||||
if (!deduped.has(candidate.label)) {
|
||||
deduped.set(candidate.label, { video: candidate.video });
|
||||
}
|
||||
});
|
||||
return Array.from(deduped.values());
|
||||
}
|
||||
|
||||
export async function requestCameraStream(options: {
|
||||
facingMode: "user" | "environment";
|
||||
isMobile: boolean;
|
||||
preset: CameraQualityPreset;
|
||||
audio?: false | MediaTrackConstraints;
|
||||
}) {
|
||||
const videoCandidates = createFallbackVideoCandidates(options.facingMode, options.isMobile, options.preset);
|
||||
const audioCandidates = options.audio ? [options.audio, false] : [false];
|
||||
let lastError: unknown = null;
|
||||
|
||||
for (const audio of audioCandidates) {
|
||||
for (let index = 0; index < videoCandidates.length; index += 1) {
|
||||
const video = videoCandidates[index]?.video ?? true;
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ video, audio });
|
||||
const videoTrack = stream.getVideoTracks()[0] || null;
|
||||
const settings = (
|
||||
videoTrack && typeof (videoTrack as MediaStreamTrack & { getSettings?: () => unknown }).getSettings === "function"
|
||||
? (videoTrack as MediaStreamTrack & { getSettings: () => unknown }).getSettings()
|
||||
: {}
|
||||
) as Record<string, unknown>;
|
||||
const appliedFacingMode = settings.facingMode === "user" ? "user" : settings.facingMode === "environment" ? "environment" : options.facingMode;
|
||||
|
||||
return {
|
||||
stream,
|
||||
appliedFacingMode,
|
||||
audioEnabled: stream.getAudioTracks().length > 0,
|
||||
usedFallback: index > 0 || audio === false && Boolean(options.audio),
|
||||
} satisfies CameraRequestResult;
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError instanceof Error ? lastError : new Error("无法访问摄像头");
|
||||
}
|
||||
|
||||
export function getLiveAnalysisBitrate(preset: CameraQualityPreset, isMobile: boolean) {
|
||||
switch (preset) {
|
||||
case "economy":
|
||||
|
||||
@@ -8,6 +8,136 @@ export type ChangeLogEntry = {
|
||||
};
|
||||
|
||||
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
|
||||
{
|
||||
version: "2026.03.16-live-camera-runtime-refresh",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "8e9e491",
|
||||
summary: "修复实时分析页偶发残留在同步观看状态、标题乱码,以及摄像头预览绑定波动导致的启动失败。",
|
||||
features: [
|
||||
"live-camera 在打开拍摄引导、启用摄像头、开始分析前,都会先向服务端强制刷新 runtime 状态,避免旧的 viewer 锁残留导致本机明明已释放却仍无法启动",
|
||||
"同步观看标题新增乱码恢复逻辑,可自动把 UTF-8 被误按 Latin-1 显示的标题恢复成正常中文,避免出现 `æœ...` 一类异常标题",
|
||||
"摄像头启动链路改为以 `getUserMedia` 成功为准;即使本地预览 `<video>` 的 `srcObject` 或 `play()` 在当前浏览器里短暂失败,也不会直接把整次启动判死",
|
||||
"e2e mock 的媒体流补齐为带假视频轨道的流对象,并把 viewer 回归改为校验“服务端 relay、无 viewer-signal”行为,减少和旧 P2P 逻辑混淆",
|
||||
],
|
||||
tests: [
|
||||
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"live camera page exposes camera startup controls|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active\"",
|
||||
"pnpm build",
|
||||
"部署后线上 smoke: `https://te.hao.work/live-camera` 登录 H1 后可见空闲态“启动摄像头”入口,确认不再被残留 viewer 锁卡住;公开站点前端资源为 `assets/index-33wVjC4p.js` 与 `assets/index-tNGuStgv.css`",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-viewer-server-relay",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "bb46d26",
|
||||
summary: "实时分析同步观看改为由 media 服务中转帧图,不再依赖浏览器之间的 P2P 视频连接。",
|
||||
features: [
|
||||
"owner 端现在会把带骨架、关键点和虚拟形象叠层的合成画布压缩成 JPEG 并持续上传到 media 服务",
|
||||
"viewer 端改为直接拉取 media 服务中的最新同步帧图,不再建立 WebRTC viewer peer 连接,因此跨网络和多端观看更稳定",
|
||||
"同步观看模式文案改为明确提示“通过 media 服务中转”,等待同步时也会自动轮询最新画面",
|
||||
"media 服务新增 live-frame 上传与静态分发能力,并记录最近同步帧的更新时间,方便后续扩展成更高频的服务端中转流",
|
||||
],
|
||||
tests: [
|
||||
"cd media && go test ./...",
|
||||
"pnpm build",
|
||||
"playwright-skill 线上 smoke: 先用 media 服务创建 relay session、上传 live-frame,并把 H1 的 `live_analysis_runtime` 注入为 active viewer 场景;随后访问 `https://te.hao.work/live-camera`,确认页面进入“同步观看模式”、同步帧来自 `/media/assets/sessions/.../live-frame.jpg`,且 `viewer-signal` 请求数为 0",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-camera-startup-fallbacks",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "a211562",
|
||||
summary: "修复部分设备上摄像头因后置镜头约束、分辨率约束或麦克风不可用而直接启动失败的问题。",
|
||||
features: [
|
||||
"live-camera 与 recorder 改为共用分级降级的摄像头请求流程,会在当前画质失败时自动降分辨率、降约束并回退到兼容镜头",
|
||||
"当设备不支持默认后置摄像头或当前镜头不可用时,页面会自动切换到实际可用的镜头方向,避免直接报错后卡死在未启动状态",
|
||||
"recorder 预览启动不再被麦克风权限或麦克风设备异常整体拖死;麦克风不可用时会自动回退到仅视频模式",
|
||||
"兼容模式命中时前端会给出明确提示,方便区分“已自动降级成功”与“仍然无法访问摄像头”的场景",
|
||||
],
|
||||
tests: [
|
||||
"pnpm build",
|
||||
"部署后线上 smoke: `https://te.hao.work/` 已提供 `assets/index-CRxtWK07.js` 与 `assets/index-tNGuStgv.css`;通过注入 `getUserMedia` 回归验证 `/live-camera` 首轮高约束失败后会自动切到兼容摄像头模式,`/recorder` 在麦克风不可用时会自动回退到仅视频模式并继续启动预览",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-analysis-viewer-full-sync",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "922a9fb",
|
||||
summary: "多端同步观看改为按持有端快照完整渲染,另一设备可同步看到视频状态、模式、画质、虚拟形象和保存阶段信息。",
|
||||
features: [
|
||||
"viewer 端现在同步显示持有端的会话标题、训练模式、设备端、拍摄视角、画质模式、虚拟形象状态和最近同步时间",
|
||||
"同步观看时的分析阶段、保存阶段、已完成状态也会跟随主端刷新,不再只显示本地默认状态",
|
||||
"viewer 页面会自动关闭拍摄校准弹窗,避免同步观看时被“启用摄像头”流程遮挡",
|
||||
"新增 viewer 同步信息卡,明确允许 1 秒级延迟,并持续显示最近心跳时间",
|
||||
],
|
||||
tests: [
|
||||
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"live camera switches into viewer mode|viewer stream|recorder blocks\"",
|
||||
"pnpm build",
|
||||
"部署后线上 smoke: `https://te.hao.work/` 已提供 `assets/index-HRdM3fxq.js` 与 `assets/index-tNGuStgv.css`;同账号 H1 双端登录后,移动端 owner 可开始实时分析,桌面端 `/live-camera` 自动进入同步观看并显示主端信息、同步视频流,owner 点击结束分析后 viewer 会同步进入保存阶段",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-analysis-lock-hardening",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "f9db6ef",
|
||||
summary: "修复同账号多端实时分析在旧登录态下仍可重复占用摄像头的问题,补强同步观看重试、录制页占用锁,并修复部署后启动阶段长时间 502。",
|
||||
features: [
|
||||
"旧用户名登录 token 即使缺少 `sid`,现在也会按 token 本身派生唯一会话标识,不再把不同设备错误识别成同一持有端",
|
||||
"同步观看模式新增 viewer 自动重试:当持有端刚启动推流、viewer 首次连接返回 `viewer stream not ready` 时,会自动重连而不是一直黑屏",
|
||||
"在线录制页接入实时分析占用锁;当其他设备正在 `/live-camera` 分析时,本页会禁止再次启动摄像头和录制",
|
||||
"应用启动改为先监听 HTTP 端口、再后台串行执行教程图同步和标准库预热,修复新容器上线时公网长时间返回 502 的问题",
|
||||
"线上 smoke 已确认 `https://te.hao.work/live-camera` 与 `/recorder` 都已切换到本次新构建,公开站点不再返回 502",
|
||||
],
|
||||
tests: [
|
||||
"curl -I https://te.hao.work/",
|
||||
"pnpm check",
|
||||
"pnpm exec vitest run server/_core/sdk.test.ts server/features.test.ts",
|
||||
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"viewer mode|viewer stream|recorder blocks\"",
|
||||
"pnpm build",
|
||||
"线上 smoke: H1 手机端开启实时分析后,PC 端 `/live-camera` 自动进入同步观看并显示同步画面,`/recorder` 禁止启动摄像头;结束分析后会话可正常释放",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-analysis-runtime-migration",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "2b72ef9",
|
||||
summary: "修复实时分析因缺失 `live_analysis_runtime` 表导致的启动失败,并补齐迁移记录避免后续部署再次漏表。",
|
||||
features: [
|
||||
"生产库补建 `live_analysis_runtime` 表,并补写 `__drizzle_migrations` 中缺失的 `0011_live_analysis_runtime` 记录",
|
||||
"仓库内 Drizzle migration journal 补齐 `0011_live_analysis_runtime` 条目,后续 `docker compose` 部署可正确感知该迁移",
|
||||
"实时分析启动链路恢复,`/live-camera` 再次可以读取 runtime 锁并正常进入分析准备流程",
|
||||
"线上 smoke 已确认 `https://te.hao.work/` 正在提供本次新构建,当前前端资源为 `assets/index-B3BN5hY-.js` 与 `assets/index-BL6GQzUF.css`",
|
||||
],
|
||||
tests: [
|
||||
"pnpm check",
|
||||
"pnpm exec vitest run server/features.test.ts",
|
||||
"pnpm build",
|
||||
"docker compose exec -T db mysql ... SHOW TABLES LIKE 'live_analysis_runtime'",
|
||||
"curl -I https://te.hao.work/live-camera",
|
||||
"Playwright smoke: 登录 `H1` 后访问 `/live-camera`,`analysis.runtimeGet` / `analysis.runtimeAcquire` / `analysis.runtimeRelease` 全部返回 200",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-camera-multidevice-viewer",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "4e4122d",
|
||||
summary: "实时分析新增同账号多端互斥和同步观看模式,分析持有端独占摄像头,其它端只能查看同步画面与核心识别结果。",
|
||||
features: [
|
||||
"同一账号在 `/live-camera` 进入实时分析后,会写入按用户维度的 runtime 锁,其他设备不能重复启动摄像头或分析",
|
||||
"其他设备会自动进入“同步观看模式”,可订阅持有端的实时画面,并同步看到动作、评分、反馈、最近片段和归档段数",
|
||||
"同步观看复用 media 服务的 WebRTC viewer 通道,传输的是带骨架、关键点和虚拟形象覆盖后的合成画面",
|
||||
"runtime 锁按 session sid 区分持有端,兼容缺少 sid 的旧 token,超过 15 秒无心跳会自动判定为陈旧并释放",
|
||||
"线上 smoke 已确认 `https://te.hao.work/live-camera` 已切换到本次新构建,公开站点正在提供这次发布的最新前端资源",
|
||||
],
|
||||
tests: [
|
||||
"pnpm check",
|
||||
"pnpm exec vitest run server/features.test.ts",
|
||||
"go test ./... && go build ./... (media)",
|
||||
"pnpm build",
|
||||
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"live camera\"",
|
||||
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"recorder flow archives a session and exposes it in videos\"",
|
||||
"curl -I https://te.hao.work/live-camera",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-analysis-overlay-archive",
|
||||
releaseDate: "2026-03-16",
|
||||
|
||||
@@ -50,6 +50,9 @@ export type MediaSession = {
|
||||
previewUpdatedAt?: string;
|
||||
streamConnected: boolean;
|
||||
lastStreamAt?: string;
|
||||
viewerCount?: number;
|
||||
liveFrameUrl?: string;
|
||||
liveFrameUpdatedAt?: string;
|
||||
playback: {
|
||||
webmUrl?: string;
|
||||
mp4Url?: string;
|
||||
@@ -122,6 +125,22 @@ export async function signalMediaSession(sessionId: string, payload: { sdp: stri
|
||||
});
|
||||
}
|
||||
|
||||
export async function signalMediaViewerSession(sessionId: string, payload: { sdp: string; type: string }) {
|
||||
return request<{ viewerId: string; sdp: string; type: string }>(`/sessions/${sessionId}/viewer-signal`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
}
|
||||
|
||||
export async function uploadMediaLiveFrame(sessionId: string, blob: Blob) {
|
||||
return request<{ session: MediaSession }>(`/sessions/${sessionId}/live-frame`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": blob.type || "image/jpeg" },
|
||||
body: blob,
|
||||
});
|
||||
}
|
||||
|
||||
export async function uploadMediaSegment(
|
||||
sessionId: string,
|
||||
sequence: number,
|
||||
@@ -164,6 +183,10 @@ export async function getMediaSession(sessionId: string) {
|
||||
return request<{ session: MediaSession }>(`/sessions/${sessionId}`);
|
||||
}
|
||||
|
||||
export function getMediaAssetUrl(path: string) {
|
||||
return `${MEDIA_BASE}${path.startsWith("/") ? path : `/${path}`}`;
|
||||
}
|
||||
|
||||
export function formatRecordingTime(milliseconds: number) {
|
||||
const totalSeconds = Math.max(0, Math.floor(milliseconds / 1000));
|
||||
const minutes = Math.floor(totalSeconds / 60);
|
||||
|
||||
文件差异内容过多而无法显示
加载差异
@@ -31,7 +31,7 @@ import {
|
||||
recognizeActionFrame,
|
||||
stabilizeActionFrame,
|
||||
} from "@/lib/actionRecognition";
|
||||
import { applyTrackZoom, getCameraVideoConstraints, readTrackZoomState } from "@/lib/camera";
|
||||
import { applyTrackZoom, readTrackZoomState, requestCameraStream } from "@/lib/camera";
|
||||
import { formatDateTimeShanghai } from "@/lib/time";
|
||||
import {
|
||||
Activity,
|
||||
@@ -189,6 +189,10 @@ function summarizeActions(actionSummary: Record<ActionType, number>) {
|
||||
export default function Recorder() {
|
||||
const { user } = useAuth();
|
||||
const utils = trpc.useUtils();
|
||||
const runtimeQuery = trpc.analysis.runtimeGet.useQuery(undefined, {
|
||||
refetchInterval: 1000,
|
||||
refetchIntervalInBackground: true,
|
||||
});
|
||||
const finalizeTaskMutation = trpc.task.createMediaFinalize.useMutation({
|
||||
onSuccess: (data) => {
|
||||
setArchiveTaskId(data.taskId);
|
||||
@@ -262,6 +266,9 @@ export default function Recorder() {
|
||||
|
||||
const mobile = useMemo(() => isMobileDevice(), []);
|
||||
const mimeType = useMemo(() => pickRecorderMimeType(), []);
|
||||
const runtimeRole = runtimeQuery.data?.role ?? "idle";
|
||||
const liveAnalysisRuntime = runtimeQuery.data?.runtimeSession;
|
||||
const liveAnalysisOccupied = runtimeRole === "viewer" && liveAnalysisRuntime?.status === "active";
|
||||
const currentPlaybackUrl = mediaSession?.playback.mp4Url || mediaSession?.playback.webmUrl || "";
|
||||
const archiveTaskQuery = useBackgroundTask(archiveTaskId);
|
||||
const archiveProgress = archiveTaskQuery.data?.progress ?? getArchiveProgress(mediaSession);
|
||||
@@ -402,14 +409,21 @@ export default function Recorder() {
|
||||
preferredZoom = zoomTargetRef.current,
|
||||
preset: keyof typeof QUALITY_PRESETS = qualityPreset,
|
||||
) => {
|
||||
if (liveAnalysisOccupied) {
|
||||
const title = liveAnalysisRuntime?.title || "其他设备正在实时分析";
|
||||
toast.error(`${title},当前设备不能再开启录制摄像头`);
|
||||
throw new Error("当前账号已有其他设备正在实时分析");
|
||||
}
|
||||
try {
|
||||
if (streamRef.current) {
|
||||
streamRef.current.getTracks().forEach((track) => track.stop());
|
||||
streamRef.current = null;
|
||||
}
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
video: getCameraVideoConstraints(nextFacingMode, mobile, preset),
|
||||
const { stream, appliedFacingMode, audioEnabled, usedFallback } = await requestCameraStream({
|
||||
facingMode: nextFacingMode,
|
||||
isMobile: mobile,
|
||||
preset,
|
||||
audio: {
|
||||
echoCancellation: true,
|
||||
noiseSuppression: true,
|
||||
@@ -426,6 +440,9 @@ export default function Recorder() {
|
||||
|
||||
suppressTrackEndedRef.current = false;
|
||||
streamRef.current = stream;
|
||||
if (appliedFacingMode !== nextFacingMode) {
|
||||
setFacingMode(appliedFacingMode);
|
||||
}
|
||||
if (liveVideoRef.current) {
|
||||
liveVideoRef.current.srcObject = stream;
|
||||
await liveVideoRef.current.play();
|
||||
@@ -433,6 +450,12 @@ export default function Recorder() {
|
||||
await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null);
|
||||
setCameraError("");
|
||||
setCameraActive(true);
|
||||
if (usedFallback) {
|
||||
toast.info("当前设备已自动切换到兼容摄像头模式");
|
||||
}
|
||||
if (!audioEnabled) {
|
||||
toast.warning("麦克风不可用,已切换为仅视频模式");
|
||||
}
|
||||
return stream;
|
||||
} catch (error: any) {
|
||||
const message = error?.message || "无法访问摄像头";
|
||||
@@ -440,7 +463,7 @@ export default function Recorder() {
|
||||
toast.error(`摄像头启动失败: ${message}`);
|
||||
throw error;
|
||||
}
|
||||
}), [facingMode, mobile, qualityPreset, syncZoomState]);
|
||||
}), [facingMode, liveAnalysisOccupied, liveAnalysisRuntime?.title, mobile, qualityPreset, syncZoomState]);
|
||||
|
||||
const ensurePreviewStream = useCallback(async () => {
|
||||
if (streamRef.current) {
|
||||
@@ -849,6 +872,11 @@ export default function Recorder() {
|
||||
toast.error("请先登录后再开始录制");
|
||||
return;
|
||||
}
|
||||
if (liveAnalysisOccupied) {
|
||||
const title = liveAnalysisRuntime?.title || "其他设备正在实时分析";
|
||||
toast.error(`${title},当前设备不能同时开始录制`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
setMode("preparing");
|
||||
@@ -898,7 +926,21 @@ export default function Recorder() {
|
||||
setMode("idle");
|
||||
toast.error(`启动录制失败: ${error?.message || "未知错误"}`);
|
||||
}
|
||||
}, [ensurePreviewStream, facingMode, mimeType, mobile, qualityPreset, startActionSampling, startRealtimePush, startRecorderLoop, syncSessionState, title, user]);
|
||||
}, [
|
||||
ensurePreviewStream,
|
||||
facingMode,
|
||||
liveAnalysisOccupied,
|
||||
liveAnalysisRuntime?.title,
|
||||
mimeType,
|
||||
mobile,
|
||||
qualityPreset,
|
||||
startActionSampling,
|
||||
startRealtimePush,
|
||||
startRecorderLoop,
|
||||
syncSessionState,
|
||||
title,
|
||||
user,
|
||||
]);
|
||||
|
||||
const finishRecording = useCallback(async () => {
|
||||
const session = currentSessionRef.current;
|
||||
@@ -1140,9 +1182,10 @@ export default function Recorder() {
|
||||
data-testid="recorder-start-camera-button"
|
||||
onClick={() => void startCamera()}
|
||||
className={buttonClass()}
|
||||
disabled={liveAnalysisOccupied}
|
||||
>
|
||||
<Camera className={iconClass} />
|
||||
{labelFor("启动摄像头", "启动")}
|
||||
{labelFor(liveAnalysisOccupied ? "实时分析占用中" : "启动摄像头", liveAnalysisOccupied ? "占用" : "启动")}
|
||||
</Button>
|
||||
) : (
|
||||
<>
|
||||
@@ -1150,9 +1193,10 @@ export default function Recorder() {
|
||||
data-testid="recorder-start-recording-button"
|
||||
onClick={() => void beginRecording()}
|
||||
className={buttonClass("record")}
|
||||
disabled={liveAnalysisOccupied}
|
||||
>
|
||||
<Circle className={`${iconClass} ${rail ? "fill-current" : "fill-current"}`} />
|
||||
{labelFor("开始录制", "录制")}
|
||||
{labelFor(liveAnalysisOccupied ? "实时分析占用中" : "开始录制", liveAnalysisOccupied ? "占用" : "录制")}
|
||||
</Button>
|
||||
<Button variant="outline" onClick={stopCamera} className={buttonClass("outline")}>
|
||||
<VideoOff className={iconClass} />
|
||||
@@ -1362,6 +1406,23 @@ export default function Recorder() {
|
||||
</Alert>
|
||||
) : null}
|
||||
|
||||
{liveAnalysisOccupied ? (
|
||||
<Alert className="border-amber-300/70 bg-amber-50 text-amber-950">
|
||||
<ShieldAlert className="h-4 w-4" />
|
||||
<AlertTitle>当前账号已有其他设备正在实时分析</AlertTitle>
|
||||
<AlertDescription>
|
||||
{liveAnalysisRuntime?.title || "其他设备正在实时分析"},本页已禁止再次启动摄像头和录制,避免同账号多端同时占用镜头。
|
||||
你可以前往
|
||||
{" "}
|
||||
<a href="/live-camera" className="font-medium underline underline-offset-4">
|
||||
实时分析页
|
||||
</a>
|
||||
{" "}
|
||||
查看同步画面与动作识别结果。
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
) : null}
|
||||
|
||||
<div className="grid gap-4 xl:grid-cols-[minmax(0,1.7fr)_minmax(340px,0.9fr)]">
|
||||
<section className="space-y-4">
|
||||
<Card className="overflow-hidden border-0 shadow-lg">
|
||||
|
||||
@@ -1,5 +1,193 @@
|
||||
# Tennis Training Hub - 变更日志
|
||||
|
||||
## 2026.03.16-live-camera-runtime-refresh (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- `/live-camera` 在打开拍摄引导、启用摄像头、开始分析前,都会先向服务端强制刷新 runtime 状态,避免旧的同步观看锁残留导致本机明明已释放却仍无法启动
|
||||
- 新增 runtime 标题乱码恢复逻辑,可自动把 UTF-8 被误按 Latin-1 显示的标题恢复成正常中文,避免出现 `æœ...` 一类异常标题
|
||||
- 摄像头启动链路改为以 `getUserMedia` 成功为准;即使本地预览 `<video>` 的 `srcObject` 或 `play()` 在当前浏览器中短暂失败,也不会直接把整次启动判死
|
||||
- e2e mock 的媒体流补齐为带假视频轨道的流对象,并把 viewer 回归改为校验“服务端 relay、无 viewer-signal”行为,避免继续按旧 P2P 逻辑断言
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"`
|
||||
- `pnpm build`
|
||||
- 部署后线上 smoke:登录 `H1` 后访问 `https://te.hao.work/live-camera`,确认空闲态“启动摄像头”入口可见,不再被残留 viewer 锁卡住
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-33wVjC4p.js` 与 `assets/index-tNGuStgv.css`
|
||||
- 真实验证已通过:登录 `H1` 后访问 `https://te.hao.work/live-camera`,页面会正常显示“摄像头未启动 / 启动摄像头”,说明旧的 viewer 锁残留不会再把空闲设备卡在同步观看模式
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `8e9e491`
|
||||
|
||||
## 2026.03.16-live-viewer-server-relay (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- `/live-camera` 的同步观看改为由 media 服务中转最新合成帧图,不再依赖浏览器之间的 P2P WebRTC viewer 连接
|
||||
- owner 端会把“原视频 + 骨架/关键点 + 虚拟形象”的合成画布压缩成 JPEG 并持续上传到 media 服务
|
||||
- viewer 端改为自动轮询 media 服务中的最新同步帧图,因此即使浏览器之间无法直连,也能继续看到同步画面和状态
|
||||
- 同步观看模式文案已调整为明确提示“通过 media 服务中转”,等待阶段会继续自动刷新,而不是停留在 P2P 连接失败状态
|
||||
- media 服务新增 live-frame 上传与静态分发能力,并记录最近同步帧时间,方便后续继续扩展更高频的服务端 relay
|
||||
|
||||
### 测试
|
||||
|
||||
- `cd media && go test ./...`
|
||||
- `pnpm build`
|
||||
- `playwright-skill` 线上 smoke:先用 media 服务创建 relay session、上传 live-frame,并把 `H1` 的 `live_analysis_runtime` 注入为 active viewer 场景;随后访问 `https://te.hao.work/live-camera`,确认页面进入“同步观看模式”、同步帧来自 `/media/assets/sessions/.../live-frame.jpg`,且 `viewer-signal` 请求数为 `0`
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-BC-IupO8.js` 与 `assets/index-tNGuStgv.css`
|
||||
- 真实验证已通过:viewer 端进入“同步观看模式”后,画面由 media 服务静态分发的 `live-frame.jpg` 提供,已确认不再触发 `/viewer-signal` P2P 观看请求
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `bb46d26`
|
||||
|
||||
## 2026.03.16-camera-startup-fallbacks (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 修复部分设备在 `/live-camera` 和 `/recorder` 中因默认后置镜头、分辨率或帧率约束不兼容而直接启动摄像头失败的问题
|
||||
- 摄像头请求现在会自动按当前画质、去掉高约束、低分辨率、备用镜头、任意可用镜头依次降级重试
|
||||
- `/recorder` 在麦克风不可用或麦克风权限未给出时,会自动回退到仅视频模式,不再让整次预览启动失败
|
||||
- 如果实际启用的是兼容镜头或降级模式,页面会显示提示,帮助区分“自动修复成功”与“仍然无法访问摄像头”
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm build`
|
||||
- `playwright-skill` 线上 smoke:通过注入 `getUserMedia` 回归验证 `/live-camera` 首轮高约束失败后会自动降级到兼容摄像头模式,`/recorder` 在麦克风不可用时会自动回退到仅视频模式并继续启动预览
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-CRxtWK07.js` 与 `assets/index-tNGuStgv.css`
|
||||
- 真实回归已通过:模拟高约束失败时,`/live-camera` 会提示“当前设备已自动切换到兼容摄像头模式”并继续启动;模拟麦克风不可用时,`/recorder` 会提示“麦克风不可用,已切换为仅视频模式”并继续显示录制入口
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `a211562`
|
||||
|
||||
## 2026.03.16-live-analysis-viewer-full-sync (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 同账号多端同步观看时,viewer 端现在会按持有端 runtime snapshot 完整渲染,不再混用本地默认状态
|
||||
- `/live-camera` viewer 端新增主端同步信息卡,可看到当前会话标题、训练模式、设备端、拍摄视角、画质模式、虚拟形象状态和最近同步时间
|
||||
- viewer 端现在会同步显示主端当前处于“分析中 / 保存中 / 已保存 / 保存失败”的阶段状态
|
||||
- viewer 页面在同步观看模式下会自动关闭拍摄校准弹窗,避免被“启用摄像头”引导遮挡画面和状态信息
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera switches into viewer mode|viewer stream|recorder blocks"`
|
||||
- `pnpm build`
|
||||
- `playwright-skill` 线上 smoke:同账号 `H1` 双端登录后,移动端 owner 开始实时分析,桌面端 `/live-camera` 进入同步观看并显示主端信息、同步视频流,owner 点击结束分析后 viewer 同步进入保存阶段
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-HRdM3fxq.js` 与 `assets/index-tNGuStgv.css`
|
||||
- 真实双端验证已通过:同账号 `H1` 在移动端开启实时分析后,桌面端 `/live-camera` 会自动进入同步观看模式,显示主端设备信息、最近同步时间和远端视频流;owner 点击结束分析后,viewer 会同步进入“保存中”阶段
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `922a9fb`
|
||||
|
||||
## 2026.03.16-live-analysis-lock-hardening (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 修复同账号多端实时分析在旧登录态下仍可重复占用摄像头的问题;缺少 `sid` 的旧 token 现在会按 token 本身派生唯一会话标识
|
||||
- `/live-camera` 的同步观看模式新增自动重试;当持有端刚启动推流、viewer 首次连接返回 `viewer stream not ready` 时,会继续重连,不再长时间停留在无画面状态
|
||||
- `/recorder` 接入实时分析占用锁;其他设备正在实时分析时,本页会禁止再次启动摄像头和开始录制,并提示前往 `/live-camera` 查看同步画面
|
||||
- 应用启动改为先监听 HTTP 端口、再后台串行执行教程图同步和标准库预热,修复新容器上线时公网长时间返回 `502`
|
||||
|
||||
### 测试
|
||||
|
||||
- `curl -I https://te.hao.work/`
|
||||
- `pnpm check`
|
||||
- `pnpm exec vitest run server/_core/sdk.test.ts server/features.test.ts`
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "viewer mode|viewer stream|recorder blocks"`
|
||||
- `playwright-skill` 线上校验:登录 `H1` 后访问 `/changelog`,确认 `2026.03.16-live-analysis-lock-hardening` 与仓库版本 `f9db6ef` 已展示
|
||||
- `pnpm build`
|
||||
- Playwright 线上 smoke:`H1` 手机端开启实时分析后,PC 端 `/live-camera` 自动进入同步观看并显示同步画面,`/recorder` 禁止启动摄像头;结束分析后会话可正常释放
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建,不再返回 `502`
|
||||
- 当前公开站点前端资源 revision:`assets/index-mi8CPCFI.js` 与 `assets/index-Cp_VJ8sf.css`
|
||||
- 真实双端验证已通过:同账号 `H1` 手机端开始实时分析后,PC 端 `/live-camera` 进入同步观看模式且可拉起同步流,`/recorder` 页面会阻止再次占用摄像头
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `f9db6ef`
|
||||
|
||||
## 2026.03.16-live-analysis-runtime-migration (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 修复生产环境缺失 `live_analysis_runtime` 表导致 `/live-camera` 启动实时分析时报 SQL 查询失败的问题
|
||||
- 生产库已补建 `live_analysis_runtime` 表,并写入缺失的 `0011_live_analysis_runtime` 迁移记录,避免后续重复报错
|
||||
- 仓库内 `drizzle/meta/_journal.json` 已补齐 `0011_live_analysis_runtime` 条目,后续 `docker compose` 部署可正确识别该迁移
|
||||
- 实时分析 runtime 锁恢复正常后,同账号多端互斥与同步观看流程可继续工作
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm check`
|
||||
- `pnpm exec vitest run server/features.test.ts`
|
||||
- `pnpm build`
|
||||
- `docker compose exec -T db mysql ... SHOW TABLES LIKE 'live_analysis_runtime'`
|
||||
- `curl -I https://te.hao.work/live-camera`
|
||||
- Playwright smoke:登录 `H1` 后访问 `/live-camera`,`analysis.runtimeGet` / `analysis.runtimeAcquire` / `analysis.runtimeRelease` 全部返回 `200`
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-B3BN5hY-.js` 与 `assets/index-BL6GQzUF.css`
|
||||
- `/live-camera` 已恢复可用,线上不再出现 `live_analysis_runtime` 缺表导致的 SQL 查询失败
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `2b72ef9`
|
||||
|
||||
## 2026.03.16-live-camera-multidevice-viewer (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- `/live-camera` 新增同账号多端 runtime 锁;一个设备开始实时分析后,其他设备不能再次启动摄像头或分析
|
||||
- 其他设备会自动进入“同步观看模式”,可查看持有端同步推送的实时画面、当前动作、评分、反馈和最近动作片段
|
||||
- 同步观看复用 media 服务新增的 `/viewer-signal` WebRTC 通道,直接订阅“原视频 + 骨架 + 关键点 + 虚拟形象”的合成画面
|
||||
- runtime 心跳按 `sid` 维度识别持有端,兼容旧 token 缺失可选字段的情况;超过 15 秒无心跳会自动释放陈旧锁
|
||||
- `/live-camera` 前端新增 owner / viewer 双模式切换,观看端会禁用镜头切换、重新校准、质量调整和分析启动
|
||||
- e2e mock 新增 viewer 模式和 runtime 接口覆盖,保证浏览器测试可以直接验证多端互斥与同步观看
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm check`
|
||||
- `pnpm exec vitest run server/features.test.ts`
|
||||
- `go test ./...`
|
||||
- `go build ./...`
|
||||
- `pnpm build`
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"`
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "recorder flow archives a session and exposes it in videos"`
|
||||
- `curl -I https://te.hao.work/live-camera`
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/live-camera` 已切换到本次新前端构建
|
||||
- 公开站点确认已经提供本次发布的最新前端资源
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `4e4122d`
|
||||
|
||||
## 2026.03.16-live-analysis-overlay-archive (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
CREATE TABLE `live_analysis_runtime` (
|
||||
`id` int AUTO_INCREMENT NOT NULL,
|
||||
`userId` int NOT NULL,
|
||||
`ownerSid` varchar(96),
|
||||
`status` enum('idle','active','ended') NOT NULL DEFAULT 'idle',
|
||||
`title` varchar(256),
|
||||
`sessionMode` enum('practice','pk') NOT NULL DEFAULT 'practice',
|
||||
`mediaSessionId` varchar(96),
|
||||
`startedAt` timestamp,
|
||||
`endedAt` timestamp,
|
||||
`lastHeartbeatAt` timestamp,
|
||||
`snapshot` json,
|
||||
`createdAt` timestamp NOT NULL DEFAULT (now()),
|
||||
`updatedAt` timestamp NOT NULL DEFAULT (now()) ON UPDATE CURRENT_TIMESTAMP,
|
||||
CONSTRAINT `live_analysis_runtime_id` PRIMARY KEY(`id`),
|
||||
CONSTRAINT `live_analysis_runtime_user_idx` UNIQUE(`userId`)
|
||||
);
|
||||
@@ -78,6 +78,13 @@
|
||||
"when": 1773662400000,
|
||||
"tag": "0010_remove_non_tennis_tutorials",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 11,
|
||||
"version": "5",
|
||||
"when": 1773691200000,
|
||||
"tag": "0011_live_analysis_runtime",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -16,6 +16,21 @@ export const users = mysqlTable("users", {
|
||||
trainingGoals: text("trainingGoals"),
|
||||
/** NTRP rating (1.0 - 5.0) */
|
||||
ntrpRating: float("ntrpRating").default(1.5),
|
||||
/** Manual NTRP baseline before automated rating is established */
|
||||
manualNtrpRating: float("manualNtrpRating"),
|
||||
manualNtrpCapturedAt: timestamp("manualNtrpCapturedAt"),
|
||||
/** Training assessment profile */
|
||||
heightCm: float("heightCm"),
|
||||
weightKg: float("weightKg"),
|
||||
sprintSpeedScore: int("sprintSpeedScore"),
|
||||
explosivePowerScore: int("explosivePowerScore"),
|
||||
agilityScore: int("agilityScore"),
|
||||
enduranceScore: int("enduranceScore"),
|
||||
flexibilityScore: int("flexibilityScore"),
|
||||
coreStabilityScore: int("coreStabilityScore"),
|
||||
shoulderMobilityScore: int("shoulderMobilityScore"),
|
||||
hipMobilityScore: int("hipMobilityScore"),
|
||||
assessmentNotes: text("assessmentNotes"),
|
||||
/** Total training sessions completed */
|
||||
totalSessions: int("totalSessions").default(0),
|
||||
/** Total training minutes */
|
||||
@@ -215,6 +230,30 @@ export const liveAnalysisSessions = mysqlTable("live_analysis_sessions", {
|
||||
export type LiveAnalysisSession = typeof liveAnalysisSessions.$inferSelect;
|
||||
export type InsertLiveAnalysisSession = typeof liveAnalysisSessions.$inferInsert;
|
||||
|
||||
/**
|
||||
* Per-user runtime state for the current live-camera analysis lock.
|
||||
*/
|
||||
export const liveAnalysisRuntime = mysqlTable("live_analysis_runtime", {
|
||||
id: int("id").autoincrement().primaryKey(),
|
||||
userId: int("userId").notNull(),
|
||||
ownerSid: varchar("ownerSid", { length: 96 }),
|
||||
status: mysqlEnum("status", ["idle", "active", "ended"]).default("idle").notNull(),
|
||||
title: varchar("title", { length: 256 }),
|
||||
sessionMode: mysqlEnum("sessionMode", ["practice", "pk"]).default("practice").notNull(),
|
||||
mediaSessionId: varchar("mediaSessionId", { length: 96 }),
|
||||
startedAt: timestamp("startedAt"),
|
||||
endedAt: timestamp("endedAt"),
|
||||
lastHeartbeatAt: timestamp("lastHeartbeatAt"),
|
||||
snapshot: json("snapshot"),
|
||||
createdAt: timestamp("createdAt").defaultNow().notNull(),
|
||||
updatedAt: timestamp("updatedAt").defaultNow().onUpdateNow().notNull(),
|
||||
}, (table) => ({
|
||||
userIdUnique: uniqueIndex("live_analysis_runtime_user_idx").on(table.userId),
|
||||
}));
|
||||
|
||||
export type LiveAnalysisRuntime = typeof liveAnalysisRuntime.$inferSelect;
|
||||
export type InsertLiveAnalysisRuntime = typeof liveAnalysisRuntime.$inferInsert;
|
||||
|
||||
/**
|
||||
* Action segments extracted from a realtime analysis session.
|
||||
*/
|
||||
@@ -390,15 +429,34 @@ export type InsertUserAchievement = typeof userAchievements.$inferInsert;
|
||||
*/
|
||||
export const tutorialVideos = mysqlTable("tutorial_videos", {
|
||||
id: int("id").autoincrement().primaryKey(),
|
||||
slug: varchar("slug", { length: 128 }),
|
||||
title: varchar("title", { length: 256 }).notNull(),
|
||||
category: varchar("category", { length: 64 }).notNull(),
|
||||
skillLevel: mysqlEnum("skillLevel", ["beginner", "intermediate", "advanced"]).default("beginner"),
|
||||
topicArea: varchar("topicArea", { length: 32 }).default("tennis_skill"),
|
||||
contentFormat: varchar("contentFormat", { length: 16 }).default("video"),
|
||||
sourcePlatform: varchar("sourcePlatform", { length: 16 }).default("none"),
|
||||
description: text("description"),
|
||||
heroSummary: text("heroSummary"),
|
||||
keyPoints: json("keyPoints"),
|
||||
commonMistakes: json("commonMistakes"),
|
||||
videoUrl: text("videoUrl"),
|
||||
externalUrl: text("externalUrl"),
|
||||
platformVideoId: varchar("platformVideoId", { length: 64 }),
|
||||
thumbnailUrl: text("thumbnailUrl"),
|
||||
duration: int("duration"),
|
||||
estimatedEffortMinutes: int("estimatedEffortMinutes"),
|
||||
prerequisites: json("prerequisites"),
|
||||
learningObjectives: json("learningObjectives"),
|
||||
stepSections: json("stepSections"),
|
||||
deliverables: json("deliverables"),
|
||||
relatedDocPaths: json("relatedDocPaths"),
|
||||
viewCount: int("viewCount"),
|
||||
commentCount: int("commentCount"),
|
||||
metricsFetchedAt: timestamp("metricsFetchedAt"),
|
||||
completionAchievementKey: varchar("completionAchievementKey", { length: 64 }),
|
||||
isFeatured: int("isFeatured").default(0),
|
||||
featuredOrder: int("featuredOrder").default(0),
|
||||
sortOrder: int("sortOrder").default(0),
|
||||
isPublished: int("isPublished").default(1),
|
||||
createdAt: timestamp("createdAt").defaultNow().notNull(),
|
||||
@@ -416,6 +474,8 @@ export const tutorialProgress = mysqlTable("tutorial_progress", {
|
||||
userId: int("userId").notNull(),
|
||||
tutorialId: int("tutorialId").notNull(),
|
||||
watched: int("watched").default(0),
|
||||
completed: int("completed").default(0),
|
||||
completedAt: timestamp("completedAt"),
|
||||
comparisonVideoId: int("comparisonVideoId"),
|
||||
selfScore: float("selfScore"),
|
||||
notes: text("notes"),
|
||||
|
||||
261
media/main.go
261
media/main.go
@@ -104,6 +104,9 @@ type Session struct {
|
||||
PreviewUpdatedAt string `json:"previewUpdatedAt,omitempty"`
|
||||
StreamConnected bool `json:"streamConnected"`
|
||||
LastStreamAt string `json:"lastStreamAt,omitempty"`
|
||||
ViewerCount int `json:"viewerCount"`
|
||||
LiveFrameURL string `json:"liveFrameUrl,omitempty"`
|
||||
LiveFrameUpdated string `json:"liveFrameUpdatedAt,omitempty"`
|
||||
Playback PlaybackInfo `json:"playback"`
|
||||
Segments []SegmentMeta `json:"segments"`
|
||||
Markers []Marker `json:"markers"`
|
||||
@@ -151,19 +154,23 @@ type FinalizeRequest struct {
|
||||
}
|
||||
|
||||
type sessionStore struct {
|
||||
rootDir string
|
||||
public string
|
||||
mu sync.RWMutex
|
||||
sessions map[string]*Session
|
||||
peers map[string]*webrtc.PeerConnection
|
||||
rootDir string
|
||||
public string
|
||||
mu sync.RWMutex
|
||||
sessions map[string]*Session
|
||||
peers map[string]*webrtc.PeerConnection
|
||||
viewerPeers map[string]map[string]*webrtc.PeerConnection
|
||||
videoTracks map[string]*webrtc.TrackLocalStaticRTP
|
||||
}
|
||||
|
||||
func newSessionStore(rootDir string) (*sessionStore, error) {
|
||||
store := &sessionStore{
|
||||
rootDir: rootDir,
|
||||
public: filepath.Join(rootDir, "public"),
|
||||
sessions: map[string]*Session{},
|
||||
peers: map[string]*webrtc.PeerConnection{},
|
||||
rootDir: rootDir,
|
||||
public: filepath.Join(rootDir, "public"),
|
||||
sessions: map[string]*Session{},
|
||||
peers: map[string]*webrtc.PeerConnection{},
|
||||
viewerPeers: map[string]map[string]*webrtc.PeerConnection{},
|
||||
videoTracks: map[string]*webrtc.TrackLocalStaticRTP{},
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Join(rootDir, "sessions"), 0o755); err != nil {
|
||||
return nil, err
|
||||
@@ -224,6 +231,14 @@ func (s *sessionStore) publicDir(id string) string {
|
||||
return filepath.Join(s.public, "sessions", id)
|
||||
}
|
||||
|
||||
func (s *sessionStore) liveFramePath(id string) string {
|
||||
return filepath.Join(s.publicDir(id), "live-frame.jpg")
|
||||
}
|
||||
|
||||
func (s *sessionStore) liveFrameURL(id string) string {
|
||||
return fmt.Sprintf("/media/assets/sessions/%s/live-frame.jpg", id)
|
||||
}
|
||||
|
||||
func (s *sessionStore) saveSession(session *Session) error {
|
||||
session.UpdatedAt = time.Now().UTC().Format(time.RFC3339)
|
||||
dir := s.sessionDir(session.ID)
|
||||
@@ -294,6 +309,42 @@ func (s *sessionStore) replacePeer(id string, peer *webrtc.PeerConnection) {
|
||||
s.peers[id] = peer
|
||||
}
|
||||
|
||||
func (s *sessionStore) replaceViewerPeer(sessionID string, viewerID string, peer *webrtc.PeerConnection) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if _, ok := s.viewerPeers[sessionID]; !ok {
|
||||
s.viewerPeers[sessionID] = map[string]*webrtc.PeerConnection{}
|
||||
}
|
||||
if existing, ok := s.viewerPeers[sessionID][viewerID]; ok {
|
||||
_ = existing.Close()
|
||||
}
|
||||
s.viewerPeers[sessionID][viewerID] = peer
|
||||
if session, ok := s.sessions[sessionID]; ok {
|
||||
session.ViewerCount = len(s.viewerPeers[sessionID])
|
||||
_ = s.saveSession(session)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *sessionStore) removeViewerPeer(sessionID string, viewerID string) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
viewers, ok := s.viewerPeers[sessionID]
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
if existing, ok := viewers[viewerID]; ok {
|
||||
_ = existing.Close()
|
||||
delete(viewers, viewerID)
|
||||
}
|
||||
if len(viewers) == 0 {
|
||||
delete(s.viewerPeers, sessionID)
|
||||
}
|
||||
if session, ok := s.sessions[sessionID]; ok {
|
||||
session.ViewerCount = len(s.viewerPeers[sessionID])
|
||||
_ = s.saveSession(session)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *sessionStore) closePeer(id string) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
@@ -301,6 +352,38 @@ func (s *sessionStore) closePeer(id string) {
|
||||
_ = existing.Close()
|
||||
delete(s.peers, id)
|
||||
}
|
||||
if viewers, ok := s.viewerPeers[id]; ok {
|
||||
for viewerID, peer := range viewers {
|
||||
_ = peer.Close()
|
||||
delete(viewers, viewerID)
|
||||
}
|
||||
delete(s.viewerPeers, id)
|
||||
}
|
||||
delete(s.videoTracks, id)
|
||||
if session, ok := s.sessions[id]; ok {
|
||||
session.ViewerCount = 0
|
||||
_ = s.saveSession(session)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *sessionStore) getVideoTrack(sessionID string) *webrtc.TrackLocalStaticRTP {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
return s.videoTracks[sessionID]
|
||||
}
|
||||
|
||||
func (s *sessionStore) ensureVideoTrack(sessionID string, codec webrtc.RTPCodecCapability) (*webrtc.TrackLocalStaticRTP, error) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if track, ok := s.videoTracks[sessionID]; ok {
|
||||
return track, nil
|
||||
}
|
||||
track, err := webrtc.NewTrackLocalStaticRTP(codec, "video", fmt.Sprintf("livecam-%s", sessionID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.videoTracks[sessionID] = track
|
||||
return track, nil
|
||||
}
|
||||
|
||||
func (s *sessionStore) updateSession(id string, update func(*Session) error) (*Session, error) {
|
||||
@@ -419,12 +502,24 @@ func (m *mediaServer) handleSession(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
m.handleSignal(sessionID, w, r)
|
||||
case "viewer-signal":
|
||||
if r.Method != http.MethodPost {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
m.handleViewerSignal(sessionID, w, r)
|
||||
case "segments":
|
||||
if r.Method != http.MethodPost {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
m.handleSegmentUpload(sessionID, w, r)
|
||||
case "live-frame":
|
||||
if r.Method != http.MethodPost {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
m.handleLiveFrameUpload(sessionID, w, r)
|
||||
case "markers":
|
||||
if r.Method != http.MethodPost {
|
||||
http.NotFound(w, r)
|
||||
@@ -509,12 +604,23 @@ func (m *mediaServer) handleSignal(sessionID string, w http.ResponseWriter, r *h
|
||||
|
||||
peer.OnTrack(func(track *webrtc.TrackRemote, receiver *webrtc.RTPReceiver) {
|
||||
_ = receiver
|
||||
if track.Kind() != webrtc.RTPCodecTypeVideo {
|
||||
return
|
||||
}
|
||||
localTrack, trackErr := m.store.ensureVideoTrack(sessionID, track.Codec().RTPCodecCapability)
|
||||
if trackErr != nil {
|
||||
log.Printf("failed to create local viewer track for session %s: %v", sessionID, trackErr)
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
buffer := make([]byte, 1600)
|
||||
for {
|
||||
if _, _, readErr := track.Read(buffer); readErr != nil {
|
||||
packet, _, readErr := track.ReadRTP()
|
||||
if readErr != nil {
|
||||
return
|
||||
}
|
||||
if writeErr := localTrack.WriteRTP(packet); writeErr != nil && !errors.Is(writeErr, io.ErrClosedPipe) {
|
||||
log.Printf("failed to fan out RTP packet for session %s: %v", sessionID, writeErr)
|
||||
}
|
||||
_, _ = m.store.updateSession(sessionID, func(session *Session) error {
|
||||
session.StreamConnected = true
|
||||
session.Status = StatusStreaming
|
||||
@@ -556,6 +662,139 @@ func (m *mediaServer) handleSignal(sessionID string, w http.ResponseWriter, r *h
|
||||
})
|
||||
}
|
||||
|
||||
func (m *mediaServer) handleViewerSignal(sessionID string, w http.ResponseWriter, r *http.Request) {
|
||||
var input SignalRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
if _, err := m.store.getSession(sessionID); err != nil {
|
||||
writeError(w, http.StatusNotFound, err.Error())
|
||||
return
|
||||
}
|
||||
localTrack := m.store.getVideoTrack(sessionID)
|
||||
if localTrack == nil {
|
||||
writeError(w, http.StatusConflict, "viewer stream not ready")
|
||||
return
|
||||
}
|
||||
|
||||
config := webrtc.Configuration{
|
||||
ICEServers: []webrtc.ICEServer{{URLs: []string{"stun:stun.l.google.com:19302"}}},
|
||||
}
|
||||
peer, err := webrtc.NewPeerConnection(config)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to create viewer peer connection")
|
||||
return
|
||||
}
|
||||
viewerID := randomID()
|
||||
m.store.replaceViewerPeer(sessionID, viewerID, peer)
|
||||
|
||||
sender, err := peer.AddTrack(localTrack)
|
||||
if err != nil {
|
||||
m.store.removeViewerPeer(sessionID, viewerID)
|
||||
writeError(w, http.StatusInternalServerError, "failed to add viewer track")
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
rtcpBuf := make([]byte, 1500)
|
||||
for {
|
||||
if _, _, readErr := sender.Read(rtcpBuf); readErr != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
peer.OnConnectionStateChange(func(state webrtc.PeerConnectionState) {
|
||||
switch state {
|
||||
case webrtc.PeerConnectionStateDisconnected, webrtc.PeerConnectionStateFailed, webrtc.PeerConnectionStateClosed:
|
||||
m.store.removeViewerPeer(sessionID, viewerID)
|
||||
}
|
||||
})
|
||||
|
||||
offer := webrtc.SessionDescription{
|
||||
Type: parseSDPType(input.Type),
|
||||
SDP: input.SDP,
|
||||
}
|
||||
if err := peer.SetRemoteDescription(offer); err != nil {
|
||||
m.store.removeViewerPeer(sessionID, viewerID)
|
||||
writeError(w, http.StatusBadRequest, "failed to set remote description")
|
||||
return
|
||||
}
|
||||
|
||||
answer, err := peer.CreateAnswer(nil)
|
||||
if err != nil {
|
||||
m.store.removeViewerPeer(sessionID, viewerID)
|
||||
writeError(w, http.StatusInternalServerError, "failed to create viewer answer")
|
||||
return
|
||||
}
|
||||
gatherComplete := webrtc.GatheringCompletePromise(peer)
|
||||
if err := peer.SetLocalDescription(answer); err != nil {
|
||||
m.store.removeViewerPeer(sessionID, viewerID)
|
||||
writeError(w, http.StatusInternalServerError, "failed to set viewer local description")
|
||||
return
|
||||
}
|
||||
<-gatherComplete
|
||||
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"viewerId": viewerID,
|
||||
"type": strings.ToLower(peer.LocalDescription().Type.String()),
|
||||
"sdp": peer.LocalDescription().SDP,
|
||||
})
|
||||
}
|
||||
|
||||
func (m *mediaServer) handleLiveFrameUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
|
||||
if _, err := m.store.getSession(sessionID); err != nil {
|
||||
writeError(w, http.StatusNotFound, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
body := http.MaxBytesReader(w, r.Body, 4<<20)
|
||||
defer body.Close()
|
||||
|
||||
frame, err := io.ReadAll(body)
|
||||
if err != nil || len(frame) == 0 {
|
||||
writeError(w, http.StatusBadRequest, "invalid live frame payload")
|
||||
return
|
||||
}
|
||||
|
||||
publicDir := m.store.publicDir(sessionID)
|
||||
if err := os.MkdirAll(publicDir, 0o755); err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to create live frame directory")
|
||||
return
|
||||
}
|
||||
|
||||
tmpFile := filepath.Join(publicDir, fmt.Sprintf("live-frame-%s.tmp", randomID()))
|
||||
if err := os.WriteFile(tmpFile, frame, 0o644); err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to write live frame")
|
||||
return
|
||||
}
|
||||
defer os.Remove(tmpFile)
|
||||
|
||||
finalFile := m.store.liveFramePath(sessionID)
|
||||
if err := os.Rename(tmpFile, finalFile); err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to publish live frame")
|
||||
return
|
||||
}
|
||||
|
||||
session, err := m.store.updateSession(sessionID, func(session *Session) error {
|
||||
session.LiveFrameURL = m.store.liveFrameURL(sessionID)
|
||||
session.LiveFrameUpdated = time.Now().UTC().Format(time.RFC3339)
|
||||
session.StreamConnected = true
|
||||
session.LastStreamAt = session.LiveFrameUpdated
|
||||
if session.Status == StatusCreated || session.Status == StatusReconnecting {
|
||||
session.Status = StatusStreaming
|
||||
}
|
||||
session.LastError = ""
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to update live frame session state")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusAccepted, map[string]any{"session": session})
|
||||
}
|
||||
|
||||
func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
|
||||
sequence, err := strconv.Atoi(r.URL.Query().Get("sequence"))
|
||||
if err != nil || sequence < 0 {
|
||||
|
||||
@@ -256,3 +256,67 @@ func TestHandleSessionGetRefreshesSessionStateFromDisk(t *testing.T) {
|
||||
t.Fatalf("expected playback ready after refresh")
|
||||
}
|
||||
}
|
||||
|
||||
func TestViewerSignalReturnsConflictBeforePublisherTrackReady(t *testing.T) {
|
||||
store, err := newSessionStore(t.TempDir())
|
||||
if err != nil {
|
||||
t.Fatalf("newSessionStore: %v", err)
|
||||
}
|
||||
|
||||
server := newMediaServer(store)
|
||||
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Viewer Pending"})
|
||||
if err != nil {
|
||||
t.Fatalf("createSession: %v", err)
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/viewer-signal", strings.NewReader(`{"type":"offer","sdp":"mock-offer"}`))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
res := httptest.NewRecorder()
|
||||
server.routes().ServeHTTP(res, req)
|
||||
|
||||
if res.Code != http.StatusConflict {
|
||||
t.Fatalf("expected viewer-signal 409 before video track is ready, got %d", res.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLiveFrameUploadPublishesRelayFrame(t *testing.T) {
|
||||
store, err := newSessionStore(t.TempDir())
|
||||
if err != nil {
|
||||
t.Fatalf("newSessionStore: %v", err)
|
||||
}
|
||||
|
||||
server := newMediaServer(store)
|
||||
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Session"})
|
||||
if err != nil {
|
||||
t.Fatalf("createSession: %v", err)
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/live-frame", strings.NewReader("jpeg-frame"))
|
||||
req.Header.Set("Content-Type", "image/jpeg")
|
||||
res := httptest.NewRecorder()
|
||||
server.routes().ServeHTTP(res, req)
|
||||
|
||||
if res.Code != http.StatusAccepted {
|
||||
t.Fatalf("expected live-frame upload 202, got %d", res.Code)
|
||||
}
|
||||
|
||||
current, err := store.getSession(session.ID)
|
||||
if err != nil {
|
||||
t.Fatalf("getSession: %v", err)
|
||||
}
|
||||
if current.LiveFrameURL == "" || current.LiveFrameUpdated == "" {
|
||||
t.Fatalf("expected live frame metadata to be recorded, got %#v", current)
|
||||
}
|
||||
if !current.StreamConnected {
|
||||
t.Fatalf("expected session stream connected after frame upload")
|
||||
}
|
||||
|
||||
framePath := store.liveFramePath(session.ID)
|
||||
body, err := os.ReadFile(framePath)
|
||||
if err != nil {
|
||||
t.Fatalf("read live frame: %v", err)
|
||||
}
|
||||
if string(body) != "jpeg-frame" {
|
||||
t.Fatalf("unexpected live frame content: %q", string(body))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,23 +6,29 @@ export type TrpcContext = {
|
||||
req: CreateExpressContextOptions["req"];
|
||||
res: CreateExpressContextOptions["res"];
|
||||
user: User | null;
|
||||
sessionSid: string | null;
|
||||
};
|
||||
|
||||
export async function createContext(
|
||||
opts: CreateExpressContextOptions
|
||||
): Promise<TrpcContext> {
|
||||
let user: User | null = null;
|
||||
let sessionSid: string | null = null;
|
||||
|
||||
try {
|
||||
user = await sdk.authenticateRequest(opts.req);
|
||||
const authenticated = await sdk.authenticateRequestWithSession(opts.req);
|
||||
user = authenticated.user;
|
||||
sessionSid = authenticated.sid;
|
||||
} catch (error) {
|
||||
// Authentication is optional for public procedures.
|
||||
user = null;
|
||||
sessionSid = null;
|
||||
}
|
||||
|
||||
return {
|
||||
req: opts.req,
|
||||
res: opts.res,
|
||||
user,
|
||||
sessionSid,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -13,6 +13,26 @@ import { createBackgroundTask, getAdminUserId, hasRecentBackgroundTaskOfType, se
|
||||
import { nanoid } from "nanoid";
|
||||
import { syncTutorialImages } from "../tutorialImages";
|
||||
|
||||
async function warmupApplicationData() {
|
||||
const tasks: Array<{ label: string; run: () => Promise<unknown> }> = [
|
||||
{ label: "seedTutorials", run: () => seedTutorials() },
|
||||
{ label: "syncTutorialImages", run: () => syncTutorialImages() },
|
||||
{ label: "seedVisionReferenceImages", run: () => seedVisionReferenceImages() },
|
||||
{ label: "seedAchievementDefinitions", run: () => seedAchievementDefinitions() },
|
||||
{ label: "seedAppSettings", run: () => seedAppSettings() },
|
||||
];
|
||||
|
||||
for (const task of tasks) {
|
||||
const startedAt = Date.now();
|
||||
try {
|
||||
await task.run();
|
||||
console.log(`[startup] ${task.label} finished in ${Date.now() - startedAt}ms`);
|
||||
} catch (error) {
|
||||
console.error(`[startup] ${task.label} failed`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function scheduleDailyNtrpRefresh() {
|
||||
const now = new Date();
|
||||
if (now.getHours() !== 0 || now.getMinutes() > 5) {
|
||||
@@ -64,12 +84,6 @@ async function findAvailablePort(startPort: number = 3000): Promise<number> {
|
||||
}
|
||||
|
||||
async function startServer() {
|
||||
await seedTutorials();
|
||||
await syncTutorialImages();
|
||||
await seedVisionReferenceImages();
|
||||
await seedAchievementDefinitions();
|
||||
await seedAppSettings();
|
||||
|
||||
const app = express();
|
||||
const server = createServer(app);
|
||||
registerMediaProxy(app);
|
||||
@@ -108,6 +122,7 @@ async function startServer() {
|
||||
|
||||
server.listen(port, () => {
|
||||
console.log(`Server running on http://localhost:${port}/`);
|
||||
void warmupApplicationData();
|
||||
});
|
||||
|
||||
setInterval(() => {
|
||||
|
||||
57
server/_core/sdk.test.ts
普通文件
57
server/_core/sdk.test.ts
普通文件
@@ -0,0 +1,57 @@
|
||||
import { SignJWT } from "jose";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
async function loadSdkForTest() {
|
||||
process.env.JWT_SECRET = "test-cookie-secret";
|
||||
process.env.VITE_APP_ID = "test-app";
|
||||
vi.resetModules();
|
||||
|
||||
const [{ sdk }, { ENV }] = await Promise.all([
|
||||
import("./sdk"),
|
||||
import("./env"),
|
||||
]);
|
||||
|
||||
return { sdk, ENV };
|
||||
}
|
||||
|
||||
async function signLegacyToken(openId: string, appId: string, name: string) {
|
||||
const secret = new TextEncoder().encode(process.env.JWT_SECRET || "");
|
||||
return new SignJWT({
|
||||
openId,
|
||||
appId,
|
||||
name,
|
||||
})
|
||||
.setProtectedHeader({ alg: "HS256", typ: "JWT" })
|
||||
.setExpirationTime(Math.floor((Date.now() + 60_000) / 1000))
|
||||
.sign(secret);
|
||||
}
|
||||
|
||||
describe("sdk.verifySession", () => {
|
||||
it("derives a stable legacy sid when the token payload does not include sid", async () => {
|
||||
const { sdk, ENV } = await loadSdkForTest();
|
||||
const legacyToken = await signLegacyToken("username_H1_legacy", ENV.appId, "H1");
|
||||
|
||||
const session = await sdk.verifySession(legacyToken);
|
||||
|
||||
expect(session).not.toBeNull();
|
||||
expect(session?.sid).toMatch(/^legacy-token:/);
|
||||
expect(session?.sid).toHaveLength("legacy-token:".length + 32);
|
||||
});
|
||||
|
||||
it("derives different legacy sid values for different legacy login tokens", async () => {
|
||||
const firstLoad = await loadSdkForTest();
|
||||
const tokenA = await signLegacyToken("username_H1_legacy", firstLoad.ENV.appId, "H1");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 5));
|
||||
|
||||
const secondLoad = await loadSdkForTest();
|
||||
const tokenB = await signLegacyToken("username_H1_legacy", secondLoad.ENV.appId, "H1-second");
|
||||
|
||||
const sessionA = await firstLoad.sdk.verifySession(tokenA);
|
||||
const sessionB = await secondLoad.sdk.verifySession(tokenB);
|
||||
|
||||
expect(sessionA?.sid).toMatch(/^legacy-token:/);
|
||||
expect(sessionB?.sid).toMatch(/^legacy-token:/);
|
||||
expect(sessionA?.sid).not.toBe(sessionB?.sid);
|
||||
});
|
||||
});
|
||||
@@ -4,6 +4,7 @@ import axios, { type AxiosInstance } from "axios";
|
||||
import { parse as parseCookieHeader } from "cookie";
|
||||
import type { Request } from "express";
|
||||
import { SignJWT, jwtVerify } from "jose";
|
||||
import { createHash } from "node:crypto";
|
||||
import type { User } from "../../drizzle/schema";
|
||||
import * as db from "../db";
|
||||
import { ENV } from "./env";
|
||||
@@ -223,11 +224,15 @@ class SDKServer {
|
||||
return null;
|
||||
}
|
||||
|
||||
const derivedSid = typeof sid === "string" && sid.length > 0
|
||||
? sid
|
||||
: `legacy-token:${createHash("sha256").update(cookieValue).digest("hex").slice(0, 32)}`;
|
||||
|
||||
return {
|
||||
openId,
|
||||
appId,
|
||||
name: typeof name === "string" ? name : undefined,
|
||||
sid: typeof sid === "string" ? sid : undefined,
|
||||
sid: derivedSid,
|
||||
};
|
||||
} catch (error) {
|
||||
console.warn("[Auth] Session verification failed", String(error));
|
||||
@@ -260,7 +265,11 @@ class SDKServer {
|
||||
}
|
||||
|
||||
async authenticateRequest(req: Request): Promise<User> {
|
||||
// Regular authentication flow
|
||||
const authenticated = await this.authenticateRequestWithSession(req);
|
||||
return authenticated.user;
|
||||
}
|
||||
|
||||
async authenticateRequestWithSession(req: Request): Promise<{ user: User; sid: string | null }> {
|
||||
const cookies = this.parseCookies(req.headers.cookie);
|
||||
const sessionCookie = cookies.get(COOKIE_NAME);
|
||||
const session = await this.verifySession(sessionCookie);
|
||||
@@ -273,7 +282,6 @@ class SDKServer {
|
||||
const signedInAt = new Date();
|
||||
let user = await db.getUserByOpenId(sessionUserId);
|
||||
|
||||
// If user not in DB, sync from OAuth server automatically
|
||||
if (!user) {
|
||||
try {
|
||||
const userInfo = await this.getUserInfoWithJwt(sessionCookie ?? "");
|
||||
@@ -300,7 +308,10 @@ class SDKServer {
|
||||
lastSignedIn: signedInAt,
|
||||
});
|
||||
|
||||
return user;
|
||||
return {
|
||||
user,
|
||||
sid: session.sid ?? null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
136
server/db.ts
136
server/db.ts
@@ -8,6 +8,7 @@ import {
|
||||
poseAnalyses, InsertPoseAnalysis,
|
||||
trainingRecords, InsertTrainingRecord,
|
||||
liveAnalysisSessions, InsertLiveAnalysisSession,
|
||||
liveAnalysisRuntime, InsertLiveAnalysisRuntime,
|
||||
liveActionSegments, InsertLiveActionSegment,
|
||||
dailyTrainingAggregates, InsertDailyTrainingAggregate,
|
||||
ratingHistory, InsertRatingHistory,
|
||||
@@ -32,6 +33,7 @@ import { fetchTutorialMetrics, shouldRefreshTutorialMetrics } from "./tutorialMe
|
||||
let _db: ReturnType<typeof drizzle> | null = null;
|
||||
|
||||
const APP_TIMEZONE = process.env.TZ || "Asia/Shanghai";
|
||||
export const LIVE_ANALYSIS_RUNTIME_TIMEOUT_MS = 15_000;
|
||||
|
||||
function getDateFormatter() {
|
||||
return new Intl.DateTimeFormat("en-CA", {
|
||||
@@ -888,6 +890,140 @@ export async function createLiveAnalysisSession(session: InsertLiveAnalysisSessi
|
||||
return result[0].insertId;
|
||||
}
|
||||
|
||||
export async function getUserLiveAnalysisRuntime(userId: number) {
|
||||
const db = await getDb();
|
||||
if (!db) return undefined;
|
||||
const result = await db.select().from(liveAnalysisRuntime)
|
||||
.where(eq(liveAnalysisRuntime.userId, userId))
|
||||
.limit(1);
|
||||
return result[0];
|
||||
}
|
||||
|
||||
export async function upsertUserLiveAnalysisRuntime(
|
||||
userId: number,
|
||||
patch: Omit<InsertLiveAnalysisRuntime, "id" | "createdAt" | "updatedAt" | "userId">,
|
||||
) {
|
||||
const db = await getDb();
|
||||
if (!db) throw new Error("Database not available");
|
||||
|
||||
const existing = await getUserLiveAnalysisRuntime(userId);
|
||||
if (existing) {
|
||||
await db.update(liveAnalysisRuntime)
|
||||
.set({
|
||||
ownerSid: patch.ownerSid ?? existing.ownerSid,
|
||||
status: patch.status ?? existing.status,
|
||||
title: patch.title ?? existing.title,
|
||||
sessionMode: patch.sessionMode ?? existing.sessionMode,
|
||||
mediaSessionId: patch.mediaSessionId === undefined ? existing.mediaSessionId : patch.mediaSessionId,
|
||||
startedAt: patch.startedAt === undefined ? existing.startedAt : patch.startedAt,
|
||||
endedAt: patch.endedAt === undefined ? existing.endedAt : patch.endedAt,
|
||||
lastHeartbeatAt: patch.lastHeartbeatAt === undefined ? existing.lastHeartbeatAt : patch.lastHeartbeatAt,
|
||||
snapshot: patch.snapshot === undefined ? existing.snapshot : patch.snapshot,
|
||||
})
|
||||
.where(eq(liveAnalysisRuntime.userId, userId));
|
||||
return getUserLiveAnalysisRuntime(userId);
|
||||
}
|
||||
|
||||
const result = await db.insert(liveAnalysisRuntime).values({
|
||||
userId,
|
||||
ownerSid: patch.ownerSid ?? null,
|
||||
status: patch.status ?? "idle",
|
||||
title: patch.title ?? null,
|
||||
sessionMode: patch.sessionMode ?? "practice",
|
||||
mediaSessionId: patch.mediaSessionId ?? null,
|
||||
startedAt: patch.startedAt ?? null,
|
||||
endedAt: patch.endedAt ?? null,
|
||||
lastHeartbeatAt: patch.lastHeartbeatAt ?? null,
|
||||
snapshot: patch.snapshot ?? null,
|
||||
});
|
||||
|
||||
const runtimeId = result[0].insertId;
|
||||
const rows = await db.select().from(liveAnalysisRuntime).where(eq(liveAnalysisRuntime.id, runtimeId)).limit(1);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
export async function updateUserLiveAnalysisRuntime(
|
||||
userId: number,
|
||||
patch: Partial<Omit<InsertLiveAnalysisRuntime, "id" | "createdAt" | "updatedAt" | "userId">>,
|
||||
) {
|
||||
const db = await getDb();
|
||||
if (!db) throw new Error("Database not available");
|
||||
const existing = await getUserLiveAnalysisRuntime(userId);
|
||||
if (!existing) return undefined;
|
||||
|
||||
await db.update(liveAnalysisRuntime)
|
||||
.set({
|
||||
ownerSid: patch.ownerSid === undefined ? existing.ownerSid : patch.ownerSid,
|
||||
status: patch.status ?? existing.status,
|
||||
title: patch.title === undefined ? existing.title : patch.title,
|
||||
sessionMode: patch.sessionMode ?? existing.sessionMode,
|
||||
mediaSessionId: patch.mediaSessionId === undefined ? existing.mediaSessionId : patch.mediaSessionId,
|
||||
startedAt: patch.startedAt === undefined ? existing.startedAt : patch.startedAt,
|
||||
endedAt: patch.endedAt === undefined ? existing.endedAt : patch.endedAt,
|
||||
lastHeartbeatAt: patch.lastHeartbeatAt === undefined ? existing.lastHeartbeatAt : patch.lastHeartbeatAt,
|
||||
snapshot: patch.snapshot === undefined ? existing.snapshot : patch.snapshot,
|
||||
})
|
||||
.where(eq(liveAnalysisRuntime.userId, userId));
|
||||
|
||||
return getUserLiveAnalysisRuntime(userId);
|
||||
}
|
||||
|
||||
export async function updateLiveAnalysisRuntimeHeartbeat(input: {
|
||||
userId: number;
|
||||
ownerSid: string;
|
||||
runtimeId: number;
|
||||
mediaSessionId?: string | null;
|
||||
snapshot?: unknown;
|
||||
}) {
|
||||
const db = await getDb();
|
||||
if (!db) throw new Error("Database not available");
|
||||
|
||||
const existing = await getUserLiveAnalysisRuntime(input.userId);
|
||||
if (!existing || existing.id !== input.runtimeId || existing.ownerSid !== input.ownerSid || existing.status !== "active") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
await db.update(liveAnalysisRuntime)
|
||||
.set({
|
||||
mediaSessionId: input.mediaSessionId === undefined ? existing.mediaSessionId : input.mediaSessionId,
|
||||
snapshot: input.snapshot === undefined ? existing.snapshot : input.snapshot,
|
||||
lastHeartbeatAt: new Date(),
|
||||
endedAt: null,
|
||||
})
|
||||
.where(and(
|
||||
eq(liveAnalysisRuntime.userId, input.userId),
|
||||
eq(liveAnalysisRuntime.id, input.runtimeId),
|
||||
));
|
||||
|
||||
return getUserLiveAnalysisRuntime(input.userId);
|
||||
}
|
||||
|
||||
export async function endUserLiveAnalysisRuntime(input: {
|
||||
userId: number;
|
||||
ownerSid?: string | null;
|
||||
runtimeId?: number;
|
||||
snapshot?: unknown;
|
||||
}) {
|
||||
const db = await getDb();
|
||||
if (!db) throw new Error("Database not available");
|
||||
|
||||
const existing = await getUserLiveAnalysisRuntime(input.userId);
|
||||
if (!existing) return undefined;
|
||||
if (input.runtimeId != null && existing.id !== input.runtimeId) return undefined;
|
||||
if (input.ownerSid != null && existing.ownerSid !== input.ownerSid) return undefined;
|
||||
|
||||
await db.update(liveAnalysisRuntime)
|
||||
.set({
|
||||
status: "ended",
|
||||
mediaSessionId: null,
|
||||
endedAt: new Date(),
|
||||
snapshot: input.snapshot === undefined ? existing.snapshot : input.snapshot,
|
||||
})
|
||||
.where(eq(liveAnalysisRuntime.userId, input.userId));
|
||||
|
||||
return getUserLiveAnalysisRuntime(input.userId);
|
||||
}
|
||||
|
||||
export async function createLiveActionSegments(segments: InsertLiveActionSegment[]) {
|
||||
const db = await getDb();
|
||||
if (!db || segments.length === 0) return;
|
||||
|
||||
@@ -45,7 +45,7 @@ function createTestUser(overrides?: Partial<AuthenticatedUser>): AuthenticatedUs
|
||||
};
|
||||
}
|
||||
|
||||
function createMockContext(user: AuthenticatedUser | null = null): {
|
||||
function createMockContext(user: AuthenticatedUser | null = null, sessionSid = "test-session-sid"): {
|
||||
ctx: TrpcContext;
|
||||
clearedCookies: { name: string; options: Record<string, unknown> }[];
|
||||
setCookies: { name: string; value: string; options: Record<string, unknown> }[];
|
||||
@@ -56,6 +56,7 @@ function createMockContext(user: AuthenticatedUser | null = null): {
|
||||
return {
|
||||
ctx: {
|
||||
user,
|
||||
sessionSid: user ? sessionSid : null,
|
||||
req: {
|
||||
protocol: "https",
|
||||
headers: {},
|
||||
@@ -1296,6 +1297,161 @@ describe("analysis.liveSessionSave", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("analysis.runtime", () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("acquires owner mode when runtime is idle", async () => {
|
||||
const user = createTestUser({ id: 7 });
|
||||
const { ctx } = createMockContext(user, "sid-owner");
|
||||
const caller = appRouter.createCaller(ctx);
|
||||
|
||||
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(undefined);
|
||||
const upsertSpy = vi.spyOn(db, "upsertUserLiveAnalysisRuntime").mockResolvedValueOnce({
|
||||
id: 11,
|
||||
userId: 7,
|
||||
ownerSid: "sid-owner",
|
||||
status: "active",
|
||||
title: "实时分析 正手",
|
||||
sessionMode: "practice",
|
||||
mediaSessionId: null,
|
||||
startedAt: new Date(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
snapshot: null,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
} as any);
|
||||
|
||||
const result = await caller.analysis.runtimeAcquire({
|
||||
title: "实时分析 正手",
|
||||
sessionMode: "practice",
|
||||
});
|
||||
|
||||
expect(upsertSpy).toHaveBeenCalledWith(7, expect.objectContaining({
|
||||
ownerSid: "sid-owner",
|
||||
status: "active",
|
||||
title: "实时分析 正手",
|
||||
sessionMode: "practice",
|
||||
}));
|
||||
expect(result.role).toBe("owner");
|
||||
expect((result.runtimeSession as any)?.ownerSid).toBe("sid-owner");
|
||||
});
|
||||
|
||||
it("returns viewer mode when another session sid already holds the runtime", async () => {
|
||||
const user = createTestUser({ id: 7 });
|
||||
const { ctx } = createMockContext(user, "sid-viewer");
|
||||
const caller = appRouter.createCaller(ctx);
|
||||
const activeRuntime = {
|
||||
id: 15,
|
||||
userId: 7,
|
||||
ownerSid: "sid-owner",
|
||||
status: "active",
|
||||
title: "实时分析 练习",
|
||||
sessionMode: "pk",
|
||||
mediaSessionId: "media-sync-1",
|
||||
startedAt: new Date(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
snapshot: { phase: "analyzing" },
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(activeRuntime as any);
|
||||
|
||||
const result = await caller.analysis.runtimeAcquire({
|
||||
title: "实时分析 练习",
|
||||
sessionMode: "pk",
|
||||
});
|
||||
|
||||
expect(result.role).toBe("viewer");
|
||||
expect((result.runtimeSession as any)?.mediaSessionId).toBe("media-sync-1");
|
||||
});
|
||||
|
||||
it("keeps owner mode when the same sid reacquires the runtime", async () => {
|
||||
const user = createTestUser({ id: 7 });
|
||||
const { ctx } = createMockContext(user, "sid-owner");
|
||||
const caller = appRouter.createCaller(ctx);
|
||||
const activeRuntime = {
|
||||
id: 19,
|
||||
userId: 7,
|
||||
ownerSid: "sid-owner",
|
||||
status: "active",
|
||||
title: "旧标题",
|
||||
sessionMode: "practice",
|
||||
mediaSessionId: "media-sync-2",
|
||||
startedAt: new Date("2026-03-16T00:00:00.000Z"),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
snapshot: { phase: "analyzing" },
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(activeRuntime as any);
|
||||
const updateSpy = vi.spyOn(db, "updateUserLiveAnalysisRuntime").mockResolvedValueOnce({
|
||||
...activeRuntime,
|
||||
title: "新标题",
|
||||
} as any);
|
||||
|
||||
const result = await caller.analysis.runtimeAcquire({
|
||||
title: "新标题",
|
||||
sessionMode: "practice",
|
||||
});
|
||||
|
||||
expect(updateSpy).toHaveBeenCalledWith(7, expect.objectContaining({
|
||||
ownerSid: "sid-owner",
|
||||
title: "新标题",
|
||||
status: "active",
|
||||
}));
|
||||
expect(result.role).toBe("owner");
|
||||
});
|
||||
|
||||
it("rejects heartbeat from a non-owner sid", async () => {
|
||||
const user = createTestUser({ id: 7 });
|
||||
const { ctx } = createMockContext(user, "sid-viewer");
|
||||
const caller = appRouter.createCaller(ctx);
|
||||
|
||||
vi.spyOn(db, "updateLiveAnalysisRuntimeHeartbeat").mockResolvedValueOnce(undefined);
|
||||
|
||||
await expect(caller.analysis.runtimeHeartbeat({
|
||||
runtimeId: 20,
|
||||
mediaSessionId: "media-sync-3",
|
||||
snapshot: { phase: "analyzing" },
|
||||
})).rejects.toThrow("当前设备不是实时分析持有端");
|
||||
});
|
||||
|
||||
it("rejects release from a non-owner sid", async () => {
|
||||
const user = createTestUser({ id: 7 });
|
||||
const { ctx } = createMockContext(user, "sid-viewer");
|
||||
const caller = appRouter.createCaller(ctx);
|
||||
|
||||
vi.spyOn(db, "endUserLiveAnalysisRuntime").mockResolvedValueOnce(undefined);
|
||||
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce({
|
||||
id: 23,
|
||||
userId: 7,
|
||||
ownerSid: "sid-owner",
|
||||
status: "active",
|
||||
title: "实时分析",
|
||||
sessionMode: "practice",
|
||||
mediaSessionId: "media-sync-4",
|
||||
startedAt: new Date(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
snapshot: null,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
} as any);
|
||||
|
||||
await expect(caller.analysis.runtimeRelease({
|
||||
runtimeId: 23,
|
||||
snapshot: { phase: "failed" },
|
||||
})).rejects.toThrow("当前设备不是实时分析持有端");
|
||||
});
|
||||
});
|
||||
|
||||
describe("rating.refreshMine", () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
|
||||
@@ -73,6 +73,67 @@ const trainingProfileUpdateSchema = z.object({
|
||||
assessmentNotes: z.string().max(2000).nullable().optional(),
|
||||
});
|
||||
|
||||
const liveRuntimeSnapshotSchema = z.object({
|
||||
phase: z.enum(["idle", "analyzing", "saving", "safe", "failed"]).optional(),
|
||||
startedAt: z.number().optional(),
|
||||
durationMs: z.number().optional(),
|
||||
currentAction: z.string().optional(),
|
||||
rawAction: z.string().optional(),
|
||||
feedback: z.array(z.string()).optional(),
|
||||
liveScore: z.record(z.string(), z.number()).nullable().optional(),
|
||||
stabilityMeta: z.record(z.string(), z.any()).optional(),
|
||||
visibleSegments: z.number().optional(),
|
||||
unknownSegments: z.number().optional(),
|
||||
archivedVideoCount: z.number().optional(),
|
||||
recentSegments: z.array(z.object({
|
||||
actionType: z.string(),
|
||||
isUnknown: z.boolean().optional(),
|
||||
startMs: z.number(),
|
||||
endMs: z.number(),
|
||||
durationMs: z.number(),
|
||||
confidenceAvg: z.number().optional(),
|
||||
score: z.number().optional(),
|
||||
clipLabel: z.string().optional(),
|
||||
})).optional(),
|
||||
}).passthrough();
|
||||
|
||||
function getRuntimeOwnerSid(ctx: { sessionSid: string | null; user: { openId: string } }) {
|
||||
return ctx.sessionSid || `legacy:${ctx.user.openId}`;
|
||||
}
|
||||
|
||||
async function resolveLiveRuntimeRole(params: {
|
||||
userId: number;
|
||||
sessionSid: string;
|
||||
}) {
|
||||
let runtime = await db.getUserLiveAnalysisRuntime(params.userId);
|
||||
if (!runtime) {
|
||||
return { role: "idle" as const, runtimeSession: null };
|
||||
}
|
||||
|
||||
const heartbeatAt = runtime.lastHeartbeatAt ?? runtime.updatedAt ?? runtime.startedAt;
|
||||
const isStale =
|
||||
runtime.status === "active" &&
|
||||
(!heartbeatAt || (Date.now() - heartbeatAt.getTime()) > db.LIVE_ANALYSIS_RUNTIME_TIMEOUT_MS);
|
||||
|
||||
if (isStale) {
|
||||
runtime = await db.endUserLiveAnalysisRuntime({
|
||||
userId: params.userId,
|
||||
runtimeId: runtime.id,
|
||||
snapshot: runtime.snapshot,
|
||||
}) ?? null as any;
|
||||
return { role: "idle" as const, runtimeSession: null };
|
||||
}
|
||||
|
||||
if (runtime.status !== "active") {
|
||||
return { role: "idle" as const, runtimeSession: runtime };
|
||||
}
|
||||
|
||||
return {
|
||||
role: runtime.ownerSid === params.sessionSid ? "owner" as const : "viewer" as const,
|
||||
runtimeSession: runtime,
|
||||
};
|
||||
}
|
||||
|
||||
export const appRouter = router({
|
||||
system: systemRouter,
|
||||
|
||||
@@ -455,6 +516,122 @@ export const appRouter = router({
|
||||
return { session, segments };
|
||||
}),
|
||||
|
||||
runtimeGet: protectedProcedure.query(async ({ ctx }) => {
|
||||
const sessionSid = getRuntimeOwnerSid(ctx);
|
||||
return resolveLiveRuntimeRole({
|
||||
userId: ctx.user.id,
|
||||
sessionSid,
|
||||
});
|
||||
}),
|
||||
|
||||
runtimeAcquire: protectedProcedure
|
||||
.input(z.object({
|
||||
title: z.string().min(1).max(256),
|
||||
sessionMode: z.enum(["practice", "pk"]).default("practice"),
|
||||
}))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const sessionSid = getRuntimeOwnerSid(ctx);
|
||||
const current = await resolveLiveRuntimeRole({
|
||||
userId: ctx.user.id,
|
||||
sessionSid,
|
||||
});
|
||||
|
||||
if (current.role === "viewer" && current.runtimeSession?.status === "active") {
|
||||
return current;
|
||||
}
|
||||
|
||||
const runtime = current.runtimeSession?.status === "active" && current.role === "owner"
|
||||
? await db.updateUserLiveAnalysisRuntime(ctx.user.id, {
|
||||
ownerSid: sessionSid,
|
||||
status: "active",
|
||||
title: input.title,
|
||||
sessionMode: input.sessionMode,
|
||||
startedAt: current.runtimeSession.startedAt ?? new Date(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
})
|
||||
: await db.upsertUserLiveAnalysisRuntime(ctx.user.id, {
|
||||
ownerSid: sessionSid,
|
||||
status: "active",
|
||||
title: input.title,
|
||||
sessionMode: input.sessionMode,
|
||||
mediaSessionId: null,
|
||||
startedAt: new Date(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
snapshot: {
|
||||
phase: "idle",
|
||||
startedAt: Date.now(),
|
||||
durationMs: 0,
|
||||
currentAction: "unknown",
|
||||
rawAction: "unknown",
|
||||
feedback: [],
|
||||
visibleSegments: 0,
|
||||
unknownSegments: 0,
|
||||
archivedVideoCount: 0,
|
||||
recentSegments: [],
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
role: "owner" as const,
|
||||
runtimeSession: runtime ?? null,
|
||||
};
|
||||
}),
|
||||
|
||||
runtimeHeartbeat: protectedProcedure
|
||||
.input(z.object({
|
||||
runtimeId: z.number(),
|
||||
mediaSessionId: z.string().max(96).nullable().optional(),
|
||||
snapshot: liveRuntimeSnapshotSchema.optional(),
|
||||
}))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const sessionSid = getRuntimeOwnerSid(ctx);
|
||||
const runtime = await db.updateLiveAnalysisRuntimeHeartbeat({
|
||||
userId: ctx.user.id,
|
||||
ownerSid: sessionSid,
|
||||
runtimeId: input.runtimeId,
|
||||
mediaSessionId: input.mediaSessionId,
|
||||
snapshot: input.snapshot,
|
||||
});
|
||||
|
||||
if (!runtime) {
|
||||
throw new TRPCError({ code: "FORBIDDEN", message: "当前设备不是实时分析持有端" });
|
||||
}
|
||||
|
||||
return {
|
||||
role: "owner" as const,
|
||||
runtimeSession: runtime,
|
||||
};
|
||||
}),
|
||||
|
||||
runtimeRelease: protectedProcedure
|
||||
.input(z.object({
|
||||
runtimeId: z.number().optional(),
|
||||
snapshot: liveRuntimeSnapshotSchema.optional(),
|
||||
}).optional())
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const sessionSid = getRuntimeOwnerSid(ctx);
|
||||
const runtime = await db.endUserLiveAnalysisRuntime({
|
||||
userId: ctx.user.id,
|
||||
ownerSid: sessionSid,
|
||||
runtimeId: input?.runtimeId,
|
||||
snapshot: input?.snapshot,
|
||||
});
|
||||
|
||||
if (!runtime) {
|
||||
const current = await db.getUserLiveAnalysisRuntime(ctx.user.id);
|
||||
if (current?.status === "active" && current.ownerSid !== sessionSid) {
|
||||
throw new TRPCError({ code: "FORBIDDEN", message: "当前设备不是实时分析持有端" });
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
runtimeSession: runtime ?? null,
|
||||
};
|
||||
}),
|
||||
|
||||
// Generate AI correction suggestions
|
||||
getCorrections: protectedProcedure
|
||||
.input(z.object({
|
||||
|
||||
@@ -68,6 +68,50 @@ test("live camera starts analysis and produces scores", async ({ page }) => {
|
||||
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
|
||||
});
|
||||
|
||||
test("live camera switches into viewer mode when another device already owns analysis", async ({ page }) => {
|
||||
await installAppMocks(page, { authenticated: true, liveViewerMode: true });
|
||||
|
||||
await page.goto("/live-camera");
|
||||
await expect(page.getByText("同步观看模式")).toBeVisible();
|
||||
await expect(page.getByText(/同步观看|重新同步/).first()).toBeVisible();
|
||||
await expect(page.getByText("当前设备已锁定为观看模式")).toBeVisible();
|
||||
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText("其他设备实时分析");
|
||||
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText("移动端");
|
||||
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText("均衡模式");
|
||||
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText("猩猩");
|
||||
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
|
||||
});
|
||||
|
||||
test("live camera recovers mojibake viewer titles before rendering", async ({ page }) => {
|
||||
const state = await installAppMocks(page, { authenticated: true, liveViewerMode: true });
|
||||
const mojibakeTitle = Buffer.from("服务端同步烟雾测试", "utf8").toString("latin1");
|
||||
if (state.liveRuntime.runtimeSession) {
|
||||
state.liveRuntime.runtimeSession.title = mojibakeTitle;
|
||||
state.liveRuntime.runtimeSession.snapshot = {
|
||||
...state.liveRuntime.runtimeSession.snapshot,
|
||||
title: mojibakeTitle,
|
||||
};
|
||||
}
|
||||
|
||||
await page.goto("/live-camera");
|
||||
await expect(page.getByRole("heading", { name: "服务端同步烟雾测试" })).toBeVisible();
|
||||
await expect(page.getByText(mojibakeTitle)).toHaveCount(0);
|
||||
});
|
||||
|
||||
test("live camera no longer opens viewer peer retries when server relay is active", async ({ page }) => {
|
||||
const state = await installAppMocks(page, {
|
||||
authenticated: true,
|
||||
liveViewerMode: true,
|
||||
viewerSignalConflictOnce: true,
|
||||
});
|
||||
|
||||
await page.goto("/live-camera");
|
||||
await expect(page.getByText("同步观看模式")).toBeVisible();
|
||||
await expect.poll(() => state.viewerSignalConflictRemaining).toBe(1);
|
||||
await expect.poll(() => state.mediaSession?.viewerCount ?? 0).toBe(0);
|
||||
await expect(page.locator('img[alt="同步中的实时分析画面"]')).toBeVisible();
|
||||
});
|
||||
|
||||
test("live camera archives overlay videos into the library after analysis stops", async ({ page }) => {
|
||||
await installAppMocks(page, { authenticated: true, videos: [] });
|
||||
|
||||
@@ -116,3 +160,11 @@ test("recorder flow archives a session and exposes it in videos", async ({ page
|
||||
await expect(page.getByTestId("video-card")).toHaveCount(1);
|
||||
await expect(page.getByText("E2E 录制")).toBeVisible();
|
||||
});
|
||||
|
||||
test("recorder blocks local camera when another device owns live analysis", async ({ page }) => {
|
||||
await installAppMocks(page, { authenticated: true, liveViewerMode: true });
|
||||
|
||||
await page.goto("/recorder");
|
||||
await expect(page.getByText("当前账号已有其他设备正在实时分析")).toBeVisible();
|
||||
await expect(page.getByTestId("recorder-start-camera-button")).toBeDisabled();
|
||||
});
|
||||
|
||||
@@ -49,6 +49,7 @@ type MockMediaSession = {
|
||||
uploadedBytes: number;
|
||||
durationMs: number;
|
||||
streamConnected: boolean;
|
||||
viewerCount?: number;
|
||||
playback: {
|
||||
webmUrl?: string;
|
||||
mp4Url?: string;
|
||||
@@ -92,9 +93,14 @@ type MockAppState = {
|
||||
adjustmentNotes: string | null;
|
||||
} | null;
|
||||
mediaSession: MockMediaSession | null;
|
||||
liveRuntime: {
|
||||
role: "idle" | "owner" | "viewer";
|
||||
runtimeSession: any | null;
|
||||
};
|
||||
nextVideoId: number;
|
||||
nextTaskId: number;
|
||||
authMeNullResponsesAfterLogin: number;
|
||||
viewerSignalConflictRemaining: number;
|
||||
};
|
||||
|
||||
function trpcResult(json: unknown) {
|
||||
@@ -428,6 +434,50 @@ async function handleTrpc(route: Route, state: MockAppState) {
|
||||
return trpcResult(state.analyses);
|
||||
case "analysis.liveSessionList":
|
||||
return trpcResult([]);
|
||||
case "analysis.runtimeGet":
|
||||
return trpcResult(state.liveRuntime);
|
||||
case "analysis.runtimeAcquire":
|
||||
if (state.liveRuntime.runtimeSession?.status === "active" && state.liveRuntime.role === "viewer") {
|
||||
return trpcResult(state.liveRuntime);
|
||||
}
|
||||
state.liveRuntime = {
|
||||
role: "owner",
|
||||
runtimeSession: {
|
||||
id: 501,
|
||||
title: "实时分析 正手",
|
||||
sessionMode: "practice",
|
||||
mediaSessionId: state.mediaSession?.id || null,
|
||||
status: "active",
|
||||
startedAt: nowIso(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: nowIso(),
|
||||
snapshot: {
|
||||
phase: "analyzing",
|
||||
currentAction: "forehand",
|
||||
rawAction: "forehand",
|
||||
visibleSegments: 1,
|
||||
unknownSegments: 0,
|
||||
durationMs: 1500,
|
||||
feedback: ["节奏稳定"],
|
||||
},
|
||||
},
|
||||
};
|
||||
return trpcResult(state.liveRuntime);
|
||||
case "analysis.runtimeHeartbeat": {
|
||||
const input = await readTrpcInput(route, operationIndex);
|
||||
if (state.liveRuntime.runtimeSession) {
|
||||
state.liveRuntime.runtimeSession = {
|
||||
...state.liveRuntime.runtimeSession,
|
||||
mediaSessionId: input?.mediaSessionId ?? state.liveRuntime.runtimeSession.mediaSessionId,
|
||||
snapshot: input?.snapshot ?? state.liveRuntime.runtimeSession.snapshot,
|
||||
lastHeartbeatAt: nowIso(),
|
||||
};
|
||||
}
|
||||
return trpcResult(state.liveRuntime);
|
||||
}
|
||||
case "analysis.runtimeRelease":
|
||||
state.liveRuntime = { role: "idle", runtimeSession: null };
|
||||
return trpcResult({ success: true, runtimeSession: null });
|
||||
case "analysis.liveSessionSave":
|
||||
return trpcResult({ sessionId: 1, trainingRecordId: 1 });
|
||||
case "task.list":
|
||||
@@ -588,6 +638,21 @@ async function handleMedia(route: Route, state: MockAppState) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (path.endsWith("/viewer-signal")) {
|
||||
if (state.viewerSignalConflictRemaining > 0) {
|
||||
state.viewerSignalConflictRemaining -= 1;
|
||||
await route.fulfill({
|
||||
status: 409,
|
||||
contentType: "application/json",
|
||||
body: JSON.stringify({ error: "viewer stream not ready" }),
|
||||
});
|
||||
return;
|
||||
}
|
||||
state.mediaSession.viewerCount = (state.mediaSession.viewerCount || 0) + 1;
|
||||
await fulfillJson(route, { viewerId: `viewer-${state.mediaSession.viewerCount}`, type: "answer", sdp: "mock-answer" });
|
||||
return;
|
||||
}
|
||||
|
||||
if (path.endsWith("/signal")) {
|
||||
state.mediaSession.status = "recording";
|
||||
await fulfillJson(route, { type: "answer", sdp: "mock-answer" });
|
||||
@@ -658,8 +723,11 @@ export async function installAppMocks(
|
||||
analyses?: any[];
|
||||
userName?: string;
|
||||
authMeNullResponsesAfterLogin?: number;
|
||||
liveViewerMode?: boolean;
|
||||
viewerSignalConflictOnce?: boolean;
|
||||
}
|
||||
) {
|
||||
const seededViewerSession = options?.liveViewerMode ? buildMediaSession(buildUser(options?.userName), "其他设备实时分析") : null;
|
||||
const state: MockAppState = {
|
||||
authenticated: options?.authenticated ?? false,
|
||||
user: buildUser(options?.userName),
|
||||
@@ -693,10 +761,83 @@ export async function installAppMocks(
|
||||
],
|
||||
tasks: [],
|
||||
activePlan: null,
|
||||
mediaSession: null,
|
||||
mediaSession: seededViewerSession,
|
||||
liveRuntime: options?.liveViewerMode
|
||||
? {
|
||||
role: "viewer",
|
||||
runtimeSession: {
|
||||
id: 777,
|
||||
title: "其他设备实时分析",
|
||||
sessionMode: "practice",
|
||||
mediaSessionId: seededViewerSession?.id || null,
|
||||
status: "active",
|
||||
startedAt: nowIso(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: nowIso(),
|
||||
snapshot: {
|
||||
phase: "analyzing",
|
||||
title: "其他设备实时分析",
|
||||
sessionMode: "practice",
|
||||
qualityPreset: "balanced",
|
||||
facingMode: "environment",
|
||||
deviceKind: "mobile",
|
||||
avatarEnabled: true,
|
||||
avatarKey: "gorilla",
|
||||
avatarLabel: "猩猩",
|
||||
updatedAt: Date.parse(nowIso()),
|
||||
currentAction: "forehand",
|
||||
rawAction: "forehand",
|
||||
durationMs: 3200,
|
||||
visibleSegments: 2,
|
||||
unknownSegments: 0,
|
||||
archivedVideoCount: 1,
|
||||
feedback: ["同步观看测试数据"],
|
||||
liveScore: {
|
||||
overall: 82,
|
||||
posture: 80,
|
||||
balance: 78,
|
||||
technique: 84,
|
||||
footwork: 76,
|
||||
consistency: 79,
|
||||
confidence: 88,
|
||||
},
|
||||
stabilityMeta: {
|
||||
windowFrames: 24,
|
||||
windowShare: 1,
|
||||
windowProgress: 1,
|
||||
switchCount: 1,
|
||||
stableMs: 1800,
|
||||
rawVolatility: 0.12,
|
||||
pending: false,
|
||||
candidateMs: 0,
|
||||
},
|
||||
recentSegments: [
|
||||
{
|
||||
actionType: "forehand",
|
||||
isUnknown: false,
|
||||
startMs: 800,
|
||||
endMs: 2800,
|
||||
durationMs: 2000,
|
||||
confidenceAvg: 0.82,
|
||||
score: 84,
|
||||
peakScore: 88,
|
||||
frameCount: 24,
|
||||
issueSummary: ["击球点略靠后"],
|
||||
keyFrames: [1000, 1800, 2600],
|
||||
clipLabel: "正手挥拍 00:00 - 00:02",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
: {
|
||||
role: "idle",
|
||||
runtimeSession: null,
|
||||
},
|
||||
nextVideoId: 100,
|
||||
nextTaskId: 1,
|
||||
authMeNullResponsesAfterLogin: options?.authMeNullResponsesAfterLogin ?? 0,
|
||||
viewerSignalConflictRemaining: options?.viewerSignalConflictOnce ? 1 : 0,
|
||||
};
|
||||
|
||||
await page.addInitScript(() => {
|
||||
@@ -725,6 +866,73 @@ export async function installAppMocks(
|
||||
return points;
|
||||
};
|
||||
|
||||
class FakeVideoTrack {
|
||||
kind = "video";
|
||||
enabled = true;
|
||||
muted = false;
|
||||
readyState = "live";
|
||||
id = "fake-video-track";
|
||||
label = "Fake Camera";
|
||||
|
||||
stop() {}
|
||||
|
||||
getSettings() {
|
||||
return {
|
||||
facingMode: "environment",
|
||||
width: 1280,
|
||||
height: 720,
|
||||
frameRate: 30,
|
||||
};
|
||||
}
|
||||
|
||||
getCapabilities() {
|
||||
return {};
|
||||
}
|
||||
|
||||
async applyConstraints() {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
class FakeAudioTrack {
|
||||
kind = "audio";
|
||||
enabled = true;
|
||||
muted = false;
|
||||
readyState = "live";
|
||||
id = "fake-audio-track";
|
||||
label = "Fake Mic";
|
||||
|
||||
stop() {}
|
||||
|
||||
getSettings() {
|
||||
return {};
|
||||
}
|
||||
|
||||
getCapabilities() {
|
||||
return {};
|
||||
}
|
||||
|
||||
async applyConstraints() {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
const createFakeMediaStream = (withAudio = false) => {
|
||||
const videoTrack = new FakeVideoTrack();
|
||||
const audioTrack = withAudio ? new FakeAudioTrack() : null;
|
||||
const tracks = audioTrack ? [videoTrack, audioTrack] : [videoTrack];
|
||||
return {
|
||||
active: true,
|
||||
id: `fake-stream-${Math.random().toString(36).slice(2)}`,
|
||||
getTracks: () => tracks,
|
||||
getVideoTracks: () => [videoTrack],
|
||||
getAudioTracks: () => (audioTrack ? [audioTrack] : []),
|
||||
addTrack: () => undefined,
|
||||
removeTrack: () => undefined,
|
||||
clone: () => createFakeMediaStream(withAudio),
|
||||
} as unknown as MediaStream;
|
||||
};
|
||||
|
||||
class FakePose {
|
||||
callback = null;
|
||||
|
||||
@@ -753,9 +961,19 @@ export async function installAppMocks(
|
||||
value: async () => undefined,
|
||||
});
|
||||
|
||||
Object.defineProperty(HTMLMediaElement.prototype, "srcObject", {
|
||||
configurable: true,
|
||||
get() {
|
||||
return (this as HTMLMediaElement & { __srcObject?: MediaStream }).__srcObject ?? null;
|
||||
},
|
||||
set(value) {
|
||||
(this as HTMLMediaElement & { __srcObject?: MediaStream }).__srcObject = value as MediaStream;
|
||||
},
|
||||
});
|
||||
|
||||
Object.defineProperty(HTMLCanvasElement.prototype, "captureStream", {
|
||||
configurable: true,
|
||||
value: () => new MediaStream(),
|
||||
value: () => createFakeMediaStream(),
|
||||
});
|
||||
|
||||
class FakeMediaRecorder extends EventTarget {
|
||||
@@ -801,9 +1019,12 @@ export async function installAppMocks(
|
||||
localDescription: { type: string; sdp: string } | null = null;
|
||||
remoteDescription: { type: string; sdp: string } | null = null;
|
||||
onconnectionstatechange: (() => void) | null = null;
|
||||
ontrack: ((event: { streams: MediaStream[] }) => void) | null = null;
|
||||
|
||||
addTrack() {}
|
||||
|
||||
addTransceiver() {}
|
||||
|
||||
async createOffer() {
|
||||
return { type: "offer", sdp: "mock-offer" };
|
||||
}
|
||||
@@ -817,6 +1038,7 @@ export async function installAppMocks(
|
||||
async setRemoteDescription(description: { type: string; sdp: string }) {
|
||||
this.remoteDescription = description;
|
||||
this.connectionState = "connected";
|
||||
this.ontrack?.({ streams: [createFakeMediaStream()] });
|
||||
this.onconnectionstatechange?.();
|
||||
}
|
||||
|
||||
@@ -839,7 +1061,7 @@ export async function installAppMocks(
|
||||
Object.defineProperty(navigator, "mediaDevices", {
|
||||
configurable: true,
|
||||
value: {
|
||||
getUserMedia: async () => new MediaStream(),
|
||||
getUserMedia: async (constraints?: { audio?: unknown }) => createFakeMediaStream(Boolean(constraints?.audio)),
|
||||
enumerateDevices: async () => [
|
||||
{ deviceId: "cam-1", kind: "videoinput", label: "Front Camera", groupId: "g1" },
|
||||
{ deviceId: "cam-2", kind: "videoinput", label: "Back Camera", groupId: "g1" },
|
||||
|
||||
在新工单中引用
屏蔽一个用户