Add auto archived overlay recordings for live analysis

这个提交包含在:
cryptocommuniums-afk
2026-03-16 11:59:51 +08:00
父节点 e3fe9a8e7b
当前提交 4fb2d092d7
修改 7 个文件,包含 377 行新增60 行删除

查看文件

@@ -8,6 +8,26 @@ export type ChangeLogEntry = {
};
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
{
version: "2026.03.16-live-analysis-overlay-archive",
releaseDate: "2026-03-16",
repoVersion: "e3fe9a8 + local changes",
summary: "实时分析新增 60 秒自动归档录像,录制内容会保留骨架、关键点和虚拟形象叠层,并同步进入视频库。",
features: [
"实时分析开始后会自动录制合成画布,每 60 秒自动切段归档",
"归档录像会保留原视频、骨架线、关键点和当前虚拟形象覆盖效果",
"归档片段会自动写入视频库,标签显示为“实时分析”",
"删除视频库中的实时分析录像时,不会删除已写入的实时分析数据和训练记录",
"线上 smoke 已确认 `https://te.hao.work/` 已切换到本次新构建,`/live-camera`、`/videos`、`/changelog` 页面均可正常访问",
],
tests: [
"pnpm check",
"pnpm test",
"pnpm build",
"pnpm test:e2e",
"Playwright smoke: 真实站点登录 H1,完成 /live-camera 引导、开始/结束分析,并确认 /videos 可见实时分析条目",
],
},
{
version: "2026.03.15-live-analysis-leave-hint",
releaseDate: "2026-03-15",

查看文件

@@ -662,18 +662,22 @@ function drawFullFigureAvatar(
drawLimbs(ctx, anchors, visual.limbStroke);
}
export function drawLiveCameraOverlay(
canvas: HTMLCanvasElement | null,
export function renderLiveCameraOverlayToContext(
ctx: CanvasRenderingContext2D | null,
width: number,
height: number,
landmarks: PosePoint[] | undefined,
avatarState?: AvatarRenderState,
options?: { clear?: boolean },
) {
const ctx = canvas?.getContext("2d");
if (!canvas || !ctx) return;
ctx.clearRect(0, 0, canvas.width, canvas.height);
if (!ctx) return;
if (options?.clear !== false) {
ctx.clearRect(0, 0, width, height);
}
if (!landmarks) return;
if (avatarState?.enabled) {
const anchors = getAvatarAnchors(landmarks, canvas.width, canvas.height);
const anchors = getAvatarAnchors(landmarks, width, height);
if (anchors) {
const sprite = getAvatarImage(avatarState.avatarKey);
const visual = AVATAR_VISUALS[avatarState.avatarKey];
@@ -715,8 +719,8 @@ export function drawLiveCameraOverlay(
const end = landmarks[to];
if (!start || !end || (start.visibility ?? 1) < 0.25 || (end.visibility ?? 1) < 0.25) return;
ctx.beginPath();
ctx.moveTo(start.x * canvas.width, start.y * canvas.height);
ctx.lineTo(end.x * canvas.width, end.y * canvas.height);
ctx.moveTo(start.x * width, start.y * height);
ctx.lineTo(end.x * width, end.y * height);
ctx.stroke();
});
@@ -724,7 +728,17 @@ export function drawLiveCameraOverlay(
if ((point.visibility ?? 1) < 0.25) return;
ctx.fillStyle = index >= 11 && index <= 16 ? "rgba(253, 224, 71, 0.95)" : "rgba(255,255,255,0.88)";
ctx.beginPath();
ctx.arc(point.x * canvas.width, point.y * canvas.height, index >= 11 && index <= 16 ? 5 : 4, 0, Math.PI * 2);
ctx.arc(point.x * width, point.y * height, index >= 11 && index <= 16 ? 5 : 4, 0, Math.PI * 2);
ctx.fill();
});
}
export function drawLiveCameraOverlay(
canvas: HTMLCanvasElement | null,
landmarks: PosePoint[] | undefined,
avatarState?: AvatarRenderState,
) {
const ctx = canvas?.getContext("2d");
if (!canvas || !ctx) return;
renderLiveCameraOverlayToContext(ctx, canvas.width, canvas.height, landmarks, avatarState, { clear: true });
}

查看文件

@@ -20,6 +20,7 @@ import {
createStableActionState,
drawLiveCameraOverlay,
getAvatarPreset,
renderLiveCameraOverlayToContext,
resolveAvatarKeyFromPrompt,
stabilizeActionStream,
type AvatarKey,
@@ -80,6 +81,14 @@ type ActionSegment = {
clipLabel: string;
};
type ArchivedAnalysisVideo = {
videoId: number;
url: string;
sequence: number;
durationMs: number;
title: string;
};
type Point = {
x: number;
y: number;
@@ -122,6 +131,7 @@ const SETUP_STEPS = [
const SEGMENT_MAX_MS = 10_000;
const MERGE_GAP_MS = 900;
const MIN_SEGMENT_MS = 1_200;
const ANALYSIS_RECORDING_SEGMENT_MS = 60_000;
const CAMERA_QUALITY_PRESETS: Record<CameraQualityPreset, { label: string; subtitle: string; description: string }> = {
economy: {
label: "节省流量",
@@ -482,10 +492,17 @@ export default function LiveCamera() {
const canvasRef = useRef<HTMLCanvasElement>(null);
const streamRef = useRef<MediaStream | null>(null);
const poseRef = useRef<any>(null);
const compositeCanvasRef = useRef<HTMLCanvasElement | null>(null);
const recorderRef = useRef<MediaRecorder | null>(null);
const recorderStreamRef = useRef<MediaStream | null>(null);
const recorderMimeTypeRef = useRef("video/webm");
const recorderChunksRef = useRef<Blob[]>([]);
const recorderStopPromiseRef = useRef<Promise<Blob | null> | null>(null);
const recorderStopPromiseRef = useRef<Promise<void> | null>(null);
const recorderSegmentStartedAtRef = useRef<number>(0);
const recorderSequenceRef = useRef(0);
const recorderRotateTimerRef = useRef<number>(0);
const recorderUploadQueueRef = useRef(Promise.resolve());
const archivedVideosRef = useRef<ArchivedAnalysisVideo[]>([]);
const analyzingRef = useRef(false);
const animationRef = useRef<number>(0);
const sessionStartedAtRef = useRef<number>(0);
@@ -525,6 +542,7 @@ export default function LiveCamera() {
const [avatarEnabled, setAvatarEnabled] = useState(false);
const [avatarKey, setAvatarKey] = useState<AvatarKey>("gorilla");
const [avatarPrompt, setAvatarPrompt] = useState("");
const [archivedVideoCount, setArchivedVideoCount] = useState(0);
const resolvedAvatarKey = useMemo(
() => resolveAvatarKeyFromPrompt(avatarPrompt, avatarKey),
@@ -536,6 +554,7 @@ export default function LiveCamera() {
onSuccess: () => {
utils.profile.stats.invalidate();
utils.analysis.liveSessionList.invalidate();
utils.video.list.invalidate();
utils.record.list.invalidate();
utils.achievement.list.invalidate();
utils.rating.current.invalidate();
@@ -621,16 +640,94 @@ export default function LiveCamera() {
}
}, [cameraActive, immersivePreview]);
const ensureCompositeCanvas = useCallback(() => {
if (typeof document === "undefined") {
return null;
}
if (!compositeCanvasRef.current) {
compositeCanvasRef.current = document.createElement("canvas");
}
return compositeCanvasRef.current;
}, []);
const renderCompositeFrame = useCallback((landmarks?: Point[]) => {
const video = videoRef.current;
const compositeCanvas = ensureCompositeCanvas();
if (!video || !compositeCanvas || video.videoWidth <= 0 || video.videoHeight <= 0) {
return;
}
if (compositeCanvas.width !== video.videoWidth || compositeCanvas.height !== video.videoHeight) {
compositeCanvas.width = video.videoWidth;
compositeCanvas.height = video.videoHeight;
}
const ctx = compositeCanvas.getContext("2d");
if (!ctx) return;
ctx.clearRect(0, 0, compositeCanvas.width, compositeCanvas.height);
ctx.drawImage(video, 0, 0, compositeCanvas.width, compositeCanvas.height);
renderLiveCameraOverlayToContext(
ctx,
compositeCanvas.width,
compositeCanvas.height,
landmarks,
avatarRenderRef.current,
{ clear: false },
);
}, [ensureCompositeCanvas]);
const queueArchivedVideoUpload = useCallback(async (blob: Blob, sequence: number, durationMs: number) => {
const format = recorderMimeTypeRef.current.includes("mp4") ? "mp4" : "webm";
const title = `实时分析录像 ${formatDateTimeShanghai(new Date(), {
year: undefined,
second: undefined,
})} · 第 ${sequence}`;
recorderUploadQueueRef.current = recorderUploadQueueRef.current
.then(async () => {
const fileBase64 = await blobToBase64(blob);
const uploaded = await uploadMutation.mutateAsync({
title,
format,
fileSize: blob.size,
duration: Math.max(1, Math.round(durationMs / 1000)),
exerciseType: "live_analysis",
fileBase64,
});
const nextVideo: ArchivedAnalysisVideo = {
videoId: uploaded.videoId,
url: uploaded.url,
sequence,
durationMs,
title,
};
archivedVideosRef.current = [...archivedVideosRef.current, nextVideo].sort((a, b) => a.sequence - b.sequence);
setArchivedVideoCount(archivedVideosRef.current.length);
})
.catch((error: any) => {
toast.error(`分析录像第 ${sequence} 段归档失败: ${error?.message || "未知错误"}`);
});
return recorderUploadQueueRef.current;
}, [uploadMutation]);
const stopSessionRecorder = useCallback(async () => {
const recorder = recorderRef.current;
if (!recorder) return null;
if (recorderRotateTimerRef.current) {
window.clearTimeout(recorderRotateTimerRef.current);
recorderRotateTimerRef.current = 0;
}
if (!recorder) {
await recorderUploadQueueRef.current;
return;
}
const stopPromise = recorderStopPromiseRef.current;
if (recorder.state !== "inactive") {
recorder.stop();
}
recorderRef.current = null;
recorderStopPromiseRef.current = null;
return stopPromise ?? null;
await (stopPromise ?? Promise.resolve());
await recorderUploadQueueRef.current;
}, []);
const stopCamera = useCallback(() => {
@@ -659,6 +756,9 @@ export default function LiveCamera() {
setRawAction("unknown");
setStabilityMeta(createEmptyStabilizedActionMeta());
setZoomState(readTrackZoomState(null));
archivedVideosRef.current = [];
recorderSequenceRef.current = 0;
setArchivedVideoCount(0);
setCameraActive(false);
}, [stopSessionRecorder]);
@@ -796,21 +896,35 @@ export default function LiveCamera() {
currentSegmentRef.current = createSegment(frame.action, elapsedMs, frame);
}, [flushSegment]);
const startSessionRecorder = useCallback((stream: MediaStream) => {
const startSessionRecorder = useCallback(function startSessionRecorderInternal() {
if (typeof MediaRecorder === "undefined") {
recorderRef.current = null;
recorderStopPromiseRef.current = Promise.resolve(null);
recorderStopPromiseRef.current = Promise.resolve();
return;
}
const compositeCanvas = ensureCompositeCanvas();
if (!compositeCanvas || typeof compositeCanvas.captureStream !== "function") {
recorderRef.current = null;
recorderStopPromiseRef.current = Promise.resolve();
return;
}
renderCompositeFrame();
recorderChunksRef.current = [];
const mimeType = pickRecorderMimeType();
recorderMimeTypeRef.current = mimeType;
const recorder = new MediaRecorder(stream, {
if (!recorderStreamRef.current) {
recorderStreamRef.current = compositeCanvas.captureStream(mobile ? 24 : 30);
}
const recorder = new MediaRecorder(recorderStreamRef.current, {
mimeType,
videoBitsPerSecond: getLiveAnalysisBitrate(qualityPreset, mobile),
});
recorderRef.current = recorder;
const sequence = recorderSequenceRef.current + 1;
recorderSequenceRef.current = sequence;
recorderSegmentStartedAtRef.current = Date.now();
recorder.ondataavailable = (event) => {
if (event.data && event.data.size > 0) {
@@ -820,14 +934,32 @@ export default function LiveCamera() {
recorderStopPromiseRef.current = new Promise((resolve) => {
recorder.onstop = () => {
const durationMs = Math.max(0, Date.now() - recorderSegmentStartedAtRef.current);
const type = recorderMimeTypeRef.current.includes("mp4") ? "video/mp4" : "video/webm";
const blob = recorderChunksRef.current.length > 0 ? new Blob(recorderChunksRef.current, { type }) : null;
resolve(blob);
recorderChunksRef.current = [];
recorderRef.current = null;
recorderStopPromiseRef.current = null;
if (blob && blob.size > 0 && durationMs > 0) {
void queueArchivedVideoUpload(blob, sequence, durationMs);
}
if (analyzingRef.current) {
startSessionRecorderInternal();
} else if (recorderStreamRef.current) {
recorderStreamRef.current.getTracks().forEach((track) => track.stop());
recorderStreamRef.current = null;
}
resolve();
};
});
recorder.start(1000);
}, [mobile, qualityPreset]);
recorder.start();
recorderRotateTimerRef.current = window.setTimeout(() => {
if (recorder.state === "recording") {
recorder.stop();
}
}, ANALYSIS_RECORDING_SEGMENT_MS);
}, [ensureCompositeCanvas, mobile, qualityPreset, queueArchivedVideoUpload, renderCompositeFrame]);
const persistSession = useCallback(async () => {
const endedAt = Date.now();
@@ -871,27 +1003,9 @@ export default function LiveCamera() {
? volatilitySamplesRef.current.reduce((sum, value) => sum + value, 0) / volatilitySamplesRef.current.length
: 0;
const avatarState = avatarRenderRef.current;
let uploadedVideo: { videoId: number; url: string } | null = null;
const recordedBlob = await stopSessionRecorder();
if (recordedBlob && recordedBlob.size > 0) {
const format = recorderMimeTypeRef.current.includes("mp4") ? "mp4" : "webm";
const fileBase64 = await blobToBase64(recordedBlob);
uploadedVideo = await uploadMutation.mutateAsync({
title: `实时分析 ${formatDateTimeShanghai(new Date(), {
year: undefined,
second: undefined,
})}`,
format,
fileSize: recordedBlob.size,
exerciseType: dominantAction,
fileBase64,
});
}
if (finalSegments.length === 0) {
return;
}
await stopSessionRecorder();
const archivedVideos = [...archivedVideosRef.current].sort((a, b) => a.sequence - b.sequence);
const primaryArchivedVideo = archivedVideos[0] ?? null;
await saveLiveSessionMutation.mutateAsync({
title: `实时分析 ${ACTION_META[dominantAction].label}`,
@@ -921,6 +1035,9 @@ export default function LiveCamera() {
rawActionVolatility: Number(averageRawVolatility.toFixed(4)),
avatarEnabled: avatarState.enabled,
avatarKey: avatarState.enabled ? avatarState.avatarKey : null,
autoRecordingEnabled: true,
autoRecordingSegmentMs: ANALYSIS_RECORDING_SEGMENT_MS,
archivedVideos,
mobile,
},
segments: finalSegments.map((segment) => ({
@@ -937,10 +1054,10 @@ export default function LiveCamera() {
keyFrames: segment.keyFrames,
clipLabel: segment.clipLabel,
})),
videoId: uploadedVideo?.videoId,
videoUrl: uploadedVideo?.url,
videoId: primaryArchivedVideo?.videoId,
videoUrl: primaryArchivedVideo?.url,
});
}, [flushSegment, liveScore, mobile, saveLiveSessionMutation, sessionMode, stopSessionRecorder, uploadMutation]);
}, [flushSegment, liveScore, mobile, saveLiveSessionMutation, sessionMode, stopSessionRecorder]);
const startAnalysis = useCallback(async () => {
if (!cameraActive || !videoRef.current || !streamRef.current) {
@@ -961,6 +1078,9 @@ export default function LiveCamera() {
stableActionStateRef.current = createStableActionState();
frameSamplesRef.current = [];
volatilitySamplesRef.current = [];
archivedVideosRef.current = [];
recorderSequenceRef.current = 0;
setArchivedVideoCount(0);
sessionStartedAtRef.current = Date.now();
setCurrentAction("unknown");
setRawAction("unknown");
@@ -968,7 +1088,7 @@ export default function LiveCamera() {
setFeedback([]);
setStabilityMeta(createEmptyStabilizedActionMeta());
setDurationMs(0);
startSessionRecorder(streamRef.current);
startSessionRecorder();
try {
const testFactory = (
@@ -1002,6 +1122,7 @@ export default function LiveCamera() {
}
drawLiveCameraOverlay(canvas, results.poseLandmarks, avatarRenderRef.current);
renderCompositeFrame(results.poseLandmarks);
if (!results.poseLandmarks) return;
const frameTimestamp = performance.now();
@@ -1063,7 +1184,7 @@ export default function LiveCamera() {
await stopSessionRecorder();
toast.error(`实时分析启动失败: ${error?.message || "未知错误"}`);
}
}, [appendFrameToSegment, cameraActive, saving, startSessionRecorder, stopSessionRecorder]);
}, [appendFrameToSegment, cameraActive, renderCompositeFrame, saving, startSessionRecorder, stopSessionRecorder]);
const stopAnalysis = useCallback(async () => {
if (!analyzingRef.current) return;
@@ -1084,7 +1205,7 @@ export default function LiveCamera() {
}
await persistSession();
setLeaveStatus("safe");
toast.success("实时分析已保存,并同步写入训练记录");
toast.success(`实时分析已保存,并同步写入训练记录${archivedVideosRef.current.length > 0 ? `;已归档 ${archivedVideosRef.current.length} 段分析录像` : ""}`);
await liveSessionsQuery.refetch();
} catch (error: any) {
setLeaveStatus("failed");
@@ -1345,7 +1466,7 @@ export default function LiveCamera() {
<Activity className="h-4 w-4" />
<AlertTitle></AlertTitle>
<AlertDescription>
</AlertDescription>
</Alert>
) : null}
@@ -1382,6 +1503,10 @@ export default function LiveCamera() {
<Video className="h-3.5 w-3.5" />
+
</Badge>
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
<PlayCircle className="h-3.5 w-3.5" />
60
</Badge>
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
<Camera className="h-3.5 w-3.5" />
{avatarEnabled ? `虚拟形象 ${resolvedAvatarLabel}` : "骨架叠加"}
@@ -1398,7 +1523,7 @@ export default function LiveCamera() {
<div>
<h1 className="text-3xl font-semibold tracking-tight"></h1>
<p className="mt-2 max-w-2xl text-sm leading-6 text-white/70">
24 10 4 3D Avatar
24 + / 60 10 4 3D Avatar
</p>
</div>
</div>
@@ -1527,13 +1652,36 @@ export default function LiveCamera() {
{renderPrimaryActions()}
</div>
</div>
<div className="mt-4 grid gap-3 rounded-[24px] border border-border/60 bg-muted/15 p-4 md:grid-cols-3">
<div className="rounded-2xl border border-border/60 bg-background/90 p-4">
<div className="text-[11px] uppercase tracking-[0.16em] text-muted-foreground"></div>
<div className="mt-2 text-lg font-semibold"> 60 </div>
<div className="mt-2 text-xs leading-5 text-muted-foreground">
线
</div>
</div>
<div className="rounded-2xl border border-border/60 bg-background/90 p-4">
<div className="text-[11px] uppercase tracking-[0.16em] text-muted-foreground"></div>
<div className="mt-2 text-lg font-semibold">{archivedVideoCount}</div>
<div className="mt-2 text-xs leading-5 text-muted-foreground">
</div>
</div>
<div className="rounded-2xl border border-border/60 bg-background/90 p-4">
<div className="text-[11px] uppercase tracking-[0.16em] text-muted-foreground"></div>
<div className="mt-2 text-lg font-semibold"></div>
<div className="mt-2 text-xs leading-5 text-muted-foreground">
使
</div>
</div>
</div>
<div className="mt-4 grid gap-3 rounded-[24px] border border-border/60 bg-muted/20 p-4 lg:grid-cols-[minmax(0,1.1fr)_180px_220px]">
<div className="space-y-3">
<div className="flex items-center justify-between gap-3">
<div>
<div className="text-sm font-medium"></div>
<div className="mt-1 text-xs text-muted-foreground">
使 10 4 3D Avatar
使 10 4 3D Avatar
</div>
</div>
<Switch

查看文件

@@ -1,11 +1,14 @@
# Tennis Training Hub - 变更日志
## 2026.03.15-live-camera-avatar-smoothing (2026-03-15)
## 2026.03.16-live-analysis-overlay-archive (2026-03-16)
### 功能更新
- `/live-camera` 新增 10 个免费动物虚拟形象,可将主体实时替换为猩猩、猴子、狗、猪、猫、狐狸、熊猫、狮子、老虎、兔子
- `/live-camera` 再新增 4 个免费的全身 3D Avatar 示例,可直接覆盖人物轮廓,并提供对应的 CC0 模型源链接
- `/live-camera` 新增实时分析自动录像,按 60 秒自动切段归档;归档视频写入视频库并标记为“实时分析”
- 实时分析录像改为录制“视频画面 + 骨架线 + 关键点 + 虚拟形象覆盖”的合成画布,回放中可直接看到分析叠层
- 实时分析记录与视频库解耦,用户删除视频库中的“实时分析”录像后,不会删除已保存的分析数据和训练记录
- 增加形象别名输入,当前可按输入内容自动映射到内置形象
- 实时分析动作稳定器从短窗口切换为 24 帧时间窗口,降低 1-2 秒内频繁跳动作的问题
- 动作切换新增确认阶段与延迟入库逻辑,连续动作区间改为只按稳定动作聚合
@@ -20,10 +23,15 @@
- `pnpm test`
- `pnpm build`
- `pnpm test:e2e`
- Playwright 线上 smoke
- `https://te.hao.work/live-camera` 真实登录 `H1` 后可完成引导、启用摄像头、开始分析、结束分析
- `https://te.hao.work/videos` 可见“实时分析”录像条目
- `https://te.hao.work/changelog` 已展示 `2026.03.16-live-analysis-overlay-archive` 条目与对应摘要
- 最终线上资源 revision`assets/index-BBOKkDaa.js``assets/index-BL6GQzUF.css`
### 仓库版本
- `264d494 + local changes`
- `e3fe9a8 + local changes`
## 2026.03.15-live-analysis-leave-hint (2026-03-15)

查看文件

@@ -187,6 +187,7 @@ export const appRouter = router({
title: z.string(),
format: z.string(),
fileSize: z.number(),
duration: z.number().optional(),
exerciseType: z.string().optional(),
fileBase64: z.string(),
}))
@@ -204,8 +205,9 @@ export const appRouter = router({
url: publicUrl,
format: input.format,
fileSize: input.fileSize,
duration: input.duration ?? null,
exerciseType: input.exerciseType || null,
analysisStatus: "pending",
analysisStatus: input.exerciseType === "live_analysis" ? "completed" : "pending",
});
return { videoId, url: publicUrl };

查看文件

@@ -68,6 +68,28 @@ test("live camera starts analysis and produces scores", async ({ page }) => {
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
});
test("live camera archives overlay videos into the library after analysis stops", async ({ page }) => {
await installAppMocks(page, { authenticated: true, videos: [] });
await page.goto("/live-camera");
await page.getByRole("button", { name: "下一步" }).click();
await page.getByRole("button", { name: "下一步" }).click();
await page.getByRole("button", { name: "下一步" }).click();
await page.getByRole("button", { name: /启用摄像头/ }).click();
await expect(page.getByTestId("live-camera-analyze-button")).toBeVisible();
await page.getByTestId("live-camera-analyze-button").click();
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
await page.getByRole("button", { name: "结束分析" }).click();
await expect(page.getByText("分析结果已保存")).toBeVisible({ timeout: 8_000 });
await page.goto("/videos");
await expect(page.getByTestId("video-card")).toHaveCount(1);
await expect(page.getByText("实时分析录像").first()).toBeVisible();
await expect(page.getByText("实时分析").first()).toBeVisible();
});
test("recorder flow archives a session and exposes it in videos", async ({ page }) => {
await installAppMocks(page, { authenticated: true, videos: [] });

查看文件

@@ -10,6 +10,19 @@ type MockUser = {
skillLevel: string;
trainingGoals: string | null;
ntrpRating: number;
manualNtrpRating: number | null;
manualNtrpCapturedAt: string | null;
heightCm: number | null;
weightKg: number | null;
sprintSpeedScore: number | null;
explosivePowerScore: number | null;
agilityScore: number | null;
enduranceScore: number | null;
flexibilityScore: number | null;
coreStabilityScore: number | null;
shoulderMobilityScore: number | null;
hipMobilityScore: number | null;
assessmentNotes: string | null;
totalSessions: number;
totalMinutes: number;
totalShots: number;
@@ -103,6 +116,19 @@ function buildUser(name = "TestPlayer"): MockUser {
skillLevel: "beginner",
trainingGoals: null,
ntrpRating: 2.8,
manualNtrpRating: 2.5,
manualNtrpCapturedAt: nowIso(),
heightCm: 178,
weightKg: 68,
sprintSpeedScore: 4,
explosivePowerScore: 4,
agilityScore: 4,
enduranceScore: 3,
flexibilityScore: 3,
coreStabilityScore: 4,
shoulderMobilityScore: 3,
hipMobilityScore: 4,
assessmentNotes: "每周可练 3 次,右肩偶尔偏紧。",
totalSessions: 12,
totalMinutes: 320,
totalShots: 280,
@@ -115,6 +141,7 @@ function buildUser(name = "TestPlayer"): MockUser {
}
function buildStats(user: MockUser) {
const hasSystemNtrp = user.ntrpRating != null;
return {
ntrpRating: user.ntrpRating,
totalSessions: user.totalSessions,
@@ -186,9 +213,45 @@ function buildStats(user: MockUser) {
matchReadiness: 70,
},
},
trainingProfileStatus: {
hasSystemNtrp,
isComplete: true,
missingFields: [],
effectiveNtrp: user.ntrpRating,
ntrpSource: hasSystemNtrp ? "system" : "manual",
assessmentSnapshot: {
heightCm: user.heightCm,
weightKg: user.weightKg,
sprintSpeedScore: user.sprintSpeedScore,
explosivePowerScore: user.explosivePowerScore,
agilityScore: user.agilityScore,
enduranceScore: user.enduranceScore,
flexibilityScore: user.flexibilityScore,
coreStabilityScore: user.coreStabilityScore,
shoulderMobilityScore: user.shoulderMobilityScore,
hipMobilityScore: user.hipMobilityScore,
assessmentNotes: user.assessmentNotes,
},
},
};
}
async function readTrpcInput(route: Route, operationIndex: number) {
const url = new URL(route.request().url());
const rawSearchInput = url.searchParams.get("input");
if (rawSearchInput) {
const parsed = JSON.parse(rawSearchInput);
return parsed?.json ?? parsed?.[operationIndex]?.json ?? null;
}
const postData = route.request().postData();
if (!postData) return null;
const parsed = JSON.parse(postData);
return parsed?.json ?? parsed?.[operationIndex]?.json ?? parsed?.[String(operationIndex)]?.json ?? null;
}
function buildMediaSession(user: MockUser, title: string): MockMediaSession {
return {
id: "session-e2e",
@@ -254,7 +317,7 @@ async function fulfillJson(route: Route, body: unknown) {
async function handleTrpc(route: Route, state: MockAppState) {
const url = new URL(route.request().url());
const operations = url.pathname.replace("/api/trpc/", "").split(",");
const results = operations.map((operation) => {
const results = await Promise.all(operations.map(async (operation, operationIndex) => {
switch (operation) {
case "auth.me":
if (state.authenticated && state.authMeNullResponsesAfterLogin > 0) {
@@ -267,16 +330,34 @@ async function handleTrpc(route: Route, state: MockAppState) {
return trpcResult({ user: state.user, isNew: false });
case "profile.stats":
return trpcResult(buildStats(state.user));
case "profile.update": {
const input = await readTrpcInput(route, operationIndex);
state.user = {
...state.user,
...input,
updatedAt: nowIso(),
manualNtrpCapturedAt:
input?.manualNtrpRating !== undefined
? input.manualNtrpRating == null
? null
: nowIso()
: state.user.manualNtrpCapturedAt,
};
return trpcResult({ success: true });
}
case "plan.active":
return trpcResult(state.activePlan);
case "plan.list":
return trpcResult(state.activePlan ? [state.activePlan] : []);
case "plan.generate":
case "plan.generate": {
const input = await readTrpcInput(route, operationIndex);
const durationDays = Number(input?.durationDays ?? 7);
const skillLevel = input?.skillLevel ?? state.user.skillLevel;
state.activePlan = {
id: 200,
title: `${state.user.name} 的训练计划`,
skillLevel: "beginner",
durationDays: 7,
skillLevel,
durationDays,
version: 1,
adjustmentNotes: null,
exercises: [
@@ -305,7 +386,7 @@ async function handleTrpc(route: Route, state: MockAppState) {
return trpcResult({
taskId: createTask(state, {
type: "training_plan_generate",
title: "7天训练计划生成",
title: `${durationDays}天训练计划生成`,
result: {
kind: "training_plan_generate",
planId: state.activePlan.id,
@@ -313,6 +394,7 @@ async function handleTrpc(route: Route, state: MockAppState) {
},
}).id,
});
}
case "plan.adjust":
return trpcResult({
taskId: createTask(state, {
@@ -326,6 +408,22 @@ async function handleTrpc(route: Route, state: MockAppState) {
});
case "video.list":
return trpcResult(state.videos);
case "video.upload": {
const input = await readTrpcInput(route, operationIndex);
const video = {
id: state.nextVideoId++,
title: input?.title || `实时分析录像 ${state.nextVideoId}`,
url: `/uploads/${state.nextVideoId}.${input?.format || "webm"}`,
format: input?.format || "webm",
fileSize: input?.fileSize || 1024 * 1024,
duration: input?.duration || 60,
exerciseType: input?.exerciseType || "live_analysis",
analysisStatus: "completed",
createdAt: nowIso(),
};
state.videos = [video, ...state.videos];
return trpcResult({ videoId: video.id, url: video.url });
}
case "analysis.list":
return trpcResult(state.analyses);
case "analysis.liveSessionList":
@@ -465,7 +563,7 @@ async function handleTrpc(route: Route, state: MockAppState) {
default:
return trpcResult(null);
}
});
}));
await fulfillJson(route, results);
}
@@ -655,6 +753,11 @@ export async function installAppMocks(
value: async () => undefined,
});
Object.defineProperty(HTMLCanvasElement.prototype, "captureStream", {
configurable: true,
value: () => new MediaStream(),
});
class FakeMediaRecorder extends EventTarget {
state = "inactive";
mimeType = "video/webm";