feat: add live camera multi-device viewer mode

这个提交包含在:
cryptocommuniums-afk
2026-03-16 16:39:14 +08:00
父节点 f0bbe4c82f
当前提交 4e4122d758
修改 15 个文件,包含 1523 行新增110 行删除

查看文件

@@ -8,6 +8,28 @@ export type ChangeLogEntry = {
};
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
{
version: "2026.03.16-live-camera-multidevice-viewer",
releaseDate: "2026-03-16",
repoVersion: "f0bbe4c",
summary: "实时分析新增同账号多端互斥和同步观看模式,分析持有端独占摄像头,其它端只能查看同步画面与核心识别结果。",
features: [
"同一账号在 `/live-camera` 进入实时分析后,会写入按用户维度的 runtime 锁,其他设备不能重复启动摄像头或分析",
"其他设备会自动进入“同步观看模式”,可订阅持有端的实时画面,并同步看到动作、评分、反馈、最近片段和归档段数",
"同步观看复用 media 服务的 WebRTC viewer 通道,传输的是带骨架、关键点和虚拟形象覆盖后的合成画面",
"runtime 锁按 session sid 区分持有端,兼容缺少 sid 的旧 token,超过 15 秒无心跳会自动判定为陈旧并释放",
"线上 smoke 已确认 `https://te.hao.work/live-camera` 当前仍在旧前端构建,公开站点资源 revision 还是 `index-BWEXNszf.js` / `index-BL6GQzUF.css`,本地新构建产物为 `index-BmsO49OJ.js`",
],
tests: [
"pnpm check",
"pnpm exec vitest run server/features.test.ts",
"go test ./... && go build ./... (media)",
"pnpm build",
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"live camera\"",
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"recorder flow archives a session and exposes it in videos\"",
"curl -I https://te.hao.work/live-camera",
],
},
{
version: "2026.03.16-live-analysis-overlay-archive",
releaseDate: "2026-03-16",

查看文件

@@ -50,6 +50,7 @@ export type MediaSession = {
previewUpdatedAt?: string;
streamConnected: boolean;
lastStreamAt?: string;
viewerCount?: number;
playback: {
webmUrl?: string;
mp4Url?: string;
@@ -122,6 +123,14 @@ export async function signalMediaSession(sessionId: string, payload: { sdp: stri
});
}
export async function signalMediaViewerSession(sessionId: string, payload: { sdp: string; type: string }) {
return request<{ viewerId: string; sdp: string; type: string }>(`/sessions/${sessionId}/viewer-signal`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
});
}
export async function uploadMediaSegment(
sessionId: string,
sequence: number,

查看文件

@@ -1,5 +1,10 @@
import { useAuth } from "@/_core/hooks/useAuth";
import { trpc } from "@/lib/trpc";
import {
createMediaSession,
signalMediaSession,
signalMediaViewerSession,
} from "@/lib/media";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
@@ -110,6 +115,35 @@ type AnalyzedFrame = {
feedback: string[];
};
type RuntimeRole = "idle" | "owner" | "viewer";
type RuntimeSnapshot = {
phase?: "idle" | "analyzing" | "saving" | "safe" | "failed";
startedAt?: number;
durationMs?: number;
currentAction?: ActionType;
rawAction?: ActionType;
feedback?: string[];
liveScore?: PoseScore | null;
stabilityMeta?: Partial<StabilizedActionMeta>;
visibleSegments?: number;
unknownSegments?: number;
archivedVideoCount?: number;
recentSegments?: ActionSegment[];
};
type RuntimeSession = {
id: number;
title: string | null;
sessionMode: SessionMode;
mediaSessionId: string | null;
status: "idle" | "active" | "ended";
startedAt: string | null;
endedAt: string | null;
lastHeartbeatAt: string | null;
snapshot: RuntimeSnapshot | null;
};
const ACTION_META: Record<ActionType, { label: string; tone: string; accent: string }> = {
forehand: { label: "正手挥拍", tone: "bg-emerald-500/10 text-emerald-700", accent: "bg-emerald-500" },
backhand: { label: "反手挥拍", tone: "bg-sky-500/10 text-sky-700", accent: "bg-sky-500" },
@@ -150,6 +184,23 @@ const CAMERA_QUALITY_PRESETS: Record<CameraQualityPreset, { label: string; subti
},
};
function waitForIceGathering(peer: RTCPeerConnection) {
if (peer.iceGatheringState === "complete") {
return Promise.resolve();
}
return new Promise<void>((resolve) => {
const handleStateChange = () => {
if (peer.iceGatheringState === "complete") {
peer.removeEventListener("icegatheringstatechange", handleStateChange);
resolve();
}
};
peer.addEventListener("icegatheringstatechange", handleStateChange);
});
}
function clamp(value: number, min: number, max: number) {
return Math.max(min, Math.min(max, value));
}
@@ -484,7 +535,7 @@ function getSessionBand(input: { overallScore: number; knownRatio: number; effec
}
export default function LiveCamera() {
useAuth();
const { user } = useAuth();
const utils = trpc.useUtils();
const mobile = useMemo(() => isMobileDevice(), []);
@@ -493,6 +544,13 @@ export default function LiveCamera() {
const streamRef = useRef<MediaStream | null>(null);
const poseRef = useRef<any>(null);
const compositeCanvasRef = useRef<HTMLCanvasElement | null>(null);
const broadcastPeerRef = useRef<RTCPeerConnection | null>(null);
const broadcastStreamRef = useRef<MediaStream | null>(null);
const broadcastSessionIdRef = useRef<string | null>(null);
const viewerPeerRef = useRef<RTCPeerConnection | null>(null);
const viewerSessionIdRef = useRef<string | null>(null);
const runtimeIdRef = useRef<number | null>(null);
const heartbeatTimerRef = useRef<number>(0);
const recorderRef = useRef<MediaRecorder | null>(null);
const recorderStreamRef = useRef<MediaStream | null>(null);
const recorderMimeTypeRef = useRef("video/webm");
@@ -513,6 +571,14 @@ export default function LiveCamera() {
const segmentsRef = useRef<ActionSegment[]>([]);
const frameSamplesRef = useRef<PoseScore[]>([]);
const volatilitySamplesRef = useRef<number[]>([]);
const currentActionRef = useRef<ActionType>("unknown");
const rawActionRef = useRef<ActionType>("unknown");
const liveScoreRef = useRef<PoseScore | null>(null);
const feedbackRef = useRef<string[]>([]);
const durationMsRef = useRef(0);
const leaveStatusRef = useRef<"idle" | "analyzing" | "saving" | "safe" | "failed">("idle");
const sessionModeRef = useRef<SessionMode>("practice");
const stabilityMetaRef = useRef<StabilizedActionMeta>(createEmptyStabilizedActionMeta());
const zoomTargetRef = useRef(1);
const avatarRenderRef = useRef<AvatarRenderState>({
enabled: false,
@@ -543,6 +609,8 @@ export default function LiveCamera() {
const [avatarKey, setAvatarKey] = useState<AvatarKey>("gorilla");
const [avatarPrompt, setAvatarPrompt] = useState("");
const [archivedVideoCount, setArchivedVideoCount] = useState(0);
const [viewerConnected, setViewerConnected] = useState(false);
const [viewerError, setViewerError] = useState("");
const resolvedAvatarKey = useMemo(
() => resolveAvatarKeyFromPrompt(avatarPrompt, avatarKey),
@@ -562,6 +630,17 @@ export default function LiveCamera() {
},
});
const liveSessionsQuery = trpc.analysis.liveSessionList.useQuery({ limit: 8 });
const runtimeQuery = trpc.analysis.runtimeGet.useQuery(undefined, {
refetchInterval: 1000,
refetchIntervalInBackground: true,
});
const runtimeAcquireMutation = trpc.analysis.runtimeAcquire.useMutation();
const runtimeHeartbeatMutation = trpc.analysis.runtimeHeartbeat.useMutation();
const runtimeReleaseMutation = trpc.analysis.runtimeRelease.useMutation();
const runtimeRole = (runtimeQuery.data?.role ?? "idle") as RuntimeRole;
const runtimeSession = (runtimeQuery.data?.runtimeSession ?? null) as RuntimeSession | null;
const runtimeSnapshot = runtimeSession?.snapshot ?? null;
useEffect(() => {
avatarRenderRef.current = {
@@ -571,6 +650,38 @@ export default function LiveCamera() {
};
}, [avatarEnabled, avatarPrompt, resolvedAvatarKey]);
useEffect(() => {
currentActionRef.current = currentAction;
}, [currentAction]);
useEffect(() => {
rawActionRef.current = rawAction;
}, [rawAction]);
useEffect(() => {
liveScoreRef.current = liveScore;
}, [liveScore]);
useEffect(() => {
feedbackRef.current = feedback;
}, [feedback]);
useEffect(() => {
durationMsRef.current = durationMs;
}, [durationMs]);
useEffect(() => {
leaveStatusRef.current = leaveStatus;
}, [leaveStatus]);
useEffect(() => {
sessionModeRef.current = sessionMode;
}, [sessionMode]);
useEffect(() => {
stabilityMetaRef.current = stabilityMeta;
}, [stabilityMeta]);
const visibleSegments = useMemo(
() => segments.filter((segment) => !segment.isUnknown).sort((a, b) => b.startMs - a.startMs),
[segments],
@@ -580,10 +691,20 @@ export default function LiveCamera() {
() => segmentFilter === "all" ? visibleSegments : visibleSegments.filter((segment) => segment.actionType === segmentFilter),
[segmentFilter, visibleSegments],
);
const viewerRecentSegments = useMemo(
() => (runtimeSnapshot?.recentSegments ?? []).filter((segment) => !segment.isUnknown),
[runtimeSnapshot?.recentSegments],
);
const displayVisibleSegments = runtimeRole === "viewer" ? viewerRecentSegments : visibleSegments;
const displayFilteredSegments = runtimeRole === "viewer"
? (segmentFilter === "all"
? viewerRecentSegments
: viewerRecentSegments.filter((segment) => segment.actionType === segmentFilter))
: filteredVisibleSegments;
const actionStats = useMemo(() => {
const totals = new Map<ActionType, { count: number; durationMs: number; averageScore: number; averageConfidence: number }>();
visibleSegments.forEach((segment) => {
displayVisibleSegments.forEach((segment) => {
const current = totals.get(segment.actionType) ?? {
count: 0,
durationMs: 0,
@@ -599,7 +720,7 @@ export default function LiveCamera() {
});
});
const totalDuration = Math.max(1, visibleSegments.reduce((sum, segment) => sum + segment.durationMs, 0));
const totalDuration = Math.max(1, displayVisibleSegments.reduce((sum, segment) => sum + segment.durationMs, 0));
return Array.from(totals.entries())
.map(([actionType, value]) => ({
actionType,
@@ -607,22 +728,25 @@ export default function LiveCamera() {
sharePct: Math.round((value.durationMs / totalDuration) * 100),
}))
.sort((a, b) => b.durationMs - a.durationMs);
}, [visibleSegments]);
}, [displayVisibleSegments]);
const bestSegment = useMemo(
() => visibleSegments.reduce<ActionSegment | null>((best, segment) => {
() => displayVisibleSegments.reduce<ActionSegment | null>((best, segment) => {
if (!best) return segment;
return segment.score > best.score ? segment : best;
}, null),
[visibleSegments],
[displayVisibleSegments],
);
const knownRatio = segments.length > 0 ? visibleSegments.length / segments.length : 0;
const totalDisplaySegments = runtimeRole === "viewer"
? (runtimeSnapshot?.visibleSegments ?? displayVisibleSegments.length) + (runtimeSnapshot?.unknownSegments ?? 0)
: segments.length;
const knownRatio = totalDisplaySegments > 0 ? displayVisibleSegments.length / totalDisplaySegments : 0;
const sessionBand = useMemo(
() => getSessionBand({
overallScore: liveScore?.overall || 0,
overallScore: (runtimeRole === "viewer" ? runtimeSnapshot?.liveScore?.overall : liveScore?.overall) || 0,
knownRatio,
effectiveSegments: visibleSegments.length,
effectiveSegments: displayVisibleSegments.length,
}),
[knownRatio, liveScore?.overall, visibleSegments.length],
[displayVisibleSegments.length, knownRatio, liveScore?.overall, runtimeRole, runtimeSnapshot?.liveScore?.overall],
);
useEffect(() => {
@@ -730,6 +854,189 @@ export default function LiveCamera() {
await recorderUploadQueueRef.current;
}, []);
const buildRuntimeSnapshot = useCallback((phase?: RuntimeSnapshot["phase"]): RuntimeSnapshot => ({
phase: phase ?? leaveStatusRef.current,
startedAt: sessionStartedAtRef.current || undefined,
durationMs: durationMsRef.current,
currentAction: currentActionRef.current,
rawAction: rawActionRef.current,
feedback: feedbackRef.current,
liveScore: liveScoreRef.current,
stabilityMeta: stabilityMetaRef.current,
visibleSegments: segmentsRef.current.filter((segment) => !segment.isUnknown).length,
unknownSegments: segmentsRef.current.filter((segment) => segment.isUnknown).length,
archivedVideoCount: archivedVideosRef.current.length,
recentSegments: segmentsRef.current.slice(-5),
}), []);
const closeBroadcastPeer = useCallback(() => {
broadcastSessionIdRef.current = null;
if (broadcastPeerRef.current) {
broadcastPeerRef.current.onconnectionstatechange = null;
broadcastPeerRef.current.close();
broadcastPeerRef.current = null;
}
if (broadcastStreamRef.current) {
broadcastStreamRef.current.getTracks().forEach((track) => track.stop());
broadcastStreamRef.current = null;
}
}, []);
const closeViewerPeer = useCallback(() => {
viewerSessionIdRef.current = null;
if (viewerPeerRef.current) {
viewerPeerRef.current.ontrack = null;
viewerPeerRef.current.onconnectionstatechange = null;
viewerPeerRef.current.close();
viewerPeerRef.current = null;
}
setViewerConnected(false);
}, []);
const releaseRuntime = useCallback(async (phase: RuntimeSnapshot["phase"]) => {
if (!runtimeIdRef.current) return;
try {
await runtimeReleaseMutation.mutateAsync({
runtimeId: runtimeIdRef.current,
snapshot: buildRuntimeSnapshot(phase),
});
} catch {
// Ignore runtime release errors and let the server-side stale timeout recover.
} finally {
runtimeIdRef.current = null;
broadcastSessionIdRef.current = null;
if (heartbeatTimerRef.current) {
window.clearInterval(heartbeatTimerRef.current);
heartbeatTimerRef.current = 0;
}
void runtimeQuery.refetch();
}
}, [buildRuntimeSnapshot, runtimeQuery, runtimeReleaseMutation]);
const startRuntimeHeartbeatLoop = useCallback((mediaSessionId?: string | null) => {
if (!runtimeIdRef.current) return;
if (typeof mediaSessionId === "string") {
broadcastSessionIdRef.current = mediaSessionId;
}
if (heartbeatTimerRef.current) {
window.clearInterval(heartbeatTimerRef.current);
heartbeatTimerRef.current = 0;
}
const sendHeartbeat = () => {
if (!runtimeIdRef.current) return;
runtimeHeartbeatMutation.mutate({
runtimeId: runtimeIdRef.current,
mediaSessionId: broadcastSessionIdRef.current,
snapshot: buildRuntimeSnapshot(),
});
};
sendHeartbeat();
heartbeatTimerRef.current = window.setInterval(sendHeartbeat, 1000);
}, [buildRuntimeSnapshot, runtimeHeartbeatMutation]);
const startBroadcastSession = useCallback(async () => {
if (!user?.id) {
throw new Error("当前用户信息未就绪");
}
const compositeCanvas = ensureCompositeCanvas();
if (!compositeCanvas || typeof compositeCanvas.captureStream !== "function") {
throw new Error("当前浏览器不支持同步观看推流");
}
renderCompositeFrame();
const sessionResponse = await createMediaSession({
userId: String(user.id),
title: `实时分析同步 ${formatDateTimeShanghai(new Date(), {
year: undefined,
second: undefined,
})}`,
format: "webm",
mimeType: "video/webm",
qualityPreset,
facingMode: facing,
deviceKind: mobile ? "mobile" : "desktop",
});
const sessionId = sessionResponse.session.id;
const stream = compositeCanvas.captureStream(mobile ? 24 : 30);
broadcastStreamRef.current = stream;
const peer = new RTCPeerConnection({
iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
});
broadcastPeerRef.current = peer;
stream.getTracks().forEach((track) => peer.addTrack(track, stream));
const offer = await peer.createOffer();
await peer.setLocalDescription(offer);
await waitForIceGathering(peer);
const answer = await signalMediaSession(sessionId, {
sdp: peer.localDescription?.sdp || "",
type: peer.localDescription?.type || "offer",
});
await peer.setRemoteDescription({
type: answer.type as RTCSdpType,
sdp: answer.sdp,
});
return sessionId;
}, [ensureCompositeCanvas, facing, mobile, qualityPreset, renderCompositeFrame, user?.id]);
const startViewerStream = useCallback(async (mediaSessionId: string) => {
if (viewerSessionIdRef.current === mediaSessionId && viewerPeerRef.current) {
return;
}
closeViewerPeer();
setViewerError("");
const peer = new RTCPeerConnection({
iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
});
viewerPeerRef.current = peer;
viewerSessionIdRef.current = mediaSessionId;
peer.addTransceiver("video", { direction: "recvonly" });
peer.ontrack = (event) => {
const nextStream = event.streams[0] ?? new MediaStream([event.track]);
if (videoRef.current) {
videoRef.current.srcObject = nextStream;
void videoRef.current.play().catch(() => undefined);
}
setViewerConnected(true);
};
peer.onconnectionstatechange = () => {
if (peer.connectionState === "failed" || peer.connectionState === "closed" || peer.connectionState === "disconnected") {
setViewerConnected(false);
}
};
const offer = await peer.createOffer();
await peer.setLocalDescription(offer);
await waitForIceGathering(peer);
const answer = await signalMediaViewerSession(mediaSessionId, {
sdp: peer.localDescription?.sdp || "",
type: peer.localDescription?.type || "offer",
});
await peer.setRemoteDescription({
type: answer.type as RTCSdpType,
sdp: answer.sdp,
});
}, [closeViewerPeer]);
const stopCamera = useCallback(() => {
if (animationRef.current) {
cancelAnimationFrame(animationRef.current);
@@ -742,11 +1049,12 @@ export default function LiveCamera() {
analyzingRef.current = false;
setAnalyzing(false);
void stopSessionRecorder();
const localStream = streamRef.current;
if (streamRef.current) {
streamRef.current.getTracks().forEach((track) => track.stop());
streamRef.current = null;
}
if (videoRef.current) {
if (videoRef.current && localStream && videoRef.current.srcObject === localStream) {
videoRef.current.srcObject = null;
}
actionHistoryRef.current = [];
@@ -762,11 +1070,36 @@ export default function LiveCamera() {
setCameraActive(false);
}, [stopSessionRecorder]);
useEffect(() => {
if (runtimeRole === "viewer" && cameraActive) {
stopCamera();
}
}, [cameraActive, runtimeRole, stopCamera]);
useEffect(() => {
if (runtimeRole !== "viewer" || !runtimeSession?.mediaSessionId) {
if (!cameraActive) {
closeViewerPeer();
}
setViewerError("");
return;
}
void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => {
const message = error?.message || "同步画面连接失败";
if (!/409/.test(message)) {
setViewerError(message);
}
});
}, [cameraActive, closeViewerPeer, runtimeRole, runtimeSession?.mediaSessionId, startViewerStream]);
useEffect(() => {
return () => {
stopCamera();
closeBroadcastPeer();
closeViewerPeer();
};
}, [stopCamera]);
}, [closeBroadcastPeer, closeViewerPeer, stopCamera]);
const syncZoomState = useCallback(async (preferredZoom?: number, providedTrack?: MediaStreamTrack | null) => {
const track = providedTrack || streamRef.current?.getVideoTracks()[0] || null;
@@ -813,6 +1146,10 @@ export default function LiveCamera() {
preferredZoom = zoomTargetRef.current,
preset: CameraQualityPreset = qualityPreset,
) => {
if (runtimeRole === "viewer") {
toast.error("当前账号已有其他设备正在实时分析,请切换到同步观看模式");
return;
}
try {
if (streamRef.current) {
streamRef.current.getTracks().forEach((track) => track.stop());
@@ -835,7 +1172,7 @@ export default function LiveCamera() {
} catch (error: any) {
toast.error(`摄像头启动失败: ${error?.message || "未知错误"}`);
}
}, [facing, mobile, qualityPreset, syncZoomState]);
}, [facing, mobile, qualityPreset, runtimeRole, syncZoomState]);
const switchCamera = useCallback(async () => {
const nextFacing: CameraFacing = facing === "user" ? "environment" : "user";
@@ -1065,6 +1402,27 @@ export default function LiveCamera() {
return;
}
if (analyzingRef.current || saving) return;
if (runtimeRole === "viewer") {
toast.error("当前设备处于同步观看模式,不能重复开启分析");
return;
}
try {
const title = `实时分析 ${ACTION_META[currentActionRef.current].label}`;
const runtime = await runtimeAcquireMutation.mutateAsync({
title,
sessionMode,
});
if (runtime.role === "viewer") {
runtimeIdRef.current = null;
toast.error("同一账号已有其他设备正在实时分析,本机已切换为同步观看模式");
await runtimeQuery.refetch();
return;
}
runtimeIdRef.current = runtime.runtimeSession?.id ?? null;
setViewerError("");
analyzingRef.current = true;
setAnalyzing(true);
@@ -1090,7 +1448,9 @@ export default function LiveCamera() {
setDurationMs(0);
startSessionRecorder();
try {
const mediaSessionId = await startBroadcastSession();
startRuntimeHeartbeatLoop(mediaSessionId);
const testFactory = (
window as typeof window & {
__TEST_MEDIAPIPE_FACTORY__?: () => Promise<{ Pose: any }>;
@@ -1182,9 +1542,25 @@ export default function LiveCamera() {
setAnalyzing(false);
setLeaveStatus("idle");
await stopSessionRecorder();
closeBroadcastPeer();
await releaseRuntime("failed");
toast.error(`实时分析启动失败: ${error?.message || "未知错误"}`);
}
}, [appendFrameToSegment, cameraActive, renderCompositeFrame, saving, startSessionRecorder, stopSessionRecorder]);
}, [
appendFrameToSegment,
cameraActive,
closeBroadcastPeer,
releaseRuntime,
runtimeAcquireMutation,
runtimeQuery,
runtimeRole,
saving,
sessionMode,
startBroadcastSession,
startRuntimeHeartbeatLoop,
startSessionRecorder,
stopSessionRecorder,
]);
const stopAnalysis = useCallback(async () => {
if (!analyzingRef.current) return;
@@ -1192,6 +1568,7 @@ export default function LiveCamera() {
setAnalyzing(false);
setSaving(true);
setLeaveStatus("saving");
let releasePhase: RuntimeSnapshot["phase"] = "safe";
if (animationRef.current) {
cancelAnimationFrame(animationRef.current);
@@ -1205,15 +1582,19 @@ export default function LiveCamera() {
}
await persistSession();
setLeaveStatus("safe");
releasePhase = "safe";
toast.success(`实时分析已保存,并同步写入训练记录${archivedVideosRef.current.length > 0 ? `;已归档 ${archivedVideosRef.current.length} 段分析录像` : ""}`);
await liveSessionsQuery.refetch();
} catch (error: any) {
setLeaveStatus("failed");
releasePhase = "failed";
toast.error(`保存实时分析失败: ${error?.message || "未知错误"}`);
} finally {
closeBroadcastPeer();
await releaseRuntime(releasePhase);
setSaving(false);
}
}, [liveSessionsQuery, persistSession]);
}, [closeBroadcastPeer, liveSessionsQuery, persistSession, releaseRuntime]);
useEffect(() => {
if (!analyzing && !saving) {
@@ -1235,26 +1616,72 @@ export default function LiveCamera() {
await startCamera(facing, zoomTargetRef.current, qualityPreset);
}, [facing, qualityPreset, startCamera]);
const heroAction = ACTION_META[currentAction];
const rawActionMeta = ACTION_META[rawAction];
const pendingActionMeta = stabilityMeta.pendingAction ? ACTION_META[stabilityMeta.pendingAction] : null;
const displayAction = runtimeRole === "viewer" ? (runtimeSnapshot?.currentAction ?? "unknown") : currentAction;
const displayRawAction = runtimeRole === "viewer" ? (runtimeSnapshot?.rawAction ?? "unknown") : rawAction;
const displayScore = runtimeRole === "viewer" ? (runtimeSnapshot?.liveScore ?? null) : liveScore;
const displayFeedback = runtimeRole === "viewer" ? (runtimeSnapshot?.feedback ?? []) : feedback;
const displayDurationMs = runtimeRole === "viewer" ? (runtimeSnapshot?.durationMs ?? 0) : durationMs;
const displayStabilityMeta = runtimeRole === "viewer"
? {
...createEmptyStabilizedActionMeta(),
...runtimeSnapshot?.stabilityMeta,
}
: stabilityMeta;
const hasVideoFeed = cameraActive || viewerConnected;
const heroAction = ACTION_META[displayAction];
const rawActionMeta = ACTION_META[displayRawAction];
const pendingActionMeta = displayStabilityMeta.pendingAction ? ACTION_META[displayStabilityMeta.pendingAction] : null;
const resolvedAvatarPreset = getAvatarPreset(resolvedAvatarKey);
const resolvedAvatarLabel = resolvedAvatarPreset?.label || "猩猩";
const animalAvatarPresets = AVATAR_PRESETS.filter((preset) => preset.category === "animal");
const fullBodyAvatarPresets = AVATAR_PRESETS.filter((preset) => preset.category === "full-body-3d");
const previewTitle = analyzing
? stabilityMeta.pending && pendingActionMeta
const previewTitle = runtimeRole === "viewer"
? viewerConnected
? "同步观看中"
: "正在连接同步画面"
: analyzing
? displayStabilityMeta.pending && pendingActionMeta
? `${pendingActionMeta.label} 切换确认中`
: `${heroAction.label} 识别中`
: cameraActive
? "准备开始实时分析"
: "摄像头待启动";
const viewerModeLabel = runtimeSession?.title || "其他设备正在实时分析";
const renderPrimaryActions = (rail = false) => {
const buttonClass = rail
? "h-14 w-14 rounded-2xl border border-white/10 bg-white/10 text-white hover:bg-white/20"
: "h-11 rounded-2xl px-4";
if (runtimeRole === "viewer") {
return (
<>
<Button
variant={rail ? "secondary" : "default"}
className={buttonClass}
onClick={() => {
if (runtimeSession?.mediaSessionId) {
void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => {
toast.error(`同步观看连接失败: ${error?.message || "未知错误"}`);
});
}
}}
disabled={!runtimeSession?.mediaSessionId}
>
<Monitor className={rail ? "h-5 w-5" : "mr-2 h-4 w-4"} />
{!rail && (viewerConnected ? "重新同步" : "同步观看")}
</Button>
{!rail ? (
<Button variant="outline" className={buttonClass} disabled>
<CameraOff className="mr-2 h-4 w-4" />
</Button>
) : null}
</>
);
}
if (!cameraActive) {
return (
<Button
@@ -1491,6 +1918,24 @@ export default function LiveCamera() {
</Alert>
) : null}
{runtimeRole === "viewer" ? (
<Alert>
<Monitor className="h-4 w-4" />
<AlertTitle></AlertTitle>
<AlertDescription>
{viewerModeLabel}
</AlertDescription>
</Alert>
) : null}
{viewerError ? (
<Alert>
<Activity className="h-4 w-4" />
<AlertTitle></AlertTitle>
<AlertDescription>{viewerError}</AlertDescription>
</Alert>
) : null}
<section className="rounded-[28px] border border-border/60 bg-[radial-gradient(circle_at_top_left,_rgba(249,115,22,0.16),_transparent_32%),linear-gradient(135deg,rgba(12,18,24,0.98),rgba(26,31,43,0.96))] p-5 text-white shadow-xl shadow-black/10 md:p-7">
<div className="flex flex-col gap-4 lg:flex-row lg:items-end lg:justify-between">
<div className="space-y-3">
@@ -1513,7 +1958,7 @@ export default function LiveCamera() {
</Badge>
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
<PlayCircle className="h-3.5 w-3.5" />
{sessionMode === "practice" ? "练习会话" : "训练 PK"}
{(runtimeRole === "viewer" ? runtimeSession?.sessionMode : sessionMode) === "practice" ? "练习会话" : "训练 PK"}
</Badge>
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
<Video className="h-3.5 w-3.5" />
@@ -1539,11 +1984,11 @@ export default function LiveCamera() {
</div>
<div className="rounded-xl bg-black/15 px-3 py-3">
<div className="text-[11px] uppercase tracking-[0.18em] text-white/45"></div>
<div className="mt-2 text-lg font-semibold text-white">{formatDuration(durationMs)}</div>
<div className="mt-2 text-lg font-semibold text-white">{formatDuration(displayDurationMs)}</div>
</div>
<div className="rounded-xl bg-black/15 px-3 py-3">
<div className="text-[11px] uppercase tracking-[0.18em] text-white/45"></div>
<div className="mt-2 text-lg font-semibold text-white">{stabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
<div className="mt-2 text-lg font-semibold text-white">{displayStabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
</div>
</div>
</div>
@@ -1563,20 +2008,40 @@ export default function LiveCamera() {
/>
<canvas
ref={canvasRef}
className={`pointer-events-none absolute inset-0 h-full w-full object-contain ${analyzing ? "" : "opacity-70"}`}
className={`pointer-events-none absolute inset-0 h-full w-full object-contain ${runtimeRole === "viewer" ? "hidden" : analyzing ? "" : "opacity-70"}`}
/>
{!cameraActive ? (
{!hasVideoFeed ? (
<div className="absolute inset-0 flex flex-col items-center justify-center gap-4 bg-[radial-gradient(circle_at_center,_rgba(249,115,22,0.12),_rgba(0,0,0,0.78))] px-6 text-center text-white/75">
<CameraOff className="h-14 w-14" />
<div className="space-y-1">
<div className="text-xl font-medium"></div>
<div className="text-sm text-white/60"></div>
<div className="text-xl font-medium">{runtimeRole === "viewer" ? "等待同步画面" : "摄像头未启动"}</div>
<div className="text-sm text-white/60">
{runtimeRole === "viewer" ? `${viewerModeLabel},当前设备只能观看同步内容。` : "先完成拍摄校准,再开启自动动作识别。"}
</div>
</div>
{runtimeRole === "viewer" ? (
<Button
data-testid="live-camera-viewer-button"
onClick={() => {
if (runtimeSession?.mediaSessionId) {
void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => {
toast.error(`同步观看连接失败: ${error?.message || "未知错误"}`);
});
}
}}
className="rounded-2xl"
disabled={!runtimeSession?.mediaSessionId}
>
<Monitor className="mr-2 h-4 w-4" />
{viewerConnected ? "重新同步" : "同步观看"}
</Button>
) : (
<Button data-testid="live-camera-start-button" onClick={() => setShowSetupGuide(true)} className="rounded-2xl">
<Camera className="mr-2 h-4 w-4" />
</Button>
)}
</div>
) : null}
@@ -1587,7 +2052,7 @@ export default function LiveCamera() {
</Badge>
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
<Target className="h-3.5 w-3.5" />
{visibleSegments.length}
{displayVisibleSegments.length}
</Badge>
{avatarEnabled ? (
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
@@ -1609,9 +2074,9 @@ export default function LiveCamera() {
</Button>
) : null}
{cameraActive && zoomState.supported ? renderZoomOverlay() : null}
{cameraActive && zoomState.supported && runtimeRole !== "viewer" ? renderZoomOverlay() : null}
{(cameraActive || saving) ? (
{(hasVideoFeed || saving) ? (
<div className="absolute bottom-3 left-3 right-20 rounded-[24px] border border-white/10 bg-black/65 px-3 py-3 text-white shadow-lg backdrop-blur-sm sm:right-[112px]">
<div className="grid gap-2 sm:grid-cols-2">
<div>
@@ -1622,14 +2087,14 @@ export default function LiveCamera() {
<div>
<div className="text-[10px] uppercase tracking-[0.18em] text-white/45"></div>
<div className="mt-1 text-sm font-semibold">
{stabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES} · {Math.round(stabilityMeta.windowShare * 100)}%
{displayStabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES} · {Math.round(displayStabilityMeta.windowShare * 100)}%
</div>
<div className="mt-1 text-xs text-white/60">
{saving
? "正在保存会话..."
: stabilityMeta.pending && pendingActionMeta
? `切换确认中 · ${pendingActionMeta.label} · ${Math.max(0, stabilityMeta.candidateMs / 1000).toFixed(1)}s`
: `已稳定 ${Math.max(0, stabilityMeta.stableMs / 1000).toFixed(1)}s · 波动 ${Math.round(stabilityMeta.rawVolatility * 100)}%`}
: displayStabilityMeta.pending && pendingActionMeta
? `切换确认中 · ${pendingActionMeta.label} · ${Math.max(0, displayStabilityMeta.candidateMs / 1000).toFixed(1)}s`
: `已稳定 ${Math.max(0, displayStabilityMeta.stableMs / 1000).toFixed(1)}s · 波动 ${Math.round(displayStabilityMeta.rawVolatility * 100)}%`}
</div>
</div>
</div>
@@ -1639,7 +2104,11 @@ export default function LiveCamera() {
<div className="border-t border-border/60 bg-card/80 p-4">
<div className="grid gap-3 md:grid-cols-[180px_minmax(0,1fr)]">
<Select value={sessionMode} onValueChange={(value) => setSessionMode(value as SessionMode)} disabled={analyzing || saving}>
<Select
value={runtimeRole === "viewer" ? (runtimeSession?.sessionMode ?? sessionMode) : sessionMode}
onValueChange={(value) => setSessionMode(value as SessionMode)}
disabled={analyzing || saving || runtimeRole === "viewer"}
>
<SelectTrigger className="h-12 rounded-2xl border-border/60">
<SelectValue />
</SelectTrigger>
@@ -1662,7 +2131,7 @@ export default function LiveCamera() {
</div>
<div className="rounded-2xl border border-border/60 bg-background/90 p-4">
<div className="text-[11px] uppercase tracking-[0.16em] text-muted-foreground"></div>
<div className="mt-2 text-lg font-semibold">{archivedVideoCount}</div>
<div className="mt-2 text-lg font-semibold">{runtimeRole === "viewer" ? (runtimeSnapshot?.archivedVideoCount ?? 0) : archivedVideoCount}</div>
<div className="mt-2 text-xs leading-5 text-muted-foreground">
</div>
@@ -1687,7 +2156,7 @@ export default function LiveCamera() {
<Switch
checked={avatarEnabled}
onCheckedChange={setAvatarEnabled}
disabled={!cameraActive && !analyzing}
disabled={runtimeRole === "viewer" || (!cameraActive && !analyzing)}
data-testid="live-camera-avatar-switch"
/>
</div>
@@ -1698,7 +2167,7 @@ export default function LiveCamera() {
</div>
<div>
<div className="mb-2 text-xs uppercase tracking-[0.18em] text-muted-foreground"></div>
<Select value={avatarKey} onValueChange={(value) => setAvatarKey(value as AvatarKey)}>
<Select value={avatarKey} onValueChange={(value) => setAvatarKey(value as AvatarKey)} disabled={runtimeRole === "viewer"}>
<SelectTrigger className="h-12 rounded-2xl border-border/60">
<SelectValue />
</SelectTrigger>
@@ -1719,6 +2188,7 @@ export default function LiveCamera() {
onChange={(event) => setAvatarPrompt(event.target.value)}
placeholder="例如 狐狸 / panda coach / BeachKing / Juanita"
className="h-12 rounded-2xl border-border/60"
disabled={runtimeRole === "viewer"}
/>
</div>
</div>
@@ -1749,7 +2219,7 @@ export default function LiveCamera() {
<div className="grid gap-3 lg:grid-cols-3">
{Object.entries(CAMERA_QUALITY_PRESETS).map(([key, preset]) => {
const active = qualityPreset === key;
const disabled = analyzing || saving;
const disabled = analyzing || saving || runtimeRole === "viewer";
return (
<button
key={key}
@@ -1840,12 +2310,12 @@ export default function LiveCamera() {
</div>
) : null}
{filteredVisibleSegments.length === 0 ? (
{displayFilteredSegments.length === 0 ? (
<div className="rounded-2xl border border-dashed border-border/60 px-4 py-8 text-center text-sm text-muted-foreground">
{runtimeRole === "viewer" ? "当前会同步最近识别到的动作片段,持有端开始分析后会自动刷新。" : "开始分析后,这里会按时间区间显示识别出的动作片段。"}
</div>
) : (
filteredVisibleSegments.map((segment) => {
displayFilteredSegments.map((segment) => {
const meta = ACTION_META[segment.actionType];
return (
<div key={`${segment.actionType}-${segment.startMs}`} className="rounded-2xl border border-border/60 bg-muted/25 p-4">
@@ -1884,25 +2354,25 @@ export default function LiveCamera() {
<CardTitle className="text-base"></CardTitle>
</CardHeader>
<CardContent className="space-y-4">
{liveScore ? (
{displayScore ? (
<>
<div className="rounded-3xl border border-border/60 bg-muted/20 p-5 text-center">
<div className="text-xs uppercase tracking-[0.18em] text-muted-foreground"></div>
<div data-testid="live-camera-score-overall" className="mt-3 text-5xl font-semibold tracking-tight">
{liveScore.overall}
{displayScore.overall}
</div>
<div className="mt-3 flex items-center justify-center gap-2">
<Badge className={heroAction.tone}>{heroAction.label}</Badge>
<Badge variant="outline"> {liveScore.confidence}%</Badge>
<Badge variant="outline"> {displayScore.confidence}%</Badge>
<Badge className={sessionBand.tone}>{sessionBand.label}</Badge>
</div>
</div>
<div className="space-y-3">
<ScoreBar label="姿态" value={liveScore.posture} accent="bg-emerald-500" />
<ScoreBar label="平衡" value={liveScore.balance} accent="bg-sky-500" />
<ScoreBar label="技术" value={liveScore.technique} accent="bg-amber-500" />
<ScoreBar label="脚步" value={liveScore.footwork} accent="bg-indigo-500" />
<ScoreBar label="连贯性" value={liveScore.consistency} accent="bg-rose-500" />
<ScoreBar label="姿态" value={displayScore.posture} accent="bg-emerald-500" />
<ScoreBar label="平衡" value={displayScore.balance} accent="bg-sky-500" />
<ScoreBar label="技术" value={displayScore.technique} accent="bg-amber-500" />
<ScoreBar label="脚步" value={displayScore.footwork} accent="bg-indigo-500" />
<ScoreBar label="连贯性" value={displayScore.consistency} accent="bg-rose-500" />
</div>
</>
) : (
@@ -1958,19 +2428,19 @@ export default function LiveCamera() {
</div>
<div className="mt-3 grid grid-cols-2 gap-2 text-xs text-muted-foreground">
<div> {rawActionMeta.label}</div>
<div> {stabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
<div> {Math.round(stabilityMeta.windowShare * 100)}%</div>
<div> {stabilityMeta.switchCount} </div>
<div> {displayStabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
<div> {Math.round(displayStabilityMeta.windowShare * 100)}%</div>
<div> {displayStabilityMeta.switchCount} </div>
</div>
<Progress value={stabilityMeta.windowProgress * 100} className="mt-3 h-2" />
<Progress value={displayStabilityMeta.windowProgress * 100} className="mt-3 h-2" />
<div className="mt-2 text-xs text-muted-foreground">
{stabilityMeta.pending && pendingActionMeta
{displayStabilityMeta.pending && pendingActionMeta
? `当前正在确认 ${pendingActionMeta.label},确认后才会切段入库。`
: "当前区间只会按稳定动作聚合,短时抖动不会直接切换动作。"}
</div>
</div>
{feedback.length > 0 ? feedback.map((item) => (
{displayFeedback.length > 0 ? displayFeedback.map((item) => (
<div key={item} className="rounded-2xl border border-border/60 bg-muted/25 px-4 py-3 text-sm">
{item}
</div>
@@ -2059,7 +2529,7 @@ export default function LiveCamera() {
<div className="grid h-full grid-cols-[minmax(0,1fr)_72px] gap-3">
<div className="relative min-h-0 overflow-hidden rounded-[32px] border border-white/10 bg-black shadow-2xl shadow-black/40">
<video ref={videoRef} className="absolute inset-0 h-full w-full object-contain" playsInline muted autoPlay />
<canvas ref={canvasRef} className="pointer-events-none absolute inset-0 h-full w-full object-contain" />
<canvas ref={canvasRef} className={`pointer-events-none absolute inset-0 h-full w-full object-contain ${runtimeRole === "viewer" ? "hidden" : ""}`} />
<div className="pointer-events-none absolute left-3 top-3 flex flex-wrap gap-2">
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
@@ -2090,12 +2560,12 @@ export default function LiveCamera() {
</div>
<div>
<div className="uppercase tracking-[0.18em] text-white/45"></div>
<div className="mt-1">{stabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
<div className="mt-1">{displayStabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
</div>
<div>
<div className="uppercase tracking-[0.18em] text-white/45"></div>
<div className="mt-1">
{stabilityMeta.pending && pendingActionMeta ? `确认 ${pendingActionMeta.label}` : "稳定跟踪中"}
{displayStabilityMeta.pending && pendingActionMeta ? `确认 ${pendingActionMeta.label}` : runtimeRole === "viewer" ? "同步观看中" : "稳定跟踪中"}
</div>
</div>
</div>
@@ -2111,7 +2581,7 @@ export default function LiveCamera() {
<Minimize2 className="h-4 w-4" />
</Button>
{cameraActive && zoomState.supported ? renderZoomOverlay() : null}
{cameraActive && zoomState.supported && runtimeRole !== "viewer" ? renderZoomOverlay() : null}
</div>
<div className="flex flex-col items-center justify-center gap-3">

查看文件

@@ -1,5 +1,37 @@
# Tennis Training Hub - 变更日志
## 2026.03.16-live-camera-multidevice-viewer (2026-03-16)
### 功能更新
- `/live-camera` 新增同账号多端 runtime 锁;一个设备开始实时分析后,其他设备不能再次启动摄像头或分析
- 其他设备会自动进入“同步观看模式”,可查看持有端同步推送的实时画面、当前动作、评分、反馈和最近动作片段
- 同步观看复用 media 服务新增的 `/viewer-signal` WebRTC 通道,直接订阅“原视频 + 骨架 + 关键点 + 虚拟形象”的合成画面
- runtime 心跳按 `sid` 维度识别持有端,兼容旧 token 缺失可选字段的情况;超过 15 秒无心跳会自动释放陈旧锁
- `/live-camera` 前端新增 owner / viewer 双模式切换,观看端会禁用镜头切换、重新校准、质量调整和分析启动
- e2e mock 新增 viewer 模式和 runtime 接口覆盖,保证浏览器测试可以直接验证多端互斥与同步观看
### 测试
- `pnpm check`
- `pnpm exec vitest run server/features.test.ts`
- `go test ./...`
- `go build ./...`
- `pnpm build`
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"`
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "recorder flow archives a session and exposes it in videos"`
- `curl -I https://te.hao.work/live-camera`
### 线上 smoke
- `https://te.hao.work/live-camera` 当前公开站点仍在旧前端构建
- 当前线上资源 revision`assets/index-BWEXNszf.js``assets/index-BL6GQzUF.css`
- 本地本次构建产物 revision`assets/index-BmsO49OJ.js`
### 仓库版本
- `f0bbe4c`
## 2026.03.16-live-analysis-overlay-archive (2026-03-16)
### 功能更新

查看文件

@@ -0,0 +1,17 @@
CREATE TABLE `live_analysis_runtime` (
`id` int AUTO_INCREMENT NOT NULL,
`userId` int NOT NULL,
`ownerSid` varchar(96),
`status` enum('idle','active','ended') NOT NULL DEFAULT 'idle',
`title` varchar(256),
`sessionMode` enum('practice','pk') NOT NULL DEFAULT 'practice',
`mediaSessionId` varchar(96),
`startedAt` timestamp,
`endedAt` timestamp,
`lastHeartbeatAt` timestamp,
`snapshot` json,
`createdAt` timestamp NOT NULL DEFAULT (now()),
`updatedAt` timestamp NOT NULL DEFAULT (now()) ON UPDATE CURRENT_TIMESTAMP,
CONSTRAINT `live_analysis_runtime_id` PRIMARY KEY(`id`),
CONSTRAINT `live_analysis_runtime_user_idx` UNIQUE(`userId`)
);

查看文件

@@ -16,6 +16,21 @@ export const users = mysqlTable("users", {
trainingGoals: text("trainingGoals"),
/** NTRP rating (1.0 - 5.0) */
ntrpRating: float("ntrpRating").default(1.5),
/** Manual NTRP baseline before automated rating is established */
manualNtrpRating: float("manualNtrpRating"),
manualNtrpCapturedAt: timestamp("manualNtrpCapturedAt"),
/** Training assessment profile */
heightCm: float("heightCm"),
weightKg: float("weightKg"),
sprintSpeedScore: int("sprintSpeedScore"),
explosivePowerScore: int("explosivePowerScore"),
agilityScore: int("agilityScore"),
enduranceScore: int("enduranceScore"),
flexibilityScore: int("flexibilityScore"),
coreStabilityScore: int("coreStabilityScore"),
shoulderMobilityScore: int("shoulderMobilityScore"),
hipMobilityScore: int("hipMobilityScore"),
assessmentNotes: text("assessmentNotes"),
/** Total training sessions completed */
totalSessions: int("totalSessions").default(0),
/** Total training minutes */
@@ -215,6 +230,30 @@ export const liveAnalysisSessions = mysqlTable("live_analysis_sessions", {
export type LiveAnalysisSession = typeof liveAnalysisSessions.$inferSelect;
export type InsertLiveAnalysisSession = typeof liveAnalysisSessions.$inferInsert;
/**
* Per-user runtime state for the current live-camera analysis lock.
*/
export const liveAnalysisRuntime = mysqlTable("live_analysis_runtime", {
id: int("id").autoincrement().primaryKey(),
userId: int("userId").notNull(),
ownerSid: varchar("ownerSid", { length: 96 }),
status: mysqlEnum("status", ["idle", "active", "ended"]).default("idle").notNull(),
title: varchar("title", { length: 256 }),
sessionMode: mysqlEnum("sessionMode", ["practice", "pk"]).default("practice").notNull(),
mediaSessionId: varchar("mediaSessionId", { length: 96 }),
startedAt: timestamp("startedAt"),
endedAt: timestamp("endedAt"),
lastHeartbeatAt: timestamp("lastHeartbeatAt"),
snapshot: json("snapshot"),
createdAt: timestamp("createdAt").defaultNow().notNull(),
updatedAt: timestamp("updatedAt").defaultNow().onUpdateNow().notNull(),
}, (table) => ({
userIdUnique: uniqueIndex("live_analysis_runtime_user_idx").on(table.userId),
}));
export type LiveAnalysisRuntime = typeof liveAnalysisRuntime.$inferSelect;
export type InsertLiveAnalysisRuntime = typeof liveAnalysisRuntime.$inferInsert;
/**
* Action segments extracted from a realtime analysis session.
*/
@@ -390,15 +429,34 @@ export type InsertUserAchievement = typeof userAchievements.$inferInsert;
*/
export const tutorialVideos = mysqlTable("tutorial_videos", {
id: int("id").autoincrement().primaryKey(),
slug: varchar("slug", { length: 128 }),
title: varchar("title", { length: 256 }).notNull(),
category: varchar("category", { length: 64 }).notNull(),
skillLevel: mysqlEnum("skillLevel", ["beginner", "intermediate", "advanced"]).default("beginner"),
topicArea: varchar("topicArea", { length: 32 }).default("tennis_skill"),
contentFormat: varchar("contentFormat", { length: 16 }).default("video"),
sourcePlatform: varchar("sourcePlatform", { length: 16 }).default("none"),
description: text("description"),
heroSummary: text("heroSummary"),
keyPoints: json("keyPoints"),
commonMistakes: json("commonMistakes"),
videoUrl: text("videoUrl"),
externalUrl: text("externalUrl"),
platformVideoId: varchar("platformVideoId", { length: 64 }),
thumbnailUrl: text("thumbnailUrl"),
duration: int("duration"),
estimatedEffortMinutes: int("estimatedEffortMinutes"),
prerequisites: json("prerequisites"),
learningObjectives: json("learningObjectives"),
stepSections: json("stepSections"),
deliverables: json("deliverables"),
relatedDocPaths: json("relatedDocPaths"),
viewCount: int("viewCount"),
commentCount: int("commentCount"),
metricsFetchedAt: timestamp("metricsFetchedAt"),
completionAchievementKey: varchar("completionAchievementKey", { length: 64 }),
isFeatured: int("isFeatured").default(0),
featuredOrder: int("featuredOrder").default(0),
sortOrder: int("sortOrder").default(0),
isPublished: int("isPublished").default(1),
createdAt: timestamp("createdAt").defaultNow().notNull(),
@@ -416,6 +474,8 @@ export const tutorialProgress = mysqlTable("tutorial_progress", {
userId: int("userId").notNull(),
tutorialId: int("tutorialId").notNull(),
watched: int("watched").default(0),
completed: int("completed").default(0),
completedAt: timestamp("completedAt"),
comparisonVideoId: int("comparisonVideoId"),
selfScore: float("selfScore"),
notes: text("notes"),

查看文件

@@ -104,6 +104,7 @@ type Session struct {
PreviewUpdatedAt string `json:"previewUpdatedAt,omitempty"`
StreamConnected bool `json:"streamConnected"`
LastStreamAt string `json:"lastStreamAt,omitempty"`
ViewerCount int `json:"viewerCount"`
Playback PlaybackInfo `json:"playback"`
Segments []SegmentMeta `json:"segments"`
Markers []Marker `json:"markers"`
@@ -156,6 +157,8 @@ type sessionStore struct {
mu sync.RWMutex
sessions map[string]*Session
peers map[string]*webrtc.PeerConnection
viewerPeers map[string]map[string]*webrtc.PeerConnection
videoTracks map[string]*webrtc.TrackLocalStaticRTP
}
func newSessionStore(rootDir string) (*sessionStore, error) {
@@ -164,6 +167,8 @@ func newSessionStore(rootDir string) (*sessionStore, error) {
public: filepath.Join(rootDir, "public"),
sessions: map[string]*Session{},
peers: map[string]*webrtc.PeerConnection{},
viewerPeers: map[string]map[string]*webrtc.PeerConnection{},
videoTracks: map[string]*webrtc.TrackLocalStaticRTP{},
}
if err := os.MkdirAll(filepath.Join(rootDir, "sessions"), 0o755); err != nil {
return nil, err
@@ -294,6 +299,42 @@ func (s *sessionStore) replacePeer(id string, peer *webrtc.PeerConnection) {
s.peers[id] = peer
}
func (s *sessionStore) replaceViewerPeer(sessionID string, viewerID string, peer *webrtc.PeerConnection) {
s.mu.Lock()
defer s.mu.Unlock()
if _, ok := s.viewerPeers[sessionID]; !ok {
s.viewerPeers[sessionID] = map[string]*webrtc.PeerConnection{}
}
if existing, ok := s.viewerPeers[sessionID][viewerID]; ok {
_ = existing.Close()
}
s.viewerPeers[sessionID][viewerID] = peer
if session, ok := s.sessions[sessionID]; ok {
session.ViewerCount = len(s.viewerPeers[sessionID])
_ = s.saveSession(session)
}
}
func (s *sessionStore) removeViewerPeer(sessionID string, viewerID string) {
s.mu.Lock()
defer s.mu.Unlock()
viewers, ok := s.viewerPeers[sessionID]
if !ok {
return
}
if existing, ok := viewers[viewerID]; ok {
_ = existing.Close()
delete(viewers, viewerID)
}
if len(viewers) == 0 {
delete(s.viewerPeers, sessionID)
}
if session, ok := s.sessions[sessionID]; ok {
session.ViewerCount = len(s.viewerPeers[sessionID])
_ = s.saveSession(session)
}
}
func (s *sessionStore) closePeer(id string) {
s.mu.Lock()
defer s.mu.Unlock()
@@ -301,6 +342,38 @@ func (s *sessionStore) closePeer(id string) {
_ = existing.Close()
delete(s.peers, id)
}
if viewers, ok := s.viewerPeers[id]; ok {
for viewerID, peer := range viewers {
_ = peer.Close()
delete(viewers, viewerID)
}
delete(s.viewerPeers, id)
}
delete(s.videoTracks, id)
if session, ok := s.sessions[id]; ok {
session.ViewerCount = 0
_ = s.saveSession(session)
}
}
func (s *sessionStore) getVideoTrack(sessionID string) *webrtc.TrackLocalStaticRTP {
s.mu.RLock()
defer s.mu.RUnlock()
return s.videoTracks[sessionID]
}
func (s *sessionStore) ensureVideoTrack(sessionID string, codec webrtc.RTPCodecCapability) (*webrtc.TrackLocalStaticRTP, error) {
s.mu.Lock()
defer s.mu.Unlock()
if track, ok := s.videoTracks[sessionID]; ok {
return track, nil
}
track, err := webrtc.NewTrackLocalStaticRTP(codec, "video", fmt.Sprintf("livecam-%s", sessionID))
if err != nil {
return nil, err
}
s.videoTracks[sessionID] = track
return track, nil
}
func (s *sessionStore) updateSession(id string, update func(*Session) error) (*Session, error) {
@@ -419,6 +492,12 @@ func (m *mediaServer) handleSession(w http.ResponseWriter, r *http.Request) {
return
}
m.handleSignal(sessionID, w, r)
case "viewer-signal":
if r.Method != http.MethodPost {
http.NotFound(w, r)
return
}
m.handleViewerSignal(sessionID, w, r)
case "segments":
if r.Method != http.MethodPost {
http.NotFound(w, r)
@@ -509,12 +588,23 @@ func (m *mediaServer) handleSignal(sessionID string, w http.ResponseWriter, r *h
peer.OnTrack(func(track *webrtc.TrackRemote, receiver *webrtc.RTPReceiver) {
_ = receiver
go func() {
buffer := make([]byte, 1600)
for {
if _, _, readErr := track.Read(buffer); readErr != nil {
if track.Kind() != webrtc.RTPCodecTypeVideo {
return
}
localTrack, trackErr := m.store.ensureVideoTrack(sessionID, track.Codec().RTPCodecCapability)
if trackErr != nil {
log.Printf("failed to create local viewer track for session %s: %v", sessionID, trackErr)
return
}
go func() {
for {
packet, _, readErr := track.ReadRTP()
if readErr != nil {
return
}
if writeErr := localTrack.WriteRTP(packet); writeErr != nil && !errors.Is(writeErr, io.ErrClosedPipe) {
log.Printf("failed to fan out RTP packet for session %s: %v", sessionID, writeErr)
}
_, _ = m.store.updateSession(sessionID, func(session *Session) error {
session.StreamConnected = true
session.Status = StatusStreaming
@@ -556,6 +646,86 @@ func (m *mediaServer) handleSignal(sessionID string, w http.ResponseWriter, r *h
})
}
func (m *mediaServer) handleViewerSignal(sessionID string, w http.ResponseWriter, r *http.Request) {
var input SignalRequest
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
writeError(w, http.StatusBadRequest, "invalid request body")
return
}
if _, err := m.store.getSession(sessionID); err != nil {
writeError(w, http.StatusNotFound, err.Error())
return
}
localTrack := m.store.getVideoTrack(sessionID)
if localTrack == nil {
writeError(w, http.StatusConflict, "viewer stream not ready")
return
}
config := webrtc.Configuration{
ICEServers: []webrtc.ICEServer{{URLs: []string{"stun:stun.l.google.com:19302"}}},
}
peer, err := webrtc.NewPeerConnection(config)
if err != nil {
writeError(w, http.StatusInternalServerError, "failed to create viewer peer connection")
return
}
viewerID := randomID()
m.store.replaceViewerPeer(sessionID, viewerID, peer)
sender, err := peer.AddTrack(localTrack)
if err != nil {
m.store.removeViewerPeer(sessionID, viewerID)
writeError(w, http.StatusInternalServerError, "failed to add viewer track")
return
}
go func() {
rtcpBuf := make([]byte, 1500)
for {
if _, _, readErr := sender.Read(rtcpBuf); readErr != nil {
return
}
}
}()
peer.OnConnectionStateChange(func(state webrtc.PeerConnectionState) {
switch state {
case webrtc.PeerConnectionStateDisconnected, webrtc.PeerConnectionStateFailed, webrtc.PeerConnectionStateClosed:
m.store.removeViewerPeer(sessionID, viewerID)
}
})
offer := webrtc.SessionDescription{
Type: parseSDPType(input.Type),
SDP: input.SDP,
}
if err := peer.SetRemoteDescription(offer); err != nil {
m.store.removeViewerPeer(sessionID, viewerID)
writeError(w, http.StatusBadRequest, "failed to set remote description")
return
}
answer, err := peer.CreateAnswer(nil)
if err != nil {
m.store.removeViewerPeer(sessionID, viewerID)
writeError(w, http.StatusInternalServerError, "failed to create viewer answer")
return
}
gatherComplete := webrtc.GatheringCompletePromise(peer)
if err := peer.SetLocalDescription(answer); err != nil {
m.store.removeViewerPeer(sessionID, viewerID)
writeError(w, http.StatusInternalServerError, "failed to set viewer local description")
return
}
<-gatherComplete
writeJSON(w, http.StatusOK, map[string]any{
"viewerId": viewerID,
"type": strings.ToLower(peer.LocalDescription().Type.String()),
"sdp": peer.LocalDescription().SDP,
})
}
func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
sequence, err := strconv.Atoi(r.URL.Query().Get("sequence"))
if err != nil || sequence < 0 {

查看文件

@@ -256,3 +256,25 @@ func TestHandleSessionGetRefreshesSessionStateFromDisk(t *testing.T) {
t.Fatalf("expected playback ready after refresh")
}
}
func TestViewerSignalReturnsConflictBeforePublisherTrackReady(t *testing.T) {
store, err := newSessionStore(t.TempDir())
if err != nil {
t.Fatalf("newSessionStore: %v", err)
}
server := newMediaServer(store)
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Viewer Pending"})
if err != nil {
t.Fatalf("createSession: %v", err)
}
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/viewer-signal", strings.NewReader(`{"type":"offer","sdp":"mock-offer"}`))
req.Header.Set("Content-Type", "application/json")
res := httptest.NewRecorder()
server.routes().ServeHTTP(res, req)
if res.Code != http.StatusConflict {
t.Fatalf("expected viewer-signal 409 before video track is ready, got %d", res.Code)
}
}

查看文件

@@ -6,23 +6,29 @@ export type TrpcContext = {
req: CreateExpressContextOptions["req"];
res: CreateExpressContextOptions["res"];
user: User | null;
sessionSid: string | null;
};
export async function createContext(
opts: CreateExpressContextOptions
): Promise<TrpcContext> {
let user: User | null = null;
let sessionSid: string | null = null;
try {
user = await sdk.authenticateRequest(opts.req);
const authenticated = await sdk.authenticateRequestWithSession(opts.req);
user = authenticated.user;
sessionSid = authenticated.sid;
} catch (error) {
// Authentication is optional for public procedures.
user = null;
sessionSid = null;
}
return {
req: opts.req,
res: opts.res,
user,
sessionSid,
};
}

查看文件

@@ -260,7 +260,11 @@ class SDKServer {
}
async authenticateRequest(req: Request): Promise<User> {
// Regular authentication flow
const authenticated = await this.authenticateRequestWithSession(req);
return authenticated.user;
}
async authenticateRequestWithSession(req: Request): Promise<{ user: User; sid: string | null }> {
const cookies = this.parseCookies(req.headers.cookie);
const sessionCookie = cookies.get(COOKIE_NAME);
const session = await this.verifySession(sessionCookie);
@@ -273,7 +277,6 @@ class SDKServer {
const signedInAt = new Date();
let user = await db.getUserByOpenId(sessionUserId);
// If user not in DB, sync from OAuth server automatically
if (!user) {
try {
const userInfo = await this.getUserInfoWithJwt(sessionCookie ?? "");
@@ -300,7 +303,10 @@ class SDKServer {
lastSignedIn: signedInAt,
});
return user;
return {
user,
sid: session.sid ?? null,
};
}
}

查看文件

@@ -8,6 +8,7 @@ import {
poseAnalyses, InsertPoseAnalysis,
trainingRecords, InsertTrainingRecord,
liveAnalysisSessions, InsertLiveAnalysisSession,
liveAnalysisRuntime, InsertLiveAnalysisRuntime,
liveActionSegments, InsertLiveActionSegment,
dailyTrainingAggregates, InsertDailyTrainingAggregate,
ratingHistory, InsertRatingHistory,
@@ -32,6 +33,7 @@ import { fetchTutorialMetrics, shouldRefreshTutorialMetrics } from "./tutorialMe
let _db: ReturnType<typeof drizzle> | null = null;
const APP_TIMEZONE = process.env.TZ || "Asia/Shanghai";
export const LIVE_ANALYSIS_RUNTIME_TIMEOUT_MS = 15_000;
function getDateFormatter() {
return new Intl.DateTimeFormat("en-CA", {
@@ -888,6 +890,140 @@ export async function createLiveAnalysisSession(session: InsertLiveAnalysisSessi
return result[0].insertId;
}
export async function getUserLiveAnalysisRuntime(userId: number) {
const db = await getDb();
if (!db) return undefined;
const result = await db.select().from(liveAnalysisRuntime)
.where(eq(liveAnalysisRuntime.userId, userId))
.limit(1);
return result[0];
}
export async function upsertUserLiveAnalysisRuntime(
userId: number,
patch: Omit<InsertLiveAnalysisRuntime, "id" | "createdAt" | "updatedAt" | "userId">,
) {
const db = await getDb();
if (!db) throw new Error("Database not available");
const existing = await getUserLiveAnalysisRuntime(userId);
if (existing) {
await db.update(liveAnalysisRuntime)
.set({
ownerSid: patch.ownerSid ?? existing.ownerSid,
status: patch.status ?? existing.status,
title: patch.title ?? existing.title,
sessionMode: patch.sessionMode ?? existing.sessionMode,
mediaSessionId: patch.mediaSessionId === undefined ? existing.mediaSessionId : patch.mediaSessionId,
startedAt: patch.startedAt === undefined ? existing.startedAt : patch.startedAt,
endedAt: patch.endedAt === undefined ? existing.endedAt : patch.endedAt,
lastHeartbeatAt: patch.lastHeartbeatAt === undefined ? existing.lastHeartbeatAt : patch.lastHeartbeatAt,
snapshot: patch.snapshot === undefined ? existing.snapshot : patch.snapshot,
})
.where(eq(liveAnalysisRuntime.userId, userId));
return getUserLiveAnalysisRuntime(userId);
}
const result = await db.insert(liveAnalysisRuntime).values({
userId,
ownerSid: patch.ownerSid ?? null,
status: patch.status ?? "idle",
title: patch.title ?? null,
sessionMode: patch.sessionMode ?? "practice",
mediaSessionId: patch.mediaSessionId ?? null,
startedAt: patch.startedAt ?? null,
endedAt: patch.endedAt ?? null,
lastHeartbeatAt: patch.lastHeartbeatAt ?? null,
snapshot: patch.snapshot ?? null,
});
const runtimeId = result[0].insertId;
const rows = await db.select().from(liveAnalysisRuntime).where(eq(liveAnalysisRuntime.id, runtimeId)).limit(1);
return rows[0];
}
export async function updateUserLiveAnalysisRuntime(
userId: number,
patch: Partial<Omit<InsertLiveAnalysisRuntime, "id" | "createdAt" | "updatedAt" | "userId">>,
) {
const db = await getDb();
if (!db) throw new Error("Database not available");
const existing = await getUserLiveAnalysisRuntime(userId);
if (!existing) return undefined;
await db.update(liveAnalysisRuntime)
.set({
ownerSid: patch.ownerSid === undefined ? existing.ownerSid : patch.ownerSid,
status: patch.status ?? existing.status,
title: patch.title === undefined ? existing.title : patch.title,
sessionMode: patch.sessionMode ?? existing.sessionMode,
mediaSessionId: patch.mediaSessionId === undefined ? existing.mediaSessionId : patch.mediaSessionId,
startedAt: patch.startedAt === undefined ? existing.startedAt : patch.startedAt,
endedAt: patch.endedAt === undefined ? existing.endedAt : patch.endedAt,
lastHeartbeatAt: patch.lastHeartbeatAt === undefined ? existing.lastHeartbeatAt : patch.lastHeartbeatAt,
snapshot: patch.snapshot === undefined ? existing.snapshot : patch.snapshot,
})
.where(eq(liveAnalysisRuntime.userId, userId));
return getUserLiveAnalysisRuntime(userId);
}
export async function updateLiveAnalysisRuntimeHeartbeat(input: {
userId: number;
ownerSid: string;
runtimeId: number;
mediaSessionId?: string | null;
snapshot?: unknown;
}) {
const db = await getDb();
if (!db) throw new Error("Database not available");
const existing = await getUserLiveAnalysisRuntime(input.userId);
if (!existing || existing.id !== input.runtimeId || existing.ownerSid !== input.ownerSid || existing.status !== "active") {
return undefined;
}
await db.update(liveAnalysisRuntime)
.set({
mediaSessionId: input.mediaSessionId === undefined ? existing.mediaSessionId : input.mediaSessionId,
snapshot: input.snapshot === undefined ? existing.snapshot : input.snapshot,
lastHeartbeatAt: new Date(),
endedAt: null,
})
.where(and(
eq(liveAnalysisRuntime.userId, input.userId),
eq(liveAnalysisRuntime.id, input.runtimeId),
));
return getUserLiveAnalysisRuntime(input.userId);
}
export async function endUserLiveAnalysisRuntime(input: {
userId: number;
ownerSid?: string | null;
runtimeId?: number;
snapshot?: unknown;
}) {
const db = await getDb();
if (!db) throw new Error("Database not available");
const existing = await getUserLiveAnalysisRuntime(input.userId);
if (!existing) return undefined;
if (input.runtimeId != null && existing.id !== input.runtimeId) return undefined;
if (input.ownerSid != null && existing.ownerSid !== input.ownerSid) return undefined;
await db.update(liveAnalysisRuntime)
.set({
status: "ended",
mediaSessionId: null,
endedAt: new Date(),
snapshot: input.snapshot === undefined ? existing.snapshot : input.snapshot,
})
.where(eq(liveAnalysisRuntime.userId, input.userId));
return getUserLiveAnalysisRuntime(input.userId);
}
export async function createLiveActionSegments(segments: InsertLiveActionSegment[]) {
const db = await getDb();
if (!db || segments.length === 0) return;

查看文件

@@ -45,7 +45,7 @@ function createTestUser(overrides?: Partial<AuthenticatedUser>): AuthenticatedUs
};
}
function createMockContext(user: AuthenticatedUser | null = null): {
function createMockContext(user: AuthenticatedUser | null = null, sessionSid = "test-session-sid"): {
ctx: TrpcContext;
clearedCookies: { name: string; options: Record<string, unknown> }[];
setCookies: { name: string; value: string; options: Record<string, unknown> }[];
@@ -56,6 +56,7 @@ function createMockContext(user: AuthenticatedUser | null = null): {
return {
ctx: {
user,
sessionSid: user ? sessionSid : null,
req: {
protocol: "https",
headers: {},
@@ -1296,6 +1297,161 @@ describe("analysis.liveSessionSave", () => {
});
});
describe("analysis.runtime", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("acquires owner mode when runtime is idle", async () => {
const user = createTestUser({ id: 7 });
const { ctx } = createMockContext(user, "sid-owner");
const caller = appRouter.createCaller(ctx);
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(undefined);
const upsertSpy = vi.spyOn(db, "upsertUserLiveAnalysisRuntime").mockResolvedValueOnce({
id: 11,
userId: 7,
ownerSid: "sid-owner",
status: "active",
title: "实时分析 正手",
sessionMode: "practice",
mediaSessionId: null,
startedAt: new Date(),
endedAt: null,
lastHeartbeatAt: new Date(),
snapshot: null,
createdAt: new Date(),
updatedAt: new Date(),
} as any);
const result = await caller.analysis.runtimeAcquire({
title: "实时分析 正手",
sessionMode: "practice",
});
expect(upsertSpy).toHaveBeenCalledWith(7, expect.objectContaining({
ownerSid: "sid-owner",
status: "active",
title: "实时分析 正手",
sessionMode: "practice",
}));
expect(result.role).toBe("owner");
expect((result.runtimeSession as any)?.ownerSid).toBe("sid-owner");
});
it("returns viewer mode when another session sid already holds the runtime", async () => {
const user = createTestUser({ id: 7 });
const { ctx } = createMockContext(user, "sid-viewer");
const caller = appRouter.createCaller(ctx);
const activeRuntime = {
id: 15,
userId: 7,
ownerSid: "sid-owner",
status: "active",
title: "实时分析 练习",
sessionMode: "pk",
mediaSessionId: "media-sync-1",
startedAt: new Date(),
endedAt: null,
lastHeartbeatAt: new Date(),
snapshot: { phase: "analyzing" },
createdAt: new Date(),
updatedAt: new Date(),
};
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(activeRuntime as any);
const result = await caller.analysis.runtimeAcquire({
title: "实时分析 练习",
sessionMode: "pk",
});
expect(result.role).toBe("viewer");
expect((result.runtimeSession as any)?.mediaSessionId).toBe("media-sync-1");
});
it("keeps owner mode when the same sid reacquires the runtime", async () => {
const user = createTestUser({ id: 7 });
const { ctx } = createMockContext(user, "sid-owner");
const caller = appRouter.createCaller(ctx);
const activeRuntime = {
id: 19,
userId: 7,
ownerSid: "sid-owner",
status: "active",
title: "旧标题",
sessionMode: "practice",
mediaSessionId: "media-sync-2",
startedAt: new Date("2026-03-16T00:00:00.000Z"),
endedAt: null,
lastHeartbeatAt: new Date(),
snapshot: { phase: "analyzing" },
createdAt: new Date(),
updatedAt: new Date(),
};
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(activeRuntime as any);
const updateSpy = vi.spyOn(db, "updateUserLiveAnalysisRuntime").mockResolvedValueOnce({
...activeRuntime,
title: "新标题",
} as any);
const result = await caller.analysis.runtimeAcquire({
title: "新标题",
sessionMode: "practice",
});
expect(updateSpy).toHaveBeenCalledWith(7, expect.objectContaining({
ownerSid: "sid-owner",
title: "新标题",
status: "active",
}));
expect(result.role).toBe("owner");
});
it("rejects heartbeat from a non-owner sid", async () => {
const user = createTestUser({ id: 7 });
const { ctx } = createMockContext(user, "sid-viewer");
const caller = appRouter.createCaller(ctx);
vi.spyOn(db, "updateLiveAnalysisRuntimeHeartbeat").mockResolvedValueOnce(undefined);
await expect(caller.analysis.runtimeHeartbeat({
runtimeId: 20,
mediaSessionId: "media-sync-3",
snapshot: { phase: "analyzing" },
})).rejects.toThrow("当前设备不是实时分析持有端");
});
it("rejects release from a non-owner sid", async () => {
const user = createTestUser({ id: 7 });
const { ctx } = createMockContext(user, "sid-viewer");
const caller = appRouter.createCaller(ctx);
vi.spyOn(db, "endUserLiveAnalysisRuntime").mockResolvedValueOnce(undefined);
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce({
id: 23,
userId: 7,
ownerSid: "sid-owner",
status: "active",
title: "实时分析",
sessionMode: "practice",
mediaSessionId: "media-sync-4",
startedAt: new Date(),
endedAt: null,
lastHeartbeatAt: new Date(),
snapshot: null,
createdAt: new Date(),
updatedAt: new Date(),
} as any);
await expect(caller.analysis.runtimeRelease({
runtimeId: 23,
snapshot: { phase: "failed" },
})).rejects.toThrow("当前设备不是实时分析持有端");
});
});
describe("rating.refreshMine", () => {
afterEach(() => {
vi.restoreAllMocks();

查看文件

@@ -73,6 +73,67 @@ const trainingProfileUpdateSchema = z.object({
assessmentNotes: z.string().max(2000).nullable().optional(),
});
const liveRuntimeSnapshotSchema = z.object({
phase: z.enum(["idle", "analyzing", "saving", "safe", "failed"]).optional(),
startedAt: z.number().optional(),
durationMs: z.number().optional(),
currentAction: z.string().optional(),
rawAction: z.string().optional(),
feedback: z.array(z.string()).optional(),
liveScore: z.record(z.string(), z.number()).nullable().optional(),
stabilityMeta: z.record(z.string(), z.any()).optional(),
visibleSegments: z.number().optional(),
unknownSegments: z.number().optional(),
archivedVideoCount: z.number().optional(),
recentSegments: z.array(z.object({
actionType: z.string(),
isUnknown: z.boolean().optional(),
startMs: z.number(),
endMs: z.number(),
durationMs: z.number(),
confidenceAvg: z.number().optional(),
score: z.number().optional(),
clipLabel: z.string().optional(),
})).optional(),
}).passthrough();
function getRuntimeOwnerSid(ctx: { sessionSid: string | null; user: { openId: string } }) {
return ctx.sessionSid || `legacy:${ctx.user.openId}`;
}
async function resolveLiveRuntimeRole(params: {
userId: number;
sessionSid: string;
}) {
let runtime = await db.getUserLiveAnalysisRuntime(params.userId);
if (!runtime) {
return { role: "idle" as const, runtimeSession: null };
}
const heartbeatAt = runtime.lastHeartbeatAt ?? runtime.updatedAt ?? runtime.startedAt;
const isStale =
runtime.status === "active" &&
(!heartbeatAt || (Date.now() - heartbeatAt.getTime()) > db.LIVE_ANALYSIS_RUNTIME_TIMEOUT_MS);
if (isStale) {
runtime = await db.endUserLiveAnalysisRuntime({
userId: params.userId,
runtimeId: runtime.id,
snapshot: runtime.snapshot,
}) ?? null as any;
return { role: "idle" as const, runtimeSession: null };
}
if (runtime.status !== "active") {
return { role: "idle" as const, runtimeSession: runtime };
}
return {
role: runtime.ownerSid === params.sessionSid ? "owner" as const : "viewer" as const,
runtimeSession: runtime,
};
}
export const appRouter = router({
system: systemRouter,
@@ -455,6 +516,122 @@ export const appRouter = router({
return { session, segments };
}),
runtimeGet: protectedProcedure.query(async ({ ctx }) => {
const sessionSid = getRuntimeOwnerSid(ctx);
return resolveLiveRuntimeRole({
userId: ctx.user.id,
sessionSid,
});
}),
runtimeAcquire: protectedProcedure
.input(z.object({
title: z.string().min(1).max(256),
sessionMode: z.enum(["practice", "pk"]).default("practice"),
}))
.mutation(async ({ ctx, input }) => {
const sessionSid = getRuntimeOwnerSid(ctx);
const current = await resolveLiveRuntimeRole({
userId: ctx.user.id,
sessionSid,
});
if (current.role === "viewer" && current.runtimeSession?.status === "active") {
return current;
}
const runtime = current.runtimeSession?.status === "active" && current.role === "owner"
? await db.updateUserLiveAnalysisRuntime(ctx.user.id, {
ownerSid: sessionSid,
status: "active",
title: input.title,
sessionMode: input.sessionMode,
startedAt: current.runtimeSession.startedAt ?? new Date(),
endedAt: null,
lastHeartbeatAt: new Date(),
})
: await db.upsertUserLiveAnalysisRuntime(ctx.user.id, {
ownerSid: sessionSid,
status: "active",
title: input.title,
sessionMode: input.sessionMode,
mediaSessionId: null,
startedAt: new Date(),
endedAt: null,
lastHeartbeatAt: new Date(),
snapshot: {
phase: "idle",
startedAt: Date.now(),
durationMs: 0,
currentAction: "unknown",
rawAction: "unknown",
feedback: [],
visibleSegments: 0,
unknownSegments: 0,
archivedVideoCount: 0,
recentSegments: [],
},
});
return {
role: "owner" as const,
runtimeSession: runtime ?? null,
};
}),
runtimeHeartbeat: protectedProcedure
.input(z.object({
runtimeId: z.number(),
mediaSessionId: z.string().max(96).nullable().optional(),
snapshot: liveRuntimeSnapshotSchema.optional(),
}))
.mutation(async ({ ctx, input }) => {
const sessionSid = getRuntimeOwnerSid(ctx);
const runtime = await db.updateLiveAnalysisRuntimeHeartbeat({
userId: ctx.user.id,
ownerSid: sessionSid,
runtimeId: input.runtimeId,
mediaSessionId: input.mediaSessionId,
snapshot: input.snapshot,
});
if (!runtime) {
throw new TRPCError({ code: "FORBIDDEN", message: "当前设备不是实时分析持有端" });
}
return {
role: "owner" as const,
runtimeSession: runtime,
};
}),
runtimeRelease: protectedProcedure
.input(z.object({
runtimeId: z.number().optional(),
snapshot: liveRuntimeSnapshotSchema.optional(),
}).optional())
.mutation(async ({ ctx, input }) => {
const sessionSid = getRuntimeOwnerSid(ctx);
const runtime = await db.endUserLiveAnalysisRuntime({
userId: ctx.user.id,
ownerSid: sessionSid,
runtimeId: input?.runtimeId,
snapshot: input?.snapshot,
});
if (!runtime) {
const current = await db.getUserLiveAnalysisRuntime(ctx.user.id);
if (current?.status === "active" && current.ownerSid !== sessionSid) {
throw new TRPCError({ code: "FORBIDDEN", message: "当前设备不是实时分析持有端" });
}
}
return {
success: true,
runtimeSession: runtime ?? null,
};
}),
// Generate AI correction suggestions
getCorrections: protectedProcedure
.input(z.object({

查看文件

@@ -68,6 +68,16 @@ test("live camera starts analysis and produces scores", async ({ page }) => {
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
});
test("live camera switches into viewer mode when another device already owns analysis", async ({ page }) => {
await installAppMocks(page, { authenticated: true, liveViewerMode: true });
await page.goto("/live-camera");
await expect(page.getByText("同步观看模式")).toBeVisible();
await expect(page.getByText(/同步观看|重新同步/).first()).toBeVisible();
await expect(page.getByText("当前设备已锁定为观看模式")).toBeVisible();
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
});
test("live camera archives overlay videos into the library after analysis stops", async ({ page }) => {
await installAppMocks(page, { authenticated: true, videos: [] });

查看文件

@@ -49,6 +49,7 @@ type MockMediaSession = {
uploadedBytes: number;
durationMs: number;
streamConnected: boolean;
viewerCount?: number;
playback: {
webmUrl?: string;
mp4Url?: string;
@@ -92,6 +93,10 @@ type MockAppState = {
adjustmentNotes: string | null;
} | null;
mediaSession: MockMediaSession | null;
liveRuntime: {
role: "idle" | "owner" | "viewer";
runtimeSession: any | null;
};
nextVideoId: number;
nextTaskId: number;
authMeNullResponsesAfterLogin: number;
@@ -428,6 +433,50 @@ async function handleTrpc(route: Route, state: MockAppState) {
return trpcResult(state.analyses);
case "analysis.liveSessionList":
return trpcResult([]);
case "analysis.runtimeGet":
return trpcResult(state.liveRuntime);
case "analysis.runtimeAcquire":
if (state.liveRuntime.runtimeSession?.status === "active" && state.liveRuntime.role === "viewer") {
return trpcResult(state.liveRuntime);
}
state.liveRuntime = {
role: "owner",
runtimeSession: {
id: 501,
title: "实时分析 正手",
sessionMode: "practice",
mediaSessionId: state.mediaSession?.id || null,
status: "active",
startedAt: nowIso(),
endedAt: null,
lastHeartbeatAt: nowIso(),
snapshot: {
phase: "analyzing",
currentAction: "forehand",
rawAction: "forehand",
visibleSegments: 1,
unknownSegments: 0,
durationMs: 1500,
feedback: ["节奏稳定"],
},
},
};
return trpcResult(state.liveRuntime);
case "analysis.runtimeHeartbeat": {
const input = await readTrpcInput(route, operationIndex);
if (state.liveRuntime.runtimeSession) {
state.liveRuntime.runtimeSession = {
...state.liveRuntime.runtimeSession,
mediaSessionId: input?.mediaSessionId ?? state.liveRuntime.runtimeSession.mediaSessionId,
snapshot: input?.snapshot ?? state.liveRuntime.runtimeSession.snapshot,
lastHeartbeatAt: nowIso(),
};
}
return trpcResult(state.liveRuntime);
}
case "analysis.runtimeRelease":
state.liveRuntime = { role: "idle", runtimeSession: null };
return trpcResult({ success: true, runtimeSession: null });
case "analysis.liveSessionSave":
return trpcResult({ sessionId: 1, trainingRecordId: 1 });
case "task.list":
@@ -594,6 +643,12 @@ async function handleMedia(route: Route, state: MockAppState) {
return;
}
if (path.endsWith("/viewer-signal")) {
state.mediaSession.viewerCount = (state.mediaSession.viewerCount || 0) + 1;
await fulfillJson(route, { viewerId: `viewer-${state.mediaSession.viewerCount}`, type: "answer", sdp: "mock-answer" });
return;
}
if (path.endsWith("/segments")) {
const buffer = (await route.request().postDataBuffer()) || Buffer.from("");
state.mediaSession.uploadedSegments += 1;
@@ -658,8 +713,10 @@ export async function installAppMocks(
analyses?: any[];
userName?: string;
authMeNullResponsesAfterLogin?: number;
liveViewerMode?: boolean;
}
) {
const seededViewerSession = options?.liveViewerMode ? buildMediaSession(buildUser(options?.userName), "其他设备实时分析") : null;
const state: MockAppState = {
authenticated: options?.authenticated ?? false,
user: buildUser(options?.userName),
@@ -693,7 +750,70 @@ export async function installAppMocks(
],
tasks: [],
activePlan: null,
mediaSession: null,
mediaSession: seededViewerSession,
liveRuntime: options?.liveViewerMode
? {
role: "viewer",
runtimeSession: {
id: 777,
title: "其他设备实时分析",
sessionMode: "practice",
mediaSessionId: seededViewerSession?.id || null,
status: "active",
startedAt: nowIso(),
endedAt: null,
lastHeartbeatAt: nowIso(),
snapshot: {
phase: "analyzing",
currentAction: "forehand",
rawAction: "forehand",
durationMs: 3200,
visibleSegments: 2,
unknownSegments: 0,
archivedVideoCount: 1,
feedback: ["同步观看测试数据"],
liveScore: {
overall: 82,
posture: 80,
balance: 78,
technique: 84,
footwork: 76,
consistency: 79,
confidence: 88,
},
stabilityMeta: {
windowFrames: 24,
windowShare: 1,
windowProgress: 1,
switchCount: 1,
stableMs: 1800,
rawVolatility: 0.12,
pending: false,
candidateMs: 0,
},
recentSegments: [
{
actionType: "forehand",
isUnknown: false,
startMs: 800,
endMs: 2800,
durationMs: 2000,
confidenceAvg: 0.82,
score: 84,
peakScore: 88,
frameCount: 24,
issueSummary: ["击球点略靠后"],
keyFrames: [1000, 1800, 2600],
clipLabel: "正手挥拍 00:00 - 00:02",
},
],
},
},
}
: {
role: "idle",
runtimeSession: null,
},
nextVideoId: 100,
nextTaskId: 1,
authMeNullResponsesAfterLogin: options?.authMeNullResponsesAfterLogin ?? 0,