feat: add live camera multi-device viewer mode

这个提交包含在:
cryptocommuniums-afk
2026-03-16 16:39:14 +08:00
父节点 f0bbe4c82f
当前提交 4e4122d758
修改 15 个文件,包含 1523 行新增110 行删除

查看文件

@@ -8,6 +8,28 @@ export type ChangeLogEntry = {
};
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
{
version: "2026.03.16-live-camera-multidevice-viewer",
releaseDate: "2026-03-16",
repoVersion: "f0bbe4c",
summary: "实时分析新增同账号多端互斥和同步观看模式,分析持有端独占摄像头,其它端只能查看同步画面与核心识别结果。",
features: [
"同一账号在 `/live-camera` 进入实时分析后,会写入按用户维度的 runtime 锁,其他设备不能重复启动摄像头或分析",
"其他设备会自动进入“同步观看模式”,可订阅持有端的实时画面,并同步看到动作、评分、反馈、最近片段和归档段数",
"同步观看复用 media 服务的 WebRTC viewer 通道,传输的是带骨架、关键点和虚拟形象覆盖后的合成画面",
"runtime 锁按 session sid 区分持有端,兼容缺少 sid 的旧 token,超过 15 秒无心跳会自动判定为陈旧并释放",
"线上 smoke 已确认 `https://te.hao.work/live-camera` 当前仍在旧前端构建,公开站点资源 revision 还是 `index-BWEXNszf.js` / `index-BL6GQzUF.css`,本地新构建产物为 `index-BmsO49OJ.js`",
],
tests: [
"pnpm check",
"pnpm exec vitest run server/features.test.ts",
"go test ./... && go build ./... (media)",
"pnpm build",
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"live camera\"",
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"recorder flow archives a session and exposes it in videos\"",
"curl -I https://te.hao.work/live-camera",
],
},
{
version: "2026.03.16-live-analysis-overlay-archive",
releaseDate: "2026-03-16",

查看文件

@@ -50,6 +50,7 @@ export type MediaSession = {
previewUpdatedAt?: string;
streamConnected: boolean;
lastStreamAt?: string;
viewerCount?: number;
playback: {
webmUrl?: string;
mp4Url?: string;
@@ -122,6 +123,14 @@ export async function signalMediaSession(sessionId: string, payload: { sdp: stri
});
}
export async function signalMediaViewerSession(sessionId: string, payload: { sdp: string; type: string }) {
return request<{ viewerId: string; sdp: string; type: string }>(`/sessions/${sessionId}/viewer-signal`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
});
}
export async function uploadMediaSegment(
sessionId: string,
sequence: number,

查看文件

@@ -1,5 +1,10 @@
import { useAuth } from "@/_core/hooks/useAuth";
import { trpc } from "@/lib/trpc";
import {
createMediaSession,
signalMediaSession,
signalMediaViewerSession,
} from "@/lib/media";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
@@ -110,6 +115,35 @@ type AnalyzedFrame = {
feedback: string[];
};
type RuntimeRole = "idle" | "owner" | "viewer";
type RuntimeSnapshot = {
phase?: "idle" | "analyzing" | "saving" | "safe" | "failed";
startedAt?: number;
durationMs?: number;
currentAction?: ActionType;
rawAction?: ActionType;
feedback?: string[];
liveScore?: PoseScore | null;
stabilityMeta?: Partial<StabilizedActionMeta>;
visibleSegments?: number;
unknownSegments?: number;
archivedVideoCount?: number;
recentSegments?: ActionSegment[];
};
type RuntimeSession = {
id: number;
title: string | null;
sessionMode: SessionMode;
mediaSessionId: string | null;
status: "idle" | "active" | "ended";
startedAt: string | null;
endedAt: string | null;
lastHeartbeatAt: string | null;
snapshot: RuntimeSnapshot | null;
};
const ACTION_META: Record<ActionType, { label: string; tone: string; accent: string }> = {
forehand: { label: "正手挥拍", tone: "bg-emerald-500/10 text-emerald-700", accent: "bg-emerald-500" },
backhand: { label: "反手挥拍", tone: "bg-sky-500/10 text-sky-700", accent: "bg-sky-500" },
@@ -150,6 +184,23 @@ const CAMERA_QUALITY_PRESETS: Record<CameraQualityPreset, { label: string; subti
},
};
function waitForIceGathering(peer: RTCPeerConnection) {
if (peer.iceGatheringState === "complete") {
return Promise.resolve();
}
return new Promise<void>((resolve) => {
const handleStateChange = () => {
if (peer.iceGatheringState === "complete") {
peer.removeEventListener("icegatheringstatechange", handleStateChange);
resolve();
}
};
peer.addEventListener("icegatheringstatechange", handleStateChange);
});
}
function clamp(value: number, min: number, max: number) {
return Math.max(min, Math.min(max, value));
}
@@ -484,7 +535,7 @@ function getSessionBand(input: { overallScore: number; knownRatio: number; effec
}
export default function LiveCamera() {
useAuth();
const { user } = useAuth();
const utils = trpc.useUtils();
const mobile = useMemo(() => isMobileDevice(), []);
@@ -493,6 +544,13 @@ export default function LiveCamera() {
const streamRef = useRef<MediaStream | null>(null);
const poseRef = useRef<any>(null);
const compositeCanvasRef = useRef<HTMLCanvasElement | null>(null);
const broadcastPeerRef = useRef<RTCPeerConnection | null>(null);
const broadcastStreamRef = useRef<MediaStream | null>(null);
const broadcastSessionIdRef = useRef<string | null>(null);
const viewerPeerRef = useRef<RTCPeerConnection | null>(null);
const viewerSessionIdRef = useRef<string | null>(null);
const runtimeIdRef = useRef<number | null>(null);
const heartbeatTimerRef = useRef<number>(0);
const recorderRef = useRef<MediaRecorder | null>(null);
const recorderStreamRef = useRef<MediaStream | null>(null);
const recorderMimeTypeRef = useRef("video/webm");
@@ -513,6 +571,14 @@ export default function LiveCamera() {
const segmentsRef = useRef<ActionSegment[]>([]);
const frameSamplesRef = useRef<PoseScore[]>([]);
const volatilitySamplesRef = useRef<number[]>([]);
const currentActionRef = useRef<ActionType>("unknown");
const rawActionRef = useRef<ActionType>("unknown");
const liveScoreRef = useRef<PoseScore | null>(null);
const feedbackRef = useRef<string[]>([]);
const durationMsRef = useRef(0);
const leaveStatusRef = useRef<"idle" | "analyzing" | "saving" | "safe" | "failed">("idle");
const sessionModeRef = useRef<SessionMode>("practice");
const stabilityMetaRef = useRef<StabilizedActionMeta>(createEmptyStabilizedActionMeta());
const zoomTargetRef = useRef(1);
const avatarRenderRef = useRef<AvatarRenderState>({
enabled: false,
@@ -543,6 +609,8 @@ export default function LiveCamera() {
const [avatarKey, setAvatarKey] = useState<AvatarKey>("gorilla");
const [avatarPrompt, setAvatarPrompt] = useState("");
const [archivedVideoCount, setArchivedVideoCount] = useState(0);
const [viewerConnected, setViewerConnected] = useState(false);
const [viewerError, setViewerError] = useState("");
const resolvedAvatarKey = useMemo(
() => resolveAvatarKeyFromPrompt(avatarPrompt, avatarKey),
@@ -562,6 +630,17 @@ export default function LiveCamera() {
},
});
const liveSessionsQuery = trpc.analysis.liveSessionList.useQuery({ limit: 8 });
const runtimeQuery = trpc.analysis.runtimeGet.useQuery(undefined, {
refetchInterval: 1000,
refetchIntervalInBackground: true,
});
const runtimeAcquireMutation = trpc.analysis.runtimeAcquire.useMutation();
const runtimeHeartbeatMutation = trpc.analysis.runtimeHeartbeat.useMutation();
const runtimeReleaseMutation = trpc.analysis.runtimeRelease.useMutation();
const runtimeRole = (runtimeQuery.data?.role ?? "idle") as RuntimeRole;
const runtimeSession = (runtimeQuery.data?.runtimeSession ?? null) as RuntimeSession | null;
const runtimeSnapshot = runtimeSession?.snapshot ?? null;
useEffect(() => {
avatarRenderRef.current = {
@@ -571,6 +650,38 @@ export default function LiveCamera() {
};
}, [avatarEnabled, avatarPrompt, resolvedAvatarKey]);
useEffect(() => {
currentActionRef.current = currentAction;
}, [currentAction]);
useEffect(() => {
rawActionRef.current = rawAction;
}, [rawAction]);
useEffect(() => {
liveScoreRef.current = liveScore;
}, [liveScore]);
useEffect(() => {
feedbackRef.current = feedback;
}, [feedback]);
useEffect(() => {
durationMsRef.current = durationMs;
}, [durationMs]);
useEffect(() => {
leaveStatusRef.current = leaveStatus;
}, [leaveStatus]);
useEffect(() => {
sessionModeRef.current = sessionMode;
}, [sessionMode]);
useEffect(() => {
stabilityMetaRef.current = stabilityMeta;
}, [stabilityMeta]);
const visibleSegments = useMemo(
() => segments.filter((segment) => !segment.isUnknown).sort((a, b) => b.startMs - a.startMs),
[segments],
@@ -580,10 +691,20 @@ export default function LiveCamera() {
() => segmentFilter === "all" ? visibleSegments : visibleSegments.filter((segment) => segment.actionType === segmentFilter),
[segmentFilter, visibleSegments],
);
const viewerRecentSegments = useMemo(
() => (runtimeSnapshot?.recentSegments ?? []).filter((segment) => !segment.isUnknown),
[runtimeSnapshot?.recentSegments],
);
const displayVisibleSegments = runtimeRole === "viewer" ? viewerRecentSegments : visibleSegments;
const displayFilteredSegments = runtimeRole === "viewer"
? (segmentFilter === "all"
? viewerRecentSegments
: viewerRecentSegments.filter((segment) => segment.actionType === segmentFilter))
: filteredVisibleSegments;
const actionStats = useMemo(() => {
const totals = new Map<ActionType, { count: number; durationMs: number; averageScore: number; averageConfidence: number }>();
visibleSegments.forEach((segment) => {
displayVisibleSegments.forEach((segment) => {
const current = totals.get(segment.actionType) ?? {
count: 0,
durationMs: 0,
@@ -599,7 +720,7 @@ export default function LiveCamera() {
});
});
const totalDuration = Math.max(1, visibleSegments.reduce((sum, segment) => sum + segment.durationMs, 0));
const totalDuration = Math.max(1, displayVisibleSegments.reduce((sum, segment) => sum + segment.durationMs, 0));
return Array.from(totals.entries())
.map(([actionType, value]) => ({
actionType,
@@ -607,22 +728,25 @@ export default function LiveCamera() {
sharePct: Math.round((value.durationMs / totalDuration) * 100),
}))
.sort((a, b) => b.durationMs - a.durationMs);
}, [visibleSegments]);
}, [displayVisibleSegments]);
const bestSegment = useMemo(
() => visibleSegments.reduce<ActionSegment | null>((best, segment) => {
() => displayVisibleSegments.reduce<ActionSegment | null>((best, segment) => {
if (!best) return segment;
return segment.score > best.score ? segment : best;
}, null),
[visibleSegments],
[displayVisibleSegments],
);
const knownRatio = segments.length > 0 ? visibleSegments.length / segments.length : 0;
const totalDisplaySegments = runtimeRole === "viewer"
? (runtimeSnapshot?.visibleSegments ?? displayVisibleSegments.length) + (runtimeSnapshot?.unknownSegments ?? 0)
: segments.length;
const knownRatio = totalDisplaySegments > 0 ? displayVisibleSegments.length / totalDisplaySegments : 0;
const sessionBand = useMemo(
() => getSessionBand({
overallScore: liveScore?.overall || 0,
overallScore: (runtimeRole === "viewer" ? runtimeSnapshot?.liveScore?.overall : liveScore?.overall) || 0,
knownRatio,
effectiveSegments: visibleSegments.length,
effectiveSegments: displayVisibleSegments.length,
}),
[knownRatio, liveScore?.overall, visibleSegments.length],
[displayVisibleSegments.length, knownRatio, liveScore?.overall, runtimeRole, runtimeSnapshot?.liveScore?.overall],
);
useEffect(() => {
@@ -730,6 +854,189 @@ export default function LiveCamera() {
await recorderUploadQueueRef.current;
}, []);
const buildRuntimeSnapshot = useCallback((phase?: RuntimeSnapshot["phase"]): RuntimeSnapshot => ({
phase: phase ?? leaveStatusRef.current,
startedAt: sessionStartedAtRef.current || undefined,
durationMs: durationMsRef.current,
currentAction: currentActionRef.current,
rawAction: rawActionRef.current,
feedback: feedbackRef.current,
liveScore: liveScoreRef.current,
stabilityMeta: stabilityMetaRef.current,
visibleSegments: segmentsRef.current.filter((segment) => !segment.isUnknown).length,
unknownSegments: segmentsRef.current.filter((segment) => segment.isUnknown).length,
archivedVideoCount: archivedVideosRef.current.length,
recentSegments: segmentsRef.current.slice(-5),
}), []);
const closeBroadcastPeer = useCallback(() => {
broadcastSessionIdRef.current = null;
if (broadcastPeerRef.current) {
broadcastPeerRef.current.onconnectionstatechange = null;
broadcastPeerRef.current.close();
broadcastPeerRef.current = null;
}
if (broadcastStreamRef.current) {
broadcastStreamRef.current.getTracks().forEach((track) => track.stop());
broadcastStreamRef.current = null;
}
}, []);
const closeViewerPeer = useCallback(() => {
viewerSessionIdRef.current = null;
if (viewerPeerRef.current) {
viewerPeerRef.current.ontrack = null;
viewerPeerRef.current.onconnectionstatechange = null;
viewerPeerRef.current.close();
viewerPeerRef.current = null;
}
setViewerConnected(false);
}, []);
const releaseRuntime = useCallback(async (phase: RuntimeSnapshot["phase"]) => {
if (!runtimeIdRef.current) return;
try {
await runtimeReleaseMutation.mutateAsync({
runtimeId: runtimeIdRef.current,
snapshot: buildRuntimeSnapshot(phase),
});
} catch {
// Ignore runtime release errors and let the server-side stale timeout recover.
} finally {
runtimeIdRef.current = null;
broadcastSessionIdRef.current = null;
if (heartbeatTimerRef.current) {
window.clearInterval(heartbeatTimerRef.current);
heartbeatTimerRef.current = 0;
}
void runtimeQuery.refetch();
}
}, [buildRuntimeSnapshot, runtimeQuery, runtimeReleaseMutation]);
const startRuntimeHeartbeatLoop = useCallback((mediaSessionId?: string | null) => {
if (!runtimeIdRef.current) return;
if (typeof mediaSessionId === "string") {
broadcastSessionIdRef.current = mediaSessionId;
}
if (heartbeatTimerRef.current) {
window.clearInterval(heartbeatTimerRef.current);
heartbeatTimerRef.current = 0;
}
const sendHeartbeat = () => {
if (!runtimeIdRef.current) return;
runtimeHeartbeatMutation.mutate({
runtimeId: runtimeIdRef.current,
mediaSessionId: broadcastSessionIdRef.current,
snapshot: buildRuntimeSnapshot(),
});
};
sendHeartbeat();
heartbeatTimerRef.current = window.setInterval(sendHeartbeat, 1000);
}, [buildRuntimeSnapshot, runtimeHeartbeatMutation]);
const startBroadcastSession = useCallback(async () => {
if (!user?.id) {
throw new Error("当前用户信息未就绪");
}
const compositeCanvas = ensureCompositeCanvas();
if (!compositeCanvas || typeof compositeCanvas.captureStream !== "function") {
throw new Error("当前浏览器不支持同步观看推流");
}
renderCompositeFrame();
const sessionResponse = await createMediaSession({
userId: String(user.id),
title: `实时分析同步 ${formatDateTimeShanghai(new Date(), {
year: undefined,
second: undefined,
})}`,
format: "webm",
mimeType: "video/webm",
qualityPreset,
facingMode: facing,
deviceKind: mobile ? "mobile" : "desktop",
});
const sessionId = sessionResponse.session.id;
const stream = compositeCanvas.captureStream(mobile ? 24 : 30);
broadcastStreamRef.current = stream;
const peer = new RTCPeerConnection({
iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
});
broadcastPeerRef.current = peer;
stream.getTracks().forEach((track) => peer.addTrack(track, stream));
const offer = await peer.createOffer();
await peer.setLocalDescription(offer);
await waitForIceGathering(peer);
const answer = await signalMediaSession(sessionId, {
sdp: peer.localDescription?.sdp || "",
type: peer.localDescription?.type || "offer",
});
await peer.setRemoteDescription({
type: answer.type as RTCSdpType,
sdp: answer.sdp,
});
return sessionId;
}, [ensureCompositeCanvas, facing, mobile, qualityPreset, renderCompositeFrame, user?.id]);
const startViewerStream = useCallback(async (mediaSessionId: string) => {
if (viewerSessionIdRef.current === mediaSessionId && viewerPeerRef.current) {
return;
}
closeViewerPeer();
setViewerError("");
const peer = new RTCPeerConnection({
iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
});
viewerPeerRef.current = peer;
viewerSessionIdRef.current = mediaSessionId;
peer.addTransceiver("video", { direction: "recvonly" });
peer.ontrack = (event) => {
const nextStream = event.streams[0] ?? new MediaStream([event.track]);
if (videoRef.current) {
videoRef.current.srcObject = nextStream;
void videoRef.current.play().catch(() => undefined);
}
setViewerConnected(true);
};
peer.onconnectionstatechange = () => {
if (peer.connectionState === "failed" || peer.connectionState === "closed" || peer.connectionState === "disconnected") {
setViewerConnected(false);
}
};
const offer = await peer.createOffer();
await peer.setLocalDescription(offer);
await waitForIceGathering(peer);
const answer = await signalMediaViewerSession(mediaSessionId, {
sdp: peer.localDescription?.sdp || "",
type: peer.localDescription?.type || "offer",
});
await peer.setRemoteDescription({
type: answer.type as RTCSdpType,
sdp: answer.sdp,
});
}, [closeViewerPeer]);
const stopCamera = useCallback(() => {
if (animationRef.current) {
cancelAnimationFrame(animationRef.current);
@@ -742,11 +1049,12 @@ export default function LiveCamera() {
analyzingRef.current = false;
setAnalyzing(false);
void stopSessionRecorder();
const localStream = streamRef.current;
if (streamRef.current) {
streamRef.current.getTracks().forEach((track) => track.stop());
streamRef.current = null;
}
if (videoRef.current) {
if (videoRef.current && localStream && videoRef.current.srcObject === localStream) {
videoRef.current.srcObject = null;
}
actionHistoryRef.current = [];
@@ -762,11 +1070,36 @@ export default function LiveCamera() {
setCameraActive(false);
}, [stopSessionRecorder]);
useEffect(() => {
if (runtimeRole === "viewer" && cameraActive) {
stopCamera();
}
}, [cameraActive, runtimeRole, stopCamera]);
useEffect(() => {
if (runtimeRole !== "viewer" || !runtimeSession?.mediaSessionId) {
if (!cameraActive) {
closeViewerPeer();
}
setViewerError("");
return;
}
void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => {
const message = error?.message || "同步画面连接失败";
if (!/409/.test(message)) {
setViewerError(message);
}
});
}, [cameraActive, closeViewerPeer, runtimeRole, runtimeSession?.mediaSessionId, startViewerStream]);
useEffect(() => {
return () => {
stopCamera();
closeBroadcastPeer();
closeViewerPeer();
};
}, [stopCamera]);
}, [closeBroadcastPeer, closeViewerPeer, stopCamera]);
const syncZoomState = useCallback(async (preferredZoom?: number, providedTrack?: MediaStreamTrack | null) => {
const track = providedTrack || streamRef.current?.getVideoTracks()[0] || null;
@@ -813,6 +1146,10 @@ export default function LiveCamera() {
preferredZoom = zoomTargetRef.current,
preset: CameraQualityPreset = qualityPreset,
) => {
if (runtimeRole === "viewer") {
toast.error("当前账号已有其他设备正在实时分析,请切换到同步观看模式");
return;
}
try {
if (streamRef.current) {
streamRef.current.getTracks().forEach((track) => track.stop());
@@ -835,7 +1172,7 @@ export default function LiveCamera() {
} catch (error: any) {
toast.error(`摄像头启动失败: ${error?.message || "未知错误"}`);
}
}, [facing, mobile, qualityPreset, syncZoomState]);
}, [facing, mobile, qualityPreset, runtimeRole, syncZoomState]);
const switchCamera = useCallback(async () => {
const nextFacing: CameraFacing = facing === "user" ? "environment" : "user";
@@ -1065,32 +1402,55 @@ export default function LiveCamera() {
return;
}
if (analyzingRef.current || saving) return;
analyzingRef.current = true;
setAnalyzing(true);
setSaving(false);
setLeaveStatus("analyzing");
setSegments([]);
segmentsRef.current = [];
currentSegmentRef.current = null;
trackingRef.current = {};
actionHistoryRef.current = [];
stableActionStateRef.current = createStableActionState();
frameSamplesRef.current = [];
volatilitySamplesRef.current = [];
archivedVideosRef.current = [];
recorderSequenceRef.current = 0;
setArchivedVideoCount(0);
sessionStartedAtRef.current = Date.now();
setCurrentAction("unknown");
setRawAction("unknown");
setLiveScore(null);
setFeedback([]);
setStabilityMeta(createEmptyStabilizedActionMeta());
setDurationMs(0);
startSessionRecorder();
if (runtimeRole === "viewer") {
toast.error("当前设备处于同步观看模式,不能重复开启分析");
return;
}
try {
const title = `实时分析 ${ACTION_META[currentActionRef.current].label}`;
const runtime = await runtimeAcquireMutation.mutateAsync({
title,
sessionMode,
});
if (runtime.role === "viewer") {
runtimeIdRef.current = null;
toast.error("同一账号已有其他设备正在实时分析,本机已切换为同步观看模式");
await runtimeQuery.refetch();
return;
}
runtimeIdRef.current = runtime.runtimeSession?.id ?? null;
setViewerError("");
analyzingRef.current = true;
setAnalyzing(true);
setSaving(false);
setLeaveStatus("analyzing");
setSegments([]);
segmentsRef.current = [];
currentSegmentRef.current = null;
trackingRef.current = {};
actionHistoryRef.current = [];
stableActionStateRef.current = createStableActionState();
frameSamplesRef.current = [];
volatilitySamplesRef.current = [];
archivedVideosRef.current = [];
recorderSequenceRef.current = 0;
setArchivedVideoCount(0);
sessionStartedAtRef.current = Date.now();
setCurrentAction("unknown");
setRawAction("unknown");
setLiveScore(null);
setFeedback([]);
setStabilityMeta(createEmptyStabilizedActionMeta());
setDurationMs(0);
startSessionRecorder();
const mediaSessionId = await startBroadcastSession();
startRuntimeHeartbeatLoop(mediaSessionId);
const testFactory = (
window as typeof window & {
__TEST_MEDIAPIPE_FACTORY__?: () => Promise<{ Pose: any }>;
@@ -1182,9 +1542,25 @@ export default function LiveCamera() {
setAnalyzing(false);
setLeaveStatus("idle");
await stopSessionRecorder();
closeBroadcastPeer();
await releaseRuntime("failed");
toast.error(`实时分析启动失败: ${error?.message || "未知错误"}`);
}
}, [appendFrameToSegment, cameraActive, renderCompositeFrame, saving, startSessionRecorder, stopSessionRecorder]);
}, [
appendFrameToSegment,
cameraActive,
closeBroadcastPeer,
releaseRuntime,
runtimeAcquireMutation,
runtimeQuery,
runtimeRole,
saving,
sessionMode,
startBroadcastSession,
startRuntimeHeartbeatLoop,
startSessionRecorder,
stopSessionRecorder,
]);
const stopAnalysis = useCallback(async () => {
if (!analyzingRef.current) return;
@@ -1192,6 +1568,7 @@ export default function LiveCamera() {
setAnalyzing(false);
setSaving(true);
setLeaveStatus("saving");
let releasePhase: RuntimeSnapshot["phase"] = "safe";
if (animationRef.current) {
cancelAnimationFrame(animationRef.current);
@@ -1205,15 +1582,19 @@ export default function LiveCamera() {
}
await persistSession();
setLeaveStatus("safe");
releasePhase = "safe";
toast.success(`实时分析已保存,并同步写入训练记录${archivedVideosRef.current.length > 0 ? `;已归档 ${archivedVideosRef.current.length} 段分析录像` : ""}`);
await liveSessionsQuery.refetch();
} catch (error: any) {
setLeaveStatus("failed");
releasePhase = "failed";
toast.error(`保存实时分析失败: ${error?.message || "未知错误"}`);
} finally {
closeBroadcastPeer();
await releaseRuntime(releasePhase);
setSaving(false);
}
}, [liveSessionsQuery, persistSession]);
}, [closeBroadcastPeer, liveSessionsQuery, persistSession, releaseRuntime]);
useEffect(() => {
if (!analyzing && !saving) {
@@ -1235,26 +1616,72 @@ export default function LiveCamera() {
await startCamera(facing, zoomTargetRef.current, qualityPreset);
}, [facing, qualityPreset, startCamera]);
const heroAction = ACTION_META[currentAction];
const rawActionMeta = ACTION_META[rawAction];
const pendingActionMeta = stabilityMeta.pendingAction ? ACTION_META[stabilityMeta.pendingAction] : null;
const displayAction = runtimeRole === "viewer" ? (runtimeSnapshot?.currentAction ?? "unknown") : currentAction;
const displayRawAction = runtimeRole === "viewer" ? (runtimeSnapshot?.rawAction ?? "unknown") : rawAction;
const displayScore = runtimeRole === "viewer" ? (runtimeSnapshot?.liveScore ?? null) : liveScore;
const displayFeedback = runtimeRole === "viewer" ? (runtimeSnapshot?.feedback ?? []) : feedback;
const displayDurationMs = runtimeRole === "viewer" ? (runtimeSnapshot?.durationMs ?? 0) : durationMs;
const displayStabilityMeta = runtimeRole === "viewer"
? {
...createEmptyStabilizedActionMeta(),
...runtimeSnapshot?.stabilityMeta,
}
: stabilityMeta;
const hasVideoFeed = cameraActive || viewerConnected;
const heroAction = ACTION_META[displayAction];
const rawActionMeta = ACTION_META[displayRawAction];
const pendingActionMeta = displayStabilityMeta.pendingAction ? ACTION_META[displayStabilityMeta.pendingAction] : null;
const resolvedAvatarPreset = getAvatarPreset(resolvedAvatarKey);
const resolvedAvatarLabel = resolvedAvatarPreset?.label || "猩猩";
const animalAvatarPresets = AVATAR_PRESETS.filter((preset) => preset.category === "animal");
const fullBodyAvatarPresets = AVATAR_PRESETS.filter((preset) => preset.category === "full-body-3d");
const previewTitle = analyzing
? stabilityMeta.pending && pendingActionMeta
? `${pendingActionMeta.label} 切换确认中`
: `${heroAction.label} 识别中`
: cameraActive
? "准备开始实时分析"
: "摄像头待启动";
const previewTitle = runtimeRole === "viewer"
? viewerConnected
? "同步观看中"
: "正在连接同步画面"
: analyzing
? displayStabilityMeta.pending && pendingActionMeta
? `${pendingActionMeta.label} 切换确认中`
: `${heroAction.label} 识别中`
: cameraActive
? "准备开始实时分析"
: "摄像头待启动";
const viewerModeLabel = runtimeSession?.title || "其他设备正在实时分析";
const renderPrimaryActions = (rail = false) => {
const buttonClass = rail
? "h-14 w-14 rounded-2xl border border-white/10 bg-white/10 text-white hover:bg-white/20"
: "h-11 rounded-2xl px-4";
if (runtimeRole === "viewer") {
return (
<>
<Button
variant={rail ? "secondary" : "default"}
className={buttonClass}
onClick={() => {
if (runtimeSession?.mediaSessionId) {
void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => {
toast.error(`同步观看连接失败: ${error?.message || "未知错误"}`);
});
}
}}
disabled={!runtimeSession?.mediaSessionId}
>
<Monitor className={rail ? "h-5 w-5" : "mr-2 h-4 w-4"} />
{!rail && (viewerConnected ? "重新同步" : "同步观看")}
</Button>
{!rail ? (
<Button variant="outline" className={buttonClass} disabled>
<CameraOff className="mr-2 h-4 w-4" />
</Button>
) : null}
</>
);
}
if (!cameraActive) {
return (
<Button
@@ -1491,6 +1918,24 @@ export default function LiveCamera() {
</Alert>
) : null}
{runtimeRole === "viewer" ? (
<Alert>
<Monitor className="h-4 w-4" />
<AlertTitle></AlertTitle>
<AlertDescription>
{viewerModeLabel}
</AlertDescription>
</Alert>
) : null}
{viewerError ? (
<Alert>
<Activity className="h-4 w-4" />
<AlertTitle></AlertTitle>
<AlertDescription>{viewerError}</AlertDescription>
</Alert>
) : null}
<section className="rounded-[28px] border border-border/60 bg-[radial-gradient(circle_at_top_left,_rgba(249,115,22,0.16),_transparent_32%),linear-gradient(135deg,rgba(12,18,24,0.98),rgba(26,31,43,0.96))] p-5 text-white shadow-xl shadow-black/10 md:p-7">
<div className="flex flex-col gap-4 lg:flex-row lg:items-end lg:justify-between">
<div className="space-y-3">
@@ -1513,7 +1958,7 @@ export default function LiveCamera() {
</Badge>
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
<PlayCircle className="h-3.5 w-3.5" />
{sessionMode === "practice" ? "练习会话" : "训练 PK"}
{(runtimeRole === "viewer" ? runtimeSession?.sessionMode : sessionMode) === "practice" ? "练习会话" : "训练 PK"}
</Badge>
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
<Video className="h-3.5 w-3.5" />
@@ -1539,11 +1984,11 @@ export default function LiveCamera() {
</div>
<div className="rounded-xl bg-black/15 px-3 py-3">
<div className="text-[11px] uppercase tracking-[0.18em] text-white/45"></div>
<div className="mt-2 text-lg font-semibold text-white">{formatDuration(durationMs)}</div>
<div className="mt-2 text-lg font-semibold text-white">{formatDuration(displayDurationMs)}</div>
</div>
<div className="rounded-xl bg-black/15 px-3 py-3">
<div className="text-[11px] uppercase tracking-[0.18em] text-white/45"></div>
<div className="mt-2 text-lg font-semibold text-white">{stabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
<div className="mt-2 text-lg font-semibold text-white">{displayStabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
</div>
</div>
</div>
@@ -1563,20 +2008,40 @@ export default function LiveCamera() {
/>
<canvas
ref={canvasRef}
className={`pointer-events-none absolute inset-0 h-full w-full object-contain ${analyzing ? "" : "opacity-70"}`}
className={`pointer-events-none absolute inset-0 h-full w-full object-contain ${runtimeRole === "viewer" ? "hidden" : analyzing ? "" : "opacity-70"}`}
/>
{!cameraActive ? (
{!hasVideoFeed ? (
<div className="absolute inset-0 flex flex-col items-center justify-center gap-4 bg-[radial-gradient(circle_at_center,_rgba(249,115,22,0.12),_rgba(0,0,0,0.78))] px-6 text-center text-white/75">
<CameraOff className="h-14 w-14" />
<div className="space-y-1">
<div className="text-xl font-medium"></div>
<div className="text-sm text-white/60"></div>
<div className="text-xl font-medium">{runtimeRole === "viewer" ? "等待同步画面" : "摄像头未启动"}</div>
<div className="text-sm text-white/60">
{runtimeRole === "viewer" ? `${viewerModeLabel},当前设备只能观看同步内容。` : "先完成拍摄校准,再开启自动动作识别。"}
</div>
</div>
<Button data-testid="live-camera-start-button" onClick={() => setShowSetupGuide(true)} className="rounded-2xl">
<Camera className="mr-2 h-4 w-4" />
</Button>
{runtimeRole === "viewer" ? (
<Button
data-testid="live-camera-viewer-button"
onClick={() => {
if (runtimeSession?.mediaSessionId) {
void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => {
toast.error(`同步观看连接失败: ${error?.message || "未知错误"}`);
});
}
}}
className="rounded-2xl"
disabled={!runtimeSession?.mediaSessionId}
>
<Monitor className="mr-2 h-4 w-4" />
{viewerConnected ? "重新同步" : "同步观看"}
</Button>
) : (
<Button data-testid="live-camera-start-button" onClick={() => setShowSetupGuide(true)} className="rounded-2xl">
<Camera className="mr-2 h-4 w-4" />
</Button>
)}
</div>
) : null}
@@ -1587,7 +2052,7 @@ export default function LiveCamera() {
</Badge>
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
<Target className="h-3.5 w-3.5" />
{visibleSegments.length}
{displayVisibleSegments.length}
</Badge>
{avatarEnabled ? (
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
@@ -1609,9 +2074,9 @@ export default function LiveCamera() {
</Button>
) : null}
{cameraActive && zoomState.supported ? renderZoomOverlay() : null}
{cameraActive && zoomState.supported && runtimeRole !== "viewer" ? renderZoomOverlay() : null}
{(cameraActive || saving) ? (
{(hasVideoFeed || saving) ? (
<div className="absolute bottom-3 left-3 right-20 rounded-[24px] border border-white/10 bg-black/65 px-3 py-3 text-white shadow-lg backdrop-blur-sm sm:right-[112px]">
<div className="grid gap-2 sm:grid-cols-2">
<div>
@@ -1622,14 +2087,14 @@ export default function LiveCamera() {
<div>
<div className="text-[10px] uppercase tracking-[0.18em] text-white/45"></div>
<div className="mt-1 text-sm font-semibold">
{stabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES} · {Math.round(stabilityMeta.windowShare * 100)}%
{displayStabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES} · {Math.round(displayStabilityMeta.windowShare * 100)}%
</div>
<div className="mt-1 text-xs text-white/60">
{saving
? "正在保存会话..."
: stabilityMeta.pending && pendingActionMeta
? `切换确认中 · ${pendingActionMeta.label} · ${Math.max(0, stabilityMeta.candidateMs / 1000).toFixed(1)}s`
: `已稳定 ${Math.max(0, stabilityMeta.stableMs / 1000).toFixed(1)}s · 波动 ${Math.round(stabilityMeta.rawVolatility * 100)}%`}
: displayStabilityMeta.pending && pendingActionMeta
? `切换确认中 · ${pendingActionMeta.label} · ${Math.max(0, displayStabilityMeta.candidateMs / 1000).toFixed(1)}s`
: `已稳定 ${Math.max(0, displayStabilityMeta.stableMs / 1000).toFixed(1)}s · 波动 ${Math.round(displayStabilityMeta.rawVolatility * 100)}%`}
</div>
</div>
</div>
@@ -1639,7 +2104,11 @@ export default function LiveCamera() {
<div className="border-t border-border/60 bg-card/80 p-4">
<div className="grid gap-3 md:grid-cols-[180px_minmax(0,1fr)]">
<Select value={sessionMode} onValueChange={(value) => setSessionMode(value as SessionMode)} disabled={analyzing || saving}>
<Select
value={runtimeRole === "viewer" ? (runtimeSession?.sessionMode ?? sessionMode) : sessionMode}
onValueChange={(value) => setSessionMode(value as SessionMode)}
disabled={analyzing || saving || runtimeRole === "viewer"}
>
<SelectTrigger className="h-12 rounded-2xl border-border/60">
<SelectValue />
</SelectTrigger>
@@ -1662,7 +2131,7 @@ export default function LiveCamera() {
</div>
<div className="rounded-2xl border border-border/60 bg-background/90 p-4">
<div className="text-[11px] uppercase tracking-[0.16em] text-muted-foreground"></div>
<div className="mt-2 text-lg font-semibold">{archivedVideoCount}</div>
<div className="mt-2 text-lg font-semibold">{runtimeRole === "viewer" ? (runtimeSnapshot?.archivedVideoCount ?? 0) : archivedVideoCount}</div>
<div className="mt-2 text-xs leading-5 text-muted-foreground">
</div>
@@ -1687,7 +2156,7 @@ export default function LiveCamera() {
<Switch
checked={avatarEnabled}
onCheckedChange={setAvatarEnabled}
disabled={!cameraActive && !analyzing}
disabled={runtimeRole === "viewer" || (!cameraActive && !analyzing)}
data-testid="live-camera-avatar-switch"
/>
</div>
@@ -1698,7 +2167,7 @@ export default function LiveCamera() {
</div>
<div>
<div className="mb-2 text-xs uppercase tracking-[0.18em] text-muted-foreground"></div>
<Select value={avatarKey} onValueChange={(value) => setAvatarKey(value as AvatarKey)}>
<Select value={avatarKey} onValueChange={(value) => setAvatarKey(value as AvatarKey)} disabled={runtimeRole === "viewer"}>
<SelectTrigger className="h-12 rounded-2xl border-border/60">
<SelectValue />
</SelectTrigger>
@@ -1719,6 +2188,7 @@ export default function LiveCamera() {
onChange={(event) => setAvatarPrompt(event.target.value)}
placeholder="例如 狐狸 / panda coach / BeachKing / Juanita"
className="h-12 rounded-2xl border-border/60"
disabled={runtimeRole === "viewer"}
/>
</div>
</div>
@@ -1749,7 +2219,7 @@ export default function LiveCamera() {
<div className="grid gap-3 lg:grid-cols-3">
{Object.entries(CAMERA_QUALITY_PRESETS).map(([key, preset]) => {
const active = qualityPreset === key;
const disabled = analyzing || saving;
const disabled = analyzing || saving || runtimeRole === "viewer";
return (
<button
key={key}
@@ -1840,12 +2310,12 @@ export default function LiveCamera() {
</div>
) : null}
{filteredVisibleSegments.length === 0 ? (
{displayFilteredSegments.length === 0 ? (
<div className="rounded-2xl border border-dashed border-border/60 px-4 py-8 text-center text-sm text-muted-foreground">
{runtimeRole === "viewer" ? "当前会同步最近识别到的动作片段,持有端开始分析后会自动刷新。" : "开始分析后,这里会按时间区间显示识别出的动作片段。"}
</div>
) : (
filteredVisibleSegments.map((segment) => {
displayFilteredSegments.map((segment) => {
const meta = ACTION_META[segment.actionType];
return (
<div key={`${segment.actionType}-${segment.startMs}`} className="rounded-2xl border border-border/60 bg-muted/25 p-4">
@@ -1884,25 +2354,25 @@ export default function LiveCamera() {
<CardTitle className="text-base"></CardTitle>
</CardHeader>
<CardContent className="space-y-4">
{liveScore ? (
{displayScore ? (
<>
<div className="rounded-3xl border border-border/60 bg-muted/20 p-5 text-center">
<div className="text-xs uppercase tracking-[0.18em] text-muted-foreground"></div>
<div data-testid="live-camera-score-overall" className="mt-3 text-5xl font-semibold tracking-tight">
{liveScore.overall}
{displayScore.overall}
</div>
<div className="mt-3 flex items-center justify-center gap-2">
<Badge className={heroAction.tone}>{heroAction.label}</Badge>
<Badge variant="outline"> {liveScore.confidence}%</Badge>
<Badge variant="outline"> {displayScore.confidence}%</Badge>
<Badge className={sessionBand.tone}>{sessionBand.label}</Badge>
</div>
</div>
<div className="space-y-3">
<ScoreBar label="姿态" value={liveScore.posture} accent="bg-emerald-500" />
<ScoreBar label="平衡" value={liveScore.balance} accent="bg-sky-500" />
<ScoreBar label="技术" value={liveScore.technique} accent="bg-amber-500" />
<ScoreBar label="脚步" value={liveScore.footwork} accent="bg-indigo-500" />
<ScoreBar label="连贯性" value={liveScore.consistency} accent="bg-rose-500" />
<ScoreBar label="姿态" value={displayScore.posture} accent="bg-emerald-500" />
<ScoreBar label="平衡" value={displayScore.balance} accent="bg-sky-500" />
<ScoreBar label="技术" value={displayScore.technique} accent="bg-amber-500" />
<ScoreBar label="脚步" value={displayScore.footwork} accent="bg-indigo-500" />
<ScoreBar label="连贯性" value={displayScore.consistency} accent="bg-rose-500" />
</div>
</>
) : (
@@ -1958,19 +2428,19 @@ export default function LiveCamera() {
</div>
<div className="mt-3 grid grid-cols-2 gap-2 text-xs text-muted-foreground">
<div> {rawActionMeta.label}</div>
<div> {stabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
<div> {Math.round(stabilityMeta.windowShare * 100)}%</div>
<div> {stabilityMeta.switchCount} </div>
<div> {displayStabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
<div> {Math.round(displayStabilityMeta.windowShare * 100)}%</div>
<div> {displayStabilityMeta.switchCount} </div>
</div>
<Progress value={stabilityMeta.windowProgress * 100} className="mt-3 h-2" />
<Progress value={displayStabilityMeta.windowProgress * 100} className="mt-3 h-2" />
<div className="mt-2 text-xs text-muted-foreground">
{stabilityMeta.pending && pendingActionMeta
{displayStabilityMeta.pending && pendingActionMeta
? `当前正在确认 ${pendingActionMeta.label},确认后才会切段入库。`
: "当前区间只会按稳定动作聚合,短时抖动不会直接切换动作。"}
</div>
</div>
{feedback.length > 0 ? feedback.map((item) => (
{displayFeedback.length > 0 ? displayFeedback.map((item) => (
<div key={item} className="rounded-2xl border border-border/60 bg-muted/25 px-4 py-3 text-sm">
{item}
</div>
@@ -2059,7 +2529,7 @@ export default function LiveCamera() {
<div className="grid h-full grid-cols-[minmax(0,1fr)_72px] gap-3">
<div className="relative min-h-0 overflow-hidden rounded-[32px] border border-white/10 bg-black shadow-2xl shadow-black/40">
<video ref={videoRef} className="absolute inset-0 h-full w-full object-contain" playsInline muted autoPlay />
<canvas ref={canvasRef} className="pointer-events-none absolute inset-0 h-full w-full object-contain" />
<canvas ref={canvasRef} className={`pointer-events-none absolute inset-0 h-full w-full object-contain ${runtimeRole === "viewer" ? "hidden" : ""}`} />
<div className="pointer-events-none absolute left-3 top-3 flex flex-wrap gap-2">
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
@@ -2090,12 +2560,12 @@ export default function LiveCamera() {
</div>
<div>
<div className="uppercase tracking-[0.18em] text-white/45"></div>
<div className="mt-1">{stabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
<div className="mt-1">{displayStabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}</div>
</div>
<div>
<div className="uppercase tracking-[0.18em] text-white/45"></div>
<div className="mt-1">
{stabilityMeta.pending && pendingActionMeta ? `确认 ${pendingActionMeta.label}` : "稳定跟踪中"}
{displayStabilityMeta.pending && pendingActionMeta ? `确认 ${pendingActionMeta.label}` : runtimeRole === "viewer" ? "同步观看中" : "稳定跟踪中"}
</div>
</div>
</div>
@@ -2111,7 +2581,7 @@ export default function LiveCamera() {
<Minimize2 className="h-4 w-4" />
</Button>
{cameraActive && zoomState.supported ? renderZoomOverlay() : null}
{cameraActive && zoomState.supported && runtimeRole !== "viewer" ? renderZoomOverlay() : null}
</div>
<div className="flex flex-col items-center justify-center gap-3">