import { useAuth } from "@/_core/hooks/useAuth"; import { trpc } from "@/lib/trpc"; import { createMediaSession, getMediaAssetUrl, uploadMediaLiveFrame, } from "@/lib/media"; import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; import { Badge } from "@/components/ui/badge"; import { Button } from "@/components/ui/button"; import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"; import { Dialog, DialogContent, DialogDescription, DialogFooter, DialogHeader, DialogTitle } from "@/components/ui/dialog"; import { Input } from "@/components/ui/input"; import { Progress } from "@/components/ui/progress"; import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; import { Slider } from "@/components/ui/slider"; import { Switch } from "@/components/ui/switch"; import { formatDateTimeShanghai } from "@/lib/time"; import { toast } from "sonner"; import { applyTrackZoom, type CameraQualityPreset, getLiveAnalysisBitrate, readTrackZoomState, requestCameraStream } from "@/lib/camera"; import { ACTION_WINDOW_FRAMES, AVATAR_PRESETS, createEmptyStabilizedActionMeta, createStableActionState, drawLiveCameraOverlay, getAvatarPreset, renderLiveCameraOverlayToContext, resolveAvatarKeyFromPrompt, stabilizeActionStream, type AvatarKey, type AvatarPreset, type AvatarRenderState, type FrameActionSample, type LiveActionType, type StabilizedActionMeta, } from "@/lib/liveCamera"; import { Activity, Camera, CameraOff, CheckCircle2, ExternalLink, FlipHorizontal, Maximize2, Minus, Minimize2, Monitor, PlayCircle, Plus, RotateCcw, Smartphone, Sparkles, Target, Video, Zap, } from "lucide-react"; import { useCallback, useEffect, useMemo, useRef, useState } from "react"; type CameraFacing = "user" | "environment"; type SessionMode = "practice" | "pk"; type ActionType = LiveActionType; type PoseScore = { overall: number; posture: number; balance: number; technique: number; footwork: number; consistency: number; confidence: number; }; type ActionSegment = { actionType: ActionType; isUnknown: boolean; startMs: number; endMs: number; durationMs: number; confidenceAvg: number; score: number; peakScore: number; frameCount: number; issueSummary: string[]; keyFrames: number[]; clipLabel: string; }; type ArchivedAnalysisVideo = { videoId: number; url: string; sequence: number; durationMs: number; title: string; }; type Point = { x: number; y: number; visibility?: number; }; type TrackingState = { prevTimestamp?: number; prevRightWrist?: Point; prevLeftWrist?: Point; prevHipCenter?: Point; lastAction?: ActionType; }; type AnalyzedFrame = { action: ActionType; confidence: number; score: PoseScore; feedback: string[]; }; type RuntimeRole = "idle" | "owner" | "viewer"; type RuntimeSnapshot = { phase?: "idle" | "analyzing" | "saving" | "safe" | "failed"; startedAt?: number; durationMs?: number; title?: string; sessionMode?: SessionMode; qualityPreset?: CameraQualityPreset; facingMode?: CameraFacing; deviceKind?: "mobile" | "desktop"; avatarEnabled?: boolean; avatarKey?: AvatarKey; avatarLabel?: string; updatedAt?: number; currentAction?: ActionType; rawAction?: ActionType; feedback?: string[]; liveScore?: PoseScore | null; stabilityMeta?: Partial; visibleSegments?: number; unknownSegments?: number; archivedVideoCount?: number; recentSegments?: ActionSegment[]; }; type RuntimeSession = { id: number; title: string | null; sessionMode: SessionMode; mediaSessionId: string | null; status: "idle" | "active" | "ended"; startedAt: string | null; endedAt: string | null; lastHeartbeatAt: string | null; snapshot: RuntimeSnapshot | null; }; const ACTION_META: Record = { forehand: { label: "正手挥拍", tone: "bg-emerald-500/10 text-emerald-700", accent: "bg-emerald-500" }, backhand: { label: "反手挥拍", tone: "bg-sky-500/10 text-sky-700", accent: "bg-sky-500" }, serve: { label: "发球", tone: "bg-amber-500/10 text-amber-700", accent: "bg-amber-500" }, volley: { label: "截击", tone: "bg-indigo-500/10 text-indigo-700", accent: "bg-indigo-500" }, overhead: { label: "高压", tone: "bg-rose-500/10 text-rose-700", accent: "bg-rose-500" }, slice: { label: "切削", tone: "bg-orange-500/10 text-orange-700", accent: "bg-orange-500" }, lob: { label: "挑高球", tone: "bg-fuchsia-500/10 text-fuchsia-700", accent: "bg-fuchsia-500" }, unknown: { label: "未知动作", tone: "bg-slate-500/10 text-slate-700", accent: "bg-slate-500" }, }; const SETUP_STEPS = [ { title: "固定设备", desc: "手机或平板保持稳定,避免分析阶段发生晃动", icon: }, { title: "保留全身", desc: "画面尽量覆盖从头到脚,便于识别重心和脚步", icon: }, { title: "确认视角", desc: "后置摄像头优先,横屏更适合完整挥拍追踪", icon: }, { title: "开始分析", desc: "动作会先经过 24 帧稳定窗口确认,再按连续区间聚合保存", icon: }, ]; const SEGMENT_MAX_MS = 10_000; const MERGE_GAP_MS = 900; const MIN_SEGMENT_MS = 1_200; const ANALYSIS_RECORDING_SEGMENT_MS = 60_000; const CAMERA_QUALITY_PRESETS: Record = { economy: { label: "节省流量", subtitle: "540p-720p · 低码率", description: "默认模式,优先减少本地录制文件大小与移动网络流量。", }, balanced: { label: "均衡模式", subtitle: "720p-900p · 中码率", description: "兼顾动作识别稳定度与录制体积。", }, clarity: { label: "清晰优先", subtitle: "720p-1080p · 高码率", description: "适合 Wi-Fi 和需要保留更多回放细节的场景。", }, }; function clamp(value: number, min: number, max: number) { return Math.max(min, Math.min(max, value)); } function distance(a?: Point, b?: Point) { if (!a || !b) return 0; const dx = a.x - b.x; const dy = a.y - b.y; return Math.sqrt(dx * dx + dy * dy); } function getAngle(a?: Point, b?: Point, c?: Point) { if (!a || !b || !c) return 0; const radians = Math.atan2(c.y - b.y, c.x - b.x) - Math.atan2(a.y - b.y, a.x - b.x); let angle = Math.abs((radians * 180) / Math.PI); if (angle > 180) angle = 360 - angle; return angle; } function formatDuration(ms: number) { const totalSeconds = Math.max(0, Math.round(ms / 1000)); const minutes = Math.floor(totalSeconds / 60); const seconds = totalSeconds % 60; return `${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`; } function normalizeRuntimeTitle(value: string | null | undefined) { if (typeof value !== "string") return ""; const trimmed = value.trim(); if (!trimmed) return ""; const suspicious = /[ÃÂÆÐÑØæåçéèêëïîôöûüœŠŽƒ€¦]/; if (!suspicious.test(trimmed)) { return trimmed; } try { const bytes = Uint8Array.from(Array.from(trimmed).map((char) => char.charCodeAt(0) & 0xff)); const decoded = new TextDecoder("utf-8").decode(bytes).trim(); if (!decoded || decoded === trimmed) { return trimmed; } const score = (text: string) => { const cjkCount = text.match(/[\u3400-\u9fff]/g)?.length ?? 0; const badCount = text.match(/[ÃÂÆÐÑØæåçéèêëïîôöûüœŠŽƒ€¦]/g)?.length ?? 0; return (cjkCount * 2) - badCount; }; return score(decoded) > score(trimmed) ? decoded : trimmed; } catch { return trimmed; } } function isMobileDevice() { if (typeof window === "undefined") return false; return /Android|iPhone|iPad|iPod/i.test(navigator.userAgent) || window.matchMedia("(max-width: 768px)").matches; } function pickRecorderMimeType() { const supported = typeof MediaRecorder !== "undefined" && typeof MediaRecorder.isTypeSupported === "function"; if (supported && MediaRecorder.isTypeSupported("video/mp4;codecs=avc1.42E01E,mp4a.40.2")) { return "video/mp4"; } if (supported && MediaRecorder.isTypeSupported("video/webm;codecs=vp9,opus")) { return "video/webm;codecs=vp9,opus"; } if (supported && MediaRecorder.isTypeSupported("video/webm;codecs=vp8,opus")) { return "video/webm;codecs=vp8,opus"; } return "video/webm"; } function blobToBase64(blob: Blob) { return new Promise((resolve, reject) => { const reader = new FileReader(); reader.onloadend = () => { const result = reader.result; if (typeof result !== "string") { reject(new Error("无法读取录制文件")); return; } const [, base64 = ""] = result.split(","); resolve(base64); }; reader.onerror = () => reject(reader.error || new Error("文件读取失败")); reader.readAsDataURL(blob); }); } function createSegment(action: ActionType, elapsedMs: number, frame: AnalyzedFrame): ActionSegment { return { actionType: action, isUnknown: action === "unknown", startMs: elapsedMs, endMs: elapsedMs, durationMs: 0, confidenceAvg: frame.confidence, score: frame.score.overall, peakScore: frame.score.overall, frameCount: 1, issueSummary: frame.feedback.slice(0, 3), keyFrames: [elapsedMs], clipLabel: `${ACTION_META[action].label} ${formatDuration(elapsedMs)}`, }; } function analyzePoseFrame(landmarks: Point[], tracking: TrackingState, timestamp: number): AnalyzedFrame { const nose = landmarks[0]; const leftShoulder = landmarks[11]; const rightShoulder = landmarks[12]; const leftElbow = landmarks[13]; const rightElbow = landmarks[14]; const leftWrist = landmarks[15]; const rightWrist = landmarks[16]; const leftHip = landmarks[23]; const rightHip = landmarks[24]; const leftKnee = landmarks[25]; const rightKnee = landmarks[26]; const leftAnkle = landmarks[27]; const rightAnkle = landmarks[28]; const hipCenter = { x: ((leftHip?.x ?? 0.5) + (rightHip?.x ?? 0.5)) / 2, y: ((leftHip?.y ?? 0.7) + (rightHip?.y ?? 0.7)) / 2, }; const dtMs = tracking.prevTimestamp ? Math.max(16, timestamp - tracking.prevTimestamp) : 33; const rightSpeed = distance(rightWrist, tracking.prevRightWrist) * (1000 / dtMs); const leftSpeed = distance(leftWrist, tracking.prevLeftWrist) * (1000 / dtMs); const hipSpeed = distance(hipCenter, tracking.prevHipCenter) * (1000 / dtMs); const rightVerticalMotion = tracking.prevRightWrist ? tracking.prevRightWrist.y - (rightWrist?.y ?? tracking.prevRightWrist.y) : 0; const shoulderTilt = Math.abs((leftShoulder?.y ?? 0.3) - (rightShoulder?.y ?? 0.3)); const hipTilt = Math.abs((leftHip?.y ?? 0.55) - (rightHip?.y ?? 0.55)); const headOffset = Math.abs((nose?.x ?? 0.5) - (((leftShoulder?.x ?? 0.45) + (rightShoulder?.x ?? 0.55)) / 2)); const kneeBend = ((getAngle(leftHip, leftKnee, leftAnkle) || 165) + (getAngle(rightHip, rightKnee, rightAnkle) || 165)) / 2; const rightElbowAngle = getAngle(rightShoulder, rightElbow, rightWrist) || 145; const leftElbowAngle = getAngle(leftShoulder, leftElbow, leftWrist) || 145; const footSpread = Math.abs((leftAnkle?.x ?? 0.42) - (rightAnkle?.x ?? 0.58)); const shoulderSpan = Math.abs((rightShoulder?.x ?? 0.56) - (leftShoulder?.x ?? 0.44)); const wristSpread = Math.abs((rightWrist?.x ?? 0.62) - (leftWrist?.x ?? 0.38)); const shoulderCenterX = ((leftShoulder?.x ?? 0.45) + (rightShoulder?.x ?? 0.55)) / 2; const torsoOffset = Math.abs(shoulderCenterX - hipCenter.x); const rightForward = (rightWrist?.x ?? shoulderCenterX) - hipCenter.x; const leftForward = hipCenter.x - (leftWrist?.x ?? shoulderCenterX); const contactHeight = hipCenter.y - (rightWrist?.y ?? hipCenter.y); const visibility = landmarks.reduce((sum, point) => sum + (point.visibility ?? 0.95), 0) / Math.max(1, landmarks.length); if (visibility < 0.42 || shoulderSpan < 0.08) { tracking.prevTimestamp = timestamp; tracking.prevRightWrist = rightWrist; tracking.prevLeftWrist = leftWrist; tracking.prevHipCenter = hipCenter; tracking.lastAction = "unknown"; return { action: "unknown", confidence: 0.2, score: { overall: 48, posture: 50, balance: 48, technique: 45, footwork: 42, consistency: 40, confidence: 20, }, feedback: ["当前画面人体可见度不足,请尽量让头肩和双脚都留在画面内。"], }; } const posture = clamp(100 - shoulderTilt * 780 - headOffset * 640, 0, 100); const balance = clamp(100 - hipTilt * 900 - Math.max(0, 0.16 - footSpread) * 260, 0, 100); const footwork = clamp(45 + Math.min(36, hipSpeed * 120) + Math.max(0, 165 - kneeBend) * 0.35, 0, 100); const consistency = clamp(visibility * 100 - Math.abs(rightSpeed - leftSpeed) * 10, 0, 100); const candidates: Array<{ action: ActionType; confidence: number }> = [ { action: "serve", confidence: clamp( (rightWrist && nose && rightWrist.y < nose.y ? 0.45 : 0.1) + (rightElbow && rightShoulder && rightElbow.y < rightShoulder.y ? 0.18 : 0.04) + clamp(contactHeight * 1.4, 0, 0.14) + clamp((0.24 - footSpread) * 1.2, 0, 0.08) + clamp((rightElbowAngle - 135) / 55, 0, 0.22) + clamp(rightVerticalMotion * 4.5, 0, 0.15), 0, 0.98, ), }, { action: "overhead", confidence: clamp( (rightWrist && rightShoulder && rightWrist.y < rightShoulder.y - 0.1 ? 0.34 : 0.08) + clamp(rightSpeed * 0.08, 0, 0.28) + clamp((rightElbowAngle - 125) / 70, 0, 0.18), 0, 0.92, ), }, { action: "forehand", confidence: clamp( (rightWrist && nose && rightWrist.x > nose.x ? 0.24 : 0.08) + (rightForward > 0.11 ? 0.16 : 0.04) + clamp((wristSpread - 0.2) * 0.8, 0, 0.16) + clamp((0.08 - torsoOffset) * 1.8, 0, 0.08) + clamp(rightSpeed * 0.12, 0, 0.28) + clamp((rightElbowAngle - 85) / 70, 0, 0.2), 0, 0.94, ), }, { action: "backhand", confidence: clamp( ((leftWrist && nose && leftWrist.x < nose.x) || (rightWrist && nose && rightWrist.x < nose.x) ? 0.2 : 0.06) + (leftForward > 0.1 ? 0.16 : 0.04) + (rightWrist && hipCenter && rightWrist.x < hipCenter.x ? 0.12 : 0.02) + clamp((wristSpread - 0.22) * 0.75, 0, 0.14) + clamp(Math.max(leftSpeed, rightSpeed) * 0.1, 0, 0.22) + clamp((leftElbowAngle - 85) / 70, 0, 0.18), 0, 0.92, ), }, { action: "volley", confidence: clamp( (rightWrist && rightShoulder && Math.abs(rightWrist.y - rightShoulder.y) < 0.12 ? 0.3 : 0.08) + clamp((0.16 - Math.abs(contactHeight - 0.08)) * 1.2, 0, 0.1) + clamp((0.22 - Math.abs((rightWrist?.x ?? 0.5) - hipCenter.x)) * 1.5, 0, 0.18) + clamp((1.8 - rightSpeed) * 0.14, 0, 0.18), 0, 0.88, ), }, { action: "slice", confidence: clamp( (rightWrist && rightShoulder && rightWrist.y > rightShoulder.y ? 0.18 : 0.06) + clamp((contactHeight + 0.06) * 0.7, 0, 0.08) + clamp((tracking.prevRightWrist && rightWrist && rightWrist.y > tracking.prevRightWrist.y ? 0.18 : 0.04), 0, 0.18) + clamp(rightSpeed * 0.08, 0, 0.24), 0, 0.82, ), }, { action: "lob", confidence: clamp( (rightWrist && nose && rightWrist.y < nose.y + 0.1 ? 0.22 : 0.08) + clamp((0.18 - Math.abs(rightForward)) * 1.2, 0, 0.08) + clamp(rightVerticalMotion * 4.2, 0, 0.28) + clamp((0.18 - Math.abs((rightWrist?.x ?? 0.5) - hipCenter.x)) * 1.4, 0, 0.18), 0, 0.86, ), }, ]; candidates.sort((a, b) => b.confidence - a.confidence); const topCandidate = candidates[0] ?? { action: "unknown" as ActionType, confidence: 0.2 }; const action = topCandidate.confidence >= 0.52 ? topCandidate.action : "unknown"; const techniqueBase = action === "serve" || action === "overhead" ? clamp(100 - Math.abs(rightElbowAngle - 160) * 0.9, 0, 100) : action === "backhand" ? clamp(100 - Math.abs(leftElbowAngle - 118) * 0.9, 0, 100) : clamp(100 - Math.abs(rightElbowAngle - 118) * 0.85, 0, 100); const technique = clamp(techniqueBase + topCandidate.confidence * 8, 0, 100); const overall = clamp( posture * 0.22 + balance * 0.18 + technique * 0.28 + footwork * 0.16 + consistency * 0.16, 0, 100, ); const feedback: string[] = []; if (action === "unknown") { feedback.push("当前片段缺少完整挥拍特征,系统已归为未知动作。"); } if (visibility < 0.65) { feedback.push("人体关键点可见度偏低,建议调整机位让双臂和双脚完全入镜。"); } if (posture < 72) { feedback.push("上体轴线偏移较明显,击球准备时保持头肩稳定。"); } if (balance < 70) { feedback.push("重心波动偏大,建议扩大支撑面并缩短恢复时间。"); } if (footwork < 68) { feedback.push("脚步启动不足,击球前先完成小碎步调整。"); } if ((action === "serve" || action === "overhead") && technique < 75) { feedback.push("抬臂延展不够,击球点再高一些会更完整。"); } if ((action === "forehand" || action === "backhand") && technique < 75) { feedback.push("肘腕角度偏紧,击球点前移并完成收拍。"); } if (feedback.length === 0) { feedback.push("节奏稳定,可以继续累积高质量动作片段。"); } tracking.prevTimestamp = timestamp; tracking.prevRightWrist = rightWrist; tracking.prevLeftWrist = leftWrist; tracking.prevHipCenter = hipCenter; tracking.lastAction = action; return { action, confidence: clamp(topCandidate.confidence, 0, 1), score: { overall: Math.round(overall), posture: Math.round(posture), balance: Math.round(balance), technique: Math.round(technique), footwork: Math.round(footwork), consistency: Math.round(consistency), confidence: Math.round(clamp(topCandidate.confidence * 100, 0, 100)), }, feedback: feedback.slice(0, 3), }; } function ScoreBar({ label, value, accent }: { label: string; value: number; accent?: string }) { return (
{label} {Math.round(value)}
); } function getSessionBand(input: { overallScore: number; knownRatio: number; effectiveSegments: number }) { if (input.overallScore >= 85 && input.knownRatio >= 0.72 && input.effectiveSegments >= 4) { return { label: "高质量", tone: "bg-emerald-500/10 text-emerald-700" }; } if (input.overallScore >= 72 && input.knownRatio >= 0.55 && input.effectiveSegments >= 2) { return { label: "稳定", tone: "bg-sky-500/10 text-sky-700" }; } return { label: "待加强", tone: "bg-amber-500/10 text-amber-700" }; } function getRuntimeSyncDelayMs(lastHeartbeatAt?: string | null) { if (!lastHeartbeatAt) return null; const heartbeatMs = new Date(lastHeartbeatAt).getTime(); if (Number.isNaN(heartbeatMs)) return null; return Math.max(0, Date.now() - heartbeatMs); } function formatRuntimeSyncDelay(delayMs: number | null) { if (delayMs == null) return "等待同步"; if (delayMs < 1500) return "同步中"; if (delayMs < 10_000) return `${(delayMs / 1000).toFixed(1)}s 延迟`; return "同步较慢"; } export default function LiveCamera() { const { user } = useAuth(); const utils = trpc.useUtils(); const mobile = useMemo(() => isMobileDevice(), []); const videoRef = useRef(null); const canvasRef = useRef(null); const streamRef = useRef(null); const poseRef = useRef(null); const compositeCanvasRef = useRef(null); const broadcastSessionIdRef = useRef(null); const viewerSessionIdRef = useRef(null); const viewerRetryTimerRef = useRef(0); const frameRelayTimerRef = useRef(0); const frameRelayInFlightRef = useRef(false); const runtimeIdRef = useRef(null); const heartbeatTimerRef = useRef(0); const recorderRef = useRef(null); const recorderStreamRef = useRef(null); const recorderMimeTypeRef = useRef("video/webm"); const recorderChunksRef = useRef([]); const recorderStopPromiseRef = useRef | null>(null); const recorderSegmentStartedAtRef = useRef(0); const recorderSequenceRef = useRef(0); const recorderRotateTimerRef = useRef(0); const recorderUploadQueueRef = useRef(Promise.resolve()); const archivedVideosRef = useRef([]); const analyzingRef = useRef(false); const animationRef = useRef(0); const sessionStartedAtRef = useRef(0); const trackingRef = useRef({}); const actionHistoryRef = useRef([]); const stableActionStateRef = useRef(createStableActionState()); const currentSegmentRef = useRef(null); const segmentsRef = useRef([]); const frameSamplesRef = useRef([]); const volatilitySamplesRef = useRef([]); const currentActionRef = useRef("unknown"); const rawActionRef = useRef("unknown"); const liveScoreRef = useRef(null); const feedbackRef = useRef([]); const durationMsRef = useRef(0); const leaveStatusRef = useRef<"idle" | "analyzing" | "saving" | "safe" | "failed">("idle"); const sessionModeRef = useRef("practice"); const stabilityMetaRef = useRef(createEmptyStabilizedActionMeta()); const zoomTargetRef = useRef(1); const avatarRenderRef = useRef({ enabled: false, avatarKey: "gorilla", }); const [cameraActive, setCameraActive] = useState(false); const [facing, setFacing] = useState("environment"); const [hasMultipleCameras, setHasMultipleCameras] = useState(false); const [showSetupGuide, setShowSetupGuide] = useState(true); const [setupStep, setSetupStep] = useState(0); const [sessionMode, setSessionMode] = useState("practice"); const [analyzing, setAnalyzing] = useState(false); const [saving, setSaving] = useState(false); const [leaveStatus, setLeaveStatus] = useState<"idle" | "analyzing" | "saving" | "safe" | "failed">("idle"); const [immersivePreview, setImmersivePreview] = useState(false); const [liveScore, setLiveScore] = useState(null); const [currentAction, setCurrentAction] = useState("unknown"); const [rawAction, setRawAction] = useState("unknown"); const [feedback, setFeedback] = useState([]); const [segments, setSegments] = useState([]); const [durationMs, setDurationMs] = useState(0); const [segmentFilter, setSegmentFilter] = useState("all"); const [qualityPreset, setQualityPreset] = useState("economy"); const [zoomState, setZoomState] = useState(() => readTrackZoomState(null)); const [stabilityMeta, setStabilityMeta] = useState(() => createEmptyStabilizedActionMeta()); const [avatarEnabled, setAvatarEnabled] = useState(false); const [avatarKey, setAvatarKey] = useState("gorilla"); const [avatarPrompt, setAvatarPrompt] = useState(""); const [archivedVideoCount, setArchivedVideoCount] = useState(0); const [viewerConnected, setViewerConnected] = useState(false); const [viewerError, setViewerError] = useState(""); const [viewerFrameVersion, setViewerFrameVersion] = useState(0); const resolvedAvatarKey = useMemo( () => resolveAvatarKeyFromPrompt(avatarPrompt, avatarKey), [avatarKey, avatarPrompt], ); const uploadMutation = trpc.video.upload.useMutation(); const saveLiveSessionMutation = trpc.analysis.liveSessionSave.useMutation({ onSuccess: () => { utils.profile.stats.invalidate(); utils.analysis.liveSessionList.invalidate(); utils.video.list.invalidate(); utils.record.list.invalidate(); utils.achievement.list.invalidate(); utils.rating.current.invalidate(); utils.rating.history.invalidate(); }, }); const liveSessionsQuery = trpc.analysis.liveSessionList.useQuery({ limit: 8 }); const runtimeQuery = trpc.analysis.runtimeGet.useQuery(undefined, { refetchInterval: 1000, refetchIntervalInBackground: true, }); const runtimeAcquireMutation = trpc.analysis.runtimeAcquire.useMutation(); const runtimeHeartbeatMutation = trpc.analysis.runtimeHeartbeat.useMutation(); const runtimeReleaseMutation = trpc.analysis.runtimeRelease.useMutation(); const runtimeRole = (runtimeQuery.data?.role ?? "idle") as RuntimeRole; const runtimeSession = (runtimeQuery.data?.runtimeSession ?? null) as RuntimeSession | null; const runtimeSnapshot = runtimeSession?.snapshot ?? null; const normalizedRuntimeTitle = normalizeRuntimeTitle(runtimeSession?.title); const normalizedSnapshotTitle = normalizeRuntimeTitle(runtimeSnapshot?.title); useEffect(() => { avatarRenderRef.current = { enabled: avatarEnabled, avatarKey: resolvedAvatarKey, customLabel: avatarPrompt.trim() || undefined, }; }, [avatarEnabled, avatarPrompt, resolvedAvatarKey]); useEffect(() => { currentActionRef.current = currentAction; }, [currentAction]); useEffect(() => { rawActionRef.current = rawAction; }, [rawAction]); useEffect(() => { liveScoreRef.current = liveScore; }, [liveScore]); useEffect(() => { feedbackRef.current = feedback; }, [feedback]); useEffect(() => { durationMsRef.current = durationMs; }, [durationMs]); useEffect(() => { leaveStatusRef.current = leaveStatus; }, [leaveStatus]); useEffect(() => { if (runtimeRole === "viewer") { setShowSetupGuide(false); setSetupStep(0); } }, [runtimeRole]); useEffect(() => { sessionModeRef.current = sessionMode; }, [sessionMode]); useEffect(() => { stabilityMetaRef.current = stabilityMeta; }, [stabilityMeta]); const visibleSegments = useMemo( () => segments.filter((segment) => !segment.isUnknown).sort((a, b) => b.startMs - a.startMs), [segments], ); const unknownSegments = useMemo(() => segments.filter((segment) => segment.isUnknown), [segments]); const filteredVisibleSegments = useMemo( () => segmentFilter === "all" ? visibleSegments : visibleSegments.filter((segment) => segment.actionType === segmentFilter), [segmentFilter, visibleSegments], ); const viewerRecentSegments = useMemo( () => (runtimeSnapshot?.recentSegments ?? []).filter((segment) => !segment.isUnknown), [runtimeSnapshot?.recentSegments], ); const displayVisibleSegments = runtimeRole === "viewer" ? viewerRecentSegments : visibleSegments; const displayFilteredSegments = runtimeRole === "viewer" ? (segmentFilter === "all" ? viewerRecentSegments : viewerRecentSegments.filter((segment) => segment.actionType === segmentFilter)) : filteredVisibleSegments; const actionStats = useMemo(() => { const totals = new Map(); displayVisibleSegments.forEach((segment) => { const current = totals.get(segment.actionType) ?? { count: 0, durationMs: 0, averageScore: 0, averageConfidence: 0, }; const nextCount = current.count + 1; totals.set(segment.actionType, { count: nextCount, durationMs: current.durationMs + segment.durationMs, averageScore: ((current.averageScore * current.count) + segment.score) / nextCount, averageConfidence: ((current.averageConfidence * current.count) + segment.confidenceAvg) / nextCount, }); }); const totalDuration = Math.max(1, displayVisibleSegments.reduce((sum, segment) => sum + segment.durationMs, 0)); return Array.from(totals.entries()) .map(([actionType, value]) => ({ actionType, ...value, sharePct: Math.round((value.durationMs / totalDuration) * 100), })) .sort((a, b) => b.durationMs - a.durationMs); }, [displayVisibleSegments]); const bestSegment = useMemo( () => displayVisibleSegments.reduce((best, segment) => { if (!best) return segment; return segment.score > best.score ? segment : best; }, null), [displayVisibleSegments], ); const totalDisplaySegments = runtimeRole === "viewer" ? (runtimeSnapshot?.visibleSegments ?? displayVisibleSegments.length) + (runtimeSnapshot?.unknownSegments ?? 0) : segments.length; const knownRatio = totalDisplaySegments > 0 ? displayVisibleSegments.length / totalDisplaySegments : 0; const sessionBand = useMemo( () => getSessionBand({ overallScore: (runtimeRole === "viewer" ? runtimeSnapshot?.liveScore?.overall : liveScore?.overall) || 0, knownRatio, effectiveSegments: displayVisibleSegments.length, }), [displayVisibleSegments.length, knownRatio, liveScore?.overall, runtimeRole, runtimeSnapshot?.liveScore?.overall], ); const refreshRuntimeState = useCallback(async () => { const result = await runtimeQuery.refetch(); return { role: (result.data?.role ?? runtimeRole) as RuntimeRole, runtimeSession: (result.data?.runtimeSession ?? runtimeSession) as RuntimeSession | null, }; }, [runtimeQuery, runtimeRole, runtimeSession]); useEffect(() => { navigator.mediaDevices?.enumerateDevices().then((devices) => { const cameras = devices.filter((device) => device.kind === "videoinput"); setHasMultipleCameras(cameras.length > 1); }).catch(() => undefined); }, []); useEffect(() => { if (!cameraActive || !streamRef.current || !videoRef.current) return; if (videoRef.current.srcObject !== streamRef.current) { videoRef.current.srcObject = streamRef.current; void videoRef.current.play().catch(() => undefined); } }, [cameraActive, immersivePreview]); const ensureCompositeCanvas = useCallback(() => { if (typeof document === "undefined") { return null; } if (!compositeCanvasRef.current) { compositeCanvasRef.current = document.createElement("canvas"); } return compositeCanvasRef.current; }, []); const renderCompositeFrame = useCallback((landmarks?: Point[]) => { const video = videoRef.current; const compositeCanvas = ensureCompositeCanvas(); if (!video || !compositeCanvas || video.videoWidth <= 0 || video.videoHeight <= 0) { return; } if (compositeCanvas.width !== video.videoWidth || compositeCanvas.height !== video.videoHeight) { compositeCanvas.width = video.videoWidth; compositeCanvas.height = video.videoHeight; } const ctx = compositeCanvas.getContext("2d"); if (!ctx) return; ctx.clearRect(0, 0, compositeCanvas.width, compositeCanvas.height); ctx.drawImage(video, 0, 0, compositeCanvas.width, compositeCanvas.height); renderLiveCameraOverlayToContext( ctx, compositeCanvas.width, compositeCanvas.height, landmarks, avatarRenderRef.current, { clear: false }, ); }, [ensureCompositeCanvas]); const queueArchivedVideoUpload = useCallback(async (blob: Blob, sequence: number, durationMs: number) => { const format = recorderMimeTypeRef.current.includes("mp4") ? "mp4" : "webm"; const title = `实时分析录像 ${formatDateTimeShanghai(new Date(), { year: undefined, second: undefined, })} · 第 ${sequence} 段`; recorderUploadQueueRef.current = recorderUploadQueueRef.current .then(async () => { const fileBase64 = await blobToBase64(blob); const uploaded = await uploadMutation.mutateAsync({ title, format, fileSize: blob.size, duration: Math.max(1, Math.round(durationMs / 1000)), exerciseType: "live_analysis", fileBase64, }); const nextVideo: ArchivedAnalysisVideo = { videoId: uploaded.videoId, url: uploaded.url, sequence, durationMs, title, }; archivedVideosRef.current = [...archivedVideosRef.current, nextVideo].sort((a, b) => a.sequence - b.sequence); setArchivedVideoCount(archivedVideosRef.current.length); }) .catch((error: any) => { toast.error(`分析录像第 ${sequence} 段归档失败: ${error?.message || "未知错误"}`); }); return recorderUploadQueueRef.current; }, [uploadMutation]); const stopSessionRecorder = useCallback(async () => { const recorder = recorderRef.current; if (recorderRotateTimerRef.current) { window.clearTimeout(recorderRotateTimerRef.current); recorderRotateTimerRef.current = 0; } if (!recorder) { await recorderUploadQueueRef.current; return; } const stopPromise = recorderStopPromiseRef.current; if (recorder.state !== "inactive") { recorder.stop(); } await (stopPromise ?? Promise.resolve()); await recorderUploadQueueRef.current; }, []); const buildRuntimeSnapshot = useCallback((phase?: RuntimeSnapshot["phase"]): RuntimeSnapshot => ({ phase: phase ?? leaveStatusRef.current, startedAt: sessionStartedAtRef.current || undefined, durationMs: durationMsRef.current, title: normalizedRuntimeTitle || `实时分析 ${ACTION_META[currentActionRef.current].label}`, sessionMode: sessionModeRef.current, qualityPreset, facingMode: facing, deviceKind: mobile ? "mobile" : "desktop", avatarEnabled: avatarRenderRef.current.enabled, avatarKey: avatarRenderRef.current.avatarKey, avatarLabel: getAvatarPreset(avatarRenderRef.current.avatarKey)?.label || "猩猩", updatedAt: Date.now(), currentAction: currentActionRef.current, rawAction: rawActionRef.current, feedback: feedbackRef.current, liveScore: liveScoreRef.current, stabilityMeta: stabilityMetaRef.current, visibleSegments: segmentsRef.current.filter((segment) => !segment.isUnknown).length, unknownSegments: segmentsRef.current.filter((segment) => segment.isUnknown).length, archivedVideoCount: archivedVideosRef.current.length, recentSegments: segmentsRef.current.slice(-5), }), [facing, mobile, normalizedRuntimeTitle, qualityPreset]); const openSetupGuide = useCallback(async () => { const latest = await refreshRuntimeState(); if (latest.role === "viewer") { setShowSetupGuide(false); toast.error("当前账号已有其他设备正在实时分析,请先切换到同步观看模式"); return; } setShowSetupGuide(true); }, [refreshRuntimeState]); const uploadLiveFrame = useCallback(async (sessionId: string) => { const compositeCanvas = ensureCompositeCanvas(); if (!compositeCanvas || frameRelayInFlightRef.current) { return; } renderCompositeFrame(); frameRelayInFlightRef.current = true; try { const blob = await new Promise((resolve) => { compositeCanvas.toBlob(resolve, "image/jpeg", mobile ? 0.7 : 0.76); }); if (!blob) { return; } await uploadMediaLiveFrame(sessionId, blob); } finally { frameRelayInFlightRef.current = false; } }, [ensureCompositeCanvas, mobile, renderCompositeFrame]); const startFrameRelayLoop = useCallback((sessionId: string) => { broadcastSessionIdRef.current = sessionId; if (frameRelayTimerRef.current) { window.clearInterval(frameRelayTimerRef.current); frameRelayTimerRef.current = 0; } void uploadLiveFrame(sessionId); frameRelayTimerRef.current = window.setInterval(() => { void uploadLiveFrame(sessionId); }, 900); }, [uploadLiveFrame]); const closeBroadcastPeer = useCallback(() => { broadcastSessionIdRef.current = null; if (frameRelayTimerRef.current) { window.clearInterval(frameRelayTimerRef.current); frameRelayTimerRef.current = 0; } frameRelayInFlightRef.current = false; }, []); const closeViewerPeer = useCallback(() => { if (viewerRetryTimerRef.current) { window.clearTimeout(viewerRetryTimerRef.current); viewerRetryTimerRef.current = 0; } viewerSessionIdRef.current = null; if (videoRef.current && !cameraActive) { videoRef.current.srcObject = null; } setViewerConnected(false); }, [cameraActive]); const releaseRuntime = useCallback(async (phase: RuntimeSnapshot["phase"]) => { if (!runtimeIdRef.current) return; try { await runtimeReleaseMutation.mutateAsync({ runtimeId: runtimeIdRef.current, snapshot: buildRuntimeSnapshot(phase), }); } catch { // Ignore runtime release errors and let the server-side stale timeout recover. } finally { runtimeIdRef.current = null; broadcastSessionIdRef.current = null; if (heartbeatTimerRef.current) { window.clearInterval(heartbeatTimerRef.current); heartbeatTimerRef.current = 0; } void runtimeQuery.refetch(); } }, [buildRuntimeSnapshot, runtimeQuery, runtimeReleaseMutation]); const startRuntimeHeartbeatLoop = useCallback((mediaSessionId?: string | null) => { if (!runtimeIdRef.current) return; if (typeof mediaSessionId === "string") { broadcastSessionIdRef.current = mediaSessionId; } if (heartbeatTimerRef.current) { window.clearInterval(heartbeatTimerRef.current); heartbeatTimerRef.current = 0; } const sendHeartbeat = () => { if (!runtimeIdRef.current) return; runtimeHeartbeatMutation.mutate({ runtimeId: runtimeIdRef.current, mediaSessionId: broadcastSessionIdRef.current, snapshot: buildRuntimeSnapshot(), }); }; sendHeartbeat(); heartbeatTimerRef.current = window.setInterval(sendHeartbeat, 1000); }, [buildRuntimeSnapshot, runtimeHeartbeatMutation]); const startBroadcastSession = useCallback(async () => { if (!user?.id) { throw new Error("当前用户信息未就绪"); } const compositeCanvas = ensureCompositeCanvas(); if (!compositeCanvas) { throw new Error("当前浏览器不支持同步观看画面"); } renderCompositeFrame(); const sessionResponse = await createMediaSession({ userId: String(user.id), title: `实时分析同步 ${formatDateTimeShanghai(new Date(), { year: undefined, second: undefined, })}`, format: "webm", mimeType: "video/webm", qualityPreset, facingMode: facing, deviceKind: mobile ? "mobile" : "desktop", }); const sessionId = sessionResponse.session.id; startFrameRelayLoop(sessionId); return sessionId; }, [ensureCompositeCanvas, facing, mobile, qualityPreset, renderCompositeFrame, startFrameRelayLoop, user?.id]); const startViewerStream = useCallback(async (mediaSessionId: string) => { if (viewerSessionIdRef.current === mediaSessionId && viewerConnected) { setViewerFrameVersion(Date.now()); return; } closeViewerPeer(); setViewerError(""); viewerSessionIdRef.current = mediaSessionId; setViewerFrameVersion(Date.now()); }, [closeViewerPeer, viewerConnected]); const stopCamera = useCallback(() => { if (animationRef.current) { cancelAnimationFrame(animationRef.current); animationRef.current = 0; } if (poseRef.current?.close) { poseRef.current.close(); poseRef.current = null; } analyzingRef.current = false; setAnalyzing(false); void stopSessionRecorder(); const localStream = streamRef.current; if (streamRef.current) { streamRef.current.getTracks().forEach((track) => track.stop()); streamRef.current = null; } if (videoRef.current && localStream && videoRef.current.srcObject === localStream) { videoRef.current.srcObject = null; } actionHistoryRef.current = []; stableActionStateRef.current = createStableActionState(); volatilitySamplesRef.current = []; setCurrentAction("unknown"); setRawAction("unknown"); setStabilityMeta(createEmptyStabilizedActionMeta()); setZoomState(readTrackZoomState(null)); archivedVideosRef.current = []; recorderSequenceRef.current = 0; setArchivedVideoCount(0); setCameraActive(false); }, [stopSessionRecorder]); useEffect(() => { if (runtimeRole === "viewer" && cameraActive) { stopCamera(); } }, [cameraActive, runtimeRole, stopCamera]); useEffect(() => { if (runtimeRole !== "viewer" || !runtimeSession?.mediaSessionId) { if (!cameraActive) { closeViewerPeer(); } setViewerError(""); return; } void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => { setViewerError(error?.message || "同步画面连接失败"); }); if (viewerRetryTimerRef.current) { window.clearInterval(viewerRetryTimerRef.current); viewerRetryTimerRef.current = 0; } viewerRetryTimerRef.current = window.setInterval(() => { setViewerFrameVersion(Date.now()); }, 900); return () => { if (viewerRetryTimerRef.current) { window.clearInterval(viewerRetryTimerRef.current); viewerRetryTimerRef.current = 0; } }; }, [ cameraActive, closeViewerPeer, runtimeRole, runtimeSession?.mediaSessionId, startViewerStream, ]); useEffect(() => { return () => { stopCamera(); closeBroadcastPeer(); closeViewerPeer(); }; }, [closeBroadcastPeer, closeViewerPeer, stopCamera]); const syncZoomState = useCallback(async (preferredZoom?: number, providedTrack?: MediaStreamTrack | null) => { const track = providedTrack || streamRef.current?.getVideoTracks()[0] || null; if (!track) { zoomTargetRef.current = 1; setZoomState(readTrackZoomState(null)); return; } let nextState = readTrackZoomState(track); if (nextState.supported && preferredZoom != null && Math.abs(preferredZoom - nextState.current) > nextState.step / 2) { try { nextState = await applyTrackZoom(track, preferredZoom); } catch { nextState = readTrackZoomState(track); } } zoomTargetRef.current = nextState.current; setZoomState(nextState); }, []); const updateZoom = useCallback(async (nextZoom: number) => { const track = streamRef.current?.getVideoTracks()[0] || null; if (!track) return; try { const nextState = await applyTrackZoom(track, nextZoom); zoomTargetRef.current = nextState.current; setZoomState(nextState); } catch (error: any) { toast.error(`镜头缩放调整失败: ${error?.message || "当前设备不支持"}`); } }, []); const stepZoom = useCallback((direction: -1 | 1) => { if (!zoomState.supported) return; const nextZoom = clamp(zoomState.current + zoomState.step * direction, zoomState.min, zoomState.max); void updateZoom(nextZoom); }, [updateZoom, zoomState]); const startCamera = useCallback(async ( nextFacing: CameraFacing = facing, preferredZoom = zoomTargetRef.current, preset: CameraQualityPreset = qualityPreset, ) => { const latest = await refreshRuntimeState(); if (latest.role === "viewer") { toast.error("当前账号已有其他设备正在实时分析,请切换到同步观看模式"); return; } try { if (streamRef.current) { streamRef.current.getTracks().forEach((track) => track.stop()); } const { stream, appliedFacingMode, usedFallback } = await requestCameraStream({ facingMode: nextFacing, isMobile: mobile, preset, }); streamRef.current = stream; if (appliedFacingMode !== nextFacing) { setFacing(appliedFacingMode); } setCameraActive(true); if (videoRef.current) { try { videoRef.current.srcObject = stream; await videoRef.current.play().catch(() => undefined); } catch { // Keep the camera session alive even if preview binding is flaky on the current browser. } } await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null); if (usedFallback) { toast.info("当前设备已自动切换到兼容摄像头模式"); } toast.success("摄像头已启动"); } catch (error: any) { toast.error(`摄像头启动失败: ${error?.message || "未知错误"}`); } }, [facing, mobile, qualityPreset, refreshRuntimeState, syncZoomState]); const switchCamera = useCallback(async () => { const nextFacing: CameraFacing = facing === "user" ? "environment" : "user"; setFacing(nextFacing); if (!cameraActive) return; stopCamera(); await new Promise((resolve) => setTimeout(resolve, 250)); await startCamera(nextFacing, zoomTargetRef.current); }, [cameraActive, facing, startCamera, stopCamera]); const handleQualityPresetChange = useCallback(async (nextPreset: CameraQualityPreset) => { setQualityPreset(nextPreset); if (cameraActive && !analyzing && !saving) { await startCamera(facing, zoomTargetRef.current, nextPreset); } }, [analyzing, cameraActive, facing, saving, startCamera]); const flushSegment = useCallback((segment: ActionSegment | null) => { if (!segment || segment.durationMs < MIN_SEGMENT_MS) { return; } const finalized: ActionSegment = { ...segment, durationMs: Math.max(segment.durationMs, segment.endMs - segment.startMs), clipLabel: `${ACTION_META[segment.actionType].label} ${formatDuration(segment.startMs)} - ${formatDuration(segment.endMs)}`, keyFrames: Array.from(new Set(segment.keyFrames)).slice(-4), issueSummary: segment.issueSummary.slice(0, 4), }; segmentsRef.current = [...segmentsRef.current, finalized]; setSegments(segmentsRef.current); }, []); const appendFrameToSegment = useCallback((frame: AnalyzedFrame, elapsedMs: number) => { const current = currentSegmentRef.current; if (!current) { currentSegmentRef.current = createSegment(frame.action, elapsedMs, frame); return; } const sameAction = current.actionType === frame.action; const gap = elapsedMs - current.endMs; const nextDuration = elapsedMs - current.startMs; if (sameAction && gap <= MERGE_GAP_MS && nextDuration <= SEGMENT_MAX_MS) { const nextFrameCount = current.frameCount + 1; current.endMs = elapsedMs; current.durationMs = current.endMs - current.startMs; current.frameCount = nextFrameCount; current.confidenceAvg = ((current.confidenceAvg * (nextFrameCount - 1)) + frame.confidence) / nextFrameCount; current.score = ((current.score * (nextFrameCount - 1)) + frame.score.overall) / nextFrameCount; current.peakScore = Math.max(current.peakScore, frame.score.overall); current.issueSummary = Array.from(new Set([...current.issueSummary, ...frame.feedback])).slice(0, 4); current.keyFrames = [...current.keyFrames.slice(-3), elapsedMs]; return; } flushSegment(current); currentSegmentRef.current = createSegment(frame.action, elapsedMs, frame); }, [flushSegment]); const startSessionRecorder = useCallback(function startSessionRecorderInternal() { if (typeof MediaRecorder === "undefined") { recorderRef.current = null; recorderStopPromiseRef.current = Promise.resolve(); return; } const compositeCanvas = ensureCompositeCanvas(); if (!compositeCanvas || typeof compositeCanvas.captureStream !== "function") { recorderRef.current = null; recorderStopPromiseRef.current = Promise.resolve(); return; } renderCompositeFrame(); recorderChunksRef.current = []; const mimeType = pickRecorderMimeType(); recorderMimeTypeRef.current = mimeType; if (!recorderStreamRef.current) { recorderStreamRef.current = compositeCanvas.captureStream(mobile ? 24 : 30); } const recorder = new MediaRecorder(recorderStreamRef.current, { mimeType, videoBitsPerSecond: getLiveAnalysisBitrate(qualityPreset, mobile), }); recorderRef.current = recorder; const sequence = recorderSequenceRef.current + 1; recorderSequenceRef.current = sequence; recorderSegmentStartedAtRef.current = Date.now(); recorder.ondataavailable = (event) => { if (event.data && event.data.size > 0) { recorderChunksRef.current.push(event.data); } }; recorderStopPromiseRef.current = new Promise((resolve) => { recorder.onstop = () => { const durationMs = Math.max(0, Date.now() - recorderSegmentStartedAtRef.current); const type = recorderMimeTypeRef.current.includes("mp4") ? "video/mp4" : "video/webm"; const blob = recorderChunksRef.current.length > 0 ? new Blob(recorderChunksRef.current, { type }) : null; recorderChunksRef.current = []; recorderRef.current = null; recorderStopPromiseRef.current = null; if (blob && blob.size > 0 && durationMs > 0) { void queueArchivedVideoUpload(blob, sequence, durationMs); } if (analyzingRef.current) { startSessionRecorderInternal(); } else if (recorderStreamRef.current) { recorderStreamRef.current.getTracks().forEach((track) => track.stop()); recorderStreamRef.current = null; } resolve(); }; }); recorder.start(); recorderRotateTimerRef.current = window.setTimeout(() => { if (recorder.state === "recording") { recorder.stop(); } }, ANALYSIS_RECORDING_SEGMENT_MS); }, [ensureCompositeCanvas, mobile, qualityPreset, queueArchivedVideoUpload, renderCompositeFrame]); const persistSession = useCallback(async () => { const endedAt = Date.now(); const sessionDuration = Math.max(0, endedAt - sessionStartedAtRef.current); const currentSegment = currentSegmentRef.current; if (currentSegment) { currentSegment.endMs = sessionDuration; currentSegment.durationMs = currentSegment.endMs - currentSegment.startMs; flushSegment(currentSegment); currentSegmentRef.current = null; } const scoreSamples = frameSamplesRef.current; const finalSegments = [...segmentsRef.current]; const segmentDurations = finalSegments.reduce>((acc, segment) => { acc[segment.actionType] = (acc[segment.actionType] || 0) + segment.durationMs; return acc; }, { forehand: 0, backhand: 0, serve: 0, volley: 0, overhead: 0, slice: 0, lob: 0, unknown: 0, }); const dominantAction = (Object.entries(segmentDurations).sort((a, b) => b[1] - a[1])[0]?.[0] || "unknown") as ActionType; const effectiveSegments = finalSegments.filter((segment) => !segment.isUnknown); const unknownCount = finalSegments.length - effectiveSegments.length; const averageScore = scoreSamples.length > 0 ? scoreSamples.reduce((sum, item) => sum + item.overall, 0) / scoreSamples.length : liveScore?.overall || 0; const averagePosture = scoreSamples.length > 0 ? scoreSamples.reduce((sum, item) => sum + item.posture, 0) / scoreSamples.length : liveScore?.posture || 0; const averageBalance = scoreSamples.length > 0 ? scoreSamples.reduce((sum, item) => sum + item.balance, 0) / scoreSamples.length : liveScore?.balance || 0; const averageTechnique = scoreSamples.length > 0 ? scoreSamples.reduce((sum, item) => sum + item.technique, 0) / scoreSamples.length : liveScore?.technique || 0; const averageFootwork = scoreSamples.length > 0 ? scoreSamples.reduce((sum, item) => sum + item.footwork, 0) / scoreSamples.length : liveScore?.footwork || 0; const averageConsistency = scoreSamples.length > 0 ? scoreSamples.reduce((sum, item) => sum + item.consistency, 0) / scoreSamples.length : liveScore?.consistency || 0; const sessionFeedback = Array.from(new Set(finalSegments.flatMap((segment) => segment.issueSummary))).slice(0, 5); const averageRawVolatility = volatilitySamplesRef.current.length > 0 ? volatilitySamplesRef.current.reduce((sum, value) => sum + value, 0) / volatilitySamplesRef.current.length : 0; const avatarState = avatarRenderRef.current; await stopSessionRecorder(); const archivedVideos = [...archivedVideosRef.current].sort((a, b) => a.sequence - b.sequence); const primaryArchivedVideo = archivedVideos[0] ?? null; await saveLiveSessionMutation.mutateAsync({ title: `实时分析 ${ACTION_META[dominantAction].label}`, sessionMode, startedAt: sessionStartedAtRef.current, endedAt, durationMs: sessionDuration, dominantAction, overallScore: Math.round(averageScore), postureScore: Math.round(averagePosture), balanceScore: Math.round(averageBalance), techniqueScore: Math.round(averageTechnique), footworkScore: Math.round(averageFootwork), consistencyScore: Math.round(averageConsistency), totalActionCount: effectiveSegments.length, effectiveSegments: effectiveSegments.length, totalSegments: finalSegments.length, unknownSegments: unknownCount, feedback: sessionFeedback, metrics: { actionDurations: segmentDurations, stabilizedActionDurations: segmentDurations, averageConfidence: Math.round((scoreSamples.reduce((sum, item) => sum + item.confidence, 0) / Math.max(1, scoreSamples.length)) * 10) / 10, sampleCount: scoreSamples.length, stableWindowFrames: ACTION_WINDOW_FRAMES, actionSwitchCount: stableActionStateRef.current.switchCount, rawActionVolatility: Number(averageRawVolatility.toFixed(4)), avatarEnabled: avatarState.enabled, avatarKey: avatarState.enabled ? avatarState.avatarKey : null, autoRecordingEnabled: true, autoRecordingSegmentMs: ANALYSIS_RECORDING_SEGMENT_MS, archivedVideos, mobile, }, segments: finalSegments.map((segment) => ({ actionType: segment.actionType, isUnknown: segment.isUnknown, startMs: segment.startMs, endMs: segment.endMs, durationMs: segment.durationMs, confidenceAvg: Number(segment.confidenceAvg.toFixed(4)), score: Math.round(segment.score), peakScore: Math.round(segment.peakScore), frameCount: segment.frameCount, issueSummary: segment.issueSummary, keyFrames: segment.keyFrames, clipLabel: segment.clipLabel, })), videoId: primaryArchivedVideo?.videoId, videoUrl: primaryArchivedVideo?.url, }); }, [flushSegment, liveScore, mobile, saveLiveSessionMutation, sessionMode, stopSessionRecorder]); const startAnalysis = useCallback(async () => { const latest = await refreshRuntimeState(); if (!cameraActive || !videoRef.current || !streamRef.current) { toast.error("请先启动摄像头"); return; } if (analyzingRef.current || saving) return; if (latest.role === "viewer") { toast.error("当前设备处于同步观看模式,不能重复开启分析"); return; } try { const title = `实时分析 ${ACTION_META[currentActionRef.current].label}`; const runtime = await runtimeAcquireMutation.mutateAsync({ title, sessionMode, }); if (runtime.role === "viewer") { runtimeIdRef.current = null; toast.error("同一账号已有其他设备正在实时分析,本机已切换为同步观看模式"); await runtimeQuery.refetch(); return; } runtimeIdRef.current = runtime.runtimeSession?.id ?? null; setViewerError(""); analyzingRef.current = true; setAnalyzing(true); setSaving(false); setLeaveStatus("analyzing"); setSegments([]); segmentsRef.current = []; currentSegmentRef.current = null; trackingRef.current = {}; actionHistoryRef.current = []; stableActionStateRef.current = createStableActionState(); frameSamplesRef.current = []; volatilitySamplesRef.current = []; archivedVideosRef.current = []; recorderSequenceRef.current = 0; setArchivedVideoCount(0); sessionStartedAtRef.current = Date.now(); setCurrentAction("unknown"); setRawAction("unknown"); setLiveScore(null); setFeedback([]); setStabilityMeta(createEmptyStabilizedActionMeta()); setDurationMs(0); startSessionRecorder(); const mediaSessionId = await startBroadcastSession(); startRuntimeHeartbeatLoop(mediaSessionId); const testFactory = ( window as typeof window & { __TEST_MEDIAPIPE_FACTORY__?: () => Promise<{ Pose: any }>; } ).__TEST_MEDIAPIPE_FACTORY__; const { Pose } = testFactory ? await testFactory() : await import("@mediapipe/pose"); const pose = new Pose({ locateFile: (file: string) => `https://cdn.jsdelivr.net/npm/@mediapipe/pose/${file}`, }); poseRef.current = pose; pose.setOptions({ modelComplexity: 1, smoothLandmarks: true, enableSegmentation: false, minDetectionConfidence: 0.5, minTrackingConfidence: 0.5, }); pose.onResults((results: { poseLandmarks?: Point[] }) => { const video = videoRef.current; const canvas = canvasRef.current; if (!video || !canvas) return; if (video.videoWidth > 0 && video.videoHeight > 0) { canvas.width = video.videoWidth; canvas.height = video.videoHeight; } drawLiveCameraOverlay(canvas, results.poseLandmarks, avatarRenderRef.current); renderCompositeFrame(results.poseLandmarks); if (!results.poseLandmarks) return; const frameTimestamp = performance.now(); const analyzed = analyzePoseFrame(results.poseLandmarks, trackingRef.current, frameTimestamp); const nextStabilityMeta = stabilizeActionStream( { action: analyzed.action, confidence: analyzed.confidence, timestamp: frameTimestamp, }, actionHistoryRef.current, stableActionStateRef.current, ); const elapsedMs = Date.now() - sessionStartedAtRef.current; const stabilityLabel = nextStabilityMeta.pendingAction ?? nextStabilityMeta.windowAction; const stabilityFeedback = nextStabilityMeta.pending && stabilityLabel !== "unknown" ? [`正在确认 ${ACTION_META[stabilityLabel].label},需要持续约 0.7 秒后再切换。`, ...analyzed.feedback] : nextStabilityMeta.stableAction === "unknown" ? ["系统正在积累 24 帧动作窗口,当前先作为观察片段处理。", ...analyzed.feedback] : analyzed.action !== nextStabilityMeta.stableAction ? [`原始候选为 ${ACTION_META[analyzed.action].label},当前保持 ${ACTION_META[nextStabilityMeta.stableAction].label}。`, ...analyzed.feedback] : analyzed.feedback; const displayedScore: PoseScore = { ...analyzed.score, confidence: Math.round(nextStabilityMeta.stableConfidence * 100), }; const stabilizedFrame: AnalyzedFrame = { ...analyzed, action: nextStabilityMeta.stableAction, confidence: nextStabilityMeta.stableConfidence, score: displayedScore, feedback: stabilityFeedback.slice(0, 3), }; appendFrameToSegment(stabilizedFrame, elapsedMs); frameSamplesRef.current.push(displayedScore); volatilitySamplesRef.current.push(nextStabilityMeta.rawVolatility); setLiveScore(displayedScore); setCurrentAction(nextStabilityMeta.stableAction); setRawAction(analyzed.action); setStabilityMeta(nextStabilityMeta); setFeedback(stabilizedFrame.feedback); setDurationMs(elapsedMs); }); const processFrame = async () => { if (!analyzingRef.current || !videoRef.current || !poseRef.current) return; if (videoRef.current.readyState >= 2 || testFactory) { await poseRef.current.send({ image: videoRef.current }); } animationRef.current = requestAnimationFrame(processFrame); }; toast.success("动作识别已启动"); processFrame(); } catch (error: any) { analyzingRef.current = false; setAnalyzing(false); setLeaveStatus("idle"); await stopSessionRecorder(); closeBroadcastPeer(); await releaseRuntime("failed"); toast.error(`实时分析启动失败: ${error?.message || "未知错误"}`); } }, [ appendFrameToSegment, cameraActive, closeBroadcastPeer, refreshRuntimeState, releaseRuntime, runtimeAcquireMutation, runtimeQuery, saving, sessionMode, startBroadcastSession, startRuntimeHeartbeatLoop, startSessionRecorder, stopSessionRecorder, ]); const stopAnalysis = useCallback(async () => { if (!analyzingRef.current) return; analyzingRef.current = false; setAnalyzing(false); setSaving(true); setLeaveStatus("saving"); let releasePhase: RuntimeSnapshot["phase"] = "safe"; if (animationRef.current) { cancelAnimationFrame(animationRef.current); animationRef.current = 0; } try { if (poseRef.current?.close) { poseRef.current.close(); poseRef.current = null; } await persistSession(); setLeaveStatus("safe"); releasePhase = "safe"; toast.success(`实时分析已保存,并同步写入训练记录${archivedVideosRef.current.length > 0 ? `;已归档 ${archivedVideosRef.current.length} 段分析录像` : ""}`); await liveSessionsQuery.refetch(); } catch (error: any) { setLeaveStatus("failed"); releasePhase = "failed"; toast.error(`保存实时分析失败: ${error?.message || "未知错误"}`); } finally { closeBroadcastPeer(); await releaseRuntime(releasePhase); setSaving(false); } }, [closeBroadcastPeer, liveSessionsQuery, persistSession, releaseRuntime]); useEffect(() => { if (!analyzing && !saving) { return; } const handleBeforeUnload = (event: BeforeUnloadEvent) => { event.preventDefault(); event.returnValue = "实时分析数据仍在处理中,请先等待保存完成。"; return event.returnValue; }; window.addEventListener("beforeunload", handleBeforeUnload); return () => window.removeEventListener("beforeunload", handleBeforeUnload); }, [analyzing, saving]); const handleSetupComplete = useCallback(async () => { const latest = await refreshRuntimeState(); if (latest.role === "viewer") { setShowSetupGuide(false); toast.error("当前账号已有其他设备正在实时分析,请切换到同步观看模式"); return; } setShowSetupGuide(false); await startCamera(facing, zoomTargetRef.current, qualityPreset); }, [facing, qualityPreset, refreshRuntimeState, startCamera]); const displayLeaveStatus = runtimeRole === "viewer" ? (runtimeSnapshot?.phase ?? "idle") : leaveStatus; const displayAction = runtimeRole === "viewer" ? (runtimeSnapshot?.currentAction ?? "unknown") : currentAction; const displayRawAction = runtimeRole === "viewer" ? (runtimeSnapshot?.rawAction ?? "unknown") : rawAction; const displayScore = runtimeRole === "viewer" ? (runtimeSnapshot?.liveScore ?? null) : liveScore; const displayFeedback = runtimeRole === "viewer" ? (runtimeSnapshot?.feedback ?? []) : feedback; const displayDurationMs = runtimeRole === "viewer" ? (runtimeSnapshot?.durationMs ?? 0) : durationMs; const displayStabilityMeta = runtimeRole === "viewer" ? { ...createEmptyStabilizedActionMeta(), ...runtimeSnapshot?.stabilityMeta, } : stabilityMeta; const displaySessionMode = runtimeRole === "viewer" ? (runtimeSnapshot?.sessionMode ?? runtimeSession?.sessionMode ?? sessionMode) : sessionMode; const displayQualityPreset = runtimeRole === "viewer" ? (runtimeSnapshot?.qualityPreset ?? qualityPreset) : qualityPreset; const displayFacing = runtimeRole === "viewer" ? (runtimeSnapshot?.facingMode ?? facing) : facing; const displayDeviceKind = runtimeRole === "viewer" ? (runtimeSnapshot?.deviceKind ?? (mobile ? "mobile" : "desktop")) : (mobile ? "mobile" : "desktop"); const displayAvatarEnabled = runtimeRole === "viewer" ? Boolean(runtimeSnapshot?.avatarEnabled) : avatarEnabled; const displayAvatarKey = runtimeRole === "viewer" ? ((runtimeSnapshot?.avatarKey as AvatarKey | undefined) ?? resolvedAvatarKey) : resolvedAvatarKey; const displayAvatarPreset = getAvatarPreset(displayAvatarKey); const displayAvatarLabel = runtimeRole === "viewer" ? (runtimeSnapshot?.avatarLabel ?? displayAvatarPreset?.label ?? "猩猩") : (displayAvatarPreset?.label || "猩猩"); const runtimeSyncDelayMs = runtimeRole === "viewer" ? getRuntimeSyncDelayMs(runtimeSession?.lastHeartbeatAt) : null; const runtimeSyncLabel = runtimeRole === "viewer" ? formatRuntimeSyncDelay(runtimeSyncDelayMs) : ""; const displayRuntimeTitle = runtimeRole === "viewer" ? (normalizedSnapshotTitle || normalizedRuntimeTitle || "其他设备实时分析") : (normalizedRuntimeTitle || `实时分析 ${ACTION_META[currentAction].label}`); const viewerFrameSrc = runtimeRole === "viewer" && runtimeSession?.mediaSessionId ? getMediaAssetUrl(`/assets/sessions/${runtimeSession.mediaSessionId}/live-frame.jpg?ts=${viewerFrameVersion || runtimeSnapshot?.updatedAt || Date.now()}`) : ""; const hasVideoFeed = cameraActive || viewerConnected; const heroAction = ACTION_META[displayAction]; const rawActionMeta = ACTION_META[displayRawAction]; const pendingActionMeta = displayStabilityMeta.pendingAction ? ACTION_META[displayStabilityMeta.pendingAction] : null; const resolvedAvatarPreset = getAvatarPreset(resolvedAvatarKey); const resolvedAvatarLabel = resolvedAvatarPreset?.label || "猩猩"; const animalAvatarPresets = AVATAR_PRESETS.filter((preset) => preset.category === "animal"); const fullBodyAvatarPresets = AVATAR_PRESETS.filter((preset) => preset.category === "full-body-3d"); const previewTitle = runtimeRole === "viewer" ? viewerConnected ? `${runtimeSyncLabel} · 服务端同步中` : "正在获取服务端同步画面" : analyzing ? displayStabilityMeta.pending && pendingActionMeta ? `${pendingActionMeta.label} 切换确认中` : `${heroAction.label} 识别中` : cameraActive ? "准备开始实时分析" : "摄像头待启动"; const viewerModeLabel = normalizedRuntimeTitle || "其他设备正在实时分析"; const renderPrimaryActions = (rail = false) => { const buttonClass = rail ? "h-14 w-14 rounded-2xl border border-white/10 bg-white/10 text-white hover:bg-white/20" : "h-11 rounded-2xl px-4"; if (runtimeRole === "viewer") { return ( <> {!rail ? ( ) : null} ); } if (!cameraActive) { return ( ); } return ( <> {hasMultipleCameras ? ( ) : null} {!analyzing ? ( ) : ( )} {!rail ? ( ) : null} ); }; const renderZoomOverlay = () => (
焦距
{zoomState.supported ? `${zoomState.current.toFixed(1)}x` : "自动"}
); const renderAvatarShowcaseCard = (preset: AvatarPreset) => { const active = resolvedAvatarKey === preset.key; return (
VRM 示例源
{preset.modelUrl ? ( 查看模型 ) : null}
); }; return (
实时分析校准 按顺序确认拍摄位置,后续动作会自动识别并按区间保存。
{SETUP_STEPS.map((step, index) => (
{index < setupStep ? : step.icon}
{step.title}
{step.desc}
))}
{setupStep > 0 ? ( ) : null} {setupStep < SETUP_STEPS.length - 1 ? ( ) : ( )}
{displayLeaveStatus === "analyzing" ? ( 分析进行中 {runtimeRole === "viewer" ? "持有端仍在采集和识别动作数据,本页会按会话心跳持续同步视频与动作信息。" : "当前仍在采集和识别动作数据,请先不要关闭浏览器或切走页面。"} ) : null} {displayLeaveStatus === "saving" ? ( 正在保存分析结果 {runtimeRole === "viewer" ? "持有端正在提交录像、动作区间和训练记录;本页会同步保存状态,可以稍后再刷新查看。" : "实时分析录像、动作区间和训练记录正在提交,请暂时停留当前页面;保存完成后会提示你可以离开。"} ) : null} {displayLeaveStatus === "safe" ? ( 分析结果已保存 {runtimeRole === "viewer" ? "持有端分析数据已经提交完成;本页显示的是同步结果,你现在可以离开,不会影响已保存的数据。" : "当前分析数据已经提交完成。现在可以关闭浏览器、返回上一页,或切换到其他页面,不会影响已保存的数据。"} ) : null} {displayLeaveStatus === "failed" ? ( 分析保存失败 {runtimeRole === "viewer" ? "持有端当前会话还没有完整写入,本页会继续显示最后一次同步状态。" : "当前会话还没有完整写入,请先留在本页并重新尝试结束分析或检查网络状态。"} ) : null} {runtimeRole === "viewer" ? ( 同步观看模式 {viewerModeLabel}。当前设备不会占用本地摄像头,也不能再次开启分析;同步画面会通过 media 服务中转,动作、评分与会话信息会按心跳自动同步,允许 1 秒级延迟。 ) : null} {viewerError ? ( 同步画面连接异常 {viewerError} ) : null}
24 帧稳定识别 60 秒自动归档 {displayAvatarEnabled ? `虚拟形象 ${displayAvatarLabel}` : "骨架叠加"} {displaySessionMode === "practice" ? "练习会话" : "训练 PK"} {runtimeRole === "viewer" ? ( {runtimeSyncLabel} ) : null}

{displayRuntimeTitle}

{runtimeRole === "viewer" ? `当前正在通过服务端中转同步 ${displayDeviceKind === "mobile" ? "移动端" : "桌面端"} ${displayFacing === "environment" ? "后置/主摄视角" : "前置视角"} 画面。同步画面、动作、评分、最近区间、虚拟形象和会话状态会自动跟随持有端刷新,允许少量网络延迟。` : "摄像头启动后会持续识别正手、反手、发球、截击、高压、切削、挑高球与未知动作。系统会用 24 帧时间窗口统一动作,再把稳定动作写入片段、训练记录与评分;分析过程中会自动录制“视频画面 + 骨架/关键点叠层”的合成回放,并按 60 秒分段归档进视频库。开启虚拟形象后,画面中的人体可切换为 10 个轻量动物替身,或 4 个免费的全身 3D Avatar 示例覆盖显示。"}

稳定动作
{heroAction.label}
原始候选
{rawActionMeta.label}
识别时长
{formatDuration(displayDurationMs)}
稳定窗口
{displayStabilityMeta.windowFrames}/{ACTION_WINDOW_FRAMES}
{renderPrimaryActions()}
{runtimeRole === "viewer" ? (
同步中的主端信息
{displayRuntimeTitle}
设备端:{displayDeviceKind === "mobile" ? "移动端" : "桌面端"}
拍摄视角:{displayFacing === "environment" ? "后置 / 主摄" : "前置"}
画质模式:{CAMERA_QUALITY_PRESETS[displayQualityPreset].label}
虚拟形象:{displayAvatarEnabled ? displayAvatarLabel : "未开启"}
最近同步
{runtimeSyncLabel}
{runtimeSession?.lastHeartbeatAt ? formatDateTimeShanghai(runtimeSession.lastHeartbeatAt) : "等待首个心跳"}
) : null}
自动分析录像
每 60 秒自动切段
录到的是合成画布,包含原视频、骨架线、关键点和当前虚拟形象覆盖效果。
已归档段数
{runtimeRole === "viewer" ? (runtimeSnapshot?.archivedVideoCount ?? 0) : archivedVideoCount}
归档完成后会自动进入视频库,标签为“实时分析”,后续可单独删除,不影响分析数据。
分析数据保留
视频与数据解耦
即使用户在视频库删除录像,实时分析片段、评分和训练记录仍会继续保留。
虚拟形象替换
开启后实时画面可使用 10 个免费动物替身,或 4 个免费的全身 3D Avatar 示例覆盖主体。该设置不会改变动作识别结果,但归档录像会保留当前叠加效果。
当前映射:{resolvedAvatarLabel} {avatarPrompt.trim() ? ` · 输入 ${avatarPrompt.trim()}` : " · 可输入猩猩、狐狸、熊猫、兔子,或 BeachKing、Juanita 等别名自动映射"}
形象预设
扩展别名
setAvatarPrompt(event.target.value)} placeholder="例如 狐狸 / panda coach / BeachKing / Juanita" className="h-12 rounded-2xl border-border/60" disabled={runtimeRole === "viewer"} />
免费 3D 全身范例
这 4 个示例来自 Open Source Avatars 的 CC0 集合,当前已处理成轻量透明素材用于实时覆盖;后续若切换到 VRM/three-vrm,可继续沿用同一批模型源。
CC0 · Open Source Avatars
{fullBodyAvatarPresets.map(renderAvatarShowcaseCard)}
拍摄与流量设置 默认使用节省流量模式;动作切换会经过 24 帧稳定窗口确认后再入库。
{Object.entries(CAMERA_QUALITY_PRESETS).map(([key, preset]) => { const active = qualityPreset === key; const disabled = analyzing || saving || runtimeRole === "viewer"; return ( ); })}
当前采集规格
{CAMERA_QUALITY_PRESETS[qualityPreset].subtitle} · 分析录制码率会随模式同步切换,默认优先节省流量。
镜头焦距 / 放大缩小
{zoomState.supported ? `当前 ${zoomState.current.toFixed(1)}x,可在分析过程中直接微调取景;焦点模式为 ${zoomState.focusMode}。` : "当前设备或浏览器未开放镜头缩放能力,仍会保持自动对焦。Chrome 安卓和部分后置摄像头通常支持此能力。"}
{zoomState.supported ? (
{ if (typeof value[0] === "number") { void updateZoom(value[0]); } }} />
{zoomState.min.toFixed(1)}x 建议 1.0x-1.5x 保留完整挥拍 {zoomState.max.toFixed(1)}x
) : null}
连续动作区间 只保留通过稳定窗口确认后的动作区间,单段最长 10 秒,方便后续查看和回放。 {actionStats.length > 0 ? (
{actionStats.map((item) => ( ))}
) : null} {displayFilteredSegments.length === 0 ? (
{runtimeRole === "viewer" ? "当前会同步最近识别到的动作片段,持有端开始分析后会自动刷新。" : "开始分析后,这里会按时间区间显示识别出的动作片段。"}
) : ( displayFilteredSegments.map((segment) => { const meta = ACTION_META[segment.actionType]; return (
{meta.label} {formatDuration(segment.startMs)} - {formatDuration(segment.endMs)} 时长 {formatDuration(segment.durationMs)} 关键帧 {segment.keyFrames.length}
{segment.issueSummary.join(" · ") || "当前片段节奏稳定"}
片段得分 {Math.round(segment.score)}
置信度 {Math.round(segment.confidenceAvg * 100)}%
); }) )}
{mobile && immersivePreview ? (
{renderPrimaryActions(true)}
) : null}
); }