比较提交
13 次代码提交
13e59b8e8a
...
main
| 作者 | SHA1 | 提交日期 | |
|---|---|---|---|
|
|
63dbfd2787 | ||
|
|
06b9701e03 | ||
|
|
8e9e4915e2 | ||
|
|
634a4704c7 | ||
|
|
bb46d26c0e | ||
|
|
bacd712dbc | ||
|
|
78a7c755e3 | ||
|
|
a211562860 | ||
|
|
09b1b95e2c | ||
|
|
922a9fb63f | ||
|
|
31bead3452 | ||
|
|
a5103685fb | ||
|
|
f9db6ef590 |
@@ -9,6 +9,13 @@ export type CameraZoomState = {
|
|||||||
focusMode: string;
|
focusMode: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type CameraRequestResult = {
|
||||||
|
stream: MediaStream;
|
||||||
|
appliedFacingMode: "user" | "environment";
|
||||||
|
audioEnabled: boolean;
|
||||||
|
usedFallback: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
type NumericRange = {
|
type NumericRange = {
|
||||||
min: number;
|
min: number;
|
||||||
max: number;
|
max: number;
|
||||||
@@ -66,6 +73,98 @@ export function getCameraVideoConstraints(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function normalizeVideoConstraintCandidate(candidate: MediaTrackConstraints | true) {
|
||||||
|
if (candidate === true) {
|
||||||
|
return { label: "camera-any", video: true as const };
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
label: JSON.stringify(candidate),
|
||||||
|
video: candidate,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createFallbackVideoCandidates(
|
||||||
|
facingMode: "user" | "environment",
|
||||||
|
isMobile: boolean,
|
||||||
|
preset: CameraQualityPreset,
|
||||||
|
) {
|
||||||
|
const base = getCameraVideoConstraints(facingMode, isMobile, preset);
|
||||||
|
const alternateFacing = facingMode === "environment" ? "user" : "environment";
|
||||||
|
const lowRes = {
|
||||||
|
facingMode,
|
||||||
|
width: { ideal: isMobile ? 640 : 960 },
|
||||||
|
height: { ideal: isMobile ? 360 : 540 },
|
||||||
|
} satisfies MediaTrackConstraints;
|
||||||
|
const lowResAlternate = {
|
||||||
|
facingMode: alternateFacing,
|
||||||
|
width: { ideal: isMobile ? 640 : 960 },
|
||||||
|
height: { ideal: isMobile ? 360 : 540 },
|
||||||
|
} satisfies MediaTrackConstraints;
|
||||||
|
const anyCamera = {
|
||||||
|
width: { ideal: isMobile ? 640 : 960 },
|
||||||
|
height: { ideal: isMobile ? 360 : 540 },
|
||||||
|
} satisfies MediaTrackConstraints;
|
||||||
|
|
||||||
|
const candidates = [
|
||||||
|
normalizeVideoConstraintCandidate(base),
|
||||||
|
normalizeVideoConstraintCandidate({
|
||||||
|
...base,
|
||||||
|
frameRate: undefined,
|
||||||
|
}),
|
||||||
|
normalizeVideoConstraintCandidate(lowRes),
|
||||||
|
normalizeVideoConstraintCandidate(lowResAlternate),
|
||||||
|
normalizeVideoConstraintCandidate(anyCamera),
|
||||||
|
normalizeVideoConstraintCandidate(true),
|
||||||
|
];
|
||||||
|
|
||||||
|
const deduped = new Map<string, { video: MediaTrackConstraints | true }>();
|
||||||
|
candidates.forEach((candidate) => {
|
||||||
|
if (!deduped.has(candidate.label)) {
|
||||||
|
deduped.set(candidate.label, { video: candidate.video });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return Array.from(deduped.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function requestCameraStream(options: {
|
||||||
|
facingMode: "user" | "environment";
|
||||||
|
isMobile: boolean;
|
||||||
|
preset: CameraQualityPreset;
|
||||||
|
audio?: false | MediaTrackConstraints;
|
||||||
|
}) {
|
||||||
|
const videoCandidates = createFallbackVideoCandidates(options.facingMode, options.isMobile, options.preset);
|
||||||
|
const audioCandidates = options.audio ? [options.audio, false] : [false];
|
||||||
|
let lastError: unknown = null;
|
||||||
|
|
||||||
|
for (const audio of audioCandidates) {
|
||||||
|
for (let index = 0; index < videoCandidates.length; index += 1) {
|
||||||
|
const video = videoCandidates[index]?.video ?? true;
|
||||||
|
try {
|
||||||
|
const stream = await navigator.mediaDevices.getUserMedia({ video, audio });
|
||||||
|
const videoTrack = stream.getVideoTracks()[0] || null;
|
||||||
|
const settings = (
|
||||||
|
videoTrack && typeof (videoTrack as MediaStreamTrack & { getSettings?: () => unknown }).getSettings === "function"
|
||||||
|
? (videoTrack as MediaStreamTrack & { getSettings: () => unknown }).getSettings()
|
||||||
|
: {}
|
||||||
|
) as Record<string, unknown>;
|
||||||
|
const appliedFacingMode = settings.facingMode === "user" ? "user" : settings.facingMode === "environment" ? "environment" : options.facingMode;
|
||||||
|
|
||||||
|
return {
|
||||||
|
stream,
|
||||||
|
appliedFacingMode,
|
||||||
|
audioEnabled: stream.getAudioTracks().length > 0,
|
||||||
|
usedFallback: index > 0 || audio === false && Boolean(options.audio),
|
||||||
|
} satisfies CameraRequestResult;
|
||||||
|
} catch (error) {
|
||||||
|
lastError = error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw lastError instanceof Error ? lastError : new Error("无法访问摄像头");
|
||||||
|
}
|
||||||
|
|
||||||
export function getLiveAnalysisBitrate(preset: CameraQualityPreset, isMobile: boolean) {
|
export function getLiveAnalysisBitrate(preset: CameraQualityPreset, isMobile: boolean) {
|
||||||
switch (preset) {
|
switch (preset) {
|
||||||
case "economy":
|
case "economy":
|
||||||
|
|||||||
@@ -8,6 +8,113 @@ export type ChangeLogEntry = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
|
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
|
||||||
|
{
|
||||||
|
version: "2026.03.17-live-camera-preview-recovery",
|
||||||
|
releaseDate: "2026-03-17",
|
||||||
|
repoVersion: "06b9701",
|
||||||
|
summary: "修复实时分析页标题乱码、同步观看残留状态导致的黑屏,以及切回本机摄像头后预览无法恢复的问题。",
|
||||||
|
features: [
|
||||||
|
"runtime 标题恢复逻辑新增更严格的乱码筛除与二次 UTF-8 解码兜底,`æœ...`、带替换字符的脏标题现在会优先恢复为正常中文,无法恢复时会安全回退到稳定默认标题",
|
||||||
|
"同步观看退出时会完整重置 viewer 轮询、连接标记和帧版本,不再把旧 viewer 状态残留到 owner 或空闲态,避免页面继续停留在黑屏或“等待同步画面”",
|
||||||
|
"本地摄像头预览新增独立重绑流程和多次 watchdog 重试,即使浏览器在首帧时没有及时绑定 `srcObject` 或 `play()` 被短暂打断,也会自动恢复预览",
|
||||||
|
"视频区域是否显示画面改为按当前 runtime 角色分别判断,避免 viewer 的旧连接状态误导 owner 模式,导致本地没有预览时仍隐藏占位提示",
|
||||||
|
],
|
||||||
|
tests: [
|
||||||
|
"pnpm check",
|
||||||
|
"pnpm vitest run client/src/lib/liveCamera.test.ts",
|
||||||
|
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"live camera\"",
|
||||||
|
"pnpm build",
|
||||||
|
"线上 smoke: `curl -I https://te.hao.work/`,并检查页面源码中的 `/assets/index-*.js`、`/assets/index-*.css`、`/assets/pose-*.js` 已切换到新构建且返回正确 MIME",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
version: "2026.03.16-live-camera-runtime-refresh",
|
||||||
|
releaseDate: "2026-03-16",
|
||||||
|
repoVersion: "8e9e491",
|
||||||
|
summary: "修复实时分析页偶发残留在同步观看状态、标题乱码,以及摄像头预览绑定波动导致的启动失败。",
|
||||||
|
features: [
|
||||||
|
"live-camera 在打开拍摄引导、启用摄像头、开始分析前,都会先向服务端强制刷新 runtime 状态,避免旧的 viewer 锁残留导致本机明明已释放却仍无法启动",
|
||||||
|
"同步观看标题新增乱码恢复逻辑,可自动把 UTF-8 被误按 Latin-1 显示的标题恢复成正常中文,避免出现 `æœ...` 一类异常标题",
|
||||||
|
"摄像头启动链路改为以 `getUserMedia` 成功为准;即使本地预览 `<video>` 的 `srcObject` 或 `play()` 在当前浏览器里短暂失败,也不会直接把整次启动判死",
|
||||||
|
"e2e mock 的媒体流补齐为带假视频轨道的流对象,并把 viewer 回归改为校验“服务端 relay、无 viewer-signal”行为,减少和旧 P2P 逻辑混淆",
|
||||||
|
],
|
||||||
|
tests: [
|
||||||
|
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"live camera page exposes camera startup controls|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active\"",
|
||||||
|
"pnpm build",
|
||||||
|
"部署后线上 smoke: `https://te.hao.work/live-camera` 登录 H1 后可见空闲态“启动摄像头”入口,确认不再被残留 viewer 锁卡住;公开站点前端资源为 `assets/index-33wVjC4p.js` 与 `assets/index-tNGuStgv.css`",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
version: "2026.03.16-live-viewer-server-relay",
|
||||||
|
releaseDate: "2026-03-16",
|
||||||
|
repoVersion: "bb46d26",
|
||||||
|
summary: "实时分析同步观看改为由 media 服务中转帧图,不再依赖浏览器之间的 P2P 视频连接。",
|
||||||
|
features: [
|
||||||
|
"owner 端现在会把带骨架、关键点和虚拟形象叠层的合成画布压缩成 JPEG 并持续上传到 media 服务",
|
||||||
|
"viewer 端改为直接拉取 media 服务中的最新同步帧图,不再建立 WebRTC viewer peer 连接,因此跨网络和多端观看更稳定",
|
||||||
|
"同步观看模式文案改为明确提示“通过 media 服务中转”,等待同步时也会自动轮询最新画面",
|
||||||
|
"media 服务新增 live-frame 上传与静态分发能力,并记录最近同步帧的更新时间,方便后续扩展成更高频的服务端中转流",
|
||||||
|
],
|
||||||
|
tests: [
|
||||||
|
"cd media && go test ./...",
|
||||||
|
"pnpm build",
|
||||||
|
"playwright-skill 线上 smoke: 先用 media 服务创建 relay session、上传 live-frame,并把 H1 的 `live_analysis_runtime` 注入为 active viewer 场景;随后访问 `https://te.hao.work/live-camera`,确认页面进入“同步观看模式”、同步帧来自 `/media/assets/sessions/.../live-frame.jpg`,且 `viewer-signal` 请求数为 0",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
version: "2026.03.16-camera-startup-fallbacks",
|
||||||
|
releaseDate: "2026-03-16",
|
||||||
|
repoVersion: "a211562",
|
||||||
|
summary: "修复部分设备上摄像头因后置镜头约束、分辨率约束或麦克风不可用而直接启动失败的问题。",
|
||||||
|
features: [
|
||||||
|
"live-camera 与 recorder 改为共用分级降级的摄像头请求流程,会在当前画质失败时自动降分辨率、降约束并回退到兼容镜头",
|
||||||
|
"当设备不支持默认后置摄像头或当前镜头不可用时,页面会自动切换到实际可用的镜头方向,避免直接报错后卡死在未启动状态",
|
||||||
|
"recorder 预览启动不再被麦克风权限或麦克风设备异常整体拖死;麦克风不可用时会自动回退到仅视频模式",
|
||||||
|
"兼容模式命中时前端会给出明确提示,方便区分“已自动降级成功”与“仍然无法访问摄像头”的场景",
|
||||||
|
],
|
||||||
|
tests: [
|
||||||
|
"pnpm build",
|
||||||
|
"部署后线上 smoke: `https://te.hao.work/` 已提供 `assets/index-CRxtWK07.js` 与 `assets/index-tNGuStgv.css`;通过注入 `getUserMedia` 回归验证 `/live-camera` 首轮高约束失败后会自动切到兼容摄像头模式,`/recorder` 在麦克风不可用时会自动回退到仅视频模式并继续启动预览",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
version: "2026.03.16-live-analysis-viewer-full-sync",
|
||||||
|
releaseDate: "2026-03-16",
|
||||||
|
repoVersion: "922a9fb",
|
||||||
|
summary: "多端同步观看改为按持有端快照完整渲染,另一设备可同步看到视频状态、模式、画质、虚拟形象和保存阶段信息。",
|
||||||
|
features: [
|
||||||
|
"viewer 端现在同步显示持有端的会话标题、训练模式、设备端、拍摄视角、画质模式、虚拟形象状态和最近同步时间",
|
||||||
|
"同步观看时的分析阶段、保存阶段、已完成状态也会跟随主端刷新,不再只显示本地默认状态",
|
||||||
|
"viewer 页面会自动关闭拍摄校准弹窗,避免同步观看时被“启用摄像头”流程遮挡",
|
||||||
|
"新增 viewer 同步信息卡,明确允许 1 秒级延迟,并持续显示最近心跳时间",
|
||||||
|
],
|
||||||
|
tests: [
|
||||||
|
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"live camera switches into viewer mode|viewer stream|recorder blocks\"",
|
||||||
|
"pnpm build",
|
||||||
|
"部署后线上 smoke: `https://te.hao.work/` 已提供 `assets/index-HRdM3fxq.js` 与 `assets/index-tNGuStgv.css`;同账号 H1 双端登录后,移动端 owner 可开始实时分析,桌面端 `/live-camera` 自动进入同步观看并显示主端信息、同步视频流,owner 点击结束分析后 viewer 会同步进入保存阶段",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
version: "2026.03.16-live-analysis-lock-hardening",
|
||||||
|
releaseDate: "2026-03-16",
|
||||||
|
repoVersion: "f9db6ef",
|
||||||
|
summary: "修复同账号多端实时分析在旧登录态下仍可重复占用摄像头的问题,补强同步观看重试、录制页占用锁,并修复部署后启动阶段长时间 502。",
|
||||||
|
features: [
|
||||||
|
"旧用户名登录 token 即使缺少 `sid`,现在也会按 token 本身派生唯一会话标识,不再把不同设备错误识别成同一持有端",
|
||||||
|
"同步观看模式新增 viewer 自动重试:当持有端刚启动推流、viewer 首次连接返回 `viewer stream not ready` 时,会自动重连而不是一直黑屏",
|
||||||
|
"在线录制页接入实时分析占用锁;当其他设备正在 `/live-camera` 分析时,本页会禁止再次启动摄像头和录制",
|
||||||
|
"应用启动改为先监听 HTTP 端口、再后台串行执行教程图同步和标准库预热,修复新容器上线时公网长时间返回 502 的问题",
|
||||||
|
"线上 smoke 已确认 `https://te.hao.work/live-camera` 与 `/recorder` 都已切换到本次新构建,公开站点不再返回 502",
|
||||||
|
],
|
||||||
|
tests: [
|
||||||
|
"curl -I https://te.hao.work/",
|
||||||
|
"pnpm check",
|
||||||
|
"pnpm exec vitest run server/_core/sdk.test.ts server/features.test.ts",
|
||||||
|
"pnpm exec playwright test tests/e2e/app.spec.ts --grep \"viewer mode|viewer stream|recorder blocks\"",
|
||||||
|
"pnpm build",
|
||||||
|
"线上 smoke: H1 手机端开启实时分析后,PC 端 `/live-camera` 自动进入同步观看并显示同步画面,`/recorder` 禁止启动摄像头;结束分析后会话可正常释放",
|
||||||
|
],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
version: "2026.03.16-live-analysis-runtime-migration",
|
version: "2026.03.16-live-analysis-runtime-migration",
|
||||||
releaseDate: "2026-03-16",
|
releaseDate: "2026-03-16",
|
||||||
|
|||||||
@@ -51,6 +51,8 @@ export type MediaSession = {
|
|||||||
streamConnected: boolean;
|
streamConnected: boolean;
|
||||||
lastStreamAt?: string;
|
lastStreamAt?: string;
|
||||||
viewerCount?: number;
|
viewerCount?: number;
|
||||||
|
liveFrameUrl?: string;
|
||||||
|
liveFrameUpdatedAt?: string;
|
||||||
playback: {
|
playback: {
|
||||||
webmUrl?: string;
|
webmUrl?: string;
|
||||||
mp4Url?: string;
|
mp4Url?: string;
|
||||||
@@ -131,6 +133,14 @@ export async function signalMediaViewerSession(sessionId: string, payload: { sdp
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function uploadMediaLiveFrame(sessionId: string, blob: Blob) {
|
||||||
|
return request<{ session: MediaSession }>(`/sessions/${sessionId}/live-frame`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "Content-Type": blob.type || "image/jpeg" },
|
||||||
|
body: blob,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export async function uploadMediaSegment(
|
export async function uploadMediaSegment(
|
||||||
sessionId: string,
|
sessionId: string,
|
||||||
sequence: number,
|
sequence: number,
|
||||||
@@ -173,6 +183,10 @@ export async function getMediaSession(sessionId: string) {
|
|||||||
return request<{ session: MediaSession }>(`/sessions/${sessionId}`);
|
return request<{ session: MediaSession }>(`/sessions/${sessionId}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getMediaAssetUrl(path: string) {
|
||||||
|
return `${MEDIA_BASE}${path.startsWith("/") ? path : `/${path}`}`;
|
||||||
|
}
|
||||||
|
|
||||||
export function formatRecordingTime(milliseconds: number) {
|
export function formatRecordingTime(milliseconds: number) {
|
||||||
const totalSeconds = Math.max(0, Math.floor(milliseconds / 1000));
|
const totalSeconds = Math.max(0, Math.floor(milliseconds / 1000));
|
||||||
const minutes = Math.floor(totalSeconds / 60);
|
const minutes = Math.floor(totalSeconds / 60);
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ import { useAuth } from "@/_core/hooks/useAuth";
|
|||||||
import { trpc } from "@/lib/trpc";
|
import { trpc } from "@/lib/trpc";
|
||||||
import {
|
import {
|
||||||
createMediaSession,
|
createMediaSession,
|
||||||
signalMediaSession,
|
getMediaAssetUrl,
|
||||||
signalMediaViewerSession,
|
uploadMediaLiveFrame,
|
||||||
} from "@/lib/media";
|
} from "@/lib/media";
|
||||||
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
|
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
|
||||||
import { Badge } from "@/components/ui/badge";
|
import { Badge } from "@/components/ui/badge";
|
||||||
@@ -17,7 +17,7 @@ import { Slider } from "@/components/ui/slider";
|
|||||||
import { Switch } from "@/components/ui/switch";
|
import { Switch } from "@/components/ui/switch";
|
||||||
import { formatDateTimeShanghai } from "@/lib/time";
|
import { formatDateTimeShanghai } from "@/lib/time";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
import { applyTrackZoom, type CameraQualityPreset, getCameraVideoConstraints, getLiveAnalysisBitrate, readTrackZoomState } from "@/lib/camera";
|
import { applyTrackZoom, type CameraQualityPreset, getLiveAnalysisBitrate, readTrackZoomState, requestCameraStream } from "@/lib/camera";
|
||||||
import {
|
import {
|
||||||
ACTION_WINDOW_FRAMES,
|
ACTION_WINDOW_FRAMES,
|
||||||
AVATAR_PRESETS,
|
AVATAR_PRESETS,
|
||||||
@@ -121,6 +121,15 @@ type RuntimeSnapshot = {
|
|||||||
phase?: "idle" | "analyzing" | "saving" | "safe" | "failed";
|
phase?: "idle" | "analyzing" | "saving" | "safe" | "failed";
|
||||||
startedAt?: number;
|
startedAt?: number;
|
||||||
durationMs?: number;
|
durationMs?: number;
|
||||||
|
title?: string;
|
||||||
|
sessionMode?: SessionMode;
|
||||||
|
qualityPreset?: CameraQualityPreset;
|
||||||
|
facingMode?: CameraFacing;
|
||||||
|
deviceKind?: "mobile" | "desktop";
|
||||||
|
avatarEnabled?: boolean;
|
||||||
|
avatarKey?: AvatarKey;
|
||||||
|
avatarLabel?: string;
|
||||||
|
updatedAt?: number;
|
||||||
currentAction?: ActionType;
|
currentAction?: ActionType;
|
||||||
rawAction?: ActionType;
|
rawAction?: ActionType;
|
||||||
feedback?: string[];
|
feedback?: string[];
|
||||||
@@ -184,23 +193,6 @@ const CAMERA_QUALITY_PRESETS: Record<CameraQualityPreset, { label: string; subti
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
function waitForIceGathering(peer: RTCPeerConnection) {
|
|
||||||
if (peer.iceGatheringState === "complete") {
|
|
||||||
return Promise.resolve();
|
|
||||||
}
|
|
||||||
|
|
||||||
return new Promise<void>((resolve) => {
|
|
||||||
const handleStateChange = () => {
|
|
||||||
if (peer.iceGatheringState === "complete") {
|
|
||||||
peer.removeEventListener("icegatheringstatechange", handleStateChange);
|
|
||||||
resolve();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
peer.addEventListener("icegatheringstatechange", handleStateChange);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function clamp(value: number, min: number, max: number) {
|
function clamp(value: number, min: number, max: number) {
|
||||||
return Math.max(min, Math.min(max, value));
|
return Math.max(min, Math.min(max, value));
|
||||||
}
|
}
|
||||||
@@ -227,6 +219,59 @@ function formatDuration(ms: number) {
|
|||||||
return `${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`;
|
return `${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function normalizeRuntimeTitle(value: string | null | undefined) {
|
||||||
|
if (typeof value !== "string") return "";
|
||||||
|
const trimmed = value.trim();
|
||||||
|
if (!trimmed) return "";
|
||||||
|
|
||||||
|
const suspicious = /[ÃÂÆÐÑØæåçéèêëïîôöûüœŠŽƒ€¦<E282AC>]/;
|
||||||
|
const control = /[\u0000-\u001f\u007f]/g;
|
||||||
|
const score = (text: string) => {
|
||||||
|
const cjkCount = text.match(/[\u3400-\u9fff]/g)?.length ?? 0;
|
||||||
|
const latinCount = text.match(/[A-Za-z0-9]/g)?.length ?? 0;
|
||||||
|
const whitespaceCount = text.match(/\s/g)?.length ?? 0;
|
||||||
|
const punctuationCount = text.match(/[()\-_:./]/g)?.length ?? 0;
|
||||||
|
const badCount = text.match(/[ÃÂÆÐÑØæåçéèêëïîôöûüœŠŽƒ€¦<E282AC>]/g)?.length ?? 0;
|
||||||
|
const controlCount = text.match(control)?.length ?? 0;
|
||||||
|
return (cjkCount * 3) + latinCount + whitespaceCount + punctuationCount - (badCount * 4) - (controlCount * 6);
|
||||||
|
};
|
||||||
|
const sanitize = (candidate: string) => {
|
||||||
|
const normalized = candidate.replace(control, "").trim();
|
||||||
|
if (!normalized || normalized.includes("<22>")) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
return score(normalized) > 0 ? normalized : "";
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!suspicious.test(trimmed)) {
|
||||||
|
return sanitize(trimmed);
|
||||||
|
}
|
||||||
|
|
||||||
|
const candidates = [trimmed];
|
||||||
|
|
||||||
|
try {
|
||||||
|
const bytes = Uint8Array.from(Array.from(trimmed).map((char) => char.charCodeAt(0) & 0xff));
|
||||||
|
const decoded = new TextDecoder("utf-8").decode(bytes).trim();
|
||||||
|
if (decoded && decoded !== trimmed) {
|
||||||
|
candidates.push(decoded);
|
||||||
|
if (suspicious.test(decoded)) {
|
||||||
|
const decodedBytes = Uint8Array.from(Array.from(decoded).map((char) => char.charCodeAt(0) & 0xff));
|
||||||
|
const twiceDecoded = new TextDecoder("utf-8").decode(decodedBytes).trim();
|
||||||
|
if (twiceDecoded && twiceDecoded !== decoded) {
|
||||||
|
candidates.push(twiceDecoded);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
return sanitize(trimmed);
|
||||||
|
}
|
||||||
|
|
||||||
|
return candidates
|
||||||
|
.map((candidate) => sanitize(candidate))
|
||||||
|
.filter(Boolean)
|
||||||
|
.sort((left, right) => score(right) - score(left))[0] || "";
|
||||||
|
}
|
||||||
|
|
||||||
function isMobileDevice() {
|
function isMobileDevice() {
|
||||||
if (typeof window === "undefined") return false;
|
if (typeof window === "undefined") return false;
|
||||||
return /Android|iPhone|iPad|iPod/i.test(navigator.userAgent) || window.matchMedia("(max-width: 768px)").matches;
|
return /Android|iPhone|iPad|iPod/i.test(navigator.userAgent) || window.matchMedia("(max-width: 768px)").matches;
|
||||||
@@ -534,6 +579,20 @@ function getSessionBand(input: { overallScore: number; knownRatio: number; effec
|
|||||||
return { label: "待加强", tone: "bg-amber-500/10 text-amber-700" };
|
return { label: "待加强", tone: "bg-amber-500/10 text-amber-700" };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getRuntimeSyncDelayMs(lastHeartbeatAt?: string | null) {
|
||||||
|
if (!lastHeartbeatAt) return null;
|
||||||
|
const heartbeatMs = new Date(lastHeartbeatAt).getTime();
|
||||||
|
if (Number.isNaN(heartbeatMs)) return null;
|
||||||
|
return Math.max(0, Date.now() - heartbeatMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatRuntimeSyncDelay(delayMs: number | null) {
|
||||||
|
if (delayMs == null) return "等待同步";
|
||||||
|
if (delayMs < 1500) return "同步中";
|
||||||
|
if (delayMs < 10_000) return `${(delayMs / 1000).toFixed(1)}s 延迟`;
|
||||||
|
return "同步较慢";
|
||||||
|
}
|
||||||
|
|
||||||
export default function LiveCamera() {
|
export default function LiveCamera() {
|
||||||
const { user } = useAuth();
|
const { user } = useAuth();
|
||||||
const utils = trpc.useUtils();
|
const utils = trpc.useUtils();
|
||||||
@@ -544,11 +603,11 @@ export default function LiveCamera() {
|
|||||||
const streamRef = useRef<MediaStream | null>(null);
|
const streamRef = useRef<MediaStream | null>(null);
|
||||||
const poseRef = useRef<any>(null);
|
const poseRef = useRef<any>(null);
|
||||||
const compositeCanvasRef = useRef<HTMLCanvasElement | null>(null);
|
const compositeCanvasRef = useRef<HTMLCanvasElement | null>(null);
|
||||||
const broadcastPeerRef = useRef<RTCPeerConnection | null>(null);
|
|
||||||
const broadcastStreamRef = useRef<MediaStream | null>(null);
|
|
||||||
const broadcastSessionIdRef = useRef<string | null>(null);
|
const broadcastSessionIdRef = useRef<string | null>(null);
|
||||||
const viewerPeerRef = useRef<RTCPeerConnection | null>(null);
|
|
||||||
const viewerSessionIdRef = useRef<string | null>(null);
|
const viewerSessionIdRef = useRef<string | null>(null);
|
||||||
|
const viewerRetryTimerRef = useRef<number>(0);
|
||||||
|
const frameRelayTimerRef = useRef<number>(0);
|
||||||
|
const frameRelayInFlightRef = useRef(false);
|
||||||
const runtimeIdRef = useRef<number | null>(null);
|
const runtimeIdRef = useRef<number | null>(null);
|
||||||
const heartbeatTimerRef = useRef<number>(0);
|
const heartbeatTimerRef = useRef<number>(0);
|
||||||
const recorderRef = useRef<MediaRecorder | null>(null);
|
const recorderRef = useRef<MediaRecorder | null>(null);
|
||||||
@@ -611,6 +670,7 @@ export default function LiveCamera() {
|
|||||||
const [archivedVideoCount, setArchivedVideoCount] = useState(0);
|
const [archivedVideoCount, setArchivedVideoCount] = useState(0);
|
||||||
const [viewerConnected, setViewerConnected] = useState(false);
|
const [viewerConnected, setViewerConnected] = useState(false);
|
||||||
const [viewerError, setViewerError] = useState("");
|
const [viewerError, setViewerError] = useState("");
|
||||||
|
const [viewerFrameVersion, setViewerFrameVersion] = useState(0);
|
||||||
|
|
||||||
const resolvedAvatarKey = useMemo(
|
const resolvedAvatarKey = useMemo(
|
||||||
() => resolveAvatarKeyFromPrompt(avatarPrompt, avatarKey),
|
() => resolveAvatarKeyFromPrompt(avatarPrompt, avatarKey),
|
||||||
@@ -641,6 +701,8 @@ export default function LiveCamera() {
|
|||||||
const runtimeRole = (runtimeQuery.data?.role ?? "idle") as RuntimeRole;
|
const runtimeRole = (runtimeQuery.data?.role ?? "idle") as RuntimeRole;
|
||||||
const runtimeSession = (runtimeQuery.data?.runtimeSession ?? null) as RuntimeSession | null;
|
const runtimeSession = (runtimeQuery.data?.runtimeSession ?? null) as RuntimeSession | null;
|
||||||
const runtimeSnapshot = runtimeSession?.snapshot ?? null;
|
const runtimeSnapshot = runtimeSession?.snapshot ?? null;
|
||||||
|
const normalizedRuntimeTitle = normalizeRuntimeTitle(runtimeSession?.title);
|
||||||
|
const normalizedSnapshotTitle = normalizeRuntimeTitle(runtimeSnapshot?.title);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
avatarRenderRef.current = {
|
avatarRenderRef.current = {
|
||||||
@@ -674,6 +736,13 @@ export default function LiveCamera() {
|
|||||||
leaveStatusRef.current = leaveStatus;
|
leaveStatusRef.current = leaveStatus;
|
||||||
}, [leaveStatus]);
|
}, [leaveStatus]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (runtimeRole === "viewer") {
|
||||||
|
setShowSetupGuide(false);
|
||||||
|
setSetupStep(0);
|
||||||
|
}
|
||||||
|
}, [runtimeRole]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
sessionModeRef.current = sessionMode;
|
sessionModeRef.current = sessionMode;
|
||||||
}, [sessionMode]);
|
}, [sessionMode]);
|
||||||
@@ -749,6 +818,14 @@ export default function LiveCamera() {
|
|||||||
[displayVisibleSegments.length, knownRatio, liveScore?.overall, runtimeRole, runtimeSnapshot?.liveScore?.overall],
|
[displayVisibleSegments.length, knownRatio, liveScore?.overall, runtimeRole, runtimeSnapshot?.liveScore?.overall],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const refreshRuntimeState = useCallback(async () => {
|
||||||
|
const result = await runtimeQuery.refetch();
|
||||||
|
return {
|
||||||
|
role: (result.data?.role ?? runtimeRole) as RuntimeRole,
|
||||||
|
runtimeSession: (result.data?.runtimeSession ?? runtimeSession) as RuntimeSession | null,
|
||||||
|
};
|
||||||
|
}, [runtimeQuery, runtimeRole, runtimeSession]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
navigator.mediaDevices?.enumerateDevices().then((devices) => {
|
navigator.mediaDevices?.enumerateDevices().then((devices) => {
|
||||||
const cameras = devices.filter((device) => device.kind === "videoinput");
|
const cameras = devices.filter((device) => device.kind === "videoinput");
|
||||||
@@ -756,13 +833,44 @@ export default function LiveCamera() {
|
|||||||
}).catch(() => undefined);
|
}).catch(() => undefined);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
useEffect(() => {
|
const bindLocalPreview = useCallback(async (providedStream?: MediaStream | null) => {
|
||||||
if (!cameraActive || !streamRef.current || !videoRef.current) return;
|
const stream = providedStream || streamRef.current;
|
||||||
if (videoRef.current.srcObject !== streamRef.current) {
|
const video = videoRef.current;
|
||||||
videoRef.current.srcObject = streamRef.current;
|
if (!stream || !video) {
|
||||||
void videoRef.current.play().catch(() => undefined);
|
return false;
|
||||||
}
|
}
|
||||||
}, [cameraActive, immersivePreview]);
|
|
||||||
|
if (video.srcObject !== stream) {
|
||||||
|
video.srcObject = stream;
|
||||||
|
}
|
||||||
|
video.muted = true;
|
||||||
|
video.defaultMuted = true;
|
||||||
|
video.playsInline = true;
|
||||||
|
await video.play().catch(() => undefined);
|
||||||
|
return video.srcObject === stream;
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!cameraActive || !streamRef.current || runtimeRole === "viewer") return;
|
||||||
|
|
||||||
|
let cancelled = false;
|
||||||
|
const ensurePreview = () => {
|
||||||
|
if (cancelled) return;
|
||||||
|
const video = videoRef.current;
|
||||||
|
const stream = streamRef.current;
|
||||||
|
if (!video || !stream) return;
|
||||||
|
if (video.srcObject !== stream || video.videoWidth === 0 || video.paused) {
|
||||||
|
void bindLocalPreview(stream);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ensurePreview();
|
||||||
|
const timers = [300, 900, 1800].map((delay) => window.setTimeout(ensurePreview, delay));
|
||||||
|
return () => {
|
||||||
|
cancelled = true;
|
||||||
|
timers.forEach((timer) => window.clearTimeout(timer));
|
||||||
|
};
|
||||||
|
}, [bindLocalPreview, cameraActive, immersivePreview, runtimeRole]);
|
||||||
|
|
||||||
const ensureCompositeCanvas = useCallback(() => {
|
const ensureCompositeCanvas = useCallback(() => {
|
||||||
if (typeof document === "undefined") {
|
if (typeof document === "undefined") {
|
||||||
@@ -858,6 +966,15 @@ export default function LiveCamera() {
|
|||||||
phase: phase ?? leaveStatusRef.current,
|
phase: phase ?? leaveStatusRef.current,
|
||||||
startedAt: sessionStartedAtRef.current || undefined,
|
startedAt: sessionStartedAtRef.current || undefined,
|
||||||
durationMs: durationMsRef.current,
|
durationMs: durationMsRef.current,
|
||||||
|
title: normalizedRuntimeTitle || `实时分析 ${ACTION_META[currentActionRef.current].label}`,
|
||||||
|
sessionMode: sessionModeRef.current,
|
||||||
|
qualityPreset,
|
||||||
|
facingMode: facing,
|
||||||
|
deviceKind: mobile ? "mobile" : "desktop",
|
||||||
|
avatarEnabled: avatarRenderRef.current.enabled,
|
||||||
|
avatarKey: avatarRenderRef.current.avatarKey,
|
||||||
|
avatarLabel: getAvatarPreset(avatarRenderRef.current.avatarKey)?.label || "猩猩",
|
||||||
|
updatedAt: Date.now(),
|
||||||
currentAction: currentActionRef.current,
|
currentAction: currentActionRef.current,
|
||||||
rawAction: rawActionRef.current,
|
rawAction: rawActionRef.current,
|
||||||
feedback: feedbackRef.current,
|
feedback: feedbackRef.current,
|
||||||
@@ -867,30 +984,72 @@ export default function LiveCamera() {
|
|||||||
unknownSegments: segmentsRef.current.filter((segment) => segment.isUnknown).length,
|
unknownSegments: segmentsRef.current.filter((segment) => segment.isUnknown).length,
|
||||||
archivedVideoCount: archivedVideosRef.current.length,
|
archivedVideoCount: archivedVideosRef.current.length,
|
||||||
recentSegments: segmentsRef.current.slice(-5),
|
recentSegments: segmentsRef.current.slice(-5),
|
||||||
}), []);
|
}), [facing, mobile, normalizedRuntimeTitle, qualityPreset]);
|
||||||
|
|
||||||
|
const openSetupGuide = useCallback(async () => {
|
||||||
|
const latest = await refreshRuntimeState();
|
||||||
|
if (latest.role === "viewer") {
|
||||||
|
setShowSetupGuide(false);
|
||||||
|
toast.error("当前账号已有其他设备正在实时分析,请先切换到同步观看模式");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
setShowSetupGuide(true);
|
||||||
|
}, [refreshRuntimeState]);
|
||||||
|
|
||||||
|
const uploadLiveFrame = useCallback(async (sessionId: string) => {
|
||||||
|
const compositeCanvas = ensureCompositeCanvas();
|
||||||
|
if (!compositeCanvas || frameRelayInFlightRef.current) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
renderCompositeFrame();
|
||||||
|
frameRelayInFlightRef.current = true;
|
||||||
|
try {
|
||||||
|
const blob = await new Promise<Blob | null>((resolve) => {
|
||||||
|
compositeCanvas.toBlob(resolve, "image/jpeg", mobile ? 0.7 : 0.76);
|
||||||
|
});
|
||||||
|
if (!blob) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await uploadMediaLiveFrame(sessionId, blob);
|
||||||
|
} finally {
|
||||||
|
frameRelayInFlightRef.current = false;
|
||||||
|
}
|
||||||
|
}, [ensureCompositeCanvas, mobile, renderCompositeFrame]);
|
||||||
|
|
||||||
|
const startFrameRelayLoop = useCallback((sessionId: string) => {
|
||||||
|
broadcastSessionIdRef.current = sessionId;
|
||||||
|
if (frameRelayTimerRef.current) {
|
||||||
|
window.clearInterval(frameRelayTimerRef.current);
|
||||||
|
frameRelayTimerRef.current = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void uploadLiveFrame(sessionId);
|
||||||
|
frameRelayTimerRef.current = window.setInterval(() => {
|
||||||
|
void uploadLiveFrame(sessionId);
|
||||||
|
}, 900);
|
||||||
|
}, [uploadLiveFrame]);
|
||||||
|
|
||||||
const closeBroadcastPeer = useCallback(() => {
|
const closeBroadcastPeer = useCallback(() => {
|
||||||
broadcastSessionIdRef.current = null;
|
broadcastSessionIdRef.current = null;
|
||||||
if (broadcastPeerRef.current) {
|
if (frameRelayTimerRef.current) {
|
||||||
broadcastPeerRef.current.onconnectionstatechange = null;
|
window.clearInterval(frameRelayTimerRef.current);
|
||||||
broadcastPeerRef.current.close();
|
frameRelayTimerRef.current = 0;
|
||||||
broadcastPeerRef.current = null;
|
|
||||||
}
|
|
||||||
if (broadcastStreamRef.current) {
|
|
||||||
broadcastStreamRef.current.getTracks().forEach((track) => track.stop());
|
|
||||||
broadcastStreamRef.current = null;
|
|
||||||
}
|
}
|
||||||
|
frameRelayInFlightRef.current = false;
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const closeViewerPeer = useCallback(() => {
|
const closeViewerPeer = useCallback((options?: { clearFrameVersion?: boolean }) => {
|
||||||
|
if (viewerRetryTimerRef.current) {
|
||||||
|
window.clearTimeout(viewerRetryTimerRef.current);
|
||||||
|
viewerRetryTimerRef.current = 0;
|
||||||
|
}
|
||||||
viewerSessionIdRef.current = null;
|
viewerSessionIdRef.current = null;
|
||||||
if (viewerPeerRef.current) {
|
if (options?.clearFrameVersion) {
|
||||||
viewerPeerRef.current.ontrack = null;
|
setViewerFrameVersion(0);
|
||||||
viewerPeerRef.current.onconnectionstatechange = null;
|
|
||||||
viewerPeerRef.current.close();
|
|
||||||
viewerPeerRef.current = null;
|
|
||||||
}
|
}
|
||||||
setViewerConnected(false);
|
setViewerConnected(false);
|
||||||
|
setViewerError("");
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const releaseRuntime = useCallback(async (phase: RuntimeSnapshot["phase"]) => {
|
const releaseRuntime = useCallback(async (phase: RuntimeSnapshot["phase"]) => {
|
||||||
@@ -945,8 +1104,8 @@ export default function LiveCamera() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const compositeCanvas = ensureCompositeCanvas();
|
const compositeCanvas = ensureCompositeCanvas();
|
||||||
if (!compositeCanvas || typeof compositeCanvas.captureStream !== "function") {
|
if (!compositeCanvas) {
|
||||||
throw new Error("当前浏览器不支持同步观看推流");
|
throw new Error("当前浏览器不支持同步观看画面");
|
||||||
}
|
}
|
||||||
|
|
||||||
renderCompositeFrame();
|
renderCompositeFrame();
|
||||||
@@ -965,77 +1124,21 @@ export default function LiveCamera() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const sessionId = sessionResponse.session.id;
|
const sessionId = sessionResponse.session.id;
|
||||||
const stream = compositeCanvas.captureStream(mobile ? 24 : 30);
|
startFrameRelayLoop(sessionId);
|
||||||
broadcastStreamRef.current = stream;
|
|
||||||
|
|
||||||
const peer = new RTCPeerConnection({
|
|
||||||
iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
|
|
||||||
});
|
|
||||||
broadcastPeerRef.current = peer;
|
|
||||||
|
|
||||||
stream.getTracks().forEach((track) => peer.addTrack(track, stream));
|
|
||||||
|
|
||||||
const offer = await peer.createOffer();
|
|
||||||
await peer.setLocalDescription(offer);
|
|
||||||
await waitForIceGathering(peer);
|
|
||||||
|
|
||||||
const answer = await signalMediaSession(sessionId, {
|
|
||||||
sdp: peer.localDescription?.sdp || "",
|
|
||||||
type: peer.localDescription?.type || "offer",
|
|
||||||
});
|
|
||||||
|
|
||||||
await peer.setRemoteDescription({
|
|
||||||
type: answer.type as RTCSdpType,
|
|
||||||
sdp: answer.sdp,
|
|
||||||
});
|
|
||||||
|
|
||||||
return sessionId;
|
return sessionId;
|
||||||
}, [ensureCompositeCanvas, facing, mobile, qualityPreset, renderCompositeFrame, user?.id]);
|
}, [ensureCompositeCanvas, facing, mobile, qualityPreset, renderCompositeFrame, startFrameRelayLoop, user?.id]);
|
||||||
|
|
||||||
const startViewerStream = useCallback(async (mediaSessionId: string) => {
|
const startViewerStream = useCallback(async (mediaSessionId: string) => {
|
||||||
if (viewerSessionIdRef.current === mediaSessionId && viewerPeerRef.current) {
|
if (viewerSessionIdRef.current === mediaSessionId && viewerConnected) {
|
||||||
|
setViewerFrameVersion(Date.now());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
closeViewerPeer();
|
closeViewerPeer();
|
||||||
setViewerError("");
|
setViewerError("");
|
||||||
|
|
||||||
const peer = new RTCPeerConnection({
|
|
||||||
iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
|
|
||||||
});
|
|
||||||
viewerPeerRef.current = peer;
|
|
||||||
viewerSessionIdRef.current = mediaSessionId;
|
viewerSessionIdRef.current = mediaSessionId;
|
||||||
peer.addTransceiver("video", { direction: "recvonly" });
|
setViewerFrameVersion(Date.now());
|
||||||
|
}, [closeViewerPeer, viewerConnected]);
|
||||||
peer.ontrack = (event) => {
|
|
||||||
const nextStream = event.streams[0] ?? new MediaStream([event.track]);
|
|
||||||
if (videoRef.current) {
|
|
||||||
videoRef.current.srcObject = nextStream;
|
|
||||||
void videoRef.current.play().catch(() => undefined);
|
|
||||||
}
|
|
||||||
setViewerConnected(true);
|
|
||||||
};
|
|
||||||
|
|
||||||
peer.onconnectionstatechange = () => {
|
|
||||||
if (peer.connectionState === "failed" || peer.connectionState === "closed" || peer.connectionState === "disconnected") {
|
|
||||||
setViewerConnected(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const offer = await peer.createOffer();
|
|
||||||
await peer.setLocalDescription(offer);
|
|
||||||
await waitForIceGathering(peer);
|
|
||||||
|
|
||||||
const answer = await signalMediaViewerSession(mediaSessionId, {
|
|
||||||
sdp: peer.localDescription?.sdp || "",
|
|
||||||
type: peer.localDescription?.type || "offer",
|
|
||||||
});
|
|
||||||
|
|
||||||
await peer.setRemoteDescription({
|
|
||||||
type: answer.type as RTCSdpType,
|
|
||||||
sdp: answer.sdp,
|
|
||||||
});
|
|
||||||
}, [closeViewerPeer]);
|
|
||||||
|
|
||||||
const stopCamera = useCallback(() => {
|
const stopCamera = useCallback(() => {
|
||||||
if (animationRef.current) {
|
if (animationRef.current) {
|
||||||
@@ -1078,20 +1181,41 @@ export default function LiveCamera() {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (runtimeRole !== "viewer" || !runtimeSession?.mediaSessionId) {
|
if (runtimeRole !== "viewer" || !runtimeSession?.mediaSessionId) {
|
||||||
if (!cameraActive) {
|
closeViewerPeer({
|
||||||
closeViewerPeer();
|
clearFrameVersion: !cameraActive,
|
||||||
|
});
|
||||||
|
if (streamRef.current) {
|
||||||
|
void bindLocalPreview();
|
||||||
}
|
}
|
||||||
setViewerError("");
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => {
|
void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => {
|
||||||
const message = error?.message || "同步画面连接失败";
|
setViewerError(error?.message || "同步画面连接失败");
|
||||||
if (!/409/.test(message)) {
|
|
||||||
setViewerError(message);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}, [cameraActive, closeViewerPeer, runtimeRole, runtimeSession?.mediaSessionId, startViewerStream]);
|
|
||||||
|
if (viewerRetryTimerRef.current) {
|
||||||
|
window.clearInterval(viewerRetryTimerRef.current);
|
||||||
|
viewerRetryTimerRef.current = 0;
|
||||||
|
}
|
||||||
|
viewerRetryTimerRef.current = window.setInterval(() => {
|
||||||
|
setViewerFrameVersion(Date.now());
|
||||||
|
}, 900);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
if (viewerRetryTimerRef.current) {
|
||||||
|
window.clearInterval(viewerRetryTimerRef.current);
|
||||||
|
viewerRetryTimerRef.current = 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [
|
||||||
|
bindLocalPreview,
|
||||||
|
cameraActive,
|
||||||
|
closeViewerPeer,
|
||||||
|
runtimeRole,
|
||||||
|
runtimeSession?.mediaSessionId,
|
||||||
|
startViewerStream,
|
||||||
|
]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
return () => {
|
return () => {
|
||||||
@@ -1146,7 +1270,8 @@ export default function LiveCamera() {
|
|||||||
preferredZoom = zoomTargetRef.current,
|
preferredZoom = zoomTargetRef.current,
|
||||||
preset: CameraQualityPreset = qualityPreset,
|
preset: CameraQualityPreset = qualityPreset,
|
||||||
) => {
|
) => {
|
||||||
if (runtimeRole === "viewer") {
|
const latest = await refreshRuntimeState();
|
||||||
|
if (latest.role === "viewer") {
|
||||||
toast.error("当前账号已有其他设备正在实时分析,请切换到同步观看模式");
|
toast.error("当前账号已有其他设备正在实时分析,请切换到同步观看模式");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -1154,25 +1279,27 @@ export default function LiveCamera() {
|
|||||||
if (streamRef.current) {
|
if (streamRef.current) {
|
||||||
streamRef.current.getTracks().forEach((track) => track.stop());
|
streamRef.current.getTracks().forEach((track) => track.stop());
|
||||||
}
|
}
|
||||||
|
const { stream, appliedFacingMode, usedFallback } = await requestCameraStream({
|
||||||
const constraints: MediaStreamConstraints = {
|
facingMode: nextFacing,
|
||||||
video: getCameraVideoConstraints(nextFacing, mobile, preset),
|
isMobile: mobile,
|
||||||
audio: false,
|
preset,
|
||||||
};
|
});
|
||||||
|
|
||||||
const stream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
||||||
streamRef.current = stream;
|
streamRef.current = stream;
|
||||||
if (videoRef.current) {
|
closeViewerPeer();
|
||||||
videoRef.current.srcObject = stream;
|
if (appliedFacingMode !== nextFacing) {
|
||||||
await videoRef.current.play();
|
setFacing(appliedFacingMode);
|
||||||
}
|
}
|
||||||
await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null);
|
await bindLocalPreview(stream);
|
||||||
setCameraActive(true);
|
setCameraActive(true);
|
||||||
|
await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null);
|
||||||
|
if (usedFallback) {
|
||||||
|
toast.info("当前设备已自动切换到兼容摄像头模式");
|
||||||
|
}
|
||||||
toast.success("摄像头已启动");
|
toast.success("摄像头已启动");
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
toast.error(`摄像头启动失败: ${error?.message || "未知错误"}`);
|
toast.error(`摄像头启动失败: ${error?.message || "未知错误"}`);
|
||||||
}
|
}
|
||||||
}, [facing, mobile, qualityPreset, runtimeRole, syncZoomState]);
|
}, [bindLocalPreview, closeViewerPeer, facing, mobile, qualityPreset, refreshRuntimeState, syncZoomState]);
|
||||||
|
|
||||||
const switchCamera = useCallback(async () => {
|
const switchCamera = useCallback(async () => {
|
||||||
const nextFacing: CameraFacing = facing === "user" ? "environment" : "user";
|
const nextFacing: CameraFacing = facing === "user" ? "environment" : "user";
|
||||||
@@ -1397,12 +1524,13 @@ export default function LiveCamera() {
|
|||||||
}, [flushSegment, liveScore, mobile, saveLiveSessionMutation, sessionMode, stopSessionRecorder]);
|
}, [flushSegment, liveScore, mobile, saveLiveSessionMutation, sessionMode, stopSessionRecorder]);
|
||||||
|
|
||||||
const startAnalysis = useCallback(async () => {
|
const startAnalysis = useCallback(async () => {
|
||||||
|
const latest = await refreshRuntimeState();
|
||||||
if (!cameraActive || !videoRef.current || !streamRef.current) {
|
if (!cameraActive || !videoRef.current || !streamRef.current) {
|
||||||
toast.error("请先启动摄像头");
|
toast.error("请先启动摄像头");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (analyzingRef.current || saving) return;
|
if (analyzingRef.current || saving) return;
|
||||||
if (runtimeRole === "viewer") {
|
if (latest.role === "viewer") {
|
||||||
toast.error("当前设备处于同步观看模式,不能重复开启分析");
|
toast.error("当前设备处于同步观看模式,不能重复开启分析");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -1550,10 +1678,10 @@ export default function LiveCamera() {
|
|||||||
appendFrameToSegment,
|
appendFrameToSegment,
|
||||||
cameraActive,
|
cameraActive,
|
||||||
closeBroadcastPeer,
|
closeBroadcastPeer,
|
||||||
|
refreshRuntimeState,
|
||||||
releaseRuntime,
|
releaseRuntime,
|
||||||
runtimeAcquireMutation,
|
runtimeAcquireMutation,
|
||||||
runtimeQuery,
|
runtimeQuery,
|
||||||
runtimeRole,
|
|
||||||
saving,
|
saving,
|
||||||
sessionMode,
|
sessionMode,
|
||||||
startBroadcastSession,
|
startBroadcastSession,
|
||||||
@@ -1612,10 +1740,17 @@ export default function LiveCamera() {
|
|||||||
}, [analyzing, saving]);
|
}, [analyzing, saving]);
|
||||||
|
|
||||||
const handleSetupComplete = useCallback(async () => {
|
const handleSetupComplete = useCallback(async () => {
|
||||||
|
const latest = await refreshRuntimeState();
|
||||||
|
if (latest.role === "viewer") {
|
||||||
|
setShowSetupGuide(false);
|
||||||
|
toast.error("当前账号已有其他设备正在实时分析,请切换到同步观看模式");
|
||||||
|
return;
|
||||||
|
}
|
||||||
setShowSetupGuide(false);
|
setShowSetupGuide(false);
|
||||||
await startCamera(facing, zoomTargetRef.current, qualityPreset);
|
await startCamera(facing, zoomTargetRef.current, qualityPreset);
|
||||||
}, [facing, qualityPreset, startCamera]);
|
}, [facing, qualityPreset, refreshRuntimeState, startCamera]);
|
||||||
|
|
||||||
|
const displayLeaveStatus = runtimeRole === "viewer" ? (runtimeSnapshot?.phase ?? "idle") : leaveStatus;
|
||||||
const displayAction = runtimeRole === "viewer" ? (runtimeSnapshot?.currentAction ?? "unknown") : currentAction;
|
const displayAction = runtimeRole === "viewer" ? (runtimeSnapshot?.currentAction ?? "unknown") : currentAction;
|
||||||
const displayRawAction = runtimeRole === "viewer" ? (runtimeSnapshot?.rawAction ?? "unknown") : rawAction;
|
const displayRawAction = runtimeRole === "viewer" ? (runtimeSnapshot?.rawAction ?? "unknown") : rawAction;
|
||||||
const displayScore = runtimeRole === "viewer" ? (runtimeSnapshot?.liveScore ?? null) : liveScore;
|
const displayScore = runtimeRole === "viewer" ? (runtimeSnapshot?.liveScore ?? null) : liveScore;
|
||||||
@@ -1627,7 +1762,37 @@ export default function LiveCamera() {
|
|||||||
...runtimeSnapshot?.stabilityMeta,
|
...runtimeSnapshot?.stabilityMeta,
|
||||||
}
|
}
|
||||||
: stabilityMeta;
|
: stabilityMeta;
|
||||||
const hasVideoFeed = cameraActive || viewerConnected;
|
const displaySessionMode = runtimeRole === "viewer"
|
||||||
|
? (runtimeSnapshot?.sessionMode ?? runtimeSession?.sessionMode ?? sessionMode)
|
||||||
|
: sessionMode;
|
||||||
|
const displayQualityPreset = runtimeRole === "viewer"
|
||||||
|
? (runtimeSnapshot?.qualityPreset ?? qualityPreset)
|
||||||
|
: qualityPreset;
|
||||||
|
const displayFacing = runtimeRole === "viewer"
|
||||||
|
? (runtimeSnapshot?.facingMode ?? facing)
|
||||||
|
: facing;
|
||||||
|
const displayDeviceKind = runtimeRole === "viewer"
|
||||||
|
? (runtimeSnapshot?.deviceKind ?? (mobile ? "mobile" : "desktop"))
|
||||||
|
: (mobile ? "mobile" : "desktop");
|
||||||
|
const displayAvatarEnabled = runtimeRole === "viewer"
|
||||||
|
? Boolean(runtimeSnapshot?.avatarEnabled)
|
||||||
|
: avatarEnabled;
|
||||||
|
const displayAvatarKey = runtimeRole === "viewer"
|
||||||
|
? ((runtimeSnapshot?.avatarKey as AvatarKey | undefined) ?? resolvedAvatarKey)
|
||||||
|
: resolvedAvatarKey;
|
||||||
|
const displayAvatarPreset = getAvatarPreset(displayAvatarKey);
|
||||||
|
const displayAvatarLabel = runtimeRole === "viewer"
|
||||||
|
? (runtimeSnapshot?.avatarLabel ?? displayAvatarPreset?.label ?? "猩猩")
|
||||||
|
: (displayAvatarPreset?.label || "猩猩");
|
||||||
|
const runtimeSyncDelayMs = runtimeRole === "viewer" ? getRuntimeSyncDelayMs(runtimeSession?.lastHeartbeatAt) : null;
|
||||||
|
const runtimeSyncLabel = runtimeRole === "viewer" ? formatRuntimeSyncDelay(runtimeSyncDelayMs) : "";
|
||||||
|
const displayRuntimeTitle = runtimeRole === "viewer"
|
||||||
|
? (normalizedSnapshotTitle || normalizedRuntimeTitle || "其他设备实时分析")
|
||||||
|
: (normalizedRuntimeTitle || `实时分析 ${ACTION_META[currentAction].label}`);
|
||||||
|
const viewerFrameSrc = runtimeRole === "viewer" && runtimeSession?.mediaSessionId
|
||||||
|
? getMediaAssetUrl(`/assets/sessions/${runtimeSession.mediaSessionId}/live-frame.jpg?ts=${viewerFrameVersion || runtimeSnapshot?.updatedAt || Date.now()}`)
|
||||||
|
: "";
|
||||||
|
const hasVideoFeed = runtimeRole === "viewer" ? viewerConnected : cameraActive;
|
||||||
const heroAction = ACTION_META[displayAction];
|
const heroAction = ACTION_META[displayAction];
|
||||||
const rawActionMeta = ACTION_META[displayRawAction];
|
const rawActionMeta = ACTION_META[displayRawAction];
|
||||||
const pendingActionMeta = displayStabilityMeta.pendingAction ? ACTION_META[displayStabilityMeta.pendingAction] : null;
|
const pendingActionMeta = displayStabilityMeta.pendingAction ? ACTION_META[displayStabilityMeta.pendingAction] : null;
|
||||||
@@ -1637,8 +1802,8 @@ export default function LiveCamera() {
|
|||||||
const fullBodyAvatarPresets = AVATAR_PRESETS.filter((preset) => preset.category === "full-body-3d");
|
const fullBodyAvatarPresets = AVATAR_PRESETS.filter((preset) => preset.category === "full-body-3d");
|
||||||
const previewTitle = runtimeRole === "viewer"
|
const previewTitle = runtimeRole === "viewer"
|
||||||
? viewerConnected
|
? viewerConnected
|
||||||
? "同步观看中"
|
? `${runtimeSyncLabel} · 服务端同步中`
|
||||||
: "正在连接同步画面"
|
: "正在获取服务端同步画面"
|
||||||
: analyzing
|
: analyzing
|
||||||
? displayStabilityMeta.pending && pendingActionMeta
|
? displayStabilityMeta.pending && pendingActionMeta
|
||||||
? `${pendingActionMeta.label} 切换确认中`
|
? `${pendingActionMeta.label} 切换确认中`
|
||||||
@@ -1647,7 +1812,7 @@ export default function LiveCamera() {
|
|||||||
? "准备开始实时分析"
|
? "准备开始实时分析"
|
||||||
: "摄像头待启动";
|
: "摄像头待启动";
|
||||||
|
|
||||||
const viewerModeLabel = runtimeSession?.title || "其他设备正在实时分析";
|
const viewerModeLabel = normalizedSnapshotTitle || normalizedRuntimeTitle || "其他设备正在实时分析";
|
||||||
|
|
||||||
const renderPrimaryActions = (rail = false) => {
|
const renderPrimaryActions = (rail = false) => {
|
||||||
const buttonClass = rail
|
const buttonClass = rail
|
||||||
@@ -1687,7 +1852,7 @@ export default function LiveCamera() {
|
|||||||
<Button
|
<Button
|
||||||
data-testid={rail ? undefined : "live-camera-toolbar-start-button"}
|
data-testid={rail ? undefined : "live-camera-toolbar-start-button"}
|
||||||
className={buttonClass}
|
className={buttonClass}
|
||||||
onClick={() => setShowSetupGuide(true)}
|
onClick={() => void openSetupGuide()}
|
||||||
>
|
>
|
||||||
<Camera className={rail ? "h-5 w-5" : "mr-2 h-4 w-4"} />
|
<Camera className={rail ? "h-5 w-5" : "mr-2 h-4 w-4"} />
|
||||||
{!rail && "启动摄像头"}
|
{!rail && "启动摄像头"}
|
||||||
@@ -1878,42 +2043,50 @@ export default function LiveCamera() {
|
|||||||
</DialogContent>
|
</DialogContent>
|
||||||
</Dialog>
|
</Dialog>
|
||||||
|
|
||||||
{leaveStatus === "analyzing" ? (
|
{displayLeaveStatus === "analyzing" ? (
|
||||||
<Alert>
|
<Alert>
|
||||||
<Activity className="h-4 w-4" />
|
<Activity className="h-4 w-4" />
|
||||||
<AlertTitle>分析进行中</AlertTitle>
|
<AlertTitle>分析进行中</AlertTitle>
|
||||||
<AlertDescription>
|
<AlertDescription>
|
||||||
当前仍在采集和识别动作数据,请先不要关闭浏览器或切走页面。
|
{runtimeRole === "viewer"
|
||||||
|
? "持有端仍在采集和识别动作数据,本页会按会话心跳持续同步视频与动作信息。"
|
||||||
|
: "当前仍在采集和识别动作数据,请先不要关闭浏览器或切走页面。"}
|
||||||
</AlertDescription>
|
</AlertDescription>
|
||||||
</Alert>
|
</Alert>
|
||||||
) : null}
|
) : null}
|
||||||
|
|
||||||
{leaveStatus === "saving" ? (
|
{displayLeaveStatus === "saving" ? (
|
||||||
<Alert>
|
<Alert>
|
||||||
<Activity className="h-4 w-4" />
|
<Activity className="h-4 w-4" />
|
||||||
<AlertTitle>正在保存分析结果</AlertTitle>
|
<AlertTitle>正在保存分析结果</AlertTitle>
|
||||||
<AlertDescription>
|
<AlertDescription>
|
||||||
实时分析录像、动作区间和训练记录正在提交,请暂时停留当前页面;保存完成后会提示你可以离开。
|
{runtimeRole === "viewer"
|
||||||
|
? "持有端正在提交录像、动作区间和训练记录;本页会同步保存状态,可以稍后再刷新查看。"
|
||||||
|
: "实时分析录像、动作区间和训练记录正在提交,请暂时停留当前页面;保存完成后会提示你可以离开。"}
|
||||||
</AlertDescription>
|
</AlertDescription>
|
||||||
</Alert>
|
</Alert>
|
||||||
) : null}
|
) : null}
|
||||||
|
|
||||||
{leaveStatus === "safe" ? (
|
{displayLeaveStatus === "safe" ? (
|
||||||
<Alert>
|
<Alert>
|
||||||
<CheckCircle2 className="h-4 w-4" />
|
<CheckCircle2 className="h-4 w-4" />
|
||||||
<AlertTitle>分析结果已保存</AlertTitle>
|
<AlertTitle>分析结果已保存</AlertTitle>
|
||||||
<AlertDescription>
|
<AlertDescription>
|
||||||
当前分析数据已经提交完成。现在可以关闭浏览器、返回上一页,或切换到其他页面,不会影响已保存的数据。
|
{runtimeRole === "viewer"
|
||||||
|
? "持有端分析数据已经提交完成;本页显示的是同步结果,你现在可以离开,不会影响已保存的数据。"
|
||||||
|
: "当前分析数据已经提交完成。现在可以关闭浏览器、返回上一页,或切换到其他页面,不会影响已保存的数据。"}
|
||||||
</AlertDescription>
|
</AlertDescription>
|
||||||
</Alert>
|
</Alert>
|
||||||
) : null}
|
) : null}
|
||||||
|
|
||||||
{leaveStatus === "failed" ? (
|
{displayLeaveStatus === "failed" ? (
|
||||||
<Alert>
|
<Alert>
|
||||||
<Activity className="h-4 w-4" />
|
<Activity className="h-4 w-4" />
|
||||||
<AlertTitle>分析保存失败</AlertTitle>
|
<AlertTitle>分析保存失败</AlertTitle>
|
||||||
<AlertDescription>
|
<AlertDescription>
|
||||||
当前会话还没有完整写入,请先留在本页并重新尝试结束分析或检查网络状态。
|
{runtimeRole === "viewer"
|
||||||
|
? "持有端当前会话还没有完整写入,本页会继续显示最后一次同步状态。"
|
||||||
|
: "当前会话还没有完整写入,请先留在本页并重新尝试结束分析或检查网络状态。"}
|
||||||
</AlertDescription>
|
</AlertDescription>
|
||||||
</Alert>
|
</Alert>
|
||||||
) : null}
|
) : null}
|
||||||
@@ -1923,7 +2096,7 @@ export default function LiveCamera() {
|
|||||||
<Monitor className="h-4 w-4" />
|
<Monitor className="h-4 w-4" />
|
||||||
<AlertTitle>同步观看模式</AlertTitle>
|
<AlertTitle>同步观看模式</AlertTitle>
|
||||||
<AlertDescription>
|
<AlertDescription>
|
||||||
{viewerModeLabel}。当前设备不会占用本地摄像头,也不能再次开启分析;如需查看同步画面,可直接点击“同步观看”。
|
{viewerModeLabel}。当前设备不会占用本地摄像头,也不能再次开启分析;同步画面会通过 media 服务中转,动作、评分与会话信息会按心跳自动同步,允许 1 秒级延迟。
|
||||||
</AlertDescription>
|
</AlertDescription>
|
||||||
</Alert>
|
</Alert>
|
||||||
) : null}
|
) : null}
|
||||||
@@ -1954,21 +2127,29 @@ export default function LiveCamera() {
|
|||||||
</Badge>
|
</Badge>
|
||||||
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
|
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
|
||||||
<Camera className="h-3.5 w-3.5" />
|
<Camera className="h-3.5 w-3.5" />
|
||||||
{avatarEnabled ? `虚拟形象 ${resolvedAvatarLabel}` : "骨架叠加"}
|
{displayAvatarEnabled ? `虚拟形象 ${displayAvatarLabel}` : "骨架叠加"}
|
||||||
</Badge>
|
</Badge>
|
||||||
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
|
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
|
||||||
<PlayCircle className="h-3.5 w-3.5" />
|
<PlayCircle className="h-3.5 w-3.5" />
|
||||||
{(runtimeRole === "viewer" ? runtimeSession?.sessionMode : sessionMode) === "practice" ? "练习会话" : "训练 PK"}
|
{displaySessionMode === "practice" ? "练习会话" : "训练 PK"}
|
||||||
</Badge>
|
</Badge>
|
||||||
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
|
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
|
||||||
<Video className="h-3.5 w-3.5" />
|
<Video className="h-3.5 w-3.5" />
|
||||||
默认 {CAMERA_QUALITY_PRESETS[qualityPreset].label}
|
默认 {CAMERA_QUALITY_PRESETS[displayQualityPreset].label}
|
||||||
</Badge>
|
</Badge>
|
||||||
|
{runtimeRole === "viewer" ? (
|
||||||
|
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10" data-testid="live-camera-viewer-delay-badge">
|
||||||
|
<Monitor className="h-3.5 w-3.5" />
|
||||||
|
{runtimeSyncLabel}
|
||||||
|
</Badge>
|
||||||
|
) : null}
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<h1 className="text-3xl font-semibold tracking-tight">实时分析中枢</h1>
|
<h1 className="text-3xl font-semibold tracking-tight">{displayRuntimeTitle}</h1>
|
||||||
<p className="mt-2 max-w-2xl text-sm leading-6 text-white/70">
|
<p className="mt-2 max-w-2xl text-sm leading-6 text-white/70">
|
||||||
摄像头启动后会持续识别正手、反手、发球、截击、高压、切削、挑高球与未知动作。系统会用 24 帧时间窗口统一动作,再把稳定动作写入片段、训练记录与评分;分析过程中会自动录制“视频画面 + 骨架/关键点叠层”的合成回放,并按 60 秒分段归档进视频库。开启虚拟形象后,画面中的人体可切换为 10 个轻量动物替身,或 4 个免费的全身 3D Avatar 示例覆盖显示。
|
{runtimeRole === "viewer"
|
||||||
|
? `当前正在通过服务端中转同步 ${displayDeviceKind === "mobile" ? "移动端" : "桌面端"} ${displayFacing === "environment" ? "后置/主摄视角" : "前置视角"} 画面。同步画面、动作、评分、最近区间、虚拟形象和会话状态会自动跟随持有端刷新,允许少量网络延迟。`
|
||||||
|
: "摄像头启动后会持续识别正手、反手、发球、截击、高压、切削、挑高球与未知动作。系统会用 24 帧时间窗口统一动作,再把稳定动作写入片段、训练记录与评分;分析过程中会自动录制“视频画面 + 骨架/关键点叠层”的合成回放,并按 60 秒分段归档进视频库。开启虚拟形象后,画面中的人体可切换为 10 个轻量动物替身,或 4 个免费的全身 3D Avatar 示例覆盖显示。"}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -2001,11 +2182,27 @@ export default function LiveCamera() {
|
|||||||
<div className="relative aspect-[16/10] overflow-hidden bg-black sm:aspect-video">
|
<div className="relative aspect-[16/10] overflow-hidden bg-black sm:aspect-video">
|
||||||
<video
|
<video
|
||||||
ref={videoRef}
|
ref={videoRef}
|
||||||
className={`absolute inset-0 h-full w-full object-contain ${immersivePreview ? "opacity-0" : ""}`}
|
className={`absolute inset-0 h-full w-full object-contain ${immersivePreview || runtimeRole === "viewer" ? "opacity-0" : ""}`}
|
||||||
playsInline
|
playsInline
|
||||||
muted
|
muted
|
||||||
autoPlay
|
autoPlay
|
||||||
/>
|
/>
|
||||||
|
{runtimeRole === "viewer" && viewerFrameSrc ? (
|
||||||
|
<img
|
||||||
|
key={viewerFrameSrc}
|
||||||
|
src={viewerFrameSrc}
|
||||||
|
alt="同步中的实时分析画面"
|
||||||
|
className="absolute inset-0 h-full w-full object-contain"
|
||||||
|
onLoad={() => {
|
||||||
|
setViewerConnected(true);
|
||||||
|
setViewerError("");
|
||||||
|
}}
|
||||||
|
onError={() => {
|
||||||
|
setViewerConnected(false);
|
||||||
|
setViewerError("持有端正在上传同步画面,正在自动重试...");
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
) : null}
|
||||||
<canvas
|
<canvas
|
||||||
ref={canvasRef}
|
ref={canvasRef}
|
||||||
className={`pointer-events-none absolute inset-0 h-full w-full object-contain ${runtimeRole === "viewer" ? "hidden" : analyzing ? "" : "opacity-70"}`}
|
className={`pointer-events-none absolute inset-0 h-full w-full object-contain ${runtimeRole === "viewer" ? "hidden" : analyzing ? "" : "opacity-70"}`}
|
||||||
@@ -2034,10 +2231,10 @@ export default function LiveCamera() {
|
|||||||
disabled={!runtimeSession?.mediaSessionId}
|
disabled={!runtimeSession?.mediaSessionId}
|
||||||
>
|
>
|
||||||
<Monitor className="mr-2 h-4 w-4" />
|
<Monitor className="mr-2 h-4 w-4" />
|
||||||
{viewerConnected ? "重新同步" : "同步观看"}
|
{viewerConnected ? "刷新同步" : "获取同步画面"}
|
||||||
</Button>
|
</Button>
|
||||||
) : (
|
) : (
|
||||||
<Button data-testid="live-camera-start-button" onClick={() => setShowSetupGuide(true)} className="rounded-2xl">
|
<Button data-testid="live-camera-start-button" onClick={() => void openSetupGuide()} className="rounded-2xl">
|
||||||
<Camera className="mr-2 h-4 w-4" />
|
<Camera className="mr-2 h-4 w-4" />
|
||||||
启动摄像头
|
启动摄像头
|
||||||
</Button>
|
</Button>
|
||||||
@@ -2054,10 +2251,10 @@ export default function LiveCamera() {
|
|||||||
<Target className="h-3.5 w-3.5" />
|
<Target className="h-3.5 w-3.5" />
|
||||||
非未知片段 {displayVisibleSegments.length}
|
非未知片段 {displayVisibleSegments.length}
|
||||||
</Badge>
|
</Badge>
|
||||||
{avatarEnabled ? (
|
{displayAvatarEnabled ? (
|
||||||
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
|
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
|
||||||
<Sparkles className="h-3.5 w-3.5" />
|
<Sparkles className="h-3.5 w-3.5" />
|
||||||
虚拟形象 {resolvedAvatarLabel}
|
虚拟形象 {displayAvatarLabel}
|
||||||
</Badge>
|
</Badge>
|
||||||
) : null}
|
) : null}
|
||||||
</div>
|
</div>
|
||||||
@@ -2105,7 +2302,7 @@ export default function LiveCamera() {
|
|||||||
<div className="border-t border-border/60 bg-card/80 p-4">
|
<div className="border-t border-border/60 bg-card/80 p-4">
|
||||||
<div className="grid gap-3 md:grid-cols-[180px_minmax(0,1fr)]">
|
<div className="grid gap-3 md:grid-cols-[180px_minmax(0,1fr)]">
|
||||||
<Select
|
<Select
|
||||||
value={runtimeRole === "viewer" ? (runtimeSession?.sessionMode ?? sessionMode) : sessionMode}
|
value={displaySessionMode}
|
||||||
onValueChange={(value) => setSessionMode(value as SessionMode)}
|
onValueChange={(value) => setSessionMode(value as SessionMode)}
|
||||||
disabled={analyzing || saving || runtimeRole === "viewer"}
|
disabled={analyzing || saving || runtimeRole === "viewer"}
|
||||||
>
|
>
|
||||||
@@ -2122,6 +2319,29 @@ export default function LiveCamera() {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="mt-4 grid gap-3 rounded-[24px] border border-border/60 bg-muted/15 p-4 md:grid-cols-3">
|
<div className="mt-4 grid gap-3 rounded-[24px] border border-border/60 bg-muted/15 p-4 md:grid-cols-3">
|
||||||
|
{runtimeRole === "viewer" ? (
|
||||||
|
<div className="rounded-2xl border border-border/60 bg-background/90 p-4 md:col-span-3" data-testid="live-camera-viewer-sync-card">
|
||||||
|
<div className="flex flex-wrap items-start justify-between gap-3">
|
||||||
|
<div>
|
||||||
|
<div className="text-[11px] uppercase tracking-[0.16em] text-muted-foreground">同步中的主端信息</div>
|
||||||
|
<div className="mt-2 text-lg font-semibold">{displayRuntimeTitle}</div>
|
||||||
|
<div className="mt-2 grid gap-2 text-xs text-muted-foreground sm:grid-cols-2">
|
||||||
|
<div>设备端:{displayDeviceKind === "mobile" ? "移动端" : "桌面端"}</div>
|
||||||
|
<div>拍摄视角:{displayFacing === "environment" ? "后置 / 主摄" : "前置"}</div>
|
||||||
|
<div>画质模式:{CAMERA_QUALITY_PRESETS[displayQualityPreset].label}</div>
|
||||||
|
<div>虚拟形象:{displayAvatarEnabled ? displayAvatarLabel : "未开启"}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="min-w-[150px] rounded-2xl border border-border/60 bg-muted/20 px-4 py-3 text-sm">
|
||||||
|
<div className="text-[11px] uppercase tracking-[0.16em] text-muted-foreground">最近同步</div>
|
||||||
|
<div className="mt-2 font-semibold">{runtimeSyncLabel}</div>
|
||||||
|
<div className="mt-1 text-xs text-muted-foreground">
|
||||||
|
{runtimeSession?.lastHeartbeatAt ? formatDateTimeShanghai(runtimeSession.lastHeartbeatAt) : "等待首个心跳"}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : null}
|
||||||
<div className="rounded-2xl border border-border/60 bg-background/90 p-4">
|
<div className="rounded-2xl border border-border/60 bg-background/90 p-4">
|
||||||
<div className="text-[11px] uppercase tracking-[0.16em] text-muted-foreground">自动分析录像</div>
|
<div className="text-[11px] uppercase tracking-[0.16em] text-muted-foreground">自动分析录像</div>
|
||||||
<div className="mt-2 text-lg font-semibold">每 60 秒自动切段</div>
|
<div className="mt-2 text-lg font-semibold">每 60 秒自动切段</div>
|
||||||
@@ -2454,11 +2674,11 @@ export default function LiveCamera() {
|
|||||||
<div className="flex items-center justify-between text-sm">
|
<div className="flex items-center justify-between text-sm">
|
||||||
<span>未知动作占比</span>
|
<span>未知动作占比</span>
|
||||||
<span className="font-medium">
|
<span className="font-medium">
|
||||||
{segments.length > 0 ? `${Math.round((unknownSegments.length / segments.length) * 100)}%` : "0%"}
|
{totalDisplaySegments > 0 ? `${Math.round(((runtimeRole === "viewer" ? (runtimeSnapshot?.unknownSegments ?? 0) : unknownSegments.length) / totalDisplaySegments) * 100)}%` : "0%"}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<Progress
|
<Progress
|
||||||
value={segments.length > 0 ? (unknownSegments.length / segments.length) * 100 : 0}
|
value={totalDisplaySegments > 0 ? (((runtimeRole === "viewer" ? (runtimeSnapshot?.unknownSegments ?? 0) : unknownSegments.length) / totalDisplaySegments) * 100) : 0}
|
||||||
className="mt-3 h-2"
|
className="mt-3 h-2"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
@@ -2536,10 +2756,10 @@ export default function LiveCamera() {
|
|||||||
<Sparkles className="h-3.5 w-3.5" />
|
<Sparkles className="h-3.5 w-3.5" />
|
||||||
{heroAction.label}
|
{heroAction.label}
|
||||||
</Badge>
|
</Badge>
|
||||||
{avatarEnabled ? (
|
{displayAvatarEnabled ? (
|
||||||
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
|
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
|
||||||
<Camera className="h-3.5 w-3.5" />
|
<Camera className="h-3.5 w-3.5" />
|
||||||
{resolvedAvatarLabel}
|
{displayAvatarLabel}
|
||||||
</Badge>
|
</Badge>
|
||||||
) : null}
|
) : null}
|
||||||
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
|
<Badge className="gap-1.5 bg-black/60 text-white shadow-sm">
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ import {
|
|||||||
recognizeActionFrame,
|
recognizeActionFrame,
|
||||||
stabilizeActionFrame,
|
stabilizeActionFrame,
|
||||||
} from "@/lib/actionRecognition";
|
} from "@/lib/actionRecognition";
|
||||||
import { applyTrackZoom, getCameraVideoConstraints, readTrackZoomState } from "@/lib/camera";
|
import { applyTrackZoom, readTrackZoomState, requestCameraStream } from "@/lib/camera";
|
||||||
import { formatDateTimeShanghai } from "@/lib/time";
|
import { formatDateTimeShanghai } from "@/lib/time";
|
||||||
import {
|
import {
|
||||||
Activity,
|
Activity,
|
||||||
@@ -189,6 +189,10 @@ function summarizeActions(actionSummary: Record<ActionType, number>) {
|
|||||||
export default function Recorder() {
|
export default function Recorder() {
|
||||||
const { user } = useAuth();
|
const { user } = useAuth();
|
||||||
const utils = trpc.useUtils();
|
const utils = trpc.useUtils();
|
||||||
|
const runtimeQuery = trpc.analysis.runtimeGet.useQuery(undefined, {
|
||||||
|
refetchInterval: 1000,
|
||||||
|
refetchIntervalInBackground: true,
|
||||||
|
});
|
||||||
const finalizeTaskMutation = trpc.task.createMediaFinalize.useMutation({
|
const finalizeTaskMutation = trpc.task.createMediaFinalize.useMutation({
|
||||||
onSuccess: (data) => {
|
onSuccess: (data) => {
|
||||||
setArchiveTaskId(data.taskId);
|
setArchiveTaskId(data.taskId);
|
||||||
@@ -262,6 +266,9 @@ export default function Recorder() {
|
|||||||
|
|
||||||
const mobile = useMemo(() => isMobileDevice(), []);
|
const mobile = useMemo(() => isMobileDevice(), []);
|
||||||
const mimeType = useMemo(() => pickRecorderMimeType(), []);
|
const mimeType = useMemo(() => pickRecorderMimeType(), []);
|
||||||
|
const runtimeRole = runtimeQuery.data?.role ?? "idle";
|
||||||
|
const liveAnalysisRuntime = runtimeQuery.data?.runtimeSession;
|
||||||
|
const liveAnalysisOccupied = runtimeRole === "viewer" && liveAnalysisRuntime?.status === "active";
|
||||||
const currentPlaybackUrl = mediaSession?.playback.mp4Url || mediaSession?.playback.webmUrl || "";
|
const currentPlaybackUrl = mediaSession?.playback.mp4Url || mediaSession?.playback.webmUrl || "";
|
||||||
const archiveTaskQuery = useBackgroundTask(archiveTaskId);
|
const archiveTaskQuery = useBackgroundTask(archiveTaskId);
|
||||||
const archiveProgress = archiveTaskQuery.data?.progress ?? getArchiveProgress(mediaSession);
|
const archiveProgress = archiveTaskQuery.data?.progress ?? getArchiveProgress(mediaSession);
|
||||||
@@ -402,14 +409,21 @@ export default function Recorder() {
|
|||||||
preferredZoom = zoomTargetRef.current,
|
preferredZoom = zoomTargetRef.current,
|
||||||
preset: keyof typeof QUALITY_PRESETS = qualityPreset,
|
preset: keyof typeof QUALITY_PRESETS = qualityPreset,
|
||||||
) => {
|
) => {
|
||||||
|
if (liveAnalysisOccupied) {
|
||||||
|
const title = liveAnalysisRuntime?.title || "其他设备正在实时分析";
|
||||||
|
toast.error(`${title},当前设备不能再开启录制摄像头`);
|
||||||
|
throw new Error("当前账号已有其他设备正在实时分析");
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
if (streamRef.current) {
|
if (streamRef.current) {
|
||||||
streamRef.current.getTracks().forEach((track) => track.stop());
|
streamRef.current.getTracks().forEach((track) => track.stop());
|
||||||
streamRef.current = null;
|
streamRef.current = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const stream = await navigator.mediaDevices.getUserMedia({
|
const { stream, appliedFacingMode, audioEnabled, usedFallback } = await requestCameraStream({
|
||||||
video: getCameraVideoConstraints(nextFacingMode, mobile, preset),
|
facingMode: nextFacingMode,
|
||||||
|
isMobile: mobile,
|
||||||
|
preset,
|
||||||
audio: {
|
audio: {
|
||||||
echoCancellation: true,
|
echoCancellation: true,
|
||||||
noiseSuppression: true,
|
noiseSuppression: true,
|
||||||
@@ -426,6 +440,9 @@ export default function Recorder() {
|
|||||||
|
|
||||||
suppressTrackEndedRef.current = false;
|
suppressTrackEndedRef.current = false;
|
||||||
streamRef.current = stream;
|
streamRef.current = stream;
|
||||||
|
if (appliedFacingMode !== nextFacingMode) {
|
||||||
|
setFacingMode(appliedFacingMode);
|
||||||
|
}
|
||||||
if (liveVideoRef.current) {
|
if (liveVideoRef.current) {
|
||||||
liveVideoRef.current.srcObject = stream;
|
liveVideoRef.current.srcObject = stream;
|
||||||
await liveVideoRef.current.play();
|
await liveVideoRef.current.play();
|
||||||
@@ -433,6 +450,12 @@ export default function Recorder() {
|
|||||||
await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null);
|
await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null);
|
||||||
setCameraError("");
|
setCameraError("");
|
||||||
setCameraActive(true);
|
setCameraActive(true);
|
||||||
|
if (usedFallback) {
|
||||||
|
toast.info("当前设备已自动切换到兼容摄像头模式");
|
||||||
|
}
|
||||||
|
if (!audioEnabled) {
|
||||||
|
toast.warning("麦克风不可用,已切换为仅视频模式");
|
||||||
|
}
|
||||||
return stream;
|
return stream;
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
const message = error?.message || "无法访问摄像头";
|
const message = error?.message || "无法访问摄像头";
|
||||||
@@ -440,7 +463,7 @@ export default function Recorder() {
|
|||||||
toast.error(`摄像头启动失败: ${message}`);
|
toast.error(`摄像头启动失败: ${message}`);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}), [facingMode, mobile, qualityPreset, syncZoomState]);
|
}), [facingMode, liveAnalysisOccupied, liveAnalysisRuntime?.title, mobile, qualityPreset, syncZoomState]);
|
||||||
|
|
||||||
const ensurePreviewStream = useCallback(async () => {
|
const ensurePreviewStream = useCallback(async () => {
|
||||||
if (streamRef.current) {
|
if (streamRef.current) {
|
||||||
@@ -849,6 +872,11 @@ export default function Recorder() {
|
|||||||
toast.error("请先登录后再开始录制");
|
toast.error("请先登录后再开始录制");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
if (liveAnalysisOccupied) {
|
||||||
|
const title = liveAnalysisRuntime?.title || "其他设备正在实时分析";
|
||||||
|
toast.error(`${title},当前设备不能同时开始录制`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
setMode("preparing");
|
setMode("preparing");
|
||||||
@@ -898,7 +926,21 @@ export default function Recorder() {
|
|||||||
setMode("idle");
|
setMode("idle");
|
||||||
toast.error(`启动录制失败: ${error?.message || "未知错误"}`);
|
toast.error(`启动录制失败: ${error?.message || "未知错误"}`);
|
||||||
}
|
}
|
||||||
}, [ensurePreviewStream, facingMode, mimeType, mobile, qualityPreset, startActionSampling, startRealtimePush, startRecorderLoop, syncSessionState, title, user]);
|
}, [
|
||||||
|
ensurePreviewStream,
|
||||||
|
facingMode,
|
||||||
|
liveAnalysisOccupied,
|
||||||
|
liveAnalysisRuntime?.title,
|
||||||
|
mimeType,
|
||||||
|
mobile,
|
||||||
|
qualityPreset,
|
||||||
|
startActionSampling,
|
||||||
|
startRealtimePush,
|
||||||
|
startRecorderLoop,
|
||||||
|
syncSessionState,
|
||||||
|
title,
|
||||||
|
user,
|
||||||
|
]);
|
||||||
|
|
||||||
const finishRecording = useCallback(async () => {
|
const finishRecording = useCallback(async () => {
|
||||||
const session = currentSessionRef.current;
|
const session = currentSessionRef.current;
|
||||||
@@ -1140,9 +1182,10 @@ export default function Recorder() {
|
|||||||
data-testid="recorder-start-camera-button"
|
data-testid="recorder-start-camera-button"
|
||||||
onClick={() => void startCamera()}
|
onClick={() => void startCamera()}
|
||||||
className={buttonClass()}
|
className={buttonClass()}
|
||||||
|
disabled={liveAnalysisOccupied}
|
||||||
>
|
>
|
||||||
<Camera className={iconClass} />
|
<Camera className={iconClass} />
|
||||||
{labelFor("启动摄像头", "启动")}
|
{labelFor(liveAnalysisOccupied ? "实时分析占用中" : "启动摄像头", liveAnalysisOccupied ? "占用" : "启动")}
|
||||||
</Button>
|
</Button>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
@@ -1150,9 +1193,10 @@ export default function Recorder() {
|
|||||||
data-testid="recorder-start-recording-button"
|
data-testid="recorder-start-recording-button"
|
||||||
onClick={() => void beginRecording()}
|
onClick={() => void beginRecording()}
|
||||||
className={buttonClass("record")}
|
className={buttonClass("record")}
|
||||||
|
disabled={liveAnalysisOccupied}
|
||||||
>
|
>
|
||||||
<Circle className={`${iconClass} ${rail ? "fill-current" : "fill-current"}`} />
|
<Circle className={`${iconClass} ${rail ? "fill-current" : "fill-current"}`} />
|
||||||
{labelFor("开始录制", "录制")}
|
{labelFor(liveAnalysisOccupied ? "实时分析占用中" : "开始录制", liveAnalysisOccupied ? "占用" : "录制")}
|
||||||
</Button>
|
</Button>
|
||||||
<Button variant="outline" onClick={stopCamera} className={buttonClass("outline")}>
|
<Button variant="outline" onClick={stopCamera} className={buttonClass("outline")}>
|
||||||
<VideoOff className={iconClass} />
|
<VideoOff className={iconClass} />
|
||||||
@@ -1362,6 +1406,23 @@ export default function Recorder() {
|
|||||||
</Alert>
|
</Alert>
|
||||||
) : null}
|
) : null}
|
||||||
|
|
||||||
|
{liveAnalysisOccupied ? (
|
||||||
|
<Alert className="border-amber-300/70 bg-amber-50 text-amber-950">
|
||||||
|
<ShieldAlert className="h-4 w-4" />
|
||||||
|
<AlertTitle>当前账号已有其他设备正在实时分析</AlertTitle>
|
||||||
|
<AlertDescription>
|
||||||
|
{liveAnalysisRuntime?.title || "其他设备正在实时分析"},本页已禁止再次启动摄像头和录制,避免同账号多端同时占用镜头。
|
||||||
|
你可以前往
|
||||||
|
{" "}
|
||||||
|
<a href="/live-camera" className="font-medium underline underline-offset-4">
|
||||||
|
实时分析页
|
||||||
|
</a>
|
||||||
|
{" "}
|
||||||
|
查看同步画面与动作识别结果。
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
) : null}
|
||||||
|
|
||||||
<div className="grid gap-4 xl:grid-cols-[minmax(0,1.7fr)_minmax(340px,0.9fr)]">
|
<div className="grid gap-4 xl:grid-cols-[minmax(0,1.7fr)_minmax(340px,0.9fr)]">
|
||||||
<section className="space-y-4">
|
<section className="space-y-4">
|
||||||
<Card className="overflow-hidden border-0 shadow-lg">
|
<Card className="overflow-hidden border-0 shadow-lg">
|
||||||
|
|||||||
@@ -1,5 +1,164 @@
|
|||||||
# Tennis Training Hub - 变更日志
|
# Tennis Training Hub - 变更日志
|
||||||
|
|
||||||
|
## 2026.03.17-live-camera-preview-recovery (2026-03-17)
|
||||||
|
|
||||||
|
### 功能更新
|
||||||
|
|
||||||
|
- `/live-camera` 的 runtime 标题恢复逻辑新增更严格的乱码筛除与二次 UTF-8 解码兜底,`æœ...` 这类异常标题会优先恢复为正常中文;无法恢复时会自动回退到稳定默认标题,避免继续显示脏字符串
|
||||||
|
- 同步观看退出时会完整重置 viewer 轮询、连接标记和帧版本,不再把旧的 viewer 状态带回 owner 或空闲态,修复退出同步后仍黑屏、仍显示“等待同步画面”的问题
|
||||||
|
- 本地摄像头预览增加独立重绑流程和多次 watchdog 重试,即使浏览器首帧没有及时绑定 `srcObject` 或 `play()` 被短暂中断,也会继续自动恢复本地预览
|
||||||
|
- 视频区域是否显示画面改为按当前 runtime 角色分别判断,避免 viewer 旧连接状态误导 owner 模式,导致本地没有预览时仍错误隐藏占位提示
|
||||||
|
|
||||||
|
### 测试
|
||||||
|
|
||||||
|
- `pnpm check`
|
||||||
|
- `pnpm vitest run client/src/lib/liveCamera.test.ts`
|
||||||
|
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"`
|
||||||
|
- `pnpm build`
|
||||||
|
- 线上 smoke:`curl -I https://te.hao.work/`
|
||||||
|
- 线上 smoke:`curl -I https://te.hao.work/assets/index-BJ7rV3xe.js`
|
||||||
|
- 线上 smoke:`curl -I https://te.hao.work/assets/index-tNGuStgv.css`
|
||||||
|
- 线上 smoke:`curl -I https://te.hao.work/assets/pose-CZKsH31a.js`
|
||||||
|
|
||||||
|
### 线上 smoke
|
||||||
|
|
||||||
|
- `https://te.hao.work/` 已切换到本次新构建
|
||||||
|
- 当前公开站点前端资源 revision:`assets/index-BJ7rV3xe.js`、`assets/index-tNGuStgv.css`、`assets/pose-CZKsH31a.js`
|
||||||
|
- 已确认 `index`、`css` 与 `pose` 模块均返回 `200`,且 MIME 分别为 `application/javascript`、`text/css`、`application/javascript`,不再出现此前的模块脚本和样式被当成 `text/html` 返回的问题
|
||||||
|
|
||||||
|
### 仓库版本
|
||||||
|
|
||||||
|
- `06b9701`
|
||||||
|
|
||||||
|
## 2026.03.16-live-camera-runtime-refresh (2026-03-16)
|
||||||
|
|
||||||
|
### 功能更新
|
||||||
|
|
||||||
|
- `/live-camera` 在打开拍摄引导、启用摄像头、开始分析前,都会先向服务端强制刷新 runtime 状态,避免旧的同步观看锁残留导致本机明明已释放却仍无法启动
|
||||||
|
- 新增 runtime 标题乱码恢复逻辑,可自动把 UTF-8 被误按 Latin-1 显示的标题恢复成正常中文,避免出现 `æœ...` 一类异常标题
|
||||||
|
- 摄像头启动链路改为以 `getUserMedia` 成功为准;即使本地预览 `<video>` 的 `srcObject` 或 `play()` 在当前浏览器中短暂失败,也不会直接把整次启动判死
|
||||||
|
- e2e mock 的媒体流补齐为带假视频轨道的流对象,并把 viewer 回归改为校验“服务端 relay、无 viewer-signal”行为,避免继续按旧 P2P 逻辑断言
|
||||||
|
|
||||||
|
### 测试
|
||||||
|
|
||||||
|
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"`
|
||||||
|
- `pnpm build`
|
||||||
|
- 部署后线上 smoke:登录 `H1` 后访问 `https://te.hao.work/live-camera`,确认空闲态“启动摄像头”入口可见,不再被残留 viewer 锁卡住
|
||||||
|
|
||||||
|
### 线上 smoke
|
||||||
|
|
||||||
|
- `https://te.hao.work/` 已切换到本次新构建
|
||||||
|
- 当前公开站点前端资源 revision:`assets/index-33wVjC4p.js` 与 `assets/index-tNGuStgv.css`
|
||||||
|
- 真实验证已通过:登录 `H1` 后访问 `https://te.hao.work/live-camera`,页面会正常显示“摄像头未启动 / 启动摄像头”,说明旧的 viewer 锁残留不会再把空闲设备卡在同步观看模式
|
||||||
|
|
||||||
|
### 仓库版本
|
||||||
|
|
||||||
|
- `8e9e491`
|
||||||
|
|
||||||
|
## 2026.03.16-live-viewer-server-relay (2026-03-16)
|
||||||
|
|
||||||
|
### 功能更新
|
||||||
|
|
||||||
|
- `/live-camera` 的同步观看改为由 media 服务中转最新合成帧图,不再依赖浏览器之间的 P2P WebRTC viewer 连接
|
||||||
|
- owner 端会把“原视频 + 骨架/关键点 + 虚拟形象”的合成画布压缩成 JPEG 并持续上传到 media 服务
|
||||||
|
- viewer 端改为自动轮询 media 服务中的最新同步帧图,因此即使浏览器之间无法直连,也能继续看到同步画面和状态
|
||||||
|
- 同步观看模式文案已调整为明确提示“通过 media 服务中转”,等待阶段会继续自动刷新,而不是停留在 P2P 连接失败状态
|
||||||
|
- media 服务新增 live-frame 上传与静态分发能力,并记录最近同步帧时间,方便后续继续扩展更高频的服务端 relay
|
||||||
|
|
||||||
|
### 测试
|
||||||
|
|
||||||
|
- `cd media && go test ./...`
|
||||||
|
- `pnpm build`
|
||||||
|
- `playwright-skill` 线上 smoke:先用 media 服务创建 relay session、上传 live-frame,并把 `H1` 的 `live_analysis_runtime` 注入为 active viewer 场景;随后访问 `https://te.hao.work/live-camera`,确认页面进入“同步观看模式”、同步帧来自 `/media/assets/sessions/.../live-frame.jpg`,且 `viewer-signal` 请求数为 `0`
|
||||||
|
|
||||||
|
### 线上 smoke
|
||||||
|
|
||||||
|
- `https://te.hao.work/` 已切换到本次新构建
|
||||||
|
- 当前公开站点前端资源 revision:`assets/index-BC-IupO8.js` 与 `assets/index-tNGuStgv.css`
|
||||||
|
- 真实验证已通过:viewer 端进入“同步观看模式”后,画面由 media 服务静态分发的 `live-frame.jpg` 提供,已确认不再触发 `/viewer-signal` P2P 观看请求
|
||||||
|
|
||||||
|
### 仓库版本
|
||||||
|
|
||||||
|
- `bb46d26`
|
||||||
|
|
||||||
|
## 2026.03.16-camera-startup-fallbacks (2026-03-16)
|
||||||
|
|
||||||
|
### 功能更新
|
||||||
|
|
||||||
|
- 修复部分设备在 `/live-camera` 和 `/recorder` 中因默认后置镜头、分辨率或帧率约束不兼容而直接启动摄像头失败的问题
|
||||||
|
- 摄像头请求现在会自动按当前画质、去掉高约束、低分辨率、备用镜头、任意可用镜头依次降级重试
|
||||||
|
- `/recorder` 在麦克风不可用或麦克风权限未给出时,会自动回退到仅视频模式,不再让整次预览启动失败
|
||||||
|
- 如果实际启用的是兼容镜头或降级模式,页面会显示提示,帮助区分“自动修复成功”与“仍然无法访问摄像头”
|
||||||
|
|
||||||
|
### 测试
|
||||||
|
|
||||||
|
- `pnpm build`
|
||||||
|
- `playwright-skill` 线上 smoke:通过注入 `getUserMedia` 回归验证 `/live-camera` 首轮高约束失败后会自动降级到兼容摄像头模式,`/recorder` 在麦克风不可用时会自动回退到仅视频模式并继续启动预览
|
||||||
|
|
||||||
|
### 线上 smoke
|
||||||
|
|
||||||
|
- `https://te.hao.work/` 已切换到本次新构建
|
||||||
|
- 当前公开站点前端资源 revision:`assets/index-CRxtWK07.js` 与 `assets/index-tNGuStgv.css`
|
||||||
|
- 真实回归已通过:模拟高约束失败时,`/live-camera` 会提示“当前设备已自动切换到兼容摄像头模式”并继续启动;模拟麦克风不可用时,`/recorder` 会提示“麦克风不可用,已切换为仅视频模式”并继续显示录制入口
|
||||||
|
|
||||||
|
### 仓库版本
|
||||||
|
|
||||||
|
- `a211562`
|
||||||
|
|
||||||
|
## 2026.03.16-live-analysis-viewer-full-sync (2026-03-16)
|
||||||
|
|
||||||
|
### 功能更新
|
||||||
|
|
||||||
|
- 同账号多端同步观看时,viewer 端现在会按持有端 runtime snapshot 完整渲染,不再混用本地默认状态
|
||||||
|
- `/live-camera` viewer 端新增主端同步信息卡,可看到当前会话标题、训练模式、设备端、拍摄视角、画质模式、虚拟形象状态和最近同步时间
|
||||||
|
- viewer 端现在会同步显示主端当前处于“分析中 / 保存中 / 已保存 / 保存失败”的阶段状态
|
||||||
|
- viewer 页面在同步观看模式下会自动关闭拍摄校准弹窗,避免被“启用摄像头”引导遮挡画面和状态信息
|
||||||
|
|
||||||
|
### 测试
|
||||||
|
|
||||||
|
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera switches into viewer mode|viewer stream|recorder blocks"`
|
||||||
|
- `pnpm build`
|
||||||
|
- `playwright-skill` 线上 smoke:同账号 `H1` 双端登录后,移动端 owner 开始实时分析,桌面端 `/live-camera` 进入同步观看并显示主端信息、同步视频流,owner 点击结束分析后 viewer 同步进入保存阶段
|
||||||
|
|
||||||
|
### 线上 smoke
|
||||||
|
|
||||||
|
- `https://te.hao.work/` 已切换到本次新构建
|
||||||
|
- 当前公开站点前端资源 revision:`assets/index-HRdM3fxq.js` 与 `assets/index-tNGuStgv.css`
|
||||||
|
- 真实双端验证已通过:同账号 `H1` 在移动端开启实时分析后,桌面端 `/live-camera` 会自动进入同步观看模式,显示主端设备信息、最近同步时间和远端视频流;owner 点击结束分析后,viewer 会同步进入“保存中”阶段
|
||||||
|
|
||||||
|
### 仓库版本
|
||||||
|
|
||||||
|
- `922a9fb`
|
||||||
|
|
||||||
|
## 2026.03.16-live-analysis-lock-hardening (2026-03-16)
|
||||||
|
|
||||||
|
### 功能更新
|
||||||
|
|
||||||
|
- 修复同账号多端实时分析在旧登录态下仍可重复占用摄像头的问题;缺少 `sid` 的旧 token 现在会按 token 本身派生唯一会话标识
|
||||||
|
- `/live-camera` 的同步观看模式新增自动重试;当持有端刚启动推流、viewer 首次连接返回 `viewer stream not ready` 时,会继续重连,不再长时间停留在无画面状态
|
||||||
|
- `/recorder` 接入实时分析占用锁;其他设备正在实时分析时,本页会禁止再次启动摄像头和开始录制,并提示前往 `/live-camera` 查看同步画面
|
||||||
|
- 应用启动改为先监听 HTTP 端口、再后台串行执行教程图同步和标准库预热,修复新容器上线时公网长时间返回 `502`
|
||||||
|
|
||||||
|
### 测试
|
||||||
|
|
||||||
|
- `curl -I https://te.hao.work/`
|
||||||
|
- `pnpm check`
|
||||||
|
- `pnpm exec vitest run server/_core/sdk.test.ts server/features.test.ts`
|
||||||
|
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "viewer mode|viewer stream|recorder blocks"`
|
||||||
|
- `playwright-skill` 线上校验:登录 `H1` 后访问 `/changelog`,确认 `2026.03.16-live-analysis-lock-hardening` 与仓库版本 `f9db6ef` 已展示
|
||||||
|
- `pnpm build`
|
||||||
|
- Playwright 线上 smoke:`H1` 手机端开启实时分析后,PC 端 `/live-camera` 自动进入同步观看并显示同步画面,`/recorder` 禁止启动摄像头;结束分析后会话可正常释放
|
||||||
|
|
||||||
|
### 线上 smoke
|
||||||
|
|
||||||
|
- `https://te.hao.work/` 已切换到本次新构建,不再返回 `502`
|
||||||
|
- 当前公开站点前端资源 revision:`assets/index-mi8CPCFI.js` 与 `assets/index-Cp_VJ8sf.css`
|
||||||
|
- 真实双端验证已通过:同账号 `H1` 手机端开始实时分析后,PC 端 `/live-camera` 进入同步观看模式且可拉起同步流,`/recorder` 页面会阻止再次占用摄像头
|
||||||
|
|
||||||
|
### 仓库版本
|
||||||
|
|
||||||
|
- `f9db6ef`
|
||||||
|
|
||||||
## 2026.03.16-live-analysis-runtime-migration (2026-03-16)
|
## 2026.03.16-live-analysis-runtime-migration (2026-03-16)
|
||||||
|
|
||||||
### 功能更新
|
### 功能更新
|
||||||
|
|||||||
@@ -105,6 +105,8 @@ type Session struct {
|
|||||||
StreamConnected bool `json:"streamConnected"`
|
StreamConnected bool `json:"streamConnected"`
|
||||||
LastStreamAt string `json:"lastStreamAt,omitempty"`
|
LastStreamAt string `json:"lastStreamAt,omitempty"`
|
||||||
ViewerCount int `json:"viewerCount"`
|
ViewerCount int `json:"viewerCount"`
|
||||||
|
LiveFrameURL string `json:"liveFrameUrl,omitempty"`
|
||||||
|
LiveFrameUpdated string `json:"liveFrameUpdatedAt,omitempty"`
|
||||||
Playback PlaybackInfo `json:"playback"`
|
Playback PlaybackInfo `json:"playback"`
|
||||||
Segments []SegmentMeta `json:"segments"`
|
Segments []SegmentMeta `json:"segments"`
|
||||||
Markers []Marker `json:"markers"`
|
Markers []Marker `json:"markers"`
|
||||||
@@ -229,6 +231,14 @@ func (s *sessionStore) publicDir(id string) string {
|
|||||||
return filepath.Join(s.public, "sessions", id)
|
return filepath.Join(s.public, "sessions", id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *sessionStore) liveFramePath(id string) string {
|
||||||
|
return filepath.Join(s.publicDir(id), "live-frame.jpg")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *sessionStore) liveFrameURL(id string) string {
|
||||||
|
return fmt.Sprintf("/media/assets/sessions/%s/live-frame.jpg", id)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *sessionStore) saveSession(session *Session) error {
|
func (s *sessionStore) saveSession(session *Session) error {
|
||||||
session.UpdatedAt = time.Now().UTC().Format(time.RFC3339)
|
session.UpdatedAt = time.Now().UTC().Format(time.RFC3339)
|
||||||
dir := s.sessionDir(session.ID)
|
dir := s.sessionDir(session.ID)
|
||||||
@@ -504,6 +514,12 @@ func (m *mediaServer) handleSession(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
m.handleSegmentUpload(sessionID, w, r)
|
m.handleSegmentUpload(sessionID, w, r)
|
||||||
|
case "live-frame":
|
||||||
|
if r.Method != http.MethodPost {
|
||||||
|
http.NotFound(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
m.handleLiveFrameUpload(sessionID, w, r)
|
||||||
case "markers":
|
case "markers":
|
||||||
if r.Method != http.MethodPost {
|
if r.Method != http.MethodPost {
|
||||||
http.NotFound(w, r)
|
http.NotFound(w, r)
|
||||||
@@ -726,6 +742,59 @@ func (m *mediaServer) handleViewerSignal(sessionID string, w http.ResponseWriter
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *mediaServer) handleLiveFrameUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
|
||||||
|
if _, err := m.store.getSession(sessionID); err != nil {
|
||||||
|
writeError(w, http.StatusNotFound, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
body := http.MaxBytesReader(w, r.Body, 4<<20)
|
||||||
|
defer body.Close()
|
||||||
|
|
||||||
|
frame, err := io.ReadAll(body)
|
||||||
|
if err != nil || len(frame) == 0 {
|
||||||
|
writeError(w, http.StatusBadRequest, "invalid live frame payload")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
publicDir := m.store.publicDir(sessionID)
|
||||||
|
if err := os.MkdirAll(publicDir, 0o755); err != nil {
|
||||||
|
writeError(w, http.StatusInternalServerError, "failed to create live frame directory")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tmpFile := filepath.Join(publicDir, fmt.Sprintf("live-frame-%s.tmp", randomID()))
|
||||||
|
if err := os.WriteFile(tmpFile, frame, 0o644); err != nil {
|
||||||
|
writeError(w, http.StatusInternalServerError, "failed to write live frame")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer os.Remove(tmpFile)
|
||||||
|
|
||||||
|
finalFile := m.store.liveFramePath(sessionID)
|
||||||
|
if err := os.Rename(tmpFile, finalFile); err != nil {
|
||||||
|
writeError(w, http.StatusInternalServerError, "failed to publish live frame")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
session, err := m.store.updateSession(sessionID, func(session *Session) error {
|
||||||
|
session.LiveFrameURL = m.store.liveFrameURL(sessionID)
|
||||||
|
session.LiveFrameUpdated = time.Now().UTC().Format(time.RFC3339)
|
||||||
|
session.StreamConnected = true
|
||||||
|
session.LastStreamAt = session.LiveFrameUpdated
|
||||||
|
if session.Status == StatusCreated || session.Status == StatusReconnecting {
|
||||||
|
session.Status = StatusStreaming
|
||||||
|
}
|
||||||
|
session.LastError = ""
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
writeError(w, http.StatusInternalServerError, "failed to update live frame session state")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusAccepted, map[string]any{"session": session})
|
||||||
|
}
|
||||||
|
|
||||||
func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
|
func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
|
||||||
sequence, err := strconv.Atoi(r.URL.Query().Get("sequence"))
|
sequence, err := strconv.Atoi(r.URL.Query().Get("sequence"))
|
||||||
if err != nil || sequence < 0 {
|
if err != nil || sequence < 0 {
|
||||||
|
|||||||
@@ -278,3 +278,45 @@ func TestViewerSignalReturnsConflictBeforePublisherTrackReady(t *testing.T) {
|
|||||||
t.Fatalf("expected viewer-signal 409 before video track is ready, got %d", res.Code)
|
t.Fatalf("expected viewer-signal 409 before video track is ready, got %d", res.Code)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestLiveFrameUploadPublishesRelayFrame(t *testing.T) {
|
||||||
|
store, err := newSessionStore(t.TempDir())
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("newSessionStore: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
server := newMediaServer(store)
|
||||||
|
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Session"})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("createSession: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/live-frame", strings.NewReader("jpeg-frame"))
|
||||||
|
req.Header.Set("Content-Type", "image/jpeg")
|
||||||
|
res := httptest.NewRecorder()
|
||||||
|
server.routes().ServeHTTP(res, req)
|
||||||
|
|
||||||
|
if res.Code != http.StatusAccepted {
|
||||||
|
t.Fatalf("expected live-frame upload 202, got %d", res.Code)
|
||||||
|
}
|
||||||
|
|
||||||
|
current, err := store.getSession(session.ID)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("getSession: %v", err)
|
||||||
|
}
|
||||||
|
if current.LiveFrameURL == "" || current.LiveFrameUpdated == "" {
|
||||||
|
t.Fatalf("expected live frame metadata to be recorded, got %#v", current)
|
||||||
|
}
|
||||||
|
if !current.StreamConnected {
|
||||||
|
t.Fatalf("expected session stream connected after frame upload")
|
||||||
|
}
|
||||||
|
|
||||||
|
framePath := store.liveFramePath(session.ID)
|
||||||
|
body, err := os.ReadFile(framePath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("read live frame: %v", err)
|
||||||
|
}
|
||||||
|
if string(body) != "jpeg-frame" {
|
||||||
|
t.Fatalf("unexpected live frame content: %q", string(body))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -13,6 +13,26 @@ import { createBackgroundTask, getAdminUserId, hasRecentBackgroundTaskOfType, se
|
|||||||
import { nanoid } from "nanoid";
|
import { nanoid } from "nanoid";
|
||||||
import { syncTutorialImages } from "../tutorialImages";
|
import { syncTutorialImages } from "../tutorialImages";
|
||||||
|
|
||||||
|
async function warmupApplicationData() {
|
||||||
|
const tasks: Array<{ label: string; run: () => Promise<unknown> }> = [
|
||||||
|
{ label: "seedTutorials", run: () => seedTutorials() },
|
||||||
|
{ label: "syncTutorialImages", run: () => syncTutorialImages() },
|
||||||
|
{ label: "seedVisionReferenceImages", run: () => seedVisionReferenceImages() },
|
||||||
|
{ label: "seedAchievementDefinitions", run: () => seedAchievementDefinitions() },
|
||||||
|
{ label: "seedAppSettings", run: () => seedAppSettings() },
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const task of tasks) {
|
||||||
|
const startedAt = Date.now();
|
||||||
|
try {
|
||||||
|
await task.run();
|
||||||
|
console.log(`[startup] ${task.label} finished in ${Date.now() - startedAt}ms`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`[startup] ${task.label} failed`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function scheduleDailyNtrpRefresh() {
|
async function scheduleDailyNtrpRefresh() {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
if (now.getHours() !== 0 || now.getMinutes() > 5) {
|
if (now.getHours() !== 0 || now.getMinutes() > 5) {
|
||||||
@@ -64,12 +84,6 @@ async function findAvailablePort(startPort: number = 3000): Promise<number> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function startServer() {
|
async function startServer() {
|
||||||
await seedTutorials();
|
|
||||||
await syncTutorialImages();
|
|
||||||
await seedVisionReferenceImages();
|
|
||||||
await seedAchievementDefinitions();
|
|
||||||
await seedAppSettings();
|
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
const server = createServer(app);
|
const server = createServer(app);
|
||||||
registerMediaProxy(app);
|
registerMediaProxy(app);
|
||||||
@@ -108,6 +122,7 @@ async function startServer() {
|
|||||||
|
|
||||||
server.listen(port, () => {
|
server.listen(port, () => {
|
||||||
console.log(`Server running on http://localhost:${port}/`);
|
console.log(`Server running on http://localhost:${port}/`);
|
||||||
|
void warmupApplicationData();
|
||||||
});
|
});
|
||||||
|
|
||||||
setInterval(() => {
|
setInterval(() => {
|
||||||
|
|||||||
57
server/_core/sdk.test.ts
普通文件
57
server/_core/sdk.test.ts
普通文件
@@ -0,0 +1,57 @@
|
|||||||
|
import { SignJWT } from "jose";
|
||||||
|
import { describe, expect, it, vi } from "vitest";
|
||||||
|
|
||||||
|
async function loadSdkForTest() {
|
||||||
|
process.env.JWT_SECRET = "test-cookie-secret";
|
||||||
|
process.env.VITE_APP_ID = "test-app";
|
||||||
|
vi.resetModules();
|
||||||
|
|
||||||
|
const [{ sdk }, { ENV }] = await Promise.all([
|
||||||
|
import("./sdk"),
|
||||||
|
import("./env"),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return { sdk, ENV };
|
||||||
|
}
|
||||||
|
|
||||||
|
async function signLegacyToken(openId: string, appId: string, name: string) {
|
||||||
|
const secret = new TextEncoder().encode(process.env.JWT_SECRET || "");
|
||||||
|
return new SignJWT({
|
||||||
|
openId,
|
||||||
|
appId,
|
||||||
|
name,
|
||||||
|
})
|
||||||
|
.setProtectedHeader({ alg: "HS256", typ: "JWT" })
|
||||||
|
.setExpirationTime(Math.floor((Date.now() + 60_000) / 1000))
|
||||||
|
.sign(secret);
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("sdk.verifySession", () => {
|
||||||
|
it("derives a stable legacy sid when the token payload does not include sid", async () => {
|
||||||
|
const { sdk, ENV } = await loadSdkForTest();
|
||||||
|
const legacyToken = await signLegacyToken("username_H1_legacy", ENV.appId, "H1");
|
||||||
|
|
||||||
|
const session = await sdk.verifySession(legacyToken);
|
||||||
|
|
||||||
|
expect(session).not.toBeNull();
|
||||||
|
expect(session?.sid).toMatch(/^legacy-token:/);
|
||||||
|
expect(session?.sid).toHaveLength("legacy-token:".length + 32);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("derives different legacy sid values for different legacy login tokens", async () => {
|
||||||
|
const firstLoad = await loadSdkForTest();
|
||||||
|
const tokenA = await signLegacyToken("username_H1_legacy", firstLoad.ENV.appId, "H1");
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 5));
|
||||||
|
|
||||||
|
const secondLoad = await loadSdkForTest();
|
||||||
|
const tokenB = await signLegacyToken("username_H1_legacy", secondLoad.ENV.appId, "H1-second");
|
||||||
|
|
||||||
|
const sessionA = await firstLoad.sdk.verifySession(tokenA);
|
||||||
|
const sessionB = await secondLoad.sdk.verifySession(tokenB);
|
||||||
|
|
||||||
|
expect(sessionA?.sid).toMatch(/^legacy-token:/);
|
||||||
|
expect(sessionB?.sid).toMatch(/^legacy-token:/);
|
||||||
|
expect(sessionA?.sid).not.toBe(sessionB?.sid);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -4,6 +4,7 @@ import axios, { type AxiosInstance } from "axios";
|
|||||||
import { parse as parseCookieHeader } from "cookie";
|
import { parse as parseCookieHeader } from "cookie";
|
||||||
import type { Request } from "express";
|
import type { Request } from "express";
|
||||||
import { SignJWT, jwtVerify } from "jose";
|
import { SignJWT, jwtVerify } from "jose";
|
||||||
|
import { createHash } from "node:crypto";
|
||||||
import type { User } from "../../drizzle/schema";
|
import type { User } from "../../drizzle/schema";
|
||||||
import * as db from "../db";
|
import * as db from "../db";
|
||||||
import { ENV } from "./env";
|
import { ENV } from "./env";
|
||||||
@@ -223,11 +224,15 @@ class SDKServer {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const derivedSid = typeof sid === "string" && sid.length > 0
|
||||||
|
? sid
|
||||||
|
: `legacy-token:${createHash("sha256").update(cookieValue).digest("hex").slice(0, 32)}`;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
openId,
|
openId,
|
||||||
appId,
|
appId,
|
||||||
name: typeof name === "string" ? name : undefined,
|
name: typeof name === "string" ? name : undefined,
|
||||||
sid: typeof sid === "string" ? sid : undefined,
|
sid: derivedSid,
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.warn("[Auth] Session verification failed", String(error));
|
console.warn("[Auth] Session verification failed", String(error));
|
||||||
|
|||||||
@@ -75,9 +75,43 @@ test("live camera switches into viewer mode when another device already owns ana
|
|||||||
await expect(page.getByText("同步观看模式")).toBeVisible();
|
await expect(page.getByText("同步观看模式")).toBeVisible();
|
||||||
await expect(page.getByText(/同步观看|重新同步/).first()).toBeVisible();
|
await expect(page.getByText(/同步观看|重新同步/).first()).toBeVisible();
|
||||||
await expect(page.getByText("当前设备已锁定为观看模式")).toBeVisible();
|
await expect(page.getByText("当前设备已锁定为观看模式")).toBeVisible();
|
||||||
|
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText("其他设备实时分析");
|
||||||
|
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText("移动端");
|
||||||
|
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText("均衡模式");
|
||||||
|
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText("猩猩");
|
||||||
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
|
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("live camera recovers mojibake viewer titles before rendering", async ({ page }) => {
|
||||||
|
const state = await installAppMocks(page, { authenticated: true, liveViewerMode: true });
|
||||||
|
const mojibakeTitle = Buffer.from("服务端同步烟雾测试", "utf8").toString("latin1");
|
||||||
|
if (state.liveRuntime.runtimeSession) {
|
||||||
|
state.liveRuntime.runtimeSession.title = mojibakeTitle;
|
||||||
|
state.liveRuntime.runtimeSession.snapshot = {
|
||||||
|
...state.liveRuntime.runtimeSession.snapshot,
|
||||||
|
title: mojibakeTitle,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.goto("/live-camera");
|
||||||
|
await expect(page.getByRole("heading", { name: "服务端同步烟雾测试" })).toBeVisible();
|
||||||
|
await expect(page.getByText(mojibakeTitle)).toHaveCount(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("live camera no longer opens viewer peer retries when server relay is active", async ({ page }) => {
|
||||||
|
const state = await installAppMocks(page, {
|
||||||
|
authenticated: true,
|
||||||
|
liveViewerMode: true,
|
||||||
|
viewerSignalConflictOnce: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
await page.goto("/live-camera");
|
||||||
|
await expect(page.getByText("同步观看模式")).toBeVisible();
|
||||||
|
await expect.poll(() => state.viewerSignalConflictRemaining).toBe(1);
|
||||||
|
await expect.poll(() => state.mediaSession?.viewerCount ?? 0).toBe(0);
|
||||||
|
await expect(page.locator('img[alt="同步中的实时分析画面"]')).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
test("live camera archives overlay videos into the library after analysis stops", async ({ page }) => {
|
test("live camera archives overlay videos into the library after analysis stops", async ({ page }) => {
|
||||||
await installAppMocks(page, { authenticated: true, videos: [] });
|
await installAppMocks(page, { authenticated: true, videos: [] });
|
||||||
|
|
||||||
@@ -126,3 +160,11 @@ test("recorder flow archives a session and exposes it in videos", async ({ page
|
|||||||
await expect(page.getByTestId("video-card")).toHaveCount(1);
|
await expect(page.getByTestId("video-card")).toHaveCount(1);
|
||||||
await expect(page.getByText("E2E 录制")).toBeVisible();
|
await expect(page.getByText("E2E 录制")).toBeVisible();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("recorder blocks local camera when another device owns live analysis", async ({ page }) => {
|
||||||
|
await installAppMocks(page, { authenticated: true, liveViewerMode: true });
|
||||||
|
|
||||||
|
await page.goto("/recorder");
|
||||||
|
await expect(page.getByText("当前账号已有其他设备正在实时分析")).toBeVisible();
|
||||||
|
await expect(page.getByTestId("recorder-start-camera-button")).toBeDisabled();
|
||||||
|
});
|
||||||
|
|||||||
@@ -100,6 +100,7 @@ type MockAppState = {
|
|||||||
nextVideoId: number;
|
nextVideoId: number;
|
||||||
nextTaskId: number;
|
nextTaskId: number;
|
||||||
authMeNullResponsesAfterLogin: number;
|
authMeNullResponsesAfterLogin: number;
|
||||||
|
viewerSignalConflictRemaining: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
function trpcResult(json: unknown) {
|
function trpcResult(json: unknown) {
|
||||||
@@ -637,15 +638,24 @@ async function handleMedia(route: Route, state: MockAppState) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (path.endsWith("/signal")) {
|
if (path.endsWith("/viewer-signal")) {
|
||||||
state.mediaSession.status = "recording";
|
if (state.viewerSignalConflictRemaining > 0) {
|
||||||
await fulfillJson(route, { type: "answer", sdp: "mock-answer" });
|
state.viewerSignalConflictRemaining -= 1;
|
||||||
|
await route.fulfill({
|
||||||
|
status: 409,
|
||||||
|
contentType: "application/json",
|
||||||
|
body: JSON.stringify({ error: "viewer stream not ready" }),
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
state.mediaSession.viewerCount = (state.mediaSession.viewerCount || 0) + 1;
|
||||||
|
await fulfillJson(route, { viewerId: `viewer-${state.mediaSession.viewerCount}`, type: "answer", sdp: "mock-answer" });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (path.endsWith("/viewer-signal")) {
|
if (path.endsWith("/signal")) {
|
||||||
state.mediaSession.viewerCount = (state.mediaSession.viewerCount || 0) + 1;
|
state.mediaSession.status = "recording";
|
||||||
await fulfillJson(route, { viewerId: `viewer-${state.mediaSession.viewerCount}`, type: "answer", sdp: "mock-answer" });
|
await fulfillJson(route, { type: "answer", sdp: "mock-answer" });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -714,6 +724,7 @@ export async function installAppMocks(
|
|||||||
userName?: string;
|
userName?: string;
|
||||||
authMeNullResponsesAfterLogin?: number;
|
authMeNullResponsesAfterLogin?: number;
|
||||||
liveViewerMode?: boolean;
|
liveViewerMode?: boolean;
|
||||||
|
viewerSignalConflictOnce?: boolean;
|
||||||
}
|
}
|
||||||
) {
|
) {
|
||||||
const seededViewerSession = options?.liveViewerMode ? buildMediaSession(buildUser(options?.userName), "其他设备实时分析") : null;
|
const seededViewerSession = options?.liveViewerMode ? buildMediaSession(buildUser(options?.userName), "其他设备实时分析") : null;
|
||||||
@@ -765,6 +776,15 @@ export async function installAppMocks(
|
|||||||
lastHeartbeatAt: nowIso(),
|
lastHeartbeatAt: nowIso(),
|
||||||
snapshot: {
|
snapshot: {
|
||||||
phase: "analyzing",
|
phase: "analyzing",
|
||||||
|
title: "其他设备实时分析",
|
||||||
|
sessionMode: "practice",
|
||||||
|
qualityPreset: "balanced",
|
||||||
|
facingMode: "environment",
|
||||||
|
deviceKind: "mobile",
|
||||||
|
avatarEnabled: true,
|
||||||
|
avatarKey: "gorilla",
|
||||||
|
avatarLabel: "猩猩",
|
||||||
|
updatedAt: Date.parse(nowIso()),
|
||||||
currentAction: "forehand",
|
currentAction: "forehand",
|
||||||
rawAction: "forehand",
|
rawAction: "forehand",
|
||||||
durationMs: 3200,
|
durationMs: 3200,
|
||||||
@@ -817,6 +837,7 @@ export async function installAppMocks(
|
|||||||
nextVideoId: 100,
|
nextVideoId: 100,
|
||||||
nextTaskId: 1,
|
nextTaskId: 1,
|
||||||
authMeNullResponsesAfterLogin: options?.authMeNullResponsesAfterLogin ?? 0,
|
authMeNullResponsesAfterLogin: options?.authMeNullResponsesAfterLogin ?? 0,
|
||||||
|
viewerSignalConflictRemaining: options?.viewerSignalConflictOnce ? 1 : 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
await page.addInitScript(() => {
|
await page.addInitScript(() => {
|
||||||
@@ -845,6 +866,73 @@ export async function installAppMocks(
|
|||||||
return points;
|
return points;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class FakeVideoTrack {
|
||||||
|
kind = "video";
|
||||||
|
enabled = true;
|
||||||
|
muted = false;
|
||||||
|
readyState = "live";
|
||||||
|
id = "fake-video-track";
|
||||||
|
label = "Fake Camera";
|
||||||
|
|
||||||
|
stop() {}
|
||||||
|
|
||||||
|
getSettings() {
|
||||||
|
return {
|
||||||
|
facingMode: "environment",
|
||||||
|
width: 1280,
|
||||||
|
height: 720,
|
||||||
|
frameRate: 30,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
getCapabilities() {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
async applyConstraints() {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class FakeAudioTrack {
|
||||||
|
kind = "audio";
|
||||||
|
enabled = true;
|
||||||
|
muted = false;
|
||||||
|
readyState = "live";
|
||||||
|
id = "fake-audio-track";
|
||||||
|
label = "Fake Mic";
|
||||||
|
|
||||||
|
stop() {}
|
||||||
|
|
||||||
|
getSettings() {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
getCapabilities() {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
async applyConstraints() {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const createFakeMediaStream = (withAudio = false) => {
|
||||||
|
const videoTrack = new FakeVideoTrack();
|
||||||
|
const audioTrack = withAudio ? new FakeAudioTrack() : null;
|
||||||
|
const tracks = audioTrack ? [videoTrack, audioTrack] : [videoTrack];
|
||||||
|
return {
|
||||||
|
active: true,
|
||||||
|
id: `fake-stream-${Math.random().toString(36).slice(2)}`,
|
||||||
|
getTracks: () => tracks,
|
||||||
|
getVideoTracks: () => [videoTrack],
|
||||||
|
getAudioTracks: () => (audioTrack ? [audioTrack] : []),
|
||||||
|
addTrack: () => undefined,
|
||||||
|
removeTrack: () => undefined,
|
||||||
|
clone: () => createFakeMediaStream(withAudio),
|
||||||
|
} as unknown as MediaStream;
|
||||||
|
};
|
||||||
|
|
||||||
class FakePose {
|
class FakePose {
|
||||||
callback = null;
|
callback = null;
|
||||||
|
|
||||||
@@ -873,9 +961,19 @@ export async function installAppMocks(
|
|||||||
value: async () => undefined,
|
value: async () => undefined,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(HTMLMediaElement.prototype, "srcObject", {
|
||||||
|
configurable: true,
|
||||||
|
get() {
|
||||||
|
return (this as HTMLMediaElement & { __srcObject?: MediaStream }).__srcObject ?? null;
|
||||||
|
},
|
||||||
|
set(value) {
|
||||||
|
(this as HTMLMediaElement & { __srcObject?: MediaStream }).__srcObject = value as MediaStream;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
Object.defineProperty(HTMLCanvasElement.prototype, "captureStream", {
|
Object.defineProperty(HTMLCanvasElement.prototype, "captureStream", {
|
||||||
configurable: true,
|
configurable: true,
|
||||||
value: () => new MediaStream(),
|
value: () => createFakeMediaStream(),
|
||||||
});
|
});
|
||||||
|
|
||||||
class FakeMediaRecorder extends EventTarget {
|
class FakeMediaRecorder extends EventTarget {
|
||||||
@@ -921,9 +1019,12 @@ export async function installAppMocks(
|
|||||||
localDescription: { type: string; sdp: string } | null = null;
|
localDescription: { type: string; sdp: string } | null = null;
|
||||||
remoteDescription: { type: string; sdp: string } | null = null;
|
remoteDescription: { type: string; sdp: string } | null = null;
|
||||||
onconnectionstatechange: (() => void) | null = null;
|
onconnectionstatechange: (() => void) | null = null;
|
||||||
|
ontrack: ((event: { streams: MediaStream[] }) => void) | null = null;
|
||||||
|
|
||||||
addTrack() {}
|
addTrack() {}
|
||||||
|
|
||||||
|
addTransceiver() {}
|
||||||
|
|
||||||
async createOffer() {
|
async createOffer() {
|
||||||
return { type: "offer", sdp: "mock-offer" };
|
return { type: "offer", sdp: "mock-offer" };
|
||||||
}
|
}
|
||||||
@@ -937,6 +1038,7 @@ export async function installAppMocks(
|
|||||||
async setRemoteDescription(description: { type: string; sdp: string }) {
|
async setRemoteDescription(description: { type: string; sdp: string }) {
|
||||||
this.remoteDescription = description;
|
this.remoteDescription = description;
|
||||||
this.connectionState = "connected";
|
this.connectionState = "connected";
|
||||||
|
this.ontrack?.({ streams: [createFakeMediaStream()] });
|
||||||
this.onconnectionstatechange?.();
|
this.onconnectionstatechange?.();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -959,7 +1061,7 @@ export async function installAppMocks(
|
|||||||
Object.defineProperty(navigator, "mediaDevices", {
|
Object.defineProperty(navigator, "mediaDevices", {
|
||||||
configurable: true,
|
configurable: true,
|
||||||
value: {
|
value: {
|
||||||
getUserMedia: async () => new MediaStream(),
|
getUserMedia: async (constraints?: { audio?: unknown }) => createFakeMediaStream(Boolean(constraints?.audio)),
|
||||||
enumerateDevices: async () => [
|
enumerateDevices: async () => [
|
||||||
{ deviceId: "cam-1", kind: "videoinput", label: "Front Camera", groupId: "g1" },
|
{ deviceId: "cam-1", kind: "videoinput", label: "Front Camera", groupId: "g1" },
|
||||||
{ deviceId: "cam-2", kind: "videoinput", label: "Back Camera", groupId: "g1" },
|
{ deviceId: "cam-2", kind: "videoinput", label: "Back Camera", groupId: "g1" },
|
||||||
|
|||||||
在新工单中引用
屏蔽一个用户