Fix live camera pose loading and relay buffer

这个提交包含在:
cryptocommuniums-afk
2026-03-17 12:31:12 +08:00
父节点 f3f7e1982c
当前提交 597f16d0b9
修改 7 个文件,包含 268 行新增76 行删除

查看文件

@@ -8,6 +8,29 @@ export type ChangeLogEntry = {
};
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
{
version: "2026.03.17-live-camera-pose-buffer-window",
releaseDate: "2026-03-17",
repoVersion: "f3f7e19+pose-buffer-window",
summary:
"修复实时分析启动时的 MediaPipe Pose 模块加载崩溃,并把多端同步缓存改为默认 2 分钟、可选 10 秒到 5 分钟。",
features: [
"live-camera 开始分析时不再直接解构 `import(\"@mediapipe/pose\")` 的返回值,而是兼容 `Pose`、`default.Pose` 和默认导出三种形态;模块缺失时会抛出明确错误,避免再次出现 `Cannot destructure property 'Pose' ... as it is undefined`",
"同步观看的 relay 缓存时长改为按会话配置,范围 10 秒到 5 分钟,默认 2 分钟;viewer 文案、徽标和设置面板都会实时显示当前缓存窗口",
"owner 端合成画布录制改为每 10 秒上传一次 relay 分片,同时继续维持每 60 秒一段的自动归档录像,因此观看端切到短缓存时不需要再等满 60 秒才出现平滑视频",
"media 服务会按各自 relay 会话的缓存窗口裁剪预览分段,并在从磁盘恢复旧会话时自动归一化缓存秒数,避免旧数据继续按固定 60 秒窗口工作",
"线上 smoke 已确认 `https://te.hao.work/` 已经提供本次新构建,而不是旧资源版本;首页、主样式和 `pose` 模块都已切到本次发布的最新资源 revision",
],
tests: [
"cd media && go test ./...",
"pnpm vitest run client/src/lib/liveCamera.test.ts",
"pnpm check",
"pnpm build",
"pnpm exec playwright test tests/e2e/app.spec.ts",
"playwright-skill 线上 smoke: 登录 `H1` 后访问 `https://te.hao.work/live-camera`,完成校准、启用假摄像头并点击“开始分析”,确认页面进入分析中状态、默认显示“缓存 2 分钟”、且无控制台与页面级错误",
"curl -I https://te.hao.work/,并确认首页、主样式与 `pose` 模块资源均返回 `200` 和正确 MIME",
],
},
{
version: "2026.03.17-live-camera-relay-buffer",
releaseDate: "2026-03-17",

查看文件

@@ -43,6 +43,7 @@ export type MediaSession = {
uploadedBytes: number;
previewSegments: number;
durationMs: number;
relayBufferSeconds?: number;
lastError?: string;
previewUpdatedAt?: string;
streamConnected: boolean;
@@ -115,6 +116,7 @@ export async function createMediaSession(payload: {
facingMode: string;
deviceKind: string;
purpose?: "recording" | "relay";
relayBufferSeconds?: number;
}) {
return request<{ session: MediaSession }>("/sessions", {
method: "POST",

查看文件

@@ -151,6 +151,7 @@ type RuntimeSnapshot = {
title?: string;
sessionMode?: SessionMode;
qualityPreset?: CameraQualityPreset;
relayBufferSeconds?: number;
facingMode?: CameraFacing;
deviceKind?: "mobile" | "desktop";
avatarEnabled?: boolean;
@@ -253,6 +254,15 @@ const SEGMENT_MAX_MS = 10_000;
const MERGE_GAP_MS = 900;
const MIN_SEGMENT_MS = 1_200;
const ANALYSIS_RECORDING_SEGMENT_MS = 60_000;
const RELAY_UPLOAD_SEGMENT_MS = 10_000;
const RELAY_BUFFER_OPTIONS = [
{ value: 10, label: "10 秒缓存" },
{ value: 30, label: "30 秒缓存" },
{ value: 60, label: "1 分钟缓存" },
{ value: 120, label: "2 分钟缓存" },
{ value: 180, label: "3 分钟缓存" },
{ value: 300, label: "5 分钟缓存" },
] as const;
const CAMERA_QUALITY_PRESETS: Record<
CameraQualityPreset,
{ label: string; subtitle: string; description: string }
@@ -798,6 +808,19 @@ function formatRuntimeSyncDelay(delayMs: number | null) {
return "同步较慢";
}
function formatRelayBufferLabel(seconds: number | null | undefined) {
const normalized = Math.max(10, Math.min(300, seconds ?? 120));
if (normalized < 60) {
return `${normalized}`;
}
if (normalized % 60 === 0) {
return `${normalized / 60} 分钟`;
}
const minutes = Math.floor(normalized / 60);
const remainSeconds = normalized % 60;
return `${minutes}${remainSeconds}`;
}
export default function LiveCamera() {
const { user } = useAuth();
const utils = trpc.useUtils();
@@ -819,6 +842,8 @@ export default function LiveCamera() {
const recorderStopPromiseRef = useRef<Promise<void> | null>(null);
const recorderSegmentStartedAtRef = useRef<number>(0);
const recorderSequenceRef = useRef(0);
const relaySequenceRef = useRef(0);
const relayChunkStartedAtRef = useRef(0);
const recorderRotateTimerRef = useRef<number>(0);
const recorderUploadQueueRef = useRef(Promise.resolve());
const relayUploadQueueRef = useRef(Promise.resolve());
@@ -872,6 +897,7 @@ export default function LiveCamera() {
const [segmentFilter, setSegmentFilter] = useState<ActionType | "all">("all");
const [qualityPreset, setQualityPreset] =
useState<CameraQualityPreset>("economy");
const [relayBufferSeconds, setRelayBufferSeconds] = useState(120);
const [zoomState, setZoomState] = useState(() => readTrackZoomState(null));
const [stabilityMeta, setStabilityMeta] = useState<StabilizedActionMeta>(() =>
createEmptyStabilizedActionMeta()
@@ -936,6 +962,11 @@ export default function LiveCamera() {
rawActionRef.current = rawAction;
}, [rawAction]);
const relayBufferLabel = useMemo(
() => formatRelayBufferLabel(relayBufferSeconds),
[relayBufferSeconds]
);
useEffect(() => {
liveScoreRef.current = liveScore;
}, [liveScore]);
@@ -1253,6 +1284,7 @@ export default function LiveCamera() {
}
if (!recorder) {
await recorderUploadQueueRef.current;
await relayUploadQueueRef.current;
return;
}
const stopPromise = recorderStopPromiseRef.current;
@@ -1261,6 +1293,7 @@ export default function LiveCamera() {
}
await (stopPromise ?? Promise.resolve());
await recorderUploadQueueRef.current;
await relayUploadQueueRef.current;
}, []);
const buildRuntimeSnapshot = useCallback(
@@ -1273,6 +1306,7 @@ export default function LiveCamera() {
`实时分析 ${ACTION_META[currentActionRef.current].label}`,
sessionMode: sessionModeRef.current,
qualityPreset,
relayBufferSeconds,
facingMode: facing,
deviceKind: mobile ? "mobile" : "desktop",
avatarEnabled: avatarRenderRef.current.enabled,
@@ -1292,7 +1326,7 @@ export default function LiveCamera() {
archivedVideoCount: archivedVideosRef.current.length,
recentSegments: segmentsRef.current.slice(-5),
}),
[facing, mobile, normalizedRuntimeTitle, qualityPreset]
[facing, mobile, normalizedRuntimeTitle, qualityPreset, relayBufferSeconds]
);
const openSetupGuide = useCallback(async () => {
@@ -1416,12 +1450,13 @@ export default function LiveCamera() {
facingMode: facing,
deviceKind: mobile ? "mobile" : "desktop",
purpose: "relay",
relayBufferSeconds,
});
const sessionId = sessionResponse.session.id;
broadcastSessionIdRef.current = sessionId;
return sessionId;
}, [facing, mobile, qualityPreset, user?.id]);
}, [facing, mobile, qualityPreset, relayBufferSeconds, user?.id]);
const startViewerStream = useCallback(async (mediaSessionId: string) => {
const response = await getMediaSession(mediaSessionId);
@@ -1466,6 +1501,7 @@ export default function LiveCamera() {
setZoomState(readTrackZoomState(null));
archivedVideosRef.current = [];
recorderSequenceRef.current = 0;
relaySequenceRef.current = 0;
setArchivedVideoCount(0);
setCameraActive(false);
}, [stopSessionRecorder]);
@@ -1736,10 +1772,24 @@ export default function LiveCamera() {
const sequence = recorderSequenceRef.current + 1;
recorderSequenceRef.current = sequence;
recorderSegmentStartedAtRef.current = Date.now();
relayChunkStartedAtRef.current = recorderSegmentStartedAtRef.current;
recorder.ondataavailable = event => {
if (event.data && event.data.size > 0) {
recorderChunksRef.current.push(event.data);
const nextRelaySequence = relaySequenceRef.current + 1;
relaySequenceRef.current = nextRelaySequence;
const now = Date.now();
const relayDurationMs = Math.max(
1,
now - relayChunkStartedAtRef.current
);
relayChunkStartedAtRef.current = now;
void queueRelaySegmentUpload(
event.data,
nextRelaySequence,
relayDurationMs
);
}
};
@@ -1760,7 +1810,6 @@ export default function LiveCamera() {
recorderRef.current = null;
recorderStopPromiseRef.current = null;
if (blob && blob.size > 0 && durationMs > 0) {
void queueRelaySegmentUpload(blob, sequence, durationMs);
void queueArchivedVideoUpload(blob, sequence, durationMs);
}
if (analyzingRef.current) {
@@ -1775,7 +1824,7 @@ export default function LiveCamera() {
};
});
recorder.start();
recorder.start(RELAY_UPLOAD_SEGMENT_MS);
recorderRotateTimerRef.current = window.setTimeout(() => {
if (recorder.state === "recording") {
recorder.stop();
@@ -1983,6 +2032,7 @@ export default function LiveCamera() {
volatilitySamplesRef.current = [];
archivedVideosRef.current = [];
recorderSequenceRef.current = 0;
relaySequenceRef.current = 0;
setArchivedVideoCount(0);
sessionStartedAtRef.current = Date.now();
setCurrentAction("unknown");
@@ -1998,14 +2048,19 @@ export default function LiveCamera() {
const testFactory = (
window as typeof window & {
__TEST_MEDIAPIPE_FACTORY__?: () => Promise<{ Pose: any }>;
__TEST_MEDIAPIPE_FACTORY__?: () => Promise<any>;
}
).__TEST_MEDIAPIPE_FACTORY__;
const { Pose } = testFactory
const poseModule = testFactory
? await testFactory()
: await import("@mediapipe/pose");
const pose = new Pose({
const PoseConstructor =
poseModule?.Pose ?? poseModule?.default?.Pose ?? poseModule?.default;
if (typeof PoseConstructor !== "function") {
throw new Error("MediaPipe Pose 模块加载失败");
}
const pose = new PoseConstructor({
locateFile: (file: string) =>
`https://cdn.jsdelivr.net/npm/@mediapipe/pose/${file}`,
});
@@ -2225,6 +2280,15 @@ export default function LiveCamera() {
runtimeRole === "viewer"
? (runtimeSnapshot?.qualityPreset ?? qualityPreset)
: qualityPreset;
const displayRelayBufferSeconds =
runtimeRole === "viewer"
? (runtimeSnapshot?.relayBufferSeconds ??
viewerMediaSession?.relayBufferSeconds ??
relayBufferSeconds)
: relayBufferSeconds;
const displayRelayBufferLabel = formatRelayBufferLabel(
displayRelayBufferSeconds
);
const displayFacing =
runtimeRole === "viewer" ? (runtimeSnapshot?.facingMode ?? facing) : facing;
const displayDeviceKind =
@@ -2287,8 +2351,8 @@ export default function LiveCamera() {
? viewerConnected
? `${runtimeSyncLabel} · 服务端缓存同步中`
: viewerBufferReady
? "正在加载最近 60 秒缓存"
: "正在缓冲最近 60 秒视频"
? `正在加载最近 ${displayRelayBufferLabel} 缓存`
: `正在缓冲最近 ${displayRelayBufferLabel} 视频`
: analyzing
? displayStabilityMeta.pending && pendingActionMeta
? `${pendingActionMeta.label} 切换确认中`
@@ -2639,8 +2703,8 @@ export default function LiveCamera() {
<AlertDescription>
{viewerModeLabel}
media 60
media {displayRelayBufferLabel}
</AlertDescription>
</Alert>
) : null}
@@ -2683,6 +2747,10 @@ export default function LiveCamera() {
<Video className="h-3.5 w-3.5" />
{CAMERA_QUALITY_PRESETS[displayQualityPreset].label}
</Badge>
<Badge className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10">
<Monitor className="h-3.5 w-3.5" />
{displayRelayBufferLabel}
</Badge>
{runtimeRole === "viewer" ? (
<Badge
className="gap-1.5 border-white/10 bg-white/10 text-white hover:bg-white/10"
@@ -2699,7 +2767,7 @@ export default function LiveCamera() {
</h1>
<p className="mt-2 max-w-2xl text-sm leading-6 text-white/70">
{runtimeRole === "viewer"
? `当前正在通过服务端中转同步 ${displayDeviceKind === "mobile" ? "移动端" : "桌面端"} ${displayFacing === "environment" ? "后置/主摄视角" : "前置视角"} 画面。持有端会持续把最近 60 秒带骨架叠层的视频缓存到服务器,观看端按缓存视频平滑播放;缓存超过 30 分钟会自动清理。`
? `当前正在通过服务端中转同步 ${displayDeviceKind === "mobile" ? "移动端" : "桌面端"} ${displayFacing === "environment" ? "后置/主摄视角" : "前置视角"} 画面。持有端会持续把最近 ${displayRelayBufferLabel} 带骨架叠层的视频缓存到服务器,观看端按缓存视频平滑播放;缓存超过 30 分钟会自动清理。`
: "摄像头启动后会持续识别正手、反手、发球、截击、高压、切削、挑高球与未知动作。系统会用 24 帧时间窗口统一动作,再把稳定动作写入片段、训练记录与评分;分析过程中会自动录制“视频画面 + 骨架/关键点叠层”的合成回放,并按 60 秒分段归档进视频库。开启虚拟形象后,画面中的人体可切换为 10 个轻量动物替身,或 4 个免费的全身 3D Avatar 示例覆盖显示。"}
</p>
</div>
@@ -2792,8 +2860,8 @@ export default function LiveCamera() {
<div className="text-sm text-white/60">
{runtimeRole === "viewer"
? viewerBufferReady
? `${viewerModeLabel},当前设备只能观看同步内容;正在载入最近 60 秒缓存回放。`
: `${viewerModeLabel},当前设备只能观看同步内容;持有端累计满 60 秒缓存后会自动出现平滑视频。`
? `${viewerModeLabel},当前设备只能观看同步内容;正在载入最近 ${displayRelayBufferLabel} 缓存回放。`
: `${viewerModeLabel},当前设备只能观看同步内容;持有端累计出缓存片段后会自动出现平滑视频。`
: "先完成拍摄校准,再开启自动动作识别。"}
</div>
</div>
@@ -2954,6 +3022,7 @@ export default function LiveCamera() {
.label
}
</div>
<div>{displayRelayBufferLabel}</div>
<div>
{displayAvatarEnabled
@@ -3114,7 +3183,8 @@ export default function LiveCamera() {
<CardHeader className="pb-3">
<CardTitle className="text-base"></CardTitle>
<CardDescription>
使 24
使 2 24
</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
@@ -3154,6 +3224,36 @@ export default function LiveCamera() {
</div>
<div className="grid gap-3 md:grid-cols-2">
<div className="rounded-2xl border border-border/60 bg-muted/25 p-4">
<div className="text-sm font-medium"></div>
<div className="mt-2 text-sm text-muted-foreground">
{relayBufferLabel} 10 5 2
</div>
<div className="mt-3">
<Select
value={String(relayBufferSeconds)}
onValueChange={value =>
setRelayBufferSeconds(Number(value))
}
disabled={analyzing || saving || runtimeRole === "viewer"}
>
<SelectTrigger className="h-12 rounded-2xl border-border/60">
<SelectValue />
</SelectTrigger>
<SelectContent>
{RELAY_BUFFER_OPTIONS.map(option => (
<SelectItem
key={option.value}
value={String(option.value)}
>
{option.label}
</SelectItem>
))}
</SelectContent>
</Select>
</div>
</div>
<div className="rounded-2xl border border-border/60 bg-muted/25 p-4">
<div className="text-sm font-medium"></div>
<div className="mt-2 text-sm text-muted-foreground">
@@ -3161,7 +3261,7 @@ export default function LiveCamera() {
</div>
</div>
<div className="rounded-2xl border border-border/60 bg-muted/25 p-4">
<div className="rounded-2xl border border-border/60 bg-muted/25 p-4 md:col-span-2">
<div className="text-sm font-medium"> / </div>
<div className="mt-2 text-sm text-muted-foreground">
{zoomState.supported

查看文件

@@ -1,5 +1,39 @@
# Tennis Training Hub - 变更日志
## 2026.03.17-live-camera-pose-buffer-window (2026-03-17)
### 功能更新
- 修复 `/live-camera` 开始分析时报错 `Cannot destructure property 'Pose' ... as it is undefined` 的问题;MediaPipe Pose 动态加载现在兼容 `Pose``default.Pose` 和默认导出三种模块形态
- 多端同步观看的 relay 缓存窗口改为按会话配置,默认 `2` 分钟,可选最短 `10` 秒、最长 `5` 分钟;viewer 页面、徽标和设置卡都会同步显示当前缓存时长
- owner 端分析录制在继续保持“每 `60` 秒自动归档”之外,会额外每 `10` 秒上传一次 relay 分片,因此短缓存模式下其他端不需要等待整整 `60` 秒才看到平滑同步视频
- media 服务会按各自 relay 会话的缓存秒数裁剪 preview 分段;从磁盘恢复旧 relay 会话时也会自动归一化到合法范围,避免旧会话继续沿用固定 `60` 秒窗口
- 线上 smoke 已确认 `https://te.hao.work/` 正在提供本次新构建,而不是旧资源版本;当前公开站点资源 revision 为 `assets/index-2-BhvFom.js``assets/index-BHHHsAWc.css``assets/pose-BPcIm7Xa.js`
### 测试
- `cd media && go test ./...`
- `pnpm vitest run client/src/lib/liveCamera.test.ts`
- `pnpm check`
- `pnpm build`
- `pnpm exec playwright test tests/e2e/app.spec.ts`
- `playwright-skill` 线上 smoke登录 `H1` 后访问 `https://te.hao.work/live-camera`,完成校准、启用假摄像头并点击“开始分析”,确认页面进入分析中状态、默认显示“缓存 2 分钟”,且无控制台与页面级错误
- `curl -I https://te.hao.work/`
- `curl -I https://te.hao.work/assets/index-2-BhvFom.js`
- `curl -I https://te.hao.work/assets/index-BHHHsAWc.css`
- `curl -I https://te.hao.work/assets/pose-BPcIm7Xa.js`
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建,而不是旧资源版本
- 当前公开站点前端资源 revision`assets/index-2-BhvFom.js``assets/index-BHHHsAWc.css``assets/pose-BPcIm7Xa.js`
- 已确认首页、主 JS、主 CSS 与 `pose` 模块均返回 `200`,且 MIME 分别为 `text/html``application/javascript``text/css``application/javascript`
- 真实浏览器验证已通过:登录 `H1` 后进入 `/live-camera`,能够完成校准、启用摄像头并点击“开始分析”;页面会进入“分析进行中”状态,默认显示“缓存 2 分钟”,且未再出现 `Pose` 模块解构异常
### 仓库版本
- `f3f7e19+pose-buffer-window`
## 2026.03.17-live-camera-relay-buffer (2026-03-17)
### 功能更新

查看文件

@@ -61,8 +61,10 @@ const (
)
const (
relayPreviewWindow = 60 * time.Second
relayCacheTTL = 30 * time.Minute
defaultRelayBufferSeconds = 120
minRelayBufferSeconds = 10
maxRelayBufferSeconds = 300
relayCacheTTL = 30 * time.Minute
)
type PlaybackInfo struct {
@@ -93,36 +95,37 @@ type Marker struct {
}
type Session struct {
ID string `json:"id"`
UserID string `json:"userId"`
Title string `json:"title"`
Purpose SessionPurpose `json:"purpose"`
Status SessionStatus `json:"status"`
ArchiveStatus ArchiveStatus `json:"archiveStatus"`
PreviewStatus PreviewStatus `json:"previewStatus"`
Format string `json:"format"`
MimeType string `json:"mimeType"`
QualityPreset string `json:"qualityPreset"`
FacingMode string `json:"facingMode"`
DeviceKind string `json:"deviceKind"`
ReconnectCount int `json:"reconnectCount"`
UploadedSegments int `json:"uploadedSegments"`
UploadedBytes int64 `json:"uploadedBytes"`
PreviewSegments int `json:"previewSegments"`
DurationMS int64 `json:"durationMs"`
LastError string `json:"lastError,omitempty"`
CreatedAt string `json:"createdAt"`
UpdatedAt string `json:"updatedAt"`
FinalizedAt string `json:"finalizedAt,omitempty"`
PreviewUpdatedAt string `json:"previewUpdatedAt,omitempty"`
StreamConnected bool `json:"streamConnected"`
LastStreamAt string `json:"lastStreamAt,omitempty"`
ViewerCount int `json:"viewerCount"`
LiveFrameURL string `json:"liveFrameUrl,omitempty"`
LiveFrameUpdated string `json:"liveFrameUpdatedAt,omitempty"`
Playback PlaybackInfo `json:"playback"`
Segments []SegmentMeta `json:"segments"`
Markers []Marker `json:"markers"`
ID string `json:"id"`
UserID string `json:"userId"`
Title string `json:"title"`
Purpose SessionPurpose `json:"purpose"`
Status SessionStatus `json:"status"`
ArchiveStatus ArchiveStatus `json:"archiveStatus"`
PreviewStatus PreviewStatus `json:"previewStatus"`
Format string `json:"format"`
MimeType string `json:"mimeType"`
QualityPreset string `json:"qualityPreset"`
FacingMode string `json:"facingMode"`
DeviceKind string `json:"deviceKind"`
ReconnectCount int `json:"reconnectCount"`
UploadedSegments int `json:"uploadedSegments"`
UploadedBytes int64 `json:"uploadedBytes"`
PreviewSegments int `json:"previewSegments"`
DurationMS int64 `json:"durationMs"`
RelayBufferSeconds int `json:"relayBufferSeconds"`
LastError string `json:"lastError,omitempty"`
CreatedAt string `json:"createdAt"`
UpdatedAt string `json:"updatedAt"`
FinalizedAt string `json:"finalizedAt,omitempty"`
PreviewUpdatedAt string `json:"previewUpdatedAt,omitempty"`
StreamConnected bool `json:"streamConnected"`
LastStreamAt string `json:"lastStreamAt,omitempty"`
ViewerCount int `json:"viewerCount"`
LiveFrameURL string `json:"liveFrameUrl,omitempty"`
LiveFrameUpdated string `json:"liveFrameUpdatedAt,omitempty"`
Playback PlaybackInfo `json:"playback"`
Segments []SegmentMeta `json:"segments"`
Markers []Marker `json:"markers"`
}
func (s *Session) recomputeAggregates() {
@@ -140,14 +143,15 @@ func (s *Session) recomputeAggregates() {
}
type CreateSessionRequest struct {
UserID string `json:"userId"`
Title string `json:"title"`
Format string `json:"format"`
MimeType string `json:"mimeType"`
QualityPreset string `json:"qualityPreset"`
FacingMode string `json:"facingMode"`
DeviceKind string `json:"deviceKind"`
Purpose string `json:"purpose"`
UserID string `json:"userId"`
Title string `json:"title"`
Format string `json:"format"`
MimeType string `json:"mimeType"`
QualityPreset string `json:"qualityPreset"`
FacingMode string `json:"facingMode"`
DeviceKind string `json:"deviceKind"`
Purpose string `json:"purpose"`
RelayBufferSeconds int `json:"relayBufferSeconds"`
}
type SignalRequest struct {
@@ -231,6 +235,9 @@ func (s *sessionStore) refreshFromDisk() error {
if session.Purpose == "" {
session.Purpose = PurposeRecording
}
if session.Purpose == PurposeRelay {
session.RelayBufferSeconds = normalizeRelayBufferSeconds(session.RelayBufferSeconds)
}
session.recomputeAggregates()
}
s.mu.Lock()
@@ -281,23 +288,29 @@ func cloneSession(session *Session) *Session {
func (s *sessionStore) createSession(input CreateSessionRequest) (*Session, error) {
now := time.Now().UTC().Format(time.RFC3339)
purpose := SessionPurpose(defaultString(input.Purpose, string(PurposeRecording)))
relayBufferSeconds := 0
if purpose == PurposeRelay {
relayBufferSeconds = normalizeRelayBufferSeconds(input.RelayBufferSeconds)
}
session := &Session{
ID: randomID(),
UserID: strings.TrimSpace(input.UserID),
Title: strings.TrimSpace(input.Title),
Purpose: SessionPurpose(defaultString(input.Purpose, string(PurposeRecording))),
Status: StatusCreated,
ArchiveStatus: ArchiveIdle,
PreviewStatus: PreviewIdle,
Format: defaultString(input.Format, "webm"),
MimeType: defaultString(input.MimeType, "video/webm"),
QualityPreset: defaultString(input.QualityPreset, "balanced"),
FacingMode: defaultString(input.FacingMode, "environment"),
DeviceKind: defaultString(input.DeviceKind, "desktop"),
CreatedAt: now,
UpdatedAt: now,
Segments: []SegmentMeta{},
Markers: []Marker{},
ID: randomID(),
UserID: strings.TrimSpace(input.UserID),
Title: strings.TrimSpace(input.Title),
Purpose: purpose,
Status: StatusCreated,
ArchiveStatus: ArchiveIdle,
PreviewStatus: PreviewIdle,
Format: defaultString(input.Format, "webm"),
MimeType: defaultString(input.MimeType, "video/webm"),
QualityPreset: defaultString(input.QualityPreset, "balanced"),
FacingMode: defaultString(input.FacingMode, "environment"),
DeviceKind: defaultString(input.DeviceKind, "desktop"),
RelayBufferSeconds: relayBufferSeconds,
CreatedAt: now,
UpdatedAt: now,
Segments: []SegmentMeta{},
Markers: []Marker{},
}
s.mu.Lock()
defer s.mu.Unlock()
@@ -311,6 +324,23 @@ func (s *sessionStore) createSession(input CreateSessionRequest) (*Session, erro
return cloneSession(session), nil
}
func normalizeRelayBufferSeconds(value int) int {
if value <= 0 {
return defaultRelayBufferSeconds
}
if value < minRelayBufferSeconds {
return minRelayBufferSeconds
}
if value > maxRelayBufferSeconds {
return maxRelayBufferSeconds
}
return value
}
func relayPreviewWindowForSession(session *Session) time.Duration {
return time.Duration(normalizeRelayBufferSeconds(session.RelayBufferSeconds)) * time.Second
}
func parseSessionTime(values ...string) time.Time {
for _, value := range values {
if strings.TrimSpace(value) == "" {
@@ -967,7 +997,7 @@ func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWrite
sortSegmentsBySequence(session.Segments)
if session.Purpose == PurposeRelay {
var kept []SegmentMeta
kept, removedSegments = trimSegmentsToDuration(session.Segments, relayPreviewWindow)
kept, removedSegments = trimSegmentsToDuration(session.Segments, relayPreviewWindowForSession(session))
session.Segments = kept
}
session.Status = StatusRecording

查看文件

@@ -331,7 +331,7 @@ func TestRelaySegmentUploadKeepsOnlyLatestMinute(t *testing.T) {
}
server := newMediaServer(store)
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Buffer", Purpose: "relay"})
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Buffer", Purpose: "relay", RelayBufferSeconds: 60})
if err != nil {
t.Fatalf("createSession: %v", err)
}
@@ -371,7 +371,7 @@ func TestProcessRelayPreviewPublishesBufferedWebM(t *testing.T) {
t.Fatalf("newSessionStore: %v", err)
}
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Preview", Purpose: "relay"})
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Preview", Purpose: "relay", RelayBufferSeconds: 60})
if err != nil {
t.Fatalf("createSession: %v", err)
}
@@ -416,7 +416,7 @@ func TestPruneExpiredRelaySessionsRemovesOldCache(t *testing.T) {
t.Fatalf("newSessionStore: %v", err)
}
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Old Relay", Purpose: "relay"})
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Old Relay", Purpose: "relay", RelayBufferSeconds: 60})
if err != nil {
t.Fatalf("createSession: %v", err)
}

查看文件

@@ -50,6 +50,7 @@ type MockMediaSession = {
uploadedSegments: number;
uploadedBytes: number;
durationMs: number;
relayBufferSeconds?: number;
previewUpdatedAt?: string;
streamConnected: boolean;
viewerCount?: number;
@@ -288,6 +289,7 @@ function buildMediaSession(
uploadedSegments: purpose === "relay" ? 1 : 0,
uploadedBytes: purpose === "relay" ? 1_280_000 : 0,
durationMs: purpose === "relay" ? 60_000 : 0,
relayBufferSeconds: purpose === "relay" ? 120 : undefined,
previewUpdatedAt: purpose === "relay" ? nowIso() : undefined,
streamConnected: true,
playback: {
@@ -858,6 +860,7 @@ export async function installAppMocks(
title: "其他设备实时分析",
sessionMode: "practice",
qualityPreset: "balanced",
relayBufferSeconds: 120,
facingMode: "environment",
deviceKind: "mobile",
avatarEnabled: true,