比较提交

...

2 次代码提交

作者 SHA1 备注 提交日期
cryptocommuniums-afk
634a4704c7 docs record live viewer relay rollout 2026-03-16 23:02:30 +08:00
cryptocommuniums-afk
bb46d26c0e feat relay live viewer frames through media service 2026-03-16 22:43:08 +08:00
修改 6 个文件,包含 263 行新增127 行删除

查看文件

@@ -8,6 +8,23 @@ export type ChangeLogEntry = {
}; };
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [ export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
{
version: "2026.03.16-live-viewer-server-relay",
releaseDate: "2026-03-16",
repoVersion: "bb46d26",
summary: "实时分析同步观看改为由 media 服务中转帧图,不再依赖浏览器之间的 P2P 视频连接。",
features: [
"owner 端现在会把带骨架、关键点和虚拟形象叠层的合成画布压缩成 JPEG 并持续上传到 media 服务",
"viewer 端改为直接拉取 media 服务中的最新同步帧图,不再建立 WebRTC viewer peer 连接,因此跨网络和多端观看更稳定",
"同步观看模式文案改为明确提示“通过 media 服务中转”,等待同步时也会自动轮询最新画面",
"media 服务新增 live-frame 上传与静态分发能力,并记录最近同步帧的更新时间,方便后续扩展成更高频的服务端中转流",
],
tests: [
"cd media && go test ./...",
"pnpm build",
"playwright-skill 线上 smoke: 先用 media 服务创建 relay session、上传 live-frame,并把 H1 的 `live_analysis_runtime` 注入为 active viewer 场景;随后访问 `https://te.hao.work/live-camera`,确认页面进入“同步观看模式”、同步帧来自 `/media/assets/sessions/.../live-frame.jpg`,且 `viewer-signal` 请求数为 0",
],
},
{ {
version: "2026.03.16-camera-startup-fallbacks", version: "2026.03.16-camera-startup-fallbacks",
releaseDate: "2026-03-16", releaseDate: "2026-03-16",

查看文件

@@ -51,6 +51,8 @@ export type MediaSession = {
streamConnected: boolean; streamConnected: boolean;
lastStreamAt?: string; lastStreamAt?: string;
viewerCount?: number; viewerCount?: number;
liveFrameUrl?: string;
liveFrameUpdatedAt?: string;
playback: { playback: {
webmUrl?: string; webmUrl?: string;
mp4Url?: string; mp4Url?: string;
@@ -131,6 +133,14 @@ export async function signalMediaViewerSession(sessionId: string, payload: { sdp
}); });
} }
export async function uploadMediaLiveFrame(sessionId: string, blob: Blob) {
return request<{ session: MediaSession }>(`/sessions/${sessionId}/live-frame`, {
method: "POST",
headers: { "Content-Type": blob.type || "image/jpeg" },
body: blob,
});
}
export async function uploadMediaSegment( export async function uploadMediaSegment(
sessionId: string, sessionId: string,
sequence: number, sequence: number,
@@ -173,6 +183,10 @@ export async function getMediaSession(sessionId: string) {
return request<{ session: MediaSession }>(`/sessions/${sessionId}`); return request<{ session: MediaSession }>(`/sessions/${sessionId}`);
} }
export function getMediaAssetUrl(path: string) {
return `${MEDIA_BASE}${path.startsWith("/") ? path : `/${path}`}`;
}
export function formatRecordingTime(milliseconds: number) { export function formatRecordingTime(milliseconds: number) {
const totalSeconds = Math.max(0, Math.floor(milliseconds / 1000)); const totalSeconds = Math.max(0, Math.floor(milliseconds / 1000));
const minutes = Math.floor(totalSeconds / 60); const minutes = Math.floor(totalSeconds / 60);

查看文件

@@ -2,8 +2,8 @@ import { useAuth } from "@/_core/hooks/useAuth";
import { trpc } from "@/lib/trpc"; import { trpc } from "@/lib/trpc";
import { import {
createMediaSession, createMediaSession,
signalMediaSession, getMediaAssetUrl,
signalMediaViewerSession, uploadMediaLiveFrame,
} from "@/lib/media"; } from "@/lib/media";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import { Badge } from "@/components/ui/badge"; import { Badge } from "@/components/ui/badge";
@@ -193,23 +193,6 @@ const CAMERA_QUALITY_PRESETS: Record<CameraQualityPreset, { label: string; subti
}, },
}; };
function waitForIceGathering(peer: RTCPeerConnection) {
if (peer.iceGatheringState === "complete") {
return Promise.resolve();
}
return new Promise<void>((resolve) => {
const handleStateChange = () => {
if (peer.iceGatheringState === "complete") {
peer.removeEventListener("icegatheringstatechange", handleStateChange);
resolve();
}
};
peer.addEventListener("icegatheringstatechange", handleStateChange);
});
}
function clamp(value: number, min: number, max: number) { function clamp(value: number, min: number, max: number) {
return Math.max(min, Math.min(max, value)); return Math.max(min, Math.min(max, value));
} }
@@ -567,12 +550,11 @@ export default function LiveCamera() {
const streamRef = useRef<MediaStream | null>(null); const streamRef = useRef<MediaStream | null>(null);
const poseRef = useRef<any>(null); const poseRef = useRef<any>(null);
const compositeCanvasRef = useRef<HTMLCanvasElement | null>(null); const compositeCanvasRef = useRef<HTMLCanvasElement | null>(null);
const broadcastPeerRef = useRef<RTCPeerConnection | null>(null);
const broadcastStreamRef = useRef<MediaStream | null>(null);
const broadcastSessionIdRef = useRef<string | null>(null); const broadcastSessionIdRef = useRef<string | null>(null);
const viewerPeerRef = useRef<RTCPeerConnection | null>(null);
const viewerSessionIdRef = useRef<string | null>(null); const viewerSessionIdRef = useRef<string | null>(null);
const viewerRetryTimerRef = useRef<number>(0); const viewerRetryTimerRef = useRef<number>(0);
const frameRelayTimerRef = useRef<number>(0);
const frameRelayInFlightRef = useRef(false);
const runtimeIdRef = useRef<number | null>(null); const runtimeIdRef = useRef<number | null>(null);
const heartbeatTimerRef = useRef<number>(0); const heartbeatTimerRef = useRef<number>(0);
const recorderRef = useRef<MediaRecorder | null>(null); const recorderRef = useRef<MediaRecorder | null>(null);
@@ -635,6 +617,7 @@ export default function LiveCamera() {
const [archivedVideoCount, setArchivedVideoCount] = useState(0); const [archivedVideoCount, setArchivedVideoCount] = useState(0);
const [viewerConnected, setViewerConnected] = useState(false); const [viewerConnected, setViewerConnected] = useState(false);
const [viewerError, setViewerError] = useState(""); const [viewerError, setViewerError] = useState("");
const [viewerFrameVersion, setViewerFrameVersion] = useState(0);
const resolvedAvatarKey = useMemo( const resolvedAvatarKey = useMemo(
() => resolveAvatarKeyFromPrompt(avatarPrompt, avatarKey), () => resolveAvatarKeyFromPrompt(avatarPrompt, avatarKey),
@@ -909,17 +892,47 @@ export default function LiveCamera() {
recentSegments: segmentsRef.current.slice(-5), recentSegments: segmentsRef.current.slice(-5),
}), [facing, mobile, qualityPreset, runtimeSession?.title]); }), [facing, mobile, qualityPreset, runtimeSession?.title]);
const uploadLiveFrame = useCallback(async (sessionId: string) => {
const compositeCanvas = ensureCompositeCanvas();
if (!compositeCanvas || frameRelayInFlightRef.current) {
return;
}
renderCompositeFrame();
frameRelayInFlightRef.current = true;
try {
const blob = await new Promise<Blob | null>((resolve) => {
compositeCanvas.toBlob(resolve, "image/jpeg", mobile ? 0.7 : 0.76);
});
if (!blob) {
return;
}
await uploadMediaLiveFrame(sessionId, blob);
} finally {
frameRelayInFlightRef.current = false;
}
}, [ensureCompositeCanvas, mobile, renderCompositeFrame]);
const startFrameRelayLoop = useCallback((sessionId: string) => {
broadcastSessionIdRef.current = sessionId;
if (frameRelayTimerRef.current) {
window.clearInterval(frameRelayTimerRef.current);
frameRelayTimerRef.current = 0;
}
void uploadLiveFrame(sessionId);
frameRelayTimerRef.current = window.setInterval(() => {
void uploadLiveFrame(sessionId);
}, 900);
}, [uploadLiveFrame]);
const closeBroadcastPeer = useCallback(() => { const closeBroadcastPeer = useCallback(() => {
broadcastSessionIdRef.current = null; broadcastSessionIdRef.current = null;
if (broadcastPeerRef.current) { if (frameRelayTimerRef.current) {
broadcastPeerRef.current.onconnectionstatechange = null; window.clearInterval(frameRelayTimerRef.current);
broadcastPeerRef.current.close(); frameRelayTimerRef.current = 0;
broadcastPeerRef.current = null;
}
if (broadcastStreamRef.current) {
broadcastStreamRef.current.getTracks().forEach((track) => track.stop());
broadcastStreamRef.current = null;
} }
frameRelayInFlightRef.current = false;
}, []); }, []);
const closeViewerPeer = useCallback(() => { const closeViewerPeer = useCallback(() => {
@@ -928,14 +941,11 @@ export default function LiveCamera() {
viewerRetryTimerRef.current = 0; viewerRetryTimerRef.current = 0;
} }
viewerSessionIdRef.current = null; viewerSessionIdRef.current = null;
if (viewerPeerRef.current) { if (videoRef.current && !cameraActive) {
viewerPeerRef.current.ontrack = null; videoRef.current.srcObject = null;
viewerPeerRef.current.onconnectionstatechange = null;
viewerPeerRef.current.close();
viewerPeerRef.current = null;
} }
setViewerConnected(false); setViewerConnected(false);
}, []); }, [cameraActive]);
const releaseRuntime = useCallback(async (phase: RuntimeSnapshot["phase"]) => { const releaseRuntime = useCallback(async (phase: RuntimeSnapshot["phase"]) => {
if (!runtimeIdRef.current) return; if (!runtimeIdRef.current) return;
@@ -989,8 +999,8 @@ export default function LiveCamera() {
} }
const compositeCanvas = ensureCompositeCanvas(); const compositeCanvas = ensureCompositeCanvas();
if (!compositeCanvas || typeof compositeCanvas.captureStream !== "function") { if (!compositeCanvas) {
throw new Error("当前浏览器不支持同步观看推流"); throw new Error("当前浏览器不支持同步观看画面");
} }
renderCompositeFrame(); renderCompositeFrame();
@@ -1009,84 +1019,21 @@ export default function LiveCamera() {
}); });
const sessionId = sessionResponse.session.id; const sessionId = sessionResponse.session.id;
const stream = compositeCanvas.captureStream(mobile ? 24 : 30); startFrameRelayLoop(sessionId);
broadcastStreamRef.current = stream;
const peer = new RTCPeerConnection({
iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
});
broadcastPeerRef.current = peer;
stream.getTracks().forEach((track) => peer.addTrack(track, stream));
const offer = await peer.createOffer();
await peer.setLocalDescription(offer);
await waitForIceGathering(peer);
const answer = await signalMediaSession(sessionId, {
sdp: peer.localDescription?.sdp || "",
type: peer.localDescription?.type || "offer",
});
await peer.setRemoteDescription({
type: answer.type as RTCSdpType,
sdp: answer.sdp,
});
return sessionId; return sessionId;
}, [ensureCompositeCanvas, facing, mobile, qualityPreset, renderCompositeFrame, user?.id]); }, [ensureCompositeCanvas, facing, mobile, qualityPreset, renderCompositeFrame, startFrameRelayLoop, user?.id]);
const startViewerStream = useCallback(async (mediaSessionId: string) => { const startViewerStream = useCallback(async (mediaSessionId: string) => {
if (viewerSessionIdRef.current === mediaSessionId && viewerPeerRef.current) { if (viewerSessionIdRef.current === mediaSessionId && viewerConnected) {
setViewerFrameVersion(Date.now());
return; return;
} }
closeViewerPeer(); closeViewerPeer();
setViewerError(""); setViewerError("");
const peer = new RTCPeerConnection({
iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
});
viewerPeerRef.current = peer;
viewerSessionIdRef.current = mediaSessionId; viewerSessionIdRef.current = mediaSessionId;
peer.addTransceiver("video", { direction: "recvonly" }); setViewerFrameVersion(Date.now());
}, [closeViewerPeer, viewerConnected]);
peer.ontrack = (event) => {
const nextStream = event.streams[0] ?? new MediaStream([event.track]);
if (videoRef.current) {
videoRef.current.srcObject = nextStream;
void videoRef.current.play().catch(() => undefined);
}
setViewerConnected(true);
};
peer.onconnectionstatechange = () => {
if (peer.connectionState === "failed" || peer.connectionState === "closed" || peer.connectionState === "disconnected") {
setViewerConnected(false);
}
};
const offer = await peer.createOffer();
await peer.setLocalDescription(offer);
await waitForIceGathering(peer);
try {
const answer = await signalMediaViewerSession(mediaSessionId, {
sdp: peer.localDescription?.sdp || "",
type: peer.localDescription?.type || "offer",
});
await peer.setRemoteDescription({
type: answer.type as RTCSdpType,
sdp: answer.sdp,
});
} catch (error) {
if (viewerPeerRef.current === peer) {
closeViewerPeer();
}
throw error;
}
}, [closeViewerPeer]);
const stopCamera = useCallback(() => { const stopCamera = useCallback(() => {
if (animationRef.current) { if (animationRef.current) {
@@ -1137,24 +1084,26 @@ export default function LiveCamera() {
} }
void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => { void startViewerStream(runtimeSession.mediaSessionId).catch((error: any) => {
const message = error?.message || "同步画面连接失败"; setViewerError(error?.message || "同步画面连接失败");
if (/409|viewer stream not ready/i.test(message)) {
setViewerError("持有端正在准备同步画面,正在自动重试...");
if (!viewerRetryTimerRef.current) {
viewerRetryTimerRef.current = window.setTimeout(() => {
viewerRetryTimerRef.current = 0;
void runtimeQuery.refetch();
}, 1200);
}
return;
}
setViewerError(message);
}); });
if (viewerRetryTimerRef.current) {
window.clearInterval(viewerRetryTimerRef.current);
viewerRetryTimerRef.current = 0;
}
viewerRetryTimerRef.current = window.setInterval(() => {
setViewerFrameVersion(Date.now());
}, 900);
return () => {
if (viewerRetryTimerRef.current) {
window.clearInterval(viewerRetryTimerRef.current);
viewerRetryTimerRef.current = 0;
}
};
}, [ }, [
cameraActive, cameraActive,
closeViewerPeer, closeViewerPeer,
runtimeQuery.refetch,
runtimeQuery.dataUpdatedAt,
runtimeRole, runtimeRole,
runtimeSession?.mediaSessionId, runtimeSession?.mediaSessionId,
startViewerStream, startViewerStream,
@@ -1726,6 +1675,9 @@ export default function LiveCamera() {
const displayRuntimeTitle = runtimeRole === "viewer" const displayRuntimeTitle = runtimeRole === "viewer"
? (runtimeSnapshot?.title ?? runtimeSession?.title ?? "其他设备实时分析") ? (runtimeSnapshot?.title ?? runtimeSession?.title ?? "其他设备实时分析")
: (runtimeSession?.title ?? `实时分析 ${ACTION_META[currentAction].label}`); : (runtimeSession?.title ?? `实时分析 ${ACTION_META[currentAction].label}`);
const viewerFrameSrc = runtimeRole === "viewer" && runtimeSession?.mediaSessionId
? getMediaAssetUrl(`/assets/sessions/${runtimeSession.mediaSessionId}/live-frame.jpg?ts=${viewerFrameVersion || runtimeSnapshot?.updatedAt || Date.now()}`)
: "";
const hasVideoFeed = cameraActive || viewerConnected; const hasVideoFeed = cameraActive || viewerConnected;
const heroAction = ACTION_META[displayAction]; const heroAction = ACTION_META[displayAction];
const rawActionMeta = ACTION_META[displayRawAction]; const rawActionMeta = ACTION_META[displayRawAction];
@@ -1736,8 +1688,8 @@ export default function LiveCamera() {
const fullBodyAvatarPresets = AVATAR_PRESETS.filter((preset) => preset.category === "full-body-3d"); const fullBodyAvatarPresets = AVATAR_PRESETS.filter((preset) => preset.category === "full-body-3d");
const previewTitle = runtimeRole === "viewer" const previewTitle = runtimeRole === "viewer"
? viewerConnected ? viewerConnected
? `${runtimeSyncLabel} · 同步观看` ? `${runtimeSyncLabel} · 服务端同步中`
: "正在连接同步画面" : "正在获取服务端同步画面"
: analyzing : analyzing
? displayStabilityMeta.pending && pendingActionMeta ? displayStabilityMeta.pending && pendingActionMeta
? `${pendingActionMeta.label} 切换确认中` ? `${pendingActionMeta.label} 切换确认中`
@@ -2030,7 +1982,7 @@ export default function LiveCamera() {
<Monitor className="h-4 w-4" /> <Monitor className="h-4 w-4" />
<AlertTitle></AlertTitle> <AlertTitle></AlertTitle>
<AlertDescription> <AlertDescription>
{viewerModeLabel} 1 {viewerModeLabel} media 1
</AlertDescription> </AlertDescription>
</Alert> </Alert>
) : null} ) : null}
@@ -2082,7 +2034,7 @@ export default function LiveCamera() {
<h1 className="text-3xl font-semibold tracking-tight">{displayRuntimeTitle}</h1> <h1 className="text-3xl font-semibold tracking-tight">{displayRuntimeTitle}</h1>
<p className="mt-2 max-w-2xl text-sm leading-6 text-white/70"> <p className="mt-2 max-w-2xl text-sm leading-6 text-white/70">
{runtimeRole === "viewer" {runtimeRole === "viewer"
? `当前正在同步 ${displayDeviceKind === "mobile" ? "移动端" : "桌面端"} ${displayFacing === "environment" ? "后置/主摄视角" : "前置视角"} 画面。视频、动作、评分、最近区间、虚拟形象和会话状态会自动跟随持有端刷新,允许少量网络延迟。` ? `当前正在通过服务端中转同步 ${displayDeviceKind === "mobile" ? "移动端" : "桌面端"} ${displayFacing === "environment" ? "后置/主摄视角" : "前置视角"} 画面。同步画面、动作、评分、最近区间、虚拟形象和会话状态会自动跟随持有端刷新,允许少量网络延迟。`
: "摄像头启动后会持续识别正手、反手、发球、截击、高压、切削、挑高球与未知动作。系统会用 24 帧时间窗口统一动作,再把稳定动作写入片段、训练记录与评分;分析过程中会自动录制“视频画面 + 骨架/关键点叠层”的合成回放,并按 60 秒分段归档进视频库。开启虚拟形象后,画面中的人体可切换为 10 个轻量动物替身,或 4 个免费的全身 3D Avatar 示例覆盖显示。"} : "摄像头启动后会持续识别正手、反手、发球、截击、高压、切削、挑高球与未知动作。系统会用 24 帧时间窗口统一动作,再把稳定动作写入片段、训练记录与评分;分析过程中会自动录制“视频画面 + 骨架/关键点叠层”的合成回放,并按 60 秒分段归档进视频库。开启虚拟形象后,画面中的人体可切换为 10 个轻量动物替身,或 4 个免费的全身 3D Avatar 示例覆盖显示。"}
</p> </p>
</div> </div>
@@ -2116,11 +2068,27 @@ export default function LiveCamera() {
<div className="relative aspect-[16/10] overflow-hidden bg-black sm:aspect-video"> <div className="relative aspect-[16/10] overflow-hidden bg-black sm:aspect-video">
<video <video
ref={videoRef} ref={videoRef}
className={`absolute inset-0 h-full w-full object-contain ${immersivePreview ? "opacity-0" : ""}`} className={`absolute inset-0 h-full w-full object-contain ${immersivePreview || runtimeRole === "viewer" ? "opacity-0" : ""}`}
playsInline playsInline
muted muted
autoPlay autoPlay
/> />
{runtimeRole === "viewer" && viewerFrameSrc ? (
<img
key={viewerFrameSrc}
src={viewerFrameSrc}
alt="同步中的实时分析画面"
className="absolute inset-0 h-full w-full object-contain"
onLoad={() => {
setViewerConnected(true);
setViewerError("");
}}
onError={() => {
setViewerConnected(false);
setViewerError("持有端正在上传同步画面,正在自动重试...");
}}
/>
) : null}
<canvas <canvas
ref={canvasRef} ref={canvasRef}
className={`pointer-events-none absolute inset-0 h-full w-full object-contain ${runtimeRole === "viewer" ? "hidden" : analyzing ? "" : "opacity-70"}`} className={`pointer-events-none absolute inset-0 h-full w-full object-contain ${runtimeRole === "viewer" ? "hidden" : analyzing ? "" : "opacity-70"}`}
@@ -2149,7 +2117,7 @@ export default function LiveCamera() {
disabled={!runtimeSession?.mediaSessionId} disabled={!runtimeSession?.mediaSessionId}
> >
<Monitor className="mr-2 h-4 w-4" /> <Monitor className="mr-2 h-4 w-4" />
{viewerConnected ? "新同步" : "同步观看"} {viewerConnected ? "新同步" : "获取同步画面"}
</Button> </Button>
) : ( ) : (
<Button data-testid="live-camera-start-button" onClick={() => setShowSetupGuide(true)} className="rounded-2xl"> <Button data-testid="live-camera-start-button" onClick={() => setShowSetupGuide(true)} className="rounded-2xl">

查看文件

@@ -1,5 +1,31 @@
# Tennis Training Hub - 变更日志 # Tennis Training Hub - 变更日志
## 2026.03.16-live-viewer-server-relay (2026-03-16)
### 功能更新
- `/live-camera` 的同步观看改为由 media 服务中转最新合成帧图,不再依赖浏览器之间的 P2P WebRTC viewer 连接
- owner 端会把“原视频 + 骨架/关键点 + 虚拟形象”的合成画布压缩成 JPEG 并持续上传到 media 服务
- viewer 端改为自动轮询 media 服务中的最新同步帧图,因此即使浏览器之间无法直连,也能继续看到同步画面和状态
- 同步观看模式文案已调整为明确提示“通过 media 服务中转”,等待阶段会继续自动刷新,而不是停留在 P2P 连接失败状态
- media 服务新增 live-frame 上传与静态分发能力,并记录最近同步帧时间,方便后续继续扩展更高频的服务端 relay
### 测试
- `cd media && go test ./...`
- `pnpm build`
- `playwright-skill` 线上 smoke先用 media 服务创建 relay session、上传 live-frame,并把 `H1``live_analysis_runtime` 注入为 active viewer 场景;随后访问 `https://te.hao.work/live-camera`,确认页面进入“同步观看模式”、同步帧来自 `/media/assets/sessions/.../live-frame.jpg`,且 `viewer-signal` 请求数为 `0`
### 线上 smoke
- `https://te.hao.work/` 已切换到本次新构建
- 当前公开站点前端资源 revision`assets/index-BC-IupO8.js``assets/index-tNGuStgv.css`
- 真实验证已通过viewer 端进入“同步观看模式”后,画面由 media 服务静态分发的 `live-frame.jpg` 提供,已确认不再触发 `/viewer-signal` P2P 观看请求
### 仓库版本
- `bb46d26`
## 2026.03.16-camera-startup-fallbacks (2026-03-16) ## 2026.03.16-camera-startup-fallbacks (2026-03-16)
### 功能更新 ### 功能更新

查看文件

@@ -105,6 +105,8 @@ type Session struct {
StreamConnected bool `json:"streamConnected"` StreamConnected bool `json:"streamConnected"`
LastStreamAt string `json:"lastStreamAt,omitempty"` LastStreamAt string `json:"lastStreamAt,omitempty"`
ViewerCount int `json:"viewerCount"` ViewerCount int `json:"viewerCount"`
LiveFrameURL string `json:"liveFrameUrl,omitempty"`
LiveFrameUpdated string `json:"liveFrameUpdatedAt,omitempty"`
Playback PlaybackInfo `json:"playback"` Playback PlaybackInfo `json:"playback"`
Segments []SegmentMeta `json:"segments"` Segments []SegmentMeta `json:"segments"`
Markers []Marker `json:"markers"` Markers []Marker `json:"markers"`
@@ -229,6 +231,14 @@ func (s *sessionStore) publicDir(id string) string {
return filepath.Join(s.public, "sessions", id) return filepath.Join(s.public, "sessions", id)
} }
func (s *sessionStore) liveFramePath(id string) string {
return filepath.Join(s.publicDir(id), "live-frame.jpg")
}
func (s *sessionStore) liveFrameURL(id string) string {
return fmt.Sprintf("/media/assets/sessions/%s/live-frame.jpg", id)
}
func (s *sessionStore) saveSession(session *Session) error { func (s *sessionStore) saveSession(session *Session) error {
session.UpdatedAt = time.Now().UTC().Format(time.RFC3339) session.UpdatedAt = time.Now().UTC().Format(time.RFC3339)
dir := s.sessionDir(session.ID) dir := s.sessionDir(session.ID)
@@ -504,6 +514,12 @@ func (m *mediaServer) handleSession(w http.ResponseWriter, r *http.Request) {
return return
} }
m.handleSegmentUpload(sessionID, w, r) m.handleSegmentUpload(sessionID, w, r)
case "live-frame":
if r.Method != http.MethodPost {
http.NotFound(w, r)
return
}
m.handleLiveFrameUpload(sessionID, w, r)
case "markers": case "markers":
if r.Method != http.MethodPost { if r.Method != http.MethodPost {
http.NotFound(w, r) http.NotFound(w, r)
@@ -726,6 +742,59 @@ func (m *mediaServer) handleViewerSignal(sessionID string, w http.ResponseWriter
}) })
} }
func (m *mediaServer) handleLiveFrameUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
if _, err := m.store.getSession(sessionID); err != nil {
writeError(w, http.StatusNotFound, err.Error())
return
}
body := http.MaxBytesReader(w, r.Body, 4<<20)
defer body.Close()
frame, err := io.ReadAll(body)
if err != nil || len(frame) == 0 {
writeError(w, http.StatusBadRequest, "invalid live frame payload")
return
}
publicDir := m.store.publicDir(sessionID)
if err := os.MkdirAll(publicDir, 0o755); err != nil {
writeError(w, http.StatusInternalServerError, "failed to create live frame directory")
return
}
tmpFile := filepath.Join(publicDir, fmt.Sprintf("live-frame-%s.tmp", randomID()))
if err := os.WriteFile(tmpFile, frame, 0o644); err != nil {
writeError(w, http.StatusInternalServerError, "failed to write live frame")
return
}
defer os.Remove(tmpFile)
finalFile := m.store.liveFramePath(sessionID)
if err := os.Rename(tmpFile, finalFile); err != nil {
writeError(w, http.StatusInternalServerError, "failed to publish live frame")
return
}
session, err := m.store.updateSession(sessionID, func(session *Session) error {
session.LiveFrameURL = m.store.liveFrameURL(sessionID)
session.LiveFrameUpdated = time.Now().UTC().Format(time.RFC3339)
session.StreamConnected = true
session.LastStreamAt = session.LiveFrameUpdated
if session.Status == StatusCreated || session.Status == StatusReconnecting {
session.Status = StatusStreaming
}
session.LastError = ""
return nil
})
if err != nil {
writeError(w, http.StatusInternalServerError, "failed to update live frame session state")
return
}
writeJSON(w, http.StatusAccepted, map[string]any{"session": session})
}
func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWriter, r *http.Request) { func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
sequence, err := strconv.Atoi(r.URL.Query().Get("sequence")) sequence, err := strconv.Atoi(r.URL.Query().Get("sequence"))
if err != nil || sequence < 0 { if err != nil || sequence < 0 {

查看文件

@@ -278,3 +278,45 @@ func TestViewerSignalReturnsConflictBeforePublisherTrackReady(t *testing.T) {
t.Fatalf("expected viewer-signal 409 before video track is ready, got %d", res.Code) t.Fatalf("expected viewer-signal 409 before video track is ready, got %d", res.Code)
} }
} }
func TestLiveFrameUploadPublishesRelayFrame(t *testing.T) {
store, err := newSessionStore(t.TempDir())
if err != nil {
t.Fatalf("newSessionStore: %v", err)
}
server := newMediaServer(store)
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Session"})
if err != nil {
t.Fatalf("createSession: %v", err)
}
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/live-frame", strings.NewReader("jpeg-frame"))
req.Header.Set("Content-Type", "image/jpeg")
res := httptest.NewRecorder()
server.routes().ServeHTTP(res, req)
if res.Code != http.StatusAccepted {
t.Fatalf("expected live-frame upload 202, got %d", res.Code)
}
current, err := store.getSession(session.ID)
if err != nil {
t.Fatalf("getSession: %v", err)
}
if current.LiveFrameURL == "" || current.LiveFrameUpdated == "" {
t.Fatalf("expected live frame metadata to be recorded, got %#v", current)
}
if !current.StreamConnected {
t.Fatalf("expected session stream connected after frame upload")
}
framePath := store.liveFramePath(session.ID)
body, err := os.ReadFile(framePath)
if err != nil {
t.Fatalf("read live frame: %v", err)
}
if string(body) != "jpeg-frame" {
t.Fatalf("unexpected live frame content: %q", string(body))
}
}