比较提交
37 次代码提交
67b27e3551
...
main
| 作者 | SHA1 | 提交日期 | |
|---|---|---|---|
|
|
495da60212 | ||
|
|
1adadbad8c | ||
|
|
b1752110fb | ||
|
|
0af88b3a15 | ||
|
|
902bd783c9 | ||
|
|
597f16d0b9 | ||
|
|
f3f7e1982c | ||
|
|
63dbfd2787 | ||
|
|
06b9701e03 | ||
|
|
8e9e4915e2 | ||
|
|
634a4704c7 | ||
|
|
bb46d26c0e | ||
|
|
bacd712dbc | ||
|
|
78a7c755e3 | ||
|
|
a211562860 | ||
|
|
09b1b95e2c | ||
|
|
922a9fb63f | ||
|
|
31bead3452 | ||
|
|
a5103685fb | ||
|
|
f9db6ef590 | ||
|
|
13e59b8e8a | ||
|
|
2b72ef9200 | ||
|
|
09cd5b4d85 | ||
|
|
7aba508247 | ||
|
|
cf06de944f | ||
|
|
4e4122d758 | ||
|
|
f0bbe4c82f | ||
|
|
4fb2d092d7 | ||
|
|
e3fe9a8e7b | ||
|
|
fe5e539a47 | ||
|
|
139dc61b61 | ||
|
|
264d49475b | ||
|
|
5c2dcf23ba | ||
|
|
3763f5b515 | ||
|
|
1ce94f6f57 | ||
|
|
669497e625 | ||
|
|
71caf0de19 |
@@ -0,0 +1,22 @@
|
||||
3D full-body avatar preview cutouts in this folder are derived from the Open Source Avatars registry:
|
||||
https://github.com/ToxSam/open-source-avatars
|
||||
|
||||
Registry summary:
|
||||
- Registry metadata/docs license: CC0
|
||||
- Individual avatars used here: CC0 from collection "100Avatars R3"
|
||||
|
||||
Integrated examples:
|
||||
- BeachKing
|
||||
Preview source: https://arweave.net/EGCdxkfTjjmNS4RGiAT_or17mG3717qnZ7R1EnZxLg8
|
||||
Model source: https://arweave.net/uKhDMselhdUyeJKjelpuVsL8s-a9v_Wqq75TQfCfnos
|
||||
- Jenny
|
||||
Preview source: https://arweave.net/4a6_AfH-PHvFMXqja7V42pF9hCn9ceIj5z5NAsK2SSs
|
||||
Model source: https://arweave.net/kgTirc4OvUWbJhIKC2CB3_pYsYuB62KTj90IdE8s3sk
|
||||
- Juanita
|
||||
Preview source: https://arweave.net/5RHeIXD9fezkpuFJS1TRtGkNIVfTKZP7Rkmh9pDmaTs
|
||||
Model source: https://arweave.net/nyMyZZx5lN2DXsmBgbGQSnt3PuXYN7AAjz9QJrjitLo
|
||||
- SportTV
|
||||
Preview source: https://arweave.net/_Qic8KV5P5mo5wJ2N3lbqX0iGVxtVDn4CxCUiM5-Qcg
|
||||
Model source: https://arweave.net/ISYr7xBXT_s4tLddbhFB3PpUhWg-H_BYs2UZhVLF1hA
|
||||
|
||||
Local files are optimized transparent WebP derivatives for faster in-browser overlay rendering.
|
||||
|
之后 宽度: | 高度: | 大小: 18 KiB |
|
之后 宽度: | 高度: | 大小: 26 KiB |
|
之后 宽度: | 高度: | 大小: 20 KiB |
|
之后 宽度: | 高度: | 大小: 19 KiB |
@@ -0,0 +1,20 @@
|
||||
Animal avatar SVG assets in this folder are sourced from Twemoji.
|
||||
|
||||
Source:
|
||||
https://github.com/jdecked/twemoji
|
||||
|
||||
Selected assets:
|
||||
- gorilla.svg
|
||||
- monkey.svg
|
||||
- dog.svg
|
||||
- pig.svg
|
||||
- cat.svg
|
||||
- fox.svg
|
||||
- panda.svg
|
||||
- lion.svg
|
||||
- tiger.svg
|
||||
- rabbit.svg
|
||||
|
||||
License:
|
||||
CC-BY 4.0
|
||||
https://creativecommons.org/licenses/by/4.0/
|
||||
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36"><path fill="#FFCC4D" d="M32.348 13.999s3.445-8.812 1.651-11.998c-.604-1.073-8 1.998-10.723 5.442 0 0-2.586-.86-5.276-.86s-5.276.86-5.276.86C10.001 3.999 2.605.928 2.001 2.001.207 5.187 3.652 13.999 3.652 13.999c-.897 1.722-1.233 4.345-1.555 7.16-.354 3.086.35 5.546.658 6.089.35.617 2.123 2.605 4.484 4.306 3.587 2.583 8.967 3.445 10.761 3.445s7.174-.861 10.761-3.445c2.361-1.701 4.134-3.689 4.484-4.306.308-.543 1.012-3.003.659-6.089-.324-2.814-.659-5.438-1.556-7.16z"/><path fill="#F18F26" d="M2.359 2.971c.2-.599 5.348 2.173 6.518 5.404 0 0-3.808 2.624-4.528 4.624 0 0-2.99-7.028-1.99-10.028z"/><path fill="#FFCC4D" d="M5.98 7.261c0-1.414 5.457 2.733 4.457 3.733s-1.255.72-2.255 1.72S5.98 8.261 5.98 7.261z"/><path fill="#F18F26" d="M33.641 2.971c-.2-.599-5.348 2.173-6.518 5.404 0 0 3.808 2.624 4.528 4.624 0 0 2.99-7.028 1.99-10.028z"/><path fill="#FFCC4D" d="M30.02 7.261c0-1.414-5.457 2.733-4.457 3.733s1.255.72 2.255 1.72 2.202-4.453 2.202-5.453z"/><path fill="#292F33" d="M14.001 20.001c0 1.105-.896 1.999-2 1.999s-2-.894-2-1.999c0-1.104.896-1.999 2-1.999s2 .896 2 1.999zm11.998 0c0 1.105-.896 1.999-2 1.999-1.105 0-2-.894-2-1.999 0-1.104.895-1.999 2-1.999s2 .896 2 1.999z"/><path fill="#FEE7B8" d="M2.201 30.458c-.148 0-.294-.065-.393-.19-.171-.217-.134-.531.083-.702.162-.127 4.02-3.12 10.648-2.605.275.021.481.261.46.536-.021.275-.257.501-.537.46-6.233-.474-9.915 2.366-9.951 2.395-.093.07-.202.106-.31.106zm8.868-4.663c-.049 0-.1-.007-.149-.022-4.79-1.497-8.737-.347-8.777-.336-.265.081-.543-.07-.623-.335-.079-.265.071-.543.335-.622.173-.052 4.286-1.247 9.362.338.264.083.411.363.328.627-.066.213-.263.35-.476.35zm22.73 4.663c.148 0 .294-.065.393-.19.171-.217.134-.531-.083-.702-.162-.127-4.02-3.12-10.648-2.605-.275.021-.481.261-.46.536.022.275.257.501.537.46 6.233-.474 9.915 2.366 9.951 2.395.093.07.202.106.31.106zm-8.868-4.663c.049 0 .1-.007.149-.022 4.79-1.497 8.737-.347 8.777-.336.265.081.543-.07.623-.335.079-.265-.071-.543-.335-.622-.173-.052-4.286-1.247-9.362.338-.264.083-.411.363-.328.627.066.213.263.35.476.35z"/><path fill="#67757F" d="M24.736 30.898c-.097-.258-.384-.392-.643-.294-.552.206-1.076.311-1.559.311-1.152 0-1.561-.306-2.033-.659-.451-.338-.956-.715-1.99-.803v-2.339c0-.276-.224-.5-.5-.5s-.5.224-.5.5v2.373c-.81.115-1.346.439-1.816.743-.568.367-1.059.685-2.083.685-.482 0-1.006-.104-1.558-.311-.258-.095-.547.035-.643.294-.097.259.035.547.293.644.664.247 1.306.373 1.907.373 1.319 0 2.014-.449 2.627-.845.524-.339.98-.631 1.848-.635.992.008 1.358.278 1.815.621.538.403 1.147.859 2.633.859.601 0 1.244-.126 1.908-.373.259-.097.391-.385.294-.644z"/><path fill="#E75A70" d="M19.4 24.807h-2.8c-.64 0-1.163.523-1.163 1.163 0 .639.523 1.163 1.163 1.163h.237v.345c0 .639.523 1.163 1.163 1.163s1.163-.523 1.163-1.163v-.345h.237c.639 0 1.163-.523 1.163-1.163s-.524-1.163-1.163-1.163z"/><path fill="#F18F26" d="M18.022 17.154c-.276 0-.5-.224-.5-.5V8.37c0-.276.224-.5.5-.5s.5.224.5.5v8.284c0 .277-.223.5-.5.5zM21 15.572c-.276 0-.5-.224-.5-.5 0-2.882 1.232-5.21 1.285-5.308.13-.244.435-.334.677-.204.243.13.334.433.204.677-.012.021-1.166 2.213-1.166 4.835 0 .276-.224.5-.5.5zm-6 0c-.276 0-.5-.224-.5-.5 0-2.623-1.155-4.814-1.167-4.835-.13-.244-.038-.546.205-.677.242-.131.545-.039.676.204.053.098 1.285 2.426 1.285 5.308.001.276-.223.5-.499.5z"/></svg>
|
||||
|
之后 宽度: | 高度: | 大小: 3.3 KiB |
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36"><path fill="#DD2E44" d="M15 27v6s0 3 3 3 3-3 3-3v-6h-6z"/><path fill="#BE1931" d="M15 33l.001.037c1.041-.035 2.016-.274 2.632-1.286.171-.281.563-.281.735 0 .616 1.011 1.591 1.251 2.632 1.286V27h-6v6z"/><path fill="#D99E82" d="M31.954 21.619c0 6.276-5 6.276-5 6.276h-18s-5 0-5-6.276c0-6.724 5-18.619 14-18.619s14 12.895 14 18.619z"/><path fill="#F4C7B5" d="M18 20c-7 0-10 3.527-10 6.395 0 3.037 2.462 5.5 5.5 5.5 1.605 0 3.042-.664 4.049-2.767.185-.386.716-.386.901 0 1.007 2.103 2.445 2.767 4.049 2.767 3.038 0 5.5-2.463 5.5-5.5C28 23.527 25 20 18 20z"/><path fill="#292F33" d="M15 22.895c-1 1 2 4 3 4s4-3 3-4-5-1-6 0zM13 19c-1.1 0-2-.9-2-2v-2c0-1.1.9-2 2-2s2 .9 2 2v2c0 1.1-.9 2-2 2zm10 0c-1.1 0-2-.9-2-2v-2c0-1.1.9-2 2-2s2 .9 2 2v2c0 1.1-.9 2-2 2z"/><path fill="#662113" d="M15 3.608C13.941 2.199 11.681.881 2.828 4.2-1.316 5.754.708 17.804 3.935 18.585c1.106 0 4.426 0 4.426-8.852 0-.22-.002-.423-.005-.625C10.35 6.298 12.5 4.857 15 3.608zm18.172.592C24.319.881 22.059 2.199 21 3.608c2.5 1.25 4.65 2.691 6.644 5.501-.003.201-.005.404-.005.625 0 8.852 3.319 8.852 4.426 8.852 3.227-.782 5.251-12.832 1.107-14.386z"/><circle fill="#D99E82" cx="23.5" cy="25.5" r=".5"/><circle fill="#D99E82" cx="11.5" cy="25.5" r=".5"/><circle fill="#D99E82" cx="25.5" cy="27.5" r=".5"/><circle fill="#D99E82" cx="10.5" cy="27.5" r=".5"/><circle fill="#D99E82" cx="23" cy="28" r="1"/><circle fill="#D99E82" cx="13" cy="28" r="1"/><path fill="#380F09" d="M9.883 7.232c-.259-.673-.634-1.397-1.176-1.939-.391-.391-1.023-.391-1.414 0s-.391 1.023 0 1.414c.57.57 1.066 1.934 1.068 2.346.145-.404.839-1.15 1.522-1.821zm16.217 0c.259-.672.634-1.397 1.176-1.939.391-.391 1.023-.391 1.414 0s.391 1.023 0 1.414c-.57.57-1.066 1.934-1.068 2.346-.145-.404-.839-1.15-1.522-1.821z"/></svg>
|
||||
|
之后 宽度: | 高度: | 大小: 1.8 KiB |
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36"><path fill="#F4900C" d="M13.431 9.802c.658 2.638-8.673 10.489-11.244 4.098C.696 10.197-.606 2.434.874 2.065c1.48-.368 11.9 5.098 12.557 7.737z"/><path fill="#A0041E" d="M11.437 10.355c.96 1.538-1.831 4.561-3.368 5.522-1.538.961-2.899-.552-4.414-4.414-.662-1.689-1.666-6.27-1.103-6.622.562-.351 7.924 3.976 8.885 5.514z"/><path fill="#F4900C" d="M22.557 9.802C21.9 12.441 31.23 20.291 33.802 13.9c1.49-3.703 2.792-11.466 1.312-11.835-1.48-.368-11.899 5.098-12.557 7.737z"/><path fill="#A0041E" d="M24.552 10.355c-.96 1.538 1.831 4.561 3.368 5.522 1.537.961 2.898-.552 4.413-4.414.662-1.688 1.666-6.269 1.104-6.621-.563-.352-7.924 3.975-8.885 5.513z"/><path fill="#F18F26" d="M32.347 26.912c0-.454-.188-1.091-.407-1.687.585.028 1.519.191 2.77.817-.008-.536-.118-.984-.273-1.393.041.02.075.034.116.055-1.103-3.31-3.309-5.517-3.309-5.517h2.206c-2.331-4.663-4.965-8.015-8.075-9.559-1.39-.873-3.688-1.338-7.373-1.339h-.003c-3.695 0-5.996.468-7.385 1.346-3.104 1.547-5.734 4.896-8.061 9.552H4.76s-2.207 2.206-3.311 5.517c.03-.015.055-.025.084-.04-.201.392-.307.847-.282 1.377 1.263-.632 2.217-.792 2.813-.818-.189.513-.343 1.044-.386 1.475-.123.371-.191.812-.135 1.343C6.75 26.584 8.25 26.792 10 27.667 11.213 31.29 14.206 34 18.001 34c3.793 0 6.746-2.794 7.958-6.416 1.458-1.25 3.708-.875 6.416.416.066-.414.036-.773-.036-1.093l.008.005z"/><path fill="#FFD983" d="M31.243 23.601c.006 0 1.108.003 3.309 1.103-1.249-2.839-7.525-4.07-9.931-3.291-1.171 1.954-1.281 5.003-3.383 6.622-1.741 1.431-4.713 1.458-6.479 0-2.345-1.924-2.559-5.813-3.382-6.622-2.407-.781-8.681.454-9.931 3.291 2.201-1.101 3.304-1.103 3.309-1.103 0 .001-1.103 2.208-1.103 3.311l.001-.001v.001c2.398-1.573 5.116-2.271 7.429-.452 1.666 7.921 12.293 7.545 13.833 0 2.314-1.818 5.03-1.122 7.429.452v-.001l.001.001c.002-1.103-1.101-3.311-1.102-3.311z"/><path fill="#272B2B" d="M11 17s0-1.5 1.5-1.5S14 17 14 17v1.5s0 1.5-1.5 1.5-1.5-1.5-1.5-1.5V17zm11 0s0-1.5 1.5-1.5S25 17 25 17v1.5s0 1.5-1.5 1.5-1.5-1.5-1.5-1.5V17zm-7.061 10.808c-1.021.208 2.041 3.968 3.062 3.968 1.02 0 4.082-3.76 3.062-3.968-1.021-.208-5.103-.208-6.124 0z"/></svg>
|
||||
|
之后 宽度: | 高度: | 大小: 2.1 KiB |
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36"><path fill="#31373D" d="M5 16c0-4-5-3-4 1s3 5 3 5l1-6zm26 0c0-4 5-3 4 1s-3 5-3 5l-1-6z"/><path fill="#31373D" d="M32.65 21.736c0 10.892-4.691 14.087-14.65 14.087-9.958 0-14.651-3.195-14.651-14.087S8.042.323 18 .323c9.959 0 14.65 10.521 14.65 21.413z"/><path fill="#66757F" d="M27.567 23c1.49-4.458 2.088-7.312-.443-7.312H8.876c-2.532 0-1.933 2.854-.444 7.312C3.504 34.201 17.166 34.823 18 34.823S32.303 33.764 27.567 23z"/><path fill="#31373D" d="M15 18.003c0 1.105-.896 2-2 2s-2-.895-2-2c0-1.104.896-1 2-1s2-.105 2 1zm10 0c0 1.105-.896 2-2 2s-2-.895-2-2c0-1.104.896-1 2-1s2-.105 2 1z"/><ellipse fill="#31373D" cx="15.572" cy="23.655" rx="1.428" ry="1"/><path fill="#31373D" d="M21.856 23.655c0 .553-.639 1-1.428 1-.79 0-1.429-.447-1.429-1 0-.553.639-1 1.429-1s1.428.448 1.428 1z"/><path fill="#99AAB5" d="M21.02 21.04c-1.965-.26-3.02.834-3.02.834s-1.055-1.094-3.021-.834c-3.156.417-3.285 3.287-1.939 3.105.766-.104.135-.938 1.713-1.556 1.579-.616 3.247.66 3.247.66s1.667-1.276 3.246-.659.947 1.452 1.714 1.556c1.346.181 1.218-2.689-1.94-3.106z"/><path fill="#31373D" d="M24.835 30.021c-1.209.323-3.204.596-6.835.596s-5.625-.272-6.835-.596c-3.205-.854-1.923-1.735 0-1.477 1.923.259 3.631.415 6.835.415 3.205 0 4.914-.156 6.835-.415 1.923-.258 3.204.623 0 1.477z"/><path fill="#66757F" d="M4.253 16.625c1.403-1.225-1.078-3.766-2.196-2.544-.341.373.921-.188 1.336 1.086.308.942.001 2.208.86 1.458zm27.493 0c-1.402-1.225 1.078-3.766 2.196-2.544.341.373-.921-.188-1.337 1.086-.306.942 0 2.208-.859 1.458z"/></svg>
|
||||
|
之后 宽度: | 高度: | 大小: 1.5 KiB |
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36"><path fill="#662113" d="M32.325 10.958s2.315.024 3.511 1.177c-.336-4.971-2.104-8.249-5.944-10.13-3.141-1.119-6.066 1.453-6.066 1.453s.862-1.99 2.19-2.746C23.789.236 21.146 0 18 0c-3.136 0-5.785.227-8.006.701 1.341.745 2.215 2.758 2.215 2.758S9.194.803 6 2.053C2.221 3.949.481 7.223.158 12.174c1.183-1.19 3.55-1.215 3.55-1.215S-.105 13.267.282 16.614c.387 2.947 1.394 5.967 2.879 8.722C3.039 22.15 5.917 20 5.917 20s-2.492 5.96-.581 8.738c1.935 2.542 4.313 4.641 6.976 5.916-.955-1.645-.136-3.044-.103-2.945.042.125.459 3.112 2.137 3.743 1.178.356 2.4.548 3.654.548 1.292 0 2.55-.207 3.761-.583 1.614-.691 2.024-3.585 2.064-3.708.032-.098.843 1.287-.09 2.921 2.706-1.309 5.118-3.463 7.064-6.073 1.699-2.846-.683-8.557-.683-8.557s2.85 2.13 2.757 5.288c1.556-2.906 2.585-6.104 2.911-9.2-.035-3.061-3.459-5.13-3.459-5.13z"/><path fill="#FFCC4D" d="M13.859 9.495c.596 2.392.16 4.422-2.231 5.017-2.392.596-6.363.087-6.958-2.304-.596-2.392.469-5.39 1.81-5.724 1.341-.334 6.784.62 7.379 3.011zm9.104 18.432c0 2.74-2.222 4.963-4.963 4.963s-4.963-2.223-4.963-4.963c0-2.741 2.223-4.964 4.963-4.964 2.741 0 4.963 2.222 4.963 4.964z"/><path fill="#DD2E44" d="M21.309 27.927c0 1.827-1.481 3.309-3.309 3.309s-3.309-1.481-3.309-3.309c0-1.827 1.481-3.31 3.309-3.31s3.309 1.483 3.309 3.31z"/><path fill="#E6AAAA" d="M11.052 8.997c.871 1.393.447 3.229-.946 4.1-1.394.871-2.608.797-3.479-.596-.871-1.394-.186-4.131.324-4.45.51-.319 3.23-.448 4.101.946z"/><path fill="#FFCC4D" d="M22.141 9.495c-.596 2.392-.159 4.422 2.232 5.017 2.392.596 6.363.087 6.959-2.304.596-2.392-.47-5.39-1.811-5.724-1.342-.334-6.786.62-7.38 3.011z"/><path fill="#E6AAAA" d="M24.948 8.997c-.871 1.393-.447 3.229.945 4.1 1.394.871 2.608.797 3.479-.596.871-1.394.185-4.131-.324-4.45-.51-.319-3.229-.448-4.1.946z"/><path fill="#FFCC4D" d="M18 7.125h-.002C5.167 7.126 7.125 12.083 8.5 18.667 9.875 25.25 10.384 27 10.384 27h15.228s.51-1.75 1.885-8.333C28.872 12.083 30.829 7.126 18 7.125z"/><path fill="#272B2B" d="M12 16s0-1.5 1.5-1.5S15 16 15 16v1.5s0 1.5-1.5 1.5-1.5-1.5-1.5-1.5V16zm9 0s0-1.5 1.5-1.5S24 16 24 16v1.5s0 1.5-1.5 1.5-1.5-1.5-1.5-1.5V16z"/><path fill="#FFE8B6" d="M20.168 21.521c-1.598 0-1.385.848-2.168 2.113-.783-1.266-.571-2.113-2.168-2.113-6.865 0-6.837.375-6.865 2.828-.058 4.986 2.802 6.132 5.257 6.06 1.597-.048 2.994-.88 3.777-2.131.783 1.251 2.179 2.083 3.776 2.131 2.455.072 5.315-1.073 5.257-6.06-.029-2.453-.001-2.828-6.866-2.828z"/><path fill="#272B2B" d="M14.582 21.411c-1.14.233 2.279 4.431 3.418 4.431s4.559-4.198 3.419-4.431c-1.14-.232-5.698-.232-6.837 0z"/><circle fill="#D99E82" cx="11.5" cy="24.5" r=".5"/><circle fill="#D99E82" cx="10.5" cy="26.5" r=".5"/><circle fill="#D99E82" cx="12.5" cy="27.5" r=".5"/><circle fill="#D99E82" cx="24.5" cy="24.5" r=".5"/><circle fill="#D99E82" cx="25.5" cy="26.5" r=".5"/><circle fill="#D99E82" cx="23.5" cy="27.5" r=".5"/></svg>
|
||||
|
之后 宽度: | 高度: | 大小: 2.8 KiB |
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36"><ellipse transform="rotate(-14.999 5.05 17.456)" fill="#D79E84" cx="5.05" cy="17.455" rx="3.818" ry="5.455"/><ellipse transform="rotate(-75.001 31.05 17.455)" fill="#D79E84" cx="31.05" cy="17.455" rx="5.455" ry="3.818"/><path fill="#BF6952" d="M19.018 36h-2.036C10.264 36 3.75 30.848 3.75 23.636c0-4.121 1.527-6.182 1.527-6.182s-.509-2.061-.509-4.121C4.768 7.152 11.282 2 18 2c6.718 0 13.232 6.182 13.232 11.333 0 2.061-.509 4.121-.509 4.121s1.527 2.061 1.527 6.182C32.25 30.848 25.736 36 19.018 36z"/><path fill="#D79E84" d="M30 16.042C30 12.153 26.825 9 22.909 9c-1.907 0-3.635.752-4.909 1.968C16.726 9.752 14.998 9 13.091 9 9.175 9 6 12.153 6 16.042c0 2.359 1.172 4.441 2.965 5.719-.503 1.238-.783 2.6-.783 4.031C8.182 31.476 12.578 35 18 35s9.818-3.524 9.818-9.208c0-1.431-.28-2.793-.783-4.031C28.828 20.483 30 18.4 30 16.042z"/><ellipse fill="#292F33" cx="13" cy="17" rx="2.25" ry="3.25"/><ellipse fill="#292F33" cx="23" cy="17" rx="2.25" ry="3.25"/><path fill="#642116" d="M18 32.727c2.838 0 5.254-1.505 6.162-3.61.375-.871-.262-1.844-1.21-1.844h-9.904c-.948 0-1.585.974-1.21 1.844.908 2.105 3.324 3.61 6.162 3.61z"/><circle fill="#642116" cx="16.25" cy="23" r="1"/><circle fill="#642116" cx="19.75" cy="23" r="1"/><path fill="#BF6952" d="M22.66.175s-5.455-1.091-7.636 2.182 4.364 1.091 4.364 1.091S20.478.175 22.66.175z"/></svg>
|
||||
|
之后 宽度: | 高度: | 大小: 1.4 KiB |
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36"><circle fill="#272B2B" cx="7" cy="6" r="6"/><circle fill="#272B2B" cx="29" cy="6" r="6"/><circle fill="#66757F" cx="7" cy="6" r="4"/><circle fill="#66757F" cx="29" cy="6" r="4"/><path fill="#EEE" d="M35 22c0 7-6.375 12-17 12S1 29 1 22C1 22 2.308 0 18 0s17 22 17 22z"/><circle fill="#CCD6DD" cx="18" cy="30" r="6"/><circle fill="#DD2E44" cx="18" cy="30" r="4"/><path fill="#272B2B" d="M20.709 12.654C25.163 9.878 32 17 26.952 22.67 23.463 26.591 20 25 20 25s-2.636-10.26.709-12.346zm-5.442.011C10.813 9.888 3.976 17.01 9.023 22.681c3.49 3.92 6.953 2.329 6.953 2.329s2.636-10.26-.709-12.345z"/><path fill="#66757F" d="M11 17s0-2 2-2 2 2 2 2v2s0 2-2 2-2-2-2-2v-2z"/><path fill="#FFF" d="M18 20S7 23.687 7 27s2.687 6 6 6c2.088 0 3.925-1.067 5-2.685C19.074 31.933 20.912 33 23 33c3.313 0 6-2.687 6-6s-11-7-11-7z"/><path fill="#66757F" d="M21 17s0-2 2-2 2 2 2 2v2s0 2-2 2-2-2-2-2v-2z"/><path fill="#272B2B" d="M13.125 25c-1.624 1 3.25 4 4.875 4s6.499-3 4.874-4-8.124-1-9.749 0z"/></svg>
|
||||
|
之后 宽度: | 高度: | 大小: 1.0 KiB |
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36"><path fill="#F4ABBA" d="M34.193 13.329c.387-.371.733-.795 1.019-1.28 1.686-2.854.27-10.292-.592-10.8-.695-.411-5.529 1.05-8.246 3.132C23.876 2.884 21.031 2 18 2c-3.021 0-5.856.879-8.349 2.367C6.93 2.293 2.119.839 1.424 1.249c-.861.508-2.276 7.947-.592 10.8.278.471.615.884.989 1.249C.666 15.85 0 18.64 0 21.479 0 31.468 8.011 34 18 34s18-2.532 18-12.521c0-2.828-.66-5.606-1.807-8.15z"/><path fill="#EA596E" d="M7.398 5.965c-2.166-1.267-4.402-2.08-4.8-1.845-.57.337-1.083 4.998-.352 8.265 1.273-2.483 3.04-4.682 5.152-6.42zm26.355 6.419c.733-3.267.219-7.928-.351-8.265-.398-.235-2.635.578-4.801 1.845 2.114 1.739 3.88 3.938 5.152 6.42zM28 23.125c0 4.487-3.097 9.375-10 9.375-6.904 0-10-4.888-10-9.375S11.096 17.5 18 17.5c6.903 0 10 1.138 10 5.625z"/><path fill="#662113" d="M15 24.6c0 1.857-.34 2.4-1.5 2.4s-1.5-.543-1.5-2.4c0-1.856.34-2.399 1.5-2.399s1.5.542 1.5 2.399zm9 0c0 1.857-.34 2.4-1.5 2.4s-1.5-.543-1.5-2.4c0-1.856.34-2.399 1.5-2.399s1.5.542 1.5 2.399z"/><circle fill="#292F33" cx="7" cy="17" r="2"/><circle fill="#292F33" cx="29" cy="17" r="2"/></svg>
|
||||
|
之后 宽度: | 高度: | 大小: 1.1 KiB |
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36"><path fill="#99AAB5" d="M33.799.005c-.467-.178-7.998 3.971-9.969 9.131-1.166 3.052-1.686 6.058-1.652 8.112C20.709 16.459 19.257 16 18 16s-2.709.458-4.178 1.249c.033-2.055-.486-5.061-1.652-8.112C10.2 3.977 2.668-.173 2.201.005c-.455.174 4.268 16.044 7.025 20.838C6.805 23.405 5 26.661 5 29.828c0 3.234 1.635 5.14 4 5.94 2.531.857 5-.94 9-.94s6.469 1.798 9 .94c2.365-.801 4-2.706 4-5.94 0-3.166-1.805-6.423-4.225-8.984C29.53 16.049 34.255.179 33.799.005z"/><path fill="#F4ABBA" d="M12.692 17.922c-.178-1.54-.68-3.55-1.457-5.584-1.534-4.016-5.686-7.245-6.049-7.107-.319.122 2.627 10.14 4.783 14.863.866-.824 1.786-1.563 2.723-2.172zm13.338 2.172c2.156-4.723 5.102-14.741 4.784-14.862-.363-.139-4.516 3.091-6.05 7.107-.777 2.034-1.279 4.043-1.457 5.583.937.609 1.857 1.348 2.723 2.172z"/><path fill="#CCD6DD" d="M25 30c0 2.762-3.06 5-6.834 5-3.773 0-6.833-2.238-6.833-5s3.06-5 6.833-5C21.94 25 25 27.238 25 30z"/><path fill="#FFF" d="M21 30.578c0 2.762-.238 3-3 3-2.761 0-3-.238-3-3 0-1 6-1 6 0z"/><circle fill="#292F33" cx="12.5" cy="24.328" r="1.5"/><circle fill="#292F33" cx="23.5" cy="24.328" r="1.5"/><path fill="#F4ABBA" d="M21 25.828c0 1.657-2 3-3 3s-3-1.343-3-3 6-1.657 6 0z"/></svg>
|
||||
|
之后 宽度: | 高度: | 大小: 1.2 KiB |
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36"><circle fill="#FFCC4D" cx="7" cy="6" r="6"/><circle fill="#FFCC4D" cx="18" cy="30" r="6"/><circle fill="#DD2E44" cx="18" cy="30" r="4"/><circle fill="#FFCC4D" cx="29" cy="6" r="6"/><circle fill="#E6AAAA" cx="7" cy="6" r="4"/><circle fill="#E6AAAA" cx="29" cy="6" r="4"/><path fill="#FFCC4D" d="M34 22c0 7-4.923 7-4.923 7H6.923S2 29 2 22C2 22 3.231 0 18 0c14.77 0 16 22 16 22z"/><path fill="#272B2B" d="M11 17s0-2 2-2 2 2 2 2v2s0 2-2 2-2-2-2-2v-2zm10 0s0-2 2-2 2 2 2 2v2s0 2-2 2-2-2-2-2v-2z"/><path fill="#FFF" d="M23.678 23c-2.402 0-4.501.953-5.678 2.378C16.823 23.953 14.723 23 12.321 23 2 23 2.043 23.421 2 26.182c-.087 5.61 6.63 6.9 10.321 6.818 2.401-.053 4.502-.989 5.679-2.397 1.177 1.408 3.276 2.345 5.678 2.397 3.691.082 10.409-1.208 10.321-6.818-.043-2.761 0-3.182-10.321-3.182z"/><path fill="#272B2B" d="M33.66 25.242c.204.279.333.588.339.939.03 1.905-.745 3.303-1.915 4.327L26.999 31l6.661-5.758zM15 25c-1 1 2 4 3 4s4-3 3-4-5-1-6 0zM10 3c2.667 2 8 4 8 4s5.333-2 8-4l-8 1-8-1zm8-1s1.652-.62 3.576-1.514C20.48.178 19.295 0 18 0s-2.481.178-3.576.486C16.348 1.38 18 2 18 2zm-7 7c3 2 7 4 7 4s4-2 7-4l-7 1-7-1zm20.645 2.285L27 15l6.006.75c-.334-1.401-.777-2.928-1.361-4.465zm1.911 7.159L28 24h5.835c.102-.595.165-1.251.165-2 0 0-.081-1.43-.444-3.556zm-31.112 0C2.082 20.57 2 22 2 22c0 .748.063 1.405.165 2H8l-5.556-5.556zm-.105 6.798c-.204.279-.333.588-.339.94-.03 1.905.745 3.303 1.916 4.327L9 31l-6.661-5.758zM9 15l-4.644-3.715c-.584 1.537-1.028 3.064-1.361 4.466L9 15z"/></svg>
|
||||
|
之后 宽度: | 高度: | 大小: 1.5 KiB |
@@ -9,6 +9,13 @@ export type CameraZoomState = {
|
||||
focusMode: string;
|
||||
};
|
||||
|
||||
export type CameraRequestResult = {
|
||||
stream: MediaStream;
|
||||
appliedFacingMode: "user" | "environment";
|
||||
audioEnabled: boolean;
|
||||
usedFallback: boolean;
|
||||
};
|
||||
|
||||
type NumericRange = {
|
||||
min: number;
|
||||
max: number;
|
||||
@@ -66,6 +73,98 @@ export function getCameraVideoConstraints(
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeVideoConstraintCandidate(candidate: MediaTrackConstraints | true) {
|
||||
if (candidate === true) {
|
||||
return { label: "camera-any", video: true as const };
|
||||
}
|
||||
|
||||
return {
|
||||
label: JSON.stringify(candidate),
|
||||
video: candidate,
|
||||
};
|
||||
}
|
||||
|
||||
function createFallbackVideoCandidates(
|
||||
facingMode: "user" | "environment",
|
||||
isMobile: boolean,
|
||||
preset: CameraQualityPreset,
|
||||
) {
|
||||
const base = getCameraVideoConstraints(facingMode, isMobile, preset);
|
||||
const alternateFacing = facingMode === "environment" ? "user" : "environment";
|
||||
const lowRes = {
|
||||
facingMode,
|
||||
width: { ideal: isMobile ? 640 : 960 },
|
||||
height: { ideal: isMobile ? 360 : 540 },
|
||||
} satisfies MediaTrackConstraints;
|
||||
const lowResAlternate = {
|
||||
facingMode: alternateFacing,
|
||||
width: { ideal: isMobile ? 640 : 960 },
|
||||
height: { ideal: isMobile ? 360 : 540 },
|
||||
} satisfies MediaTrackConstraints;
|
||||
const anyCamera = {
|
||||
width: { ideal: isMobile ? 640 : 960 },
|
||||
height: { ideal: isMobile ? 360 : 540 },
|
||||
} satisfies MediaTrackConstraints;
|
||||
|
||||
const candidates = [
|
||||
normalizeVideoConstraintCandidate(base),
|
||||
normalizeVideoConstraintCandidate({
|
||||
...base,
|
||||
frameRate: undefined,
|
||||
}),
|
||||
normalizeVideoConstraintCandidate(lowRes),
|
||||
normalizeVideoConstraintCandidate(lowResAlternate),
|
||||
normalizeVideoConstraintCandidate(anyCamera),
|
||||
normalizeVideoConstraintCandidate(true),
|
||||
];
|
||||
|
||||
const deduped = new Map<string, { video: MediaTrackConstraints | true }>();
|
||||
candidates.forEach((candidate) => {
|
||||
if (!deduped.has(candidate.label)) {
|
||||
deduped.set(candidate.label, { video: candidate.video });
|
||||
}
|
||||
});
|
||||
return Array.from(deduped.values());
|
||||
}
|
||||
|
||||
export async function requestCameraStream(options: {
|
||||
facingMode: "user" | "environment";
|
||||
isMobile: boolean;
|
||||
preset: CameraQualityPreset;
|
||||
audio?: false | MediaTrackConstraints;
|
||||
}) {
|
||||
const videoCandidates = createFallbackVideoCandidates(options.facingMode, options.isMobile, options.preset);
|
||||
const audioCandidates = options.audio ? [options.audio, false] : [false];
|
||||
let lastError: unknown = null;
|
||||
|
||||
for (const audio of audioCandidates) {
|
||||
for (let index = 0; index < videoCandidates.length; index += 1) {
|
||||
const video = videoCandidates[index]?.video ?? true;
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ video, audio });
|
||||
const videoTrack = stream.getVideoTracks()[0] || null;
|
||||
const settings = (
|
||||
videoTrack && typeof (videoTrack as MediaStreamTrack & { getSettings?: () => unknown }).getSettings === "function"
|
||||
? (videoTrack as MediaStreamTrack & { getSettings: () => unknown }).getSettings()
|
||||
: {}
|
||||
) as Record<string, unknown>;
|
||||
const appliedFacingMode = settings.facingMode === "user" ? "user" : settings.facingMode === "environment" ? "environment" : options.facingMode;
|
||||
|
||||
return {
|
||||
stream,
|
||||
appliedFacingMode,
|
||||
audioEnabled: stream.getAudioTracks().length > 0,
|
||||
usedFallback: index > 0 || audio === false && Boolean(options.audio),
|
||||
} satisfies CameraRequestResult;
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError instanceof Error ? lastError : new Error("无法访问摄像头");
|
||||
}
|
||||
|
||||
export function getLiveAnalysisBitrate(preset: CameraQualityPreset, isMobile: boolean) {
|
||||
switch (preset) {
|
||||
case "economy":
|
||||
|
||||
@@ -8,6 +8,311 @@ export type ChangeLogEntry = {
|
||||
};
|
||||
|
||||
export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
|
||||
{
|
||||
version: "2026.03.17-live-camera-relay-mp4-hardening",
|
||||
releaseDate: "2026-03-17",
|
||||
repoVersion: "1adadba",
|
||||
summary:
|
||||
"修复实时分析 relay 预览在 Chrome `mp4` 分段下容易失效的问题,并让 live-camera 录制优先回到更稳定的 `webm`。",
|
||||
features: [
|
||||
"media 服务在 relay 会话收到第一段 `mp4` 时会额外保留初始化片段,后续滚动缓存即使裁掉旧分段,也能继续为 preview 重建可解码的输入源",
|
||||
"relay preview 构建会跳过明显异常的小 `mp4` 分段,并优先尝试把保留的初始化片段与当前缓存拼成单一输入后再转成 `preview.webm`",
|
||||
"如果 relay preview 本轮重建失败,但磁盘上仍有上一版可播放 `preview.webm`,worker 会保留旧预览继续对 viewer 提供播放,而不是直接把同步观看打成永久失败",
|
||||
"live-camera 的合成录制 mime 选择已改为优先 `video/webm`,Chrome 不再默认上传 fragmented `mp4` relay 分段,从源头减少 `trex/tfhd` 类 ffmpeg 拼接失败",
|
||||
],
|
||||
tests: [
|
||||
"cd media && go test ./...",
|
||||
"pnpm check",
|
||||
"pnpm build",
|
||||
"部署后线上 smoke: 已确认 `https://te.hao.work/` 正在提供新构建;当前线上仍有一条补丁前启动的旧 `mp4` relay 会话在运行,因此完整的 `webm` relay 端到端验证需要在重启该实时分析会话后继续确认",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.17-live-camera-media-asset-url",
|
||||
releaseDate: "2026-03-17",
|
||||
repoVersion: "0af88b3",
|
||||
summary:
|
||||
"修复同步观看预览地址被重复拼接 `/media` 导致的 404,观看端可以继续打开 relay 缓存视频。",
|
||||
features: [
|
||||
"共享的 `getMediaAssetUrl()` 现在会保留已带 `/media/` 前缀的应用内路径,不再把 `/media/assets/...` 再次拼成 `/media/media/assets/...`",
|
||||
"当服务端直接返回完整 `https://...` 外链时,前端会原样使用该地址,避免对外部媒体地址做错误拼接",
|
||||
"其他仍是普通相对路径的媒体资源会继续自动补齐 `/media` 前缀,因此旧调用方无需改动",
|
||||
"同步观看点击“同步观看”后,请求的 preview 地址恢复为 `/media/assets/sessions/.../preview.webm`,不再因 `404 page not found` 导致无视频可播",
|
||||
"线上 smoke 已确认 `https://te.hao.work/` 已切换到包含本次修复的新构建,而不是继续提供部署前的旧资源 revision",
|
||||
],
|
||||
tests: [
|
||||
"pnpm vitest run client/src/lib/media.test.ts",
|
||||
"pnpm check",
|
||||
"pnpm build",
|
||||
"playwright-skill 线上 smoke: 登录 `H1` 后访问 `https://te.hao.work/live-camera`,确认 viewer 实际请求 `https://te.hao.work/media/assets/sessions/.../preview.webm?...` 并返回 `200`,同时不存在 `/media/media/...` 双前缀请求",
|
||||
"线上 smoke: 已确认部署前公开站点还是旧 revision;部署后 `https://te.hao.work/` 已切换到包含本次修复的新构建",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.17-live-camera-pose-buffer-window",
|
||||
releaseDate: "2026-03-17",
|
||||
repoVersion: "f3f7e19+pose-buffer-window",
|
||||
summary:
|
||||
"修复实时分析启动时的 MediaPipe Pose 模块加载崩溃,并把多端同步缓存改为默认 2 分钟、可选 10 秒到 5 分钟。",
|
||||
features: [
|
||||
"live-camera 开始分析时不再直接解构 `import(\"@mediapipe/pose\")` 的返回值,而是兼容 `Pose`、`default.Pose` 和默认导出三种形态;模块缺失时会抛出明确错误,避免再次出现 `Cannot destructure property 'Pose' ... as it is undefined`",
|
||||
"同步观看的 relay 缓存时长改为按会话配置,范围 10 秒到 5 分钟,默认 2 分钟;viewer 文案、徽标和设置面板都会实时显示当前缓存窗口",
|
||||
"owner 端合成画布录制改为每 10 秒上传一次 relay 分片,同时继续维持每 60 秒一段的自动归档录像,因此观看端切到短缓存时不需要再等满 60 秒才出现平滑视频",
|
||||
"media 服务会按各自 relay 会话的缓存窗口裁剪预览分段,并在从磁盘恢复旧会话时自动归一化缓存秒数,避免旧数据继续按固定 60 秒窗口工作",
|
||||
"同步端渲染远端 recentSegments 时新增旧快照归一化,`keyFrames`、`issueSummary` 等数组字段缺失时也会自动补默认值,避免再出现 `Cannot read properties of undefined (reading 'length')`",
|
||||
"同步观看界面新增“已累积 / 还需多久才能看到首段回放 / 距离目标缓存还差多少”的提示,观看端不再只显示笼统的等待文案",
|
||||
"线上 smoke 已确认 `https://te.hao.work/` 已经提供本次新构建,而不是旧资源版本;首页、主样式和 `pose` 模块都已切到本次发布的最新资源 revision",
|
||||
],
|
||||
tests: [
|
||||
"cd media && go test ./...",
|
||||
"pnpm vitest run client/src/lib/liveCamera.test.ts",
|
||||
"pnpm check",
|
||||
"pnpm build",
|
||||
"pnpm exec playwright test tests/e2e/app.spec.ts",
|
||||
"playwright-skill 线上 smoke: 登录 `H1` 后访问 `https://te.hao.work/live-camera`,完成校准、启用假摄像头并点击“开始分析”,确认页面进入分析中状态、默认显示“缓存 2 分钟”、且无控制台与页面级错误",
|
||||
"curl -I https://te.hao.work/,并确认首页、主样式与 `pose` 模块资源均返回 `200` 和正确 MIME",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.17-live-camera-relay-buffer",
|
||||
releaseDate: "2026-03-17",
|
||||
repoVersion: "63dbfd2+relay-buffer",
|
||||
summary:
|
||||
"实时分析同步观看改为服务端滚动视频缓存,观看端不再轮询单帧图片;media 服务同时新增最近 60 秒缓冲和 30 分钟缓存清理。",
|
||||
features: [
|
||||
"live-camera owner 端的 60 秒合成录像分段现在会额外上传到 media relay 会话,观看端改为播放服务端生成的滚动 preview 视频,不再依赖 `live-frame.jpg` 单帧轮询",
|
||||
"relay 会话只保留最近 60 秒分段,worker 会在新分段到达后按最新窗口重建 `preview.webm`,避免观看端继续看到旧一分钟缓存",
|
||||
"超过 30 分钟无活动的 relay 会话、分段目录和公开缓存文件会自动清理,避免多端同步长期堆积无用缓存",
|
||||
"实时分析 viewer 文案和占位提示同步调整为“缓冲最近 60 秒视频 / 加载缓存回放”,更贴近现在的服务端缓存播放行为",
|
||||
"media preview 非归档阶段跳过 mp4 转码,Chrome 观看直接使用 webm,降低 worker 处理时延和 CPU 消耗",
|
||||
],
|
||||
tests: [
|
||||
"cd media && go test ./...",
|
||||
"pnpm vitest run client/src/lib/liveCamera.test.ts",
|
||||
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera starts analysis and produces scores|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"',
|
||||
"pnpm check",
|
||||
"pnpm build",
|
||||
"线上 smoke: 部署后确认 `https://te.hao.work/` 已提供新构建而不是旧资源版本,`/live-camera` viewer 端进入“服务端缓存同步”路径并返回正确的 JS/CSS MIME",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.17-live-camera-preview-recovery",
|
||||
releaseDate: "2026-03-17",
|
||||
repoVersion: "06b9701",
|
||||
summary:
|
||||
"修复实时分析页标题乱码、同步观看残留状态导致的黑屏,以及切回本机摄像头后预览无法恢复的问题。",
|
||||
features: [
|
||||
"runtime 标题恢复逻辑新增更严格的乱码筛除与二次 UTF-8 解码兜底,`æœ...`、带替换字符的脏标题现在会优先恢复为正常中文,无法恢复时会安全回退到稳定默认标题",
|
||||
"同步观看退出时会完整重置 viewer 轮询、连接标记和帧版本,不再把旧 viewer 状态残留到 owner 或空闲态,避免页面继续停留在黑屏或“等待同步画面”",
|
||||
"本地摄像头预览新增独立重绑流程和多次 watchdog 重试,即使浏览器在首帧时没有及时绑定 `srcObject` 或 `play()` 被短暂打断,也会自动恢复预览",
|
||||
"视频区域是否显示画面改为按当前 runtime 角色分别判断,避免 viewer 的旧连接状态误导 owner 模式,导致本地没有预览时仍隐藏占位提示",
|
||||
],
|
||||
tests: [
|
||||
"pnpm check",
|
||||
"pnpm vitest run client/src/lib/liveCamera.test.ts",
|
||||
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"',
|
||||
"pnpm build",
|
||||
"线上 smoke: `curl -I https://te.hao.work/`,并检查页面源码中的 `/assets/index-*.js`、`/assets/index-*.css`、`/assets/pose-*.js` 已切换到新构建且返回正确 MIME",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-camera-runtime-refresh",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "8e9e491",
|
||||
summary:
|
||||
"修复实时分析页偶发残留在同步观看状态、标题乱码,以及摄像头预览绑定波动导致的启动失败。",
|
||||
features: [
|
||||
"live-camera 在打开拍摄引导、启用摄像头、开始分析前,都会先向服务端强制刷新 runtime 状态,避免旧的 viewer 锁残留导致本机明明已释放却仍无法启动",
|
||||
"同步观看标题新增乱码恢复逻辑,可自动把 UTF-8 被误按 Latin-1 显示的标题恢复成正常中文,避免出现 `æœ...` 一类异常标题",
|
||||
"摄像头启动链路改为以 `getUserMedia` 成功为准;即使本地预览 `<video>` 的 `srcObject` 或 `play()` 在当前浏览器里短暂失败,也不会直接把整次启动判死",
|
||||
"e2e mock 的媒体流补齐为带假视频轨道的流对象,并把 viewer 回归改为校验“服务端 relay、无 viewer-signal”行为,减少和旧 P2P 逻辑混淆",
|
||||
],
|
||||
tests: [
|
||||
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"',
|
||||
"pnpm build",
|
||||
"部署后线上 smoke: `https://te.hao.work/live-camera` 登录 H1 后可见空闲态“启动摄像头”入口,确认不再被残留 viewer 锁卡住;公开站点前端资源为 `assets/index-33wVjC4p.js` 与 `assets/index-tNGuStgv.css`",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-viewer-server-relay",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "bb46d26",
|
||||
summary:
|
||||
"实时分析同步观看改为由 media 服务中转帧图,不再依赖浏览器之间的 P2P 视频连接。",
|
||||
features: [
|
||||
"owner 端现在会把带骨架、关键点和虚拟形象叠层的合成画布压缩成 JPEG 并持续上传到 media 服务",
|
||||
"viewer 端改为直接拉取 media 服务中的最新同步帧图,不再建立 WebRTC viewer peer 连接,因此跨网络和多端观看更稳定",
|
||||
"同步观看模式文案改为明确提示“通过 media 服务中转”,等待同步时也会自动轮询最新画面",
|
||||
"media 服务新增 live-frame 上传与静态分发能力,并记录最近同步帧的更新时间,方便后续扩展成更高频的服务端中转流",
|
||||
],
|
||||
tests: [
|
||||
"cd media && go test ./...",
|
||||
"pnpm build",
|
||||
"playwright-skill 线上 smoke: 先用 media 服务创建 relay session、上传 live-frame,并把 H1 的 `live_analysis_runtime` 注入为 active viewer 场景;随后访问 `https://te.hao.work/live-camera`,确认页面进入“同步观看模式”、同步帧来自 `/media/assets/sessions/.../live-frame.jpg`,且 `viewer-signal` 请求数为 0",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-camera-startup-fallbacks",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "a211562",
|
||||
summary:
|
||||
"修复部分设备上摄像头因后置镜头约束、分辨率约束或麦克风不可用而直接启动失败的问题。",
|
||||
features: [
|
||||
"live-camera 与 recorder 改为共用分级降级的摄像头请求流程,会在当前画质失败时自动降分辨率、降约束并回退到兼容镜头",
|
||||
"当设备不支持默认后置摄像头或当前镜头不可用时,页面会自动切换到实际可用的镜头方向,避免直接报错后卡死在未启动状态",
|
||||
"recorder 预览启动不再被麦克风权限或麦克风设备异常整体拖死;麦克风不可用时会自动回退到仅视频模式",
|
||||
"兼容模式命中时前端会给出明确提示,方便区分“已自动降级成功”与“仍然无法访问摄像头”的场景",
|
||||
],
|
||||
tests: [
|
||||
"pnpm build",
|
||||
"部署后线上 smoke: `https://te.hao.work/` 已提供 `assets/index-CRxtWK07.js` 与 `assets/index-tNGuStgv.css`;通过注入 `getUserMedia` 回归验证 `/live-camera` 首轮高约束失败后会自动切到兼容摄像头模式,`/recorder` 在麦克风不可用时会自动回退到仅视频模式并继续启动预览",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-analysis-viewer-full-sync",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "922a9fb",
|
||||
summary:
|
||||
"多端同步观看改为按持有端快照完整渲染,另一设备可同步看到视频状态、模式、画质、虚拟形象和保存阶段信息。",
|
||||
features: [
|
||||
"viewer 端现在同步显示持有端的会话标题、训练模式、设备端、拍摄视角、画质模式、虚拟形象状态和最近同步时间",
|
||||
"同步观看时的分析阶段、保存阶段、已完成状态也会跟随主端刷新,不再只显示本地默认状态",
|
||||
"viewer 页面会自动关闭拍摄校准弹窗,避免同步观看时被“启用摄像头”流程遮挡",
|
||||
"新增 viewer 同步信息卡,明确允许 1 秒级延迟,并持续显示最近心跳时间",
|
||||
],
|
||||
tests: [
|
||||
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera switches into viewer mode|viewer stream|recorder blocks"',
|
||||
"pnpm build",
|
||||
"部署后线上 smoke: `https://te.hao.work/` 已提供 `assets/index-HRdM3fxq.js` 与 `assets/index-tNGuStgv.css`;同账号 H1 双端登录后,移动端 owner 可开始实时分析,桌面端 `/live-camera` 自动进入同步观看并显示主端信息、同步视频流,owner 点击结束分析后 viewer 会同步进入保存阶段",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-analysis-lock-hardening",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "f9db6ef",
|
||||
summary:
|
||||
"修复同账号多端实时分析在旧登录态下仍可重复占用摄像头的问题,补强同步观看重试、录制页占用锁,并修复部署后启动阶段长时间 502。",
|
||||
features: [
|
||||
"旧用户名登录 token 即使缺少 `sid`,现在也会按 token 本身派生唯一会话标识,不再把不同设备错误识别成同一持有端",
|
||||
"同步观看模式新增 viewer 自动重试:当持有端刚启动推流、viewer 首次连接返回 `viewer stream not ready` 时,会自动重连而不是一直黑屏",
|
||||
"在线录制页接入实时分析占用锁;当其他设备正在 `/live-camera` 分析时,本页会禁止再次启动摄像头和录制",
|
||||
"应用启动改为先监听 HTTP 端口、再后台串行执行教程图同步和标准库预热,修复新容器上线时公网长时间返回 502 的问题",
|
||||
"线上 smoke 已确认 `https://te.hao.work/live-camera` 与 `/recorder` 都已切换到本次新构建,公开站点不再返回 502",
|
||||
],
|
||||
tests: [
|
||||
"curl -I https://te.hao.work/",
|
||||
"pnpm check",
|
||||
"pnpm exec vitest run server/_core/sdk.test.ts server/features.test.ts",
|
||||
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "viewer mode|viewer stream|recorder blocks"',
|
||||
"pnpm build",
|
||||
"线上 smoke: H1 手机端开启实时分析后,PC 端 `/live-camera` 自动进入同步观看并显示同步画面,`/recorder` 禁止启动摄像头;结束分析后会话可正常释放",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-analysis-runtime-migration",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "2b72ef9",
|
||||
summary:
|
||||
"修复实时分析因缺失 `live_analysis_runtime` 表导致的启动失败,并补齐迁移记录避免后续部署再次漏表。",
|
||||
features: [
|
||||
"生产库补建 `live_analysis_runtime` 表,并补写 `__drizzle_migrations` 中缺失的 `0011_live_analysis_runtime` 记录",
|
||||
"仓库内 Drizzle migration journal 补齐 `0011_live_analysis_runtime` 条目,后续 `docker compose` 部署可正确感知该迁移",
|
||||
"实时分析启动链路恢复,`/live-camera` 再次可以读取 runtime 锁并正常进入分析准备流程",
|
||||
"线上 smoke 已确认 `https://te.hao.work/` 正在提供本次新构建,当前前端资源为 `assets/index-B3BN5hY-.js` 与 `assets/index-BL6GQzUF.css`",
|
||||
],
|
||||
tests: [
|
||||
"pnpm check",
|
||||
"pnpm exec vitest run server/features.test.ts",
|
||||
"pnpm build",
|
||||
"docker compose exec -T db mysql ... SHOW TABLES LIKE 'live_analysis_runtime'",
|
||||
"curl -I https://te.hao.work/live-camera",
|
||||
"Playwright smoke: 登录 `H1` 后访问 `/live-camera`,`analysis.runtimeGet` / `analysis.runtimeAcquire` / `analysis.runtimeRelease` 全部返回 200",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-camera-multidevice-viewer",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "4e4122d",
|
||||
summary:
|
||||
"实时分析新增同账号多端互斥和同步观看模式,分析持有端独占摄像头,其它端只能查看同步画面与核心识别结果。",
|
||||
features: [
|
||||
"同一账号在 `/live-camera` 进入实时分析后,会写入按用户维度的 runtime 锁,其他设备不能重复启动摄像头或分析",
|
||||
"其他设备会自动进入“同步观看模式”,可订阅持有端的实时画面,并同步看到动作、评分、反馈、最近片段和归档段数",
|
||||
"同步观看复用 media 服务的 WebRTC viewer 通道,传输的是带骨架、关键点和虚拟形象覆盖后的合成画面",
|
||||
"runtime 锁按 session sid 区分持有端,兼容缺少 sid 的旧 token,超过 15 秒无心跳会自动判定为陈旧并释放",
|
||||
"线上 smoke 已确认 `https://te.hao.work/live-camera` 已切换到本次新构建,公开站点正在提供这次发布的最新前端资源",
|
||||
],
|
||||
tests: [
|
||||
"pnpm check",
|
||||
"pnpm exec vitest run server/features.test.ts",
|
||||
"go test ./... && go build ./... (media)",
|
||||
"pnpm build",
|
||||
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"',
|
||||
'pnpm exec playwright test tests/e2e/app.spec.ts --grep "recorder flow archives a session and exposes it in videos"',
|
||||
"curl -I https://te.hao.work/live-camera",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.16-live-analysis-overlay-archive",
|
||||
releaseDate: "2026-03-16",
|
||||
repoVersion: "4fb2d09",
|
||||
summary:
|
||||
"实时分析新增 60 秒自动归档录像,录制内容会保留骨架、关键点和虚拟形象叠层,并同步进入视频库。",
|
||||
features: [
|
||||
"实时分析开始后会自动录制合成画布,每 60 秒自动切段归档",
|
||||
"归档录像会保留原视频、骨架线、关键点和当前虚拟形象覆盖效果",
|
||||
"归档片段会自动写入视频库,标签显示为“实时分析”",
|
||||
"删除视频库中的实时分析录像时,不会删除已写入的实时分析数据和训练记录",
|
||||
"线上 smoke 已确认 `https://te.hao.work/` 已切换到本次新构建,`/live-camera`、`/videos`、`/changelog` 页面均可正常访问",
|
||||
],
|
||||
tests: [
|
||||
"pnpm check",
|
||||
"pnpm test",
|
||||
"pnpm build",
|
||||
"pnpm test:e2e",
|
||||
"Playwright smoke: 真实站点登录 H1,完成 /live-camera 引导、开始/结束分析,并确认 /videos 可见实时分析条目",
|
||||
],
|
||||
},
|
||||
{
|
||||
version: "2026.03.15-live-analysis-leave-hint",
|
||||
releaseDate: "2026-03-15",
|
||||
repoVersion: "5c2dcf2",
|
||||
summary:
|
||||
"实时分析结束后增加离开提示,明确何时必须停留、何时可以安全关闭或切页。",
|
||||
features: [
|
||||
"分析进行中显示“不要关闭或切走页面”提示",
|
||||
"结束分析后保存阶段显示“请暂时停留当前页面”提示",
|
||||
"保存成功后明确提示“现在可以关闭浏览器或切换到其他页面”",
|
||||
"分析中和保存中挂接 beforeunload 提醒,减少误关页面导致的数据丢失",
|
||||
],
|
||||
tests: ["pnpm check", "pnpm build"],
|
||||
},
|
||||
{
|
||||
version: "2026.03.15-training-generator-collapse",
|
||||
releaseDate: "2026-03-15",
|
||||
repoVersion: "1ce94f6",
|
||||
summary: "训练计划生成面板在桌面端默认折叠到右侧,按需展开查看和重新生成。",
|
||||
features: [
|
||||
"训练页右侧生成器在桌面端默认折叠为窄栏",
|
||||
"点击右侧折叠栏可展开“重新生成计划”完整面板",
|
||||
"移动端继续直接展示完整生成器,避免隐藏关键操作",
|
||||
"未生成计划时点击“前往生成训练计划”会自动展开并滚动到生成面板",
|
||||
],
|
||||
tests: ["pnpm check", "pnpm build"],
|
||||
},
|
||||
{
|
||||
version: "2026.03.15-progress-time-actions",
|
||||
releaseDate: "2026-03-15",
|
||||
repoVersion: "71caf0d",
|
||||
summary: "最近训练记录默认显示具体上海时间,并直接展示录制动作数据摘要。",
|
||||
features: [
|
||||
"最近训练记录摘要行默认显示到秒的 Asia/Shanghai 时间",
|
||||
"录制记录列表直接展示主动作和前 3 个动作统计,无需先展开",
|
||||
"展开态动作明细统一用中文动作标签展示",
|
||||
"提醒页通知时间统一切换为 Asia/Shanghai",
|
||||
],
|
||||
tests: ["pnpm check", "pnpm build"],
|
||||
},
|
||||
{
|
||||
version: "2026.03.15-session-changelog",
|
||||
releaseDate: "2026-03-15",
|
||||
@@ -58,7 +363,7 @@ export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
|
||||
],
|
||||
tests: [
|
||||
"pnpm check",
|
||||
"pnpm exec vitest run server/features.test.ts -t \"video\\\\.\"",
|
||||
'pnpm exec vitest run server/features.test.ts -t "video\\\\."',
|
||||
"Playwright 真实站点完成 /videos 新增-编辑-删除全链路",
|
||||
],
|
||||
},
|
||||
@@ -73,8 +378,6 @@ export const CHANGE_LOG_ENTRIES: ChangeLogEntry[] = [
|
||||
"训练提醒通知",
|
||||
"通知历史管理",
|
||||
],
|
||||
tests: [
|
||||
"教程库、提醒、通知相关测试通过",
|
||||
],
|
||||
tests: ["教程库、提醒、通知相关测试通过"],
|
||||
},
|
||||
];
|
||||
|
||||
129
client/src/lib/liveCamera.test.ts
普通文件
@@ -0,0 +1,129 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
ACTION_WINDOW_FRAMES,
|
||||
AVATAR_PRESETS,
|
||||
createStableActionState,
|
||||
getAvatarAnchors,
|
||||
getAvatarPreset,
|
||||
resolveAvatarKeyFromPrompt,
|
||||
stabilizeActionStream,
|
||||
type FrameActionSample,
|
||||
} from "./liveCamera";
|
||||
|
||||
function feedSamples(samples: Array<Omit<FrameActionSample, "timestamp">>, intervalMs = 33) {
|
||||
const history: FrameActionSample[] = [];
|
||||
const state = createStableActionState();
|
||||
let lastResult = null as ReturnType<typeof stabilizeActionStream> | null;
|
||||
|
||||
samples.forEach((sample, index) => {
|
||||
lastResult = stabilizeActionStream(
|
||||
{
|
||||
...sample,
|
||||
timestamp: index * intervalMs,
|
||||
},
|
||||
history,
|
||||
state,
|
||||
);
|
||||
});
|
||||
|
||||
return { history, state, lastResult };
|
||||
}
|
||||
|
||||
describe("live camera action stabilizer", () => {
|
||||
it("locks a dominant action after a full temporal window", () => {
|
||||
const samples = Array.from({ length: ACTION_WINDOW_FRAMES * 2 }, () => ({
|
||||
action: "forehand" as const,
|
||||
confidence: 0.84,
|
||||
}));
|
||||
const { lastResult } = feedSamples(samples);
|
||||
|
||||
expect(lastResult?.stableAction).toBe("forehand");
|
||||
expect(lastResult?.windowAction).toBe("forehand");
|
||||
expect(lastResult?.pending).toBe(false);
|
||||
expect(lastResult?.windowShare).toBeGreaterThan(0.9);
|
||||
});
|
||||
|
||||
it("ignores brief action spikes and keeps the stable action", () => {
|
||||
const stableFrames = Array.from({ length: ACTION_WINDOW_FRAMES * 2 }, () => ({
|
||||
action: "forehand" as const,
|
||||
confidence: 0.82,
|
||||
}));
|
||||
const noisyFrames = Array.from({ length: 5 }, () => ({
|
||||
action: "backhand" as const,
|
||||
confidence: 0.88,
|
||||
}));
|
||||
const { lastResult } = feedSamples([...stableFrames, ...noisyFrames]);
|
||||
|
||||
expect(lastResult?.stableAction).toBe("forehand");
|
||||
expect(lastResult?.pending).toBe(false);
|
||||
});
|
||||
|
||||
it("switches only after the next action persists long enough", () => {
|
||||
const forehandFrames = Array.from({ length: ACTION_WINDOW_FRAMES * 2 }, () => ({
|
||||
action: "forehand" as const,
|
||||
confidence: 0.8,
|
||||
}));
|
||||
const backhandFrames = Array.from({ length: ACTION_WINDOW_FRAMES * 2 }, () => ({
|
||||
action: "backhand" as const,
|
||||
confidence: 0.85,
|
||||
}));
|
||||
const { lastResult, state } = feedSamples([...forehandFrames, ...backhandFrames]);
|
||||
|
||||
expect(lastResult?.stableAction).toBe("backhand");
|
||||
expect(state.switchCount).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
|
||||
it("requires a longer delay before falling back to unknown", () => {
|
||||
const forehandFrames = Array.from({ length: ACTION_WINDOW_FRAMES * 2 }, () => ({
|
||||
action: "forehand" as const,
|
||||
confidence: 0.83,
|
||||
}));
|
||||
const unknownFrames = Array.from({ length: 10 }, () => ({
|
||||
action: "unknown" as const,
|
||||
confidence: 0.4,
|
||||
}));
|
||||
const { lastResult } = feedSamples([...forehandFrames, ...unknownFrames]);
|
||||
|
||||
expect(lastResult?.stableAction).toBe("forehand");
|
||||
});
|
||||
});
|
||||
|
||||
describe("live camera avatar helpers", () => {
|
||||
it("maps prompt keywords into avatar presets", () => {
|
||||
expect(resolveAvatarKeyFromPrompt("切换成猩猩形象", "gorilla")).toBe("gorilla");
|
||||
expect(resolveAvatarKeyFromPrompt("dog mascot", "gorilla")).toBe("dog");
|
||||
expect(resolveAvatarKeyFromPrompt("狐狸风格", "gorilla")).toBe("fox");
|
||||
expect(resolveAvatarKeyFromPrompt("兔子教练", "gorilla")).toBe("rabbit");
|
||||
expect(resolveAvatarKeyFromPrompt("BeachKing 3D 替身", "gorilla")).toBe("beachKing");
|
||||
expect(resolveAvatarKeyFromPrompt("Juanita avatar", "gorilla")).toBe("juanita3d");
|
||||
expect(resolveAvatarKeyFromPrompt("", "pig")).toBe("pig");
|
||||
});
|
||||
|
||||
it("exposes full-body 3d avatar examples with CC0 metadata", () => {
|
||||
const presets = AVATAR_PRESETS.filter((preset) => preset.category === "full-body-3d");
|
||||
|
||||
expect(presets).toHaveLength(4);
|
||||
expect(presets.every((preset) => preset.license === "CC0")).toBe(true);
|
||||
expect(getAvatarPreset("sportTv")?.modelUrl).toContain("arweave.net");
|
||||
});
|
||||
|
||||
it("builds avatar anchors from pose landmarks", () => {
|
||||
const landmarks = Array.from({ length: 33 }, () => ({ x: 0.5, y: 0.5, visibility: 0.95 }));
|
||||
landmarks[0] = { x: 0.5, y: 0.16, visibility: 0.99 };
|
||||
landmarks[11] = { x: 0.4, y: 0.3, visibility: 0.99 };
|
||||
landmarks[12] = { x: 0.6, y: 0.3, visibility: 0.99 };
|
||||
landmarks[15] = { x: 0.28, y: 0.44, visibility: 0.99 };
|
||||
landmarks[16] = { x: 0.72, y: 0.44, visibility: 0.99 };
|
||||
landmarks[23] = { x: 0.44, y: 0.58, visibility: 0.99 };
|
||||
landmarks[24] = { x: 0.56, y: 0.58, visibility: 0.99 };
|
||||
landmarks[27] = { x: 0.43, y: 0.92, visibility: 0.99 };
|
||||
landmarks[28] = { x: 0.57, y: 0.92, visibility: 0.99 };
|
||||
|
||||
const anchors = getAvatarAnchors(landmarks, 1280, 720);
|
||||
|
||||
expect(anchors).not.toBeNull();
|
||||
expect(anchors?.headRadius).toBeGreaterThan(30);
|
||||
expect(anchors?.bodyHeight).toBeGreaterThan(120);
|
||||
expect(anchors?.rightHandX).toBeGreaterThan(anchors?.leftHandX || 0);
|
||||
});
|
||||
});
|
||||
744
client/src/lib/liveCamera.ts
普通文件
@@ -0,0 +1,744 @@
|
||||
export type LiveActionType = "forehand" | "backhand" | "serve" | "volley" | "overhead" | "slice" | "lob" | "unknown";
|
||||
|
||||
export type PosePoint = {
|
||||
x: number;
|
||||
y: number;
|
||||
visibility?: number;
|
||||
};
|
||||
|
||||
export type AvatarKey =
|
||||
| "gorilla"
|
||||
| "monkey"
|
||||
| "dog"
|
||||
| "pig"
|
||||
| "cat"
|
||||
| "fox"
|
||||
| "panda"
|
||||
| "lion"
|
||||
| "tiger"
|
||||
| "rabbit"
|
||||
| "beachKing"
|
||||
| "jenny3d"
|
||||
| "juanita3d"
|
||||
| "sportTv";
|
||||
|
||||
export type AvatarCategory = "animal" | "full-body-3d";
|
||||
|
||||
export type AvatarPreset = {
|
||||
key: AvatarKey;
|
||||
label: string;
|
||||
category: AvatarCategory;
|
||||
keywords: string[];
|
||||
description?: string;
|
||||
collection?: string;
|
||||
license?: string;
|
||||
sourceUrl?: string;
|
||||
modelUrl?: string;
|
||||
};
|
||||
|
||||
export type AvatarRenderState = {
|
||||
enabled: boolean;
|
||||
avatarKey: AvatarKey;
|
||||
customLabel?: string;
|
||||
};
|
||||
|
||||
export type FrameActionSample = {
|
||||
action: LiveActionType;
|
||||
confidence: number;
|
||||
timestamp: number;
|
||||
};
|
||||
|
||||
export type StableActionState = {
|
||||
current: LiveActionType;
|
||||
currentSince: number | null;
|
||||
candidate: LiveActionType | null;
|
||||
candidateSince: number | null;
|
||||
candidateWindows: number;
|
||||
switchCount: number;
|
||||
};
|
||||
|
||||
export type StabilizedActionMeta = {
|
||||
stableAction: LiveActionType;
|
||||
stableConfidence: number;
|
||||
windowAction: LiveActionType;
|
||||
windowConfidence: number;
|
||||
windowShare: number;
|
||||
windowFrames: number;
|
||||
windowProgress: number;
|
||||
pending: boolean;
|
||||
pendingAction: LiveActionType | null;
|
||||
stableMs: number;
|
||||
candidateMs: number;
|
||||
rawVolatility: number;
|
||||
switchCount: number;
|
||||
};
|
||||
|
||||
type ActionStat = {
|
||||
count: number;
|
||||
totalConfidence: number;
|
||||
share: number;
|
||||
averageConfidence: number;
|
||||
strength: number;
|
||||
};
|
||||
|
||||
type AvatarAnchors = {
|
||||
headX: number;
|
||||
headY: number;
|
||||
headRadius: number;
|
||||
bodyX: number;
|
||||
bodyY: number;
|
||||
bodyWidth: number;
|
||||
bodyHeight: number;
|
||||
shoulderY: number;
|
||||
footY: number;
|
||||
leftHandX: number;
|
||||
leftHandY: number;
|
||||
rightHandX: number;
|
||||
rightHandY: number;
|
||||
};
|
||||
|
||||
type AvatarVisualSpec = {
|
||||
src: string;
|
||||
bodyFill: string;
|
||||
limbStroke: string;
|
||||
glow: string;
|
||||
renderMode: "badge" | "full-figure";
|
||||
figureScale?: number;
|
||||
figureOffsetY?: number;
|
||||
};
|
||||
|
||||
const ACTIONS: LiveActionType[] = ["forehand", "backhand", "serve", "volley", "overhead", "slice", "lob", "unknown"];
|
||||
|
||||
export const ACTION_WINDOW_FRAMES = 24;
|
||||
const ACTION_WINDOW_MIN_SHARE = 0.6;
|
||||
const ACTION_WINDOW_MIN_CONFIDENCE = 0.58;
|
||||
const ACTION_SWITCH_MIN_MS = 700;
|
||||
const ACTION_UNKNOWN_MIN_MS = 900;
|
||||
const ACTION_LOCK_IN_WINDOWS = 2;
|
||||
const ACTION_SWITCH_DELTA = 0.12;
|
||||
|
||||
export const AVATAR_PRESETS: AvatarPreset[] = [
|
||||
{ key: "gorilla", label: "猩猩", category: "animal", keywords: ["gorilla", "ape", "猩猩", "猩", "大猩猩"], description: "轻量动物替身,移动端负担最低。" },
|
||||
{ key: "monkey", label: "猴子", category: "animal", keywords: ["monkey", "ape", "猴", "猴子"], description: "轻量动物替身,适合快速练习。" },
|
||||
{ key: "dog", label: "狗", category: "animal", keywords: ["dog", "puppy", "犬", "狗", "小狗"], description: "轻量动物替身,覆盖速度快。" },
|
||||
{ key: "pig", label: "猪", category: "animal", keywords: ["pig", "猪", "小猪"], description: "轻量动物替身,适合低端设备。" },
|
||||
{ key: "cat", label: "猫", category: "animal", keywords: ["cat", "kitty", "猫", "小猫"], description: "轻量动物替身,适合低码率录制。" },
|
||||
{ key: "fox", label: "狐狸", category: "animal", keywords: ["fox", "狐狸"], description: "轻量动物替身,动作切换反馈清晰。" },
|
||||
{ key: "panda", label: "熊猫", category: "animal", keywords: ["panda", "熊猫"], description: "轻量动物替身,适合直播预览。" },
|
||||
{ key: "lion", label: "狮子", category: "animal", keywords: ["lion", "狮子"], description: "轻量动物替身,轮廓感更强。" },
|
||||
{ key: "tiger", label: "老虎", category: "animal", keywords: ["tiger", "虎", "老虎"], description: "轻量动物替身,适合训练 PK。" },
|
||||
{ key: "rabbit", label: "兔子", category: "animal", keywords: ["rabbit", "bunny", "兔", "兔子"], description: "轻量动物替身,适合日常训练。" },
|
||||
{
|
||||
key: "beachKing",
|
||||
label: "BeachKing",
|
||||
category: "full-body-3d",
|
||||
keywords: ["beachking", "beach king", "海滩王", "3d beach", "beach avatar"],
|
||||
description: "CC0 全身 3D 示例,适合覆盖竖屏全身站姿。",
|
||||
collection: "100Avatars R3",
|
||||
license: "CC0",
|
||||
sourceUrl: "https://github.com/ToxSam/open-source-avatars",
|
||||
modelUrl: "https://arweave.net/uKhDMselhdUyeJKjelpuVsL8s-a9v_Wqq75TQfCfnos",
|
||||
},
|
||||
{
|
||||
key: "jenny3d",
|
||||
label: "Jenny",
|
||||
category: "full-body-3d",
|
||||
keywords: ["jenny", "frog coach", "青蛙教练", "3d jenny", "jenny avatar"],
|
||||
description: "CC0 全身 3D 示例,适合想要更完整人物轮廓时使用。",
|
||||
collection: "100Avatars R3",
|
||||
license: "CC0",
|
||||
sourceUrl: "https://github.com/ToxSam/open-source-avatars",
|
||||
modelUrl: "https://arweave.net/kgTirc4OvUWbJhIKC2CB3_pYsYuB62KTj90IdE8s3sk",
|
||||
},
|
||||
{
|
||||
key: "juanita3d",
|
||||
label: "Juanita",
|
||||
category: "full-body-3d",
|
||||
keywords: ["juanita", "粉发学员", "pink avatar", "3d juanita", "juanita avatar"],
|
||||
description: "CC0 全身 3D 示例,适合教学演示和移动端预览。",
|
||||
collection: "100Avatars R3",
|
||||
license: "CC0",
|
||||
sourceUrl: "https://github.com/ToxSam/open-source-avatars",
|
||||
modelUrl: "https://arweave.net/nyMyZZx5lN2DXsmBgbGQSnt3PuXYN7AAjz9QJrjitLo",
|
||||
},
|
||||
{
|
||||
key: "sportTv",
|
||||
label: "SportTV",
|
||||
category: "full-body-3d",
|
||||
keywords: ["sporttv", "sport tv", "屏幕街头", "tv avatar", "hoodie avatar"],
|
||||
description: "CC0 全身 3D 示例,适合训练空间较宽的画面。",
|
||||
collection: "100Avatars R3",
|
||||
license: "CC0",
|
||||
sourceUrl: "https://github.com/ToxSam/open-source-avatars",
|
||||
modelUrl: "https://arweave.net/ISYr7xBXT_s4tLddbhFB3PpUhWg-H_BYs2UZhVLF1hA",
|
||||
},
|
||||
];
|
||||
|
||||
const AVATAR_VISUALS: Record<AvatarKey, AvatarVisualSpec> = {
|
||||
gorilla: {
|
||||
src: "/avatars/twemoji/gorilla.svg",
|
||||
bodyFill: "rgba(39,39,42,0.95)",
|
||||
limbStroke: "rgba(63,63,70,0.92)",
|
||||
glow: "rgba(161,161,170,0.32)",
|
||||
renderMode: "badge",
|
||||
},
|
||||
monkey: {
|
||||
src: "/avatars/twemoji/monkey.svg",
|
||||
bodyFill: "rgba(120,53,15,0.95)",
|
||||
limbStroke: "rgba(146,64,14,0.9)",
|
||||
glow: "rgba(180,83,9,0.3)",
|
||||
renderMode: "badge",
|
||||
},
|
||||
dog: {
|
||||
src: "/avatars/twemoji/dog.svg",
|
||||
bodyFill: "rgba(180,83,9,0.93)",
|
||||
limbStroke: "rgba(180,83,9,0.88)",
|
||||
glow: "rgba(217,119,6,0.26)",
|
||||
renderMode: "badge",
|
||||
},
|
||||
pig: {
|
||||
src: "/avatars/twemoji/pig.svg",
|
||||
bodyFill: "rgba(244,114,182,0.92)",
|
||||
limbStroke: "rgba(244,114,182,0.86)",
|
||||
glow: "rgba(244,114,182,0.28)",
|
||||
renderMode: "badge",
|
||||
},
|
||||
cat: {
|
||||
src: "/avatars/twemoji/cat.svg",
|
||||
bodyFill: "rgba(245,158,11,0.92)",
|
||||
limbStroke: "rgba(217,119,6,0.88)",
|
||||
glow: "rgba(251,191,36,0.28)",
|
||||
renderMode: "badge",
|
||||
},
|
||||
fox: {
|
||||
src: "/avatars/twemoji/fox.svg",
|
||||
bodyFill: "rgba(234,88,12,0.93)",
|
||||
limbStroke: "rgba(194,65,12,0.9)",
|
||||
glow: "rgba(251,146,60,0.3)",
|
||||
renderMode: "badge",
|
||||
},
|
||||
panda: {
|
||||
src: "/avatars/twemoji/panda.svg",
|
||||
bodyFill: "rgba(82,82,91,0.92)",
|
||||
limbStroke: "rgba(39,39,42,0.9)",
|
||||
glow: "rgba(228,228,231,0.28)",
|
||||
renderMode: "badge",
|
||||
},
|
||||
lion: {
|
||||
src: "/avatars/twemoji/lion.svg",
|
||||
bodyFill: "rgba(202,138,4,0.92)",
|
||||
limbStroke: "rgba(161,98,7,0.9)",
|
||||
glow: "rgba(250,204,21,0.28)",
|
||||
renderMode: "badge",
|
||||
},
|
||||
tiger: {
|
||||
src: "/avatars/twemoji/tiger.svg",
|
||||
bodyFill: "rgba(249,115,22,0.94)",
|
||||
limbStroke: "rgba(194,65,12,0.9)",
|
||||
glow: "rgba(251,146,60,0.3)",
|
||||
renderMode: "badge",
|
||||
},
|
||||
rabbit: {
|
||||
src: "/avatars/twemoji/rabbit.svg",
|
||||
bodyFill: "rgba(236,72,153,0.9)",
|
||||
limbStroke: "rgba(219,39,119,0.86)",
|
||||
glow: "rgba(244,114,182,0.28)",
|
||||
renderMode: "badge",
|
||||
},
|
||||
beachKing: {
|
||||
src: "/avatars/opensource3d/beach-king.webp",
|
||||
bodyFill: "rgba(15,23,42,0.16)",
|
||||
limbStroke: "rgba(125,211,252,0.28)",
|
||||
glow: "rgba(56,189,248,0.16)",
|
||||
renderMode: "full-figure",
|
||||
figureScale: 1.12,
|
||||
figureOffsetY: 0.02,
|
||||
},
|
||||
jenny3d: {
|
||||
src: "/avatars/opensource3d/jenny.webp",
|
||||
bodyFill: "rgba(34,197,94,0.16)",
|
||||
limbStroke: "rgba(16,185,129,0.24)",
|
||||
glow: "rgba(34,197,94,0.18)",
|
||||
renderMode: "full-figure",
|
||||
figureScale: 1.08,
|
||||
figureOffsetY: 0,
|
||||
},
|
||||
juanita3d: {
|
||||
src: "/avatars/opensource3d/juanita.webp",
|
||||
bodyFill: "rgba(244,114,182,0.14)",
|
||||
limbStroke: "rgba(236,72,153,0.26)",
|
||||
glow: "rgba(244,114,182,0.18)",
|
||||
renderMode: "full-figure",
|
||||
figureScale: 1.06,
|
||||
figureOffsetY: 0,
|
||||
},
|
||||
sportTv: {
|
||||
src: "/avatars/opensource3d/sport-tv.webp",
|
||||
bodyFill: "rgba(59,130,246,0.14)",
|
||||
limbStroke: "rgba(96,165,250,0.24)",
|
||||
glow: "rgba(96,165,250,0.18)",
|
||||
renderMode: "full-figure",
|
||||
figureScale: 1.1,
|
||||
figureOffsetY: 0.02,
|
||||
},
|
||||
};
|
||||
|
||||
const avatarImageCache = new Map<AvatarKey, HTMLImageElement | null>();
|
||||
|
||||
export function getAvatarPreset(key: AvatarKey) {
|
||||
return AVATAR_PRESETS.find((preset) => preset.key === key) ?? AVATAR_PRESETS[0];
|
||||
}
|
||||
|
||||
function clamp(value: number, min: number, max: number) {
|
||||
return Math.max(min, Math.min(max, value));
|
||||
}
|
||||
|
||||
function getActionStat(samples: FrameActionSample[], action: LiveActionType): ActionStat {
|
||||
const matches = samples.filter((sample) => sample.action === action);
|
||||
const count = matches.length;
|
||||
const totalConfidence = matches.reduce((sum, sample) => sum + sample.confidence, 0);
|
||||
const share = samples.length > 0 ? count / samples.length : 0;
|
||||
const averageConfidence = count > 0 ? totalConfidence / count : 0;
|
||||
|
||||
return {
|
||||
count,
|
||||
totalConfidence,
|
||||
share,
|
||||
averageConfidence,
|
||||
strength: share * 0.7 + averageConfidence * 0.3,
|
||||
};
|
||||
}
|
||||
|
||||
function getWindowAction(samples: FrameActionSample[]) {
|
||||
const stats = new Map<LiveActionType, ActionStat>();
|
||||
ACTIONS.forEach((action) => {
|
||||
stats.set(action, getActionStat(samples, action));
|
||||
});
|
||||
|
||||
const ranked = ACTIONS
|
||||
.map((action) => ({ action, stats: stats.get(action)! }))
|
||||
.sort((a, b) => {
|
||||
if (b.stats.strength !== a.stats.strength) {
|
||||
return b.stats.strength - a.stats.strength;
|
||||
}
|
||||
return b.stats.totalConfidence - a.stats.totalConfidence;
|
||||
});
|
||||
|
||||
const winner = ranked[0] ?? { action: "unknown" as LiveActionType, stats: stats.get("unknown")! };
|
||||
const qualifies =
|
||||
winner.stats.share >= ACTION_WINDOW_MIN_SHARE &&
|
||||
winner.stats.averageConfidence >= ACTION_WINDOW_MIN_CONFIDENCE;
|
||||
|
||||
return {
|
||||
action: qualifies ? winner.action : "unknown",
|
||||
stats,
|
||||
winnerStats: winner.stats,
|
||||
};
|
||||
}
|
||||
|
||||
function getRawVolatility(samples: FrameActionSample[]) {
|
||||
if (samples.length <= 1) return 0;
|
||||
let switches = 0;
|
||||
for (let index = 1; index < samples.length; index += 1) {
|
||||
if (samples[index]?.action !== samples[index - 1]?.action) {
|
||||
switches += 1;
|
||||
}
|
||||
}
|
||||
return switches / (samples.length - 1);
|
||||
}
|
||||
|
||||
export function createStableActionState(initial: LiveActionType = "unknown"): StableActionState {
|
||||
return {
|
||||
current: initial,
|
||||
currentSince: null,
|
||||
candidate: null,
|
||||
candidateSince: null,
|
||||
candidateWindows: 0,
|
||||
switchCount: 0,
|
||||
};
|
||||
}
|
||||
|
||||
export function createEmptyStabilizedActionMeta(): StabilizedActionMeta {
|
||||
return {
|
||||
stableAction: "unknown",
|
||||
stableConfidence: 0,
|
||||
windowAction: "unknown",
|
||||
windowConfidence: 0,
|
||||
windowShare: 0,
|
||||
windowFrames: 0,
|
||||
windowProgress: 0,
|
||||
pending: false,
|
||||
pendingAction: null,
|
||||
stableMs: 0,
|
||||
candidateMs: 0,
|
||||
rawVolatility: 0,
|
||||
switchCount: 0,
|
||||
};
|
||||
}
|
||||
|
||||
export function stabilizeActionStream(
|
||||
sample: FrameActionSample,
|
||||
history: FrameActionSample[],
|
||||
state: StableActionState,
|
||||
) {
|
||||
history.push(sample);
|
||||
if (history.length > ACTION_WINDOW_FRAMES) {
|
||||
history.splice(0, history.length - ACTION_WINDOW_FRAMES);
|
||||
}
|
||||
|
||||
const { action: windowAction, stats } = getWindowAction(history);
|
||||
const windowStats = stats.get(windowAction) ?? getActionStat(history, "unknown");
|
||||
const currentStats = stats.get(state.current) ?? getActionStat(history, state.current);
|
||||
const pendingMinMs = windowAction === "unknown" ? ACTION_UNKNOWN_MIN_MS : ACTION_SWITCH_MIN_MS;
|
||||
const windowProgress = clamp(history.length / ACTION_WINDOW_FRAMES, 0, 1);
|
||||
|
||||
if (state.currentSince == null) {
|
||||
state.currentSince = sample.timestamp;
|
||||
}
|
||||
|
||||
if (windowAction === state.current) {
|
||||
state.candidate = null;
|
||||
state.candidateSince = null;
|
||||
state.candidateWindows = 0;
|
||||
} else if (windowProgress >= 0.7) {
|
||||
if (state.candidate !== windowAction) {
|
||||
state.candidate = windowAction;
|
||||
state.candidateSince = sample.timestamp;
|
||||
state.candidateWindows = 1;
|
||||
} else {
|
||||
state.candidateWindows += 1;
|
||||
}
|
||||
|
||||
const candidateStats = stats.get(windowAction) ?? getActionStat(history, windowAction);
|
||||
const currentStrength = state.current === "unknown" ? currentStats.strength * 0.55 : currentStats.strength;
|
||||
const candidateDuration = state.candidateSince == null ? 0 : sample.timestamp - state.candidateSince;
|
||||
const canSwitch =
|
||||
state.candidateWindows >= ACTION_LOCK_IN_WINDOWS &&
|
||||
candidateDuration >= pendingMinMs &&
|
||||
candidateStats.strength >= currentStrength + ACTION_SWITCH_DELTA;
|
||||
|
||||
if (canSwitch) {
|
||||
state.current = windowAction;
|
||||
state.currentSince = sample.timestamp;
|
||||
state.candidate = null;
|
||||
state.candidateSince = null;
|
||||
state.candidateWindows = 0;
|
||||
state.switchCount += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const stableStats = stats.get(state.current) ?? getActionStat(history, state.current);
|
||||
const stableConfidence = state.current === "unknown"
|
||||
? Math.max(sample.confidence * 0.45, stableStats.averageConfidence)
|
||||
: Math.max(stableStats.averageConfidence, windowStats.averageConfidence * 0.88);
|
||||
|
||||
return {
|
||||
stableAction: state.current,
|
||||
stableConfidence: clamp(stableConfidence, 0, 1),
|
||||
windowAction,
|
||||
windowConfidence: clamp(windowStats.averageConfidence, 0, 1),
|
||||
windowShare: clamp(windowStats.share, 0, 1),
|
||||
windowFrames: history.length,
|
||||
windowProgress,
|
||||
pending: Boolean(state.candidate),
|
||||
pendingAction: state.candidate,
|
||||
stableMs: state.currentSince == null ? 0 : sample.timestamp - state.currentSince,
|
||||
candidateMs: state.candidateSince == null ? 0 : sample.timestamp - state.candidateSince,
|
||||
rawVolatility: getRawVolatility(history),
|
||||
switchCount: state.switchCount,
|
||||
} satisfies StabilizedActionMeta;
|
||||
}
|
||||
|
||||
export function resolveAvatarKeyFromPrompt(prompt: string, fallback: AvatarKey): AvatarKey {
|
||||
const normalized = prompt.trim().toLowerCase();
|
||||
if (!normalized) return fallback;
|
||||
const matched = AVATAR_PRESETS.find((preset) => preset.keywords.some((keyword) => normalized.includes(keyword)));
|
||||
return matched?.key ?? fallback;
|
||||
}
|
||||
|
||||
function averagePoint(a: PosePoint | undefined, b: PosePoint | undefined, defaultX: number, defaultY: number) {
|
||||
return {
|
||||
x: ((a?.x ?? defaultX) + (b?.x ?? defaultX)) / 2,
|
||||
y: ((a?.y ?? defaultY) + (b?.y ?? defaultY)) / 2,
|
||||
};
|
||||
}
|
||||
|
||||
export function getAvatarAnchors(landmarks: PosePoint[], width: number, height: number): AvatarAnchors | null {
|
||||
const nose = landmarks[0];
|
||||
const leftShoulder = landmarks[11];
|
||||
const rightShoulder = landmarks[12];
|
||||
const leftHip = landmarks[23];
|
||||
const rightHip = landmarks[24];
|
||||
const leftWrist = landmarks[15];
|
||||
const rightWrist = landmarks[16];
|
||||
const leftAnkle = landmarks[27];
|
||||
const rightAnkle = landmarks[28];
|
||||
const leftEar = landmarks[7];
|
||||
const rightEar = landmarks[8];
|
||||
|
||||
if (!nose || !leftShoulder || !rightShoulder || !leftHip || !rightHip) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const shoulderCenter = averagePoint(leftShoulder, rightShoulder, 0.5, 0.32);
|
||||
const hipCenter = averagePoint(leftHip, rightHip, 0.5, 0.62);
|
||||
const ankleCenter = averagePoint(leftAnkle, rightAnkle, hipCenter.x, 0.92);
|
||||
const shoulderSpan = Math.abs(rightShoulder.x - leftShoulder.x) * width;
|
||||
const torsoHeight = Math.max((hipCenter.y - shoulderCenter.y) * height, shoulderSpan * 0.8);
|
||||
const headRadius = Math.max(
|
||||
shoulderSpan * 0.28,
|
||||
Math.abs((leftEar?.x ?? nose.x - 0.04) - (rightEar?.x ?? nose.x + 0.04)) * width * 0.45,
|
||||
34,
|
||||
);
|
||||
const bodyWidth = Math.max(shoulderSpan * 1.05, headRadius * 1.8);
|
||||
const bodyHeight = Math.max(torsoHeight * 1.1, headRadius * 2.2);
|
||||
|
||||
return {
|
||||
headX: nose.x * width,
|
||||
headY: Math.min(nose.y * height, shoulderCenter.y * height - headRadius * 0.2),
|
||||
headRadius,
|
||||
bodyX: shoulderCenter.x * width,
|
||||
bodyY: shoulderCenter.y * height + bodyHeight * 0.48,
|
||||
bodyWidth,
|
||||
bodyHeight,
|
||||
shoulderY: shoulderCenter.y * height,
|
||||
footY: Math.max(ankleCenter.y * height, hipCenter.y * height + bodyHeight * 1.35),
|
||||
leftHandX: (leftWrist?.x ?? leftShoulder.x - 0.08) * width,
|
||||
leftHandY: (leftWrist?.y ?? shoulderCenter.y + 0.1) * height,
|
||||
rightHandX: (rightWrist?.x ?? rightShoulder.x + 0.08) * width,
|
||||
rightHandY: (rightWrist?.y ?? shoulderCenter.y + 0.1) * height,
|
||||
};
|
||||
}
|
||||
|
||||
function drawRoundedBody(ctx: CanvasRenderingContext2D, anchors: AvatarAnchors, fill: string) {
|
||||
const radius = Math.min(anchors.bodyWidth, anchors.bodyHeight) * 0.18;
|
||||
const left = anchors.bodyX - anchors.bodyWidth / 2;
|
||||
const top = anchors.bodyY - anchors.bodyHeight / 2;
|
||||
const right = left + anchors.bodyWidth;
|
||||
const bottom = top + anchors.bodyHeight;
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(left + radius, top);
|
||||
ctx.lineTo(right - radius, top);
|
||||
ctx.quadraticCurveTo(right, top, right, top + radius);
|
||||
ctx.lineTo(right, bottom - radius);
|
||||
ctx.quadraticCurveTo(right, bottom, right - radius, bottom);
|
||||
ctx.lineTo(left + radius, bottom);
|
||||
ctx.quadraticCurveTo(left, bottom, left, bottom - radius);
|
||||
ctx.lineTo(left, top + radius);
|
||||
ctx.quadraticCurveTo(left, top, left + radius, top);
|
||||
ctx.closePath();
|
||||
ctx.fillStyle = fill;
|
||||
ctx.fill();
|
||||
}
|
||||
|
||||
function drawLimbs(ctx: CanvasRenderingContext2D, anchors: AvatarAnchors, stroke: string) {
|
||||
ctx.strokeStyle = stroke;
|
||||
ctx.lineWidth = Math.max(anchors.headRadius * 0.22, 10);
|
||||
ctx.lineCap = "round";
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(anchors.bodyX - anchors.bodyWidth * 0.24, anchors.shoulderY + anchors.headRadius * 0.65);
|
||||
ctx.lineTo(anchors.leftHandX, anchors.leftHandY);
|
||||
ctx.moveTo(anchors.bodyX + anchors.bodyWidth * 0.24, anchors.shoulderY + anchors.headRadius * 0.65);
|
||||
ctx.lineTo(anchors.rightHandX, anchors.rightHandY);
|
||||
ctx.moveTo(anchors.bodyX - anchors.bodyWidth * 0.14, anchors.bodyY + anchors.bodyHeight * 0.42);
|
||||
ctx.lineTo(anchors.bodyX - anchors.bodyWidth * 0.18, anchors.footY);
|
||||
ctx.moveTo(anchors.bodyX + anchors.bodyWidth * 0.14, anchors.bodyY + anchors.bodyHeight * 0.42);
|
||||
ctx.lineTo(anchors.bodyX + anchors.bodyWidth * 0.18, anchors.footY);
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
function getAvatarImage(key: AvatarKey) {
|
||||
if (typeof Image === "undefined") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cached = avatarImageCache.get(key);
|
||||
if (cached) {
|
||||
return cached.complete && cached.naturalWidth > 0 ? cached : null;
|
||||
}
|
||||
|
||||
const image = new Image();
|
||||
image.decoding = "async";
|
||||
image.src = AVATAR_VISUALS[key].src;
|
||||
avatarImageCache.set(key, image);
|
||||
return null;
|
||||
}
|
||||
|
||||
function drawAvatarBadge(
|
||||
ctx: CanvasRenderingContext2D,
|
||||
anchors: AvatarAnchors,
|
||||
avatarKey: AvatarKey,
|
||||
sprite: HTMLImageElement | null,
|
||||
) {
|
||||
const visual = AVATAR_VISUALS[avatarKey];
|
||||
const headSize = anchors.headRadius * 2.5;
|
||||
const torsoBadge = Math.max(anchors.headRadius * 0.95, 40);
|
||||
|
||||
drawRoundedBody(ctx, anchors, visual.bodyFill);
|
||||
drawLimbs(ctx, anchors, visual.limbStroke);
|
||||
|
||||
ctx.save();
|
||||
ctx.fillStyle = visual.glow;
|
||||
ctx.beginPath();
|
||||
ctx.arc(anchors.headX, anchors.headY, anchors.headRadius * 1.16, 0, Math.PI * 2);
|
||||
ctx.fill();
|
||||
ctx.restore();
|
||||
|
||||
if (sprite) {
|
||||
ctx.drawImage(
|
||||
sprite,
|
||||
anchors.headX - headSize / 2,
|
||||
anchors.headY - headSize / 2,
|
||||
headSize,
|
||||
headSize,
|
||||
);
|
||||
ctx.save();
|
||||
ctx.globalAlpha = 0.94;
|
||||
ctx.drawImage(
|
||||
sprite,
|
||||
anchors.bodyX - torsoBadge / 2,
|
||||
anchors.bodyY - torsoBadge / 2,
|
||||
torsoBadge,
|
||||
torsoBadge,
|
||||
);
|
||||
ctx.restore();
|
||||
return;
|
||||
}
|
||||
|
||||
ctx.fillStyle = "rgba(255,255,255,0.92)";
|
||||
ctx.beginPath();
|
||||
ctx.arc(anchors.headX, anchors.headY, anchors.headRadius * 0.88, 0, Math.PI * 2);
|
||||
ctx.fill();
|
||||
ctx.fillStyle = "rgba(17,24,39,0.82)";
|
||||
ctx.beginPath();
|
||||
ctx.arc(anchors.headX - anchors.headRadius * 0.22, anchors.headY - anchors.headRadius * 0.08, anchors.headRadius * 0.08, 0, Math.PI * 2);
|
||||
ctx.arc(anchors.headX + anchors.headRadius * 0.22, anchors.headY - anchors.headRadius * 0.08, anchors.headRadius * 0.08, 0, Math.PI * 2);
|
||||
ctx.fill();
|
||||
}
|
||||
|
||||
function drawFullFigureAvatar(
|
||||
ctx: CanvasRenderingContext2D,
|
||||
anchors: AvatarAnchors,
|
||||
avatarKey: AvatarKey,
|
||||
sprite: HTMLImageElement | null,
|
||||
) {
|
||||
const visual = AVATAR_VISUALS[avatarKey];
|
||||
const topY = anchors.headY - anchors.headRadius * 1.55 + anchors.bodyHeight * (visual.figureOffsetY ?? 0);
|
||||
const baseHeight = Math.max(anchors.footY - topY, anchors.bodyHeight * 2.35);
|
||||
const figureHeight = baseHeight * (visual.figureScale ?? 1);
|
||||
const aspectRatio = sprite?.naturalWidth && sprite?.naturalHeight
|
||||
? sprite.naturalWidth / sprite.naturalHeight
|
||||
: 0.72;
|
||||
const figureWidth = figureHeight * aspectRatio;
|
||||
const figureLeft = anchors.bodyX - figureWidth / 2;
|
||||
|
||||
ctx.save();
|
||||
ctx.fillStyle = visual.glow;
|
||||
ctx.beginPath();
|
||||
ctx.ellipse(
|
||||
anchors.bodyX,
|
||||
anchors.footY - anchors.headRadius * 0.1,
|
||||
Math.max(anchors.bodyWidth * 0.42, 34),
|
||||
Math.max(anchors.headRadius * 0.22, 10),
|
||||
0,
|
||||
0,
|
||||
Math.PI * 2,
|
||||
);
|
||||
ctx.fill();
|
||||
ctx.restore();
|
||||
|
||||
if (sprite) {
|
||||
ctx.save();
|
||||
ctx.shadowColor = "rgba(15,23,42,0.28)";
|
||||
ctx.shadowBlur = 16;
|
||||
ctx.shadowOffsetY = 10;
|
||||
ctx.drawImage(sprite, figureLeft, topY, figureWidth, figureHeight);
|
||||
ctx.restore();
|
||||
return;
|
||||
}
|
||||
|
||||
drawRoundedBody(ctx, anchors, visual.bodyFill);
|
||||
drawLimbs(ctx, anchors, visual.limbStroke);
|
||||
}
|
||||
|
||||
export function renderLiveCameraOverlayToContext(
|
||||
ctx: CanvasRenderingContext2D | null,
|
||||
width: number,
|
||||
height: number,
|
||||
landmarks: PosePoint[] | undefined,
|
||||
avatarState?: AvatarRenderState,
|
||||
options?: { clear?: boolean },
|
||||
) {
|
||||
if (!ctx) return;
|
||||
if (options?.clear !== false) {
|
||||
ctx.clearRect(0, 0, width, height);
|
||||
}
|
||||
if (!landmarks) return;
|
||||
|
||||
if (avatarState?.enabled) {
|
||||
const anchors = getAvatarAnchors(landmarks, width, height);
|
||||
if (anchors) {
|
||||
const sprite = getAvatarImage(avatarState.avatarKey);
|
||||
const visual = AVATAR_VISUALS[avatarState.avatarKey];
|
||||
ctx.save();
|
||||
ctx.globalAlpha = 0.95;
|
||||
if (visual.renderMode === "full-figure") {
|
||||
drawFullFigureAvatar(ctx, anchors, avatarState.avatarKey, sprite);
|
||||
} else {
|
||||
drawAvatarBadge(ctx, anchors, avatarState.avatarKey, sprite);
|
||||
}
|
||||
ctx.restore();
|
||||
|
||||
if (visual.renderMode !== "full-figure") {
|
||||
ctx.save();
|
||||
ctx.strokeStyle = "rgba(255,255,255,0.16)";
|
||||
ctx.lineWidth = 2;
|
||||
ctx.setLineDash([8, 10]);
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(anchors.bodyX, anchors.shoulderY - anchors.headRadius * 1.25);
|
||||
ctx.lineTo(anchors.bodyX, anchors.footY);
|
||||
ctx.stroke();
|
||||
ctx.restore();
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const poseConnections: Array<[number, number]> = [
|
||||
[11, 12], [11, 13], [13, 15], [12, 14], [14, 16],
|
||||
[11, 23], [12, 24], [23, 24], [23, 25], [24, 26],
|
||||
[25, 27], [26, 28], [15, 17], [16, 18], [15, 19],
|
||||
[16, 20], [17, 19], [18, 20],
|
||||
];
|
||||
|
||||
ctx.strokeStyle = "rgba(25, 211, 155, 0.9)";
|
||||
ctx.lineWidth = 3;
|
||||
poseConnections.forEach(([from, to]) => {
|
||||
const start = landmarks[from];
|
||||
const end = landmarks[to];
|
||||
if (!start || !end || (start.visibility ?? 1) < 0.25 || (end.visibility ?? 1) < 0.25) return;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(start.x * width, start.y * height);
|
||||
ctx.lineTo(end.x * width, end.y * height);
|
||||
ctx.stroke();
|
||||
});
|
||||
|
||||
landmarks.forEach((point, index) => {
|
||||
if ((point.visibility ?? 1) < 0.25) return;
|
||||
ctx.fillStyle = index >= 11 && index <= 16 ? "rgba(253, 224, 71, 0.95)" : "rgba(255,255,255,0.88)";
|
||||
ctx.beginPath();
|
||||
ctx.arc(point.x * width, point.y * height, index >= 11 && index <= 16 ? 5 : 4, 0, Math.PI * 2);
|
||||
ctx.fill();
|
||||
});
|
||||
}
|
||||
|
||||
export function drawLiveCameraOverlay(
|
||||
canvas: HTMLCanvasElement | null,
|
||||
landmarks: PosePoint[] | undefined,
|
||||
avatarState?: AvatarRenderState,
|
||||
) {
|
||||
const ctx = canvas?.getContext("2d");
|
||||
if (!canvas || !ctx) return;
|
||||
renderLiveCameraOverlayToContext(ctx, canvas.width, canvas.height, landmarks, avatarState, { clear: true });
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { formatRecordingTime, pickBitrate } from "./media";
|
||||
import { formatRecordingTime, getMediaAssetUrl, pickBitrate } from "./media";
|
||||
|
||||
describe("media utilities", () => {
|
||||
it("formats recording time with minute and second padding", () => {
|
||||
@@ -14,4 +14,16 @@ describe("media utilities", () => {
|
||||
expect(pickBitrate("balanced", true)).toBe(1_400_000);
|
||||
expect(pickBitrate("balanced", false)).toBe(1_900_000);
|
||||
});
|
||||
|
||||
it("keeps already-prefixed media asset paths stable", () => {
|
||||
expect(getMediaAssetUrl("/media/assets/sessions/demo/preview.webm")).toBe(
|
||||
"/media/assets/sessions/demo/preview.webm"
|
||||
);
|
||||
expect(getMediaAssetUrl("https://cdn.example.com/demo.webm")).toBe(
|
||||
"https://cdn.example.com/demo.webm"
|
||||
);
|
||||
expect(getMediaAssetUrl("/assets/sessions/demo/preview.webm")).toBe(
|
||||
"/media/assets/sessions/demo/preview.webm"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -14,11 +14,7 @@ export type ArchiveStatus =
|
||||
| "completed"
|
||||
| "failed";
|
||||
|
||||
export type PreviewStatus =
|
||||
| "idle"
|
||||
| "processing"
|
||||
| "ready"
|
||||
| "failed";
|
||||
export type PreviewStatus = "idle" | "processing" | "ready" | "failed";
|
||||
|
||||
export type MediaMarker = {
|
||||
id: string;
|
||||
@@ -33,6 +29,7 @@ export type MediaSession = {
|
||||
id: string;
|
||||
userId: string;
|
||||
title: string;
|
||||
purpose?: "recording" | "relay";
|
||||
status: MediaSessionStatus;
|
||||
archiveStatus: ArchiveStatus;
|
||||
previewStatus: PreviewStatus;
|
||||
@@ -46,10 +43,14 @@ export type MediaSession = {
|
||||
uploadedBytes: number;
|
||||
previewSegments: number;
|
||||
durationMs: number;
|
||||
relayBufferSeconds?: number;
|
||||
lastError?: string;
|
||||
previewUpdatedAt?: string;
|
||||
streamConnected: boolean;
|
||||
lastStreamAt?: string;
|
||||
viewerCount?: number;
|
||||
liveFrameUrl?: string;
|
||||
liveFrameUpdatedAt?: string;
|
||||
playback: {
|
||||
webmUrl?: string;
|
||||
mp4Url?: string;
|
||||
@@ -61,11 +62,14 @@ export type MediaSession = {
|
||||
markers: MediaMarker[];
|
||||
};
|
||||
|
||||
const MEDIA_BASE = (import.meta.env.VITE_MEDIA_BASE_URL || "/media").replace(/\/$/, "");
|
||||
const MEDIA_BASE = (import.meta.env.VITE_MEDIA_BASE_URL || "/media").replace(
|
||||
/\/$/,
|
||||
""
|
||||
);
|
||||
const RETRYABLE_STATUS = new Set([502, 503, 504]);
|
||||
|
||||
function sleep(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function request<T>(path: string, init?: RequestInit): Promise<T> {
|
||||
@@ -76,7 +80,11 @@ async function request<T>(path: string, init?: RequestInit): Promise<T> {
|
||||
const response = await fetch(`${MEDIA_BASE}${path}`, init);
|
||||
if (!response.ok) {
|
||||
const errorBody = await response.json().catch(() => ({}));
|
||||
const error = new Error(errorBody.error || errorBody.message || `Media service error (${response.status})`);
|
||||
const error = new Error(
|
||||
errorBody.error ||
|
||||
errorBody.message ||
|
||||
`Media service error (${response.status})`
|
||||
);
|
||||
if (RETRYABLE_STATUS.has(response.status) && attempt < 2) {
|
||||
lastError = error;
|
||||
await sleep(400 * (attempt + 1));
|
||||
@@ -86,7 +94,8 @@ async function request<T>(path: string, init?: RequestInit): Promise<T> {
|
||||
}
|
||||
return response.json() as Promise<T>;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error("Media request failed");
|
||||
lastError =
|
||||
error instanceof Error ? error : new Error("Media request failed");
|
||||
if (attempt < 2) {
|
||||
await sleep(400 * (attempt + 1));
|
||||
continue;
|
||||
@@ -106,6 +115,8 @@ export async function createMediaSession(payload: {
|
||||
qualityPreset: string;
|
||||
facingMode: string;
|
||||
deviceKind: string;
|
||||
purpose?: "recording" | "relay";
|
||||
relayBufferSeconds?: number;
|
||||
}) {
|
||||
return request<{ session: MediaSession }>("/sessions", {
|
||||
method: "POST",
|
||||
@@ -114,12 +125,43 @@ export async function createMediaSession(payload: {
|
||||
});
|
||||
}
|
||||
|
||||
export async function signalMediaSession(sessionId: string, payload: { sdp: string; type: string }) {
|
||||
return request<{ sdp: string; type: string }>(`/sessions/${sessionId}/signal`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
export async function signalMediaSession(
|
||||
sessionId: string,
|
||||
payload: { sdp: string; type: string }
|
||||
) {
|
||||
return request<{ sdp: string; type: string }>(
|
||||
`/sessions/${sessionId}/signal`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(payload),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export async function signalMediaViewerSession(
|
||||
sessionId: string,
|
||||
payload: { sdp: string; type: string }
|
||||
) {
|
||||
return request<{ viewerId: string; sdp: string; type: string }>(
|
||||
`/sessions/${sessionId}/viewer-signal`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(payload),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export async function uploadMediaLiveFrame(sessionId: string, blob: Blob) {
|
||||
return request<{ session: MediaSession }>(
|
||||
`/sessions/${sessionId}/live-frame`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: { "Content-Type": blob.type || "image/jpeg" },
|
||||
body: blob,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export async function uploadMediaSegment(
|
||||
@@ -140,7 +182,12 @@ export async function uploadMediaSegment(
|
||||
|
||||
export async function createMediaMarker(
|
||||
sessionId: string,
|
||||
payload: { type: string; label: string; timestampMs: number; confidence?: number }
|
||||
payload: {
|
||||
type: string;
|
||||
label: string;
|
||||
timestampMs: number;
|
||||
confidence?: number;
|
||||
}
|
||||
) {
|
||||
return request<{ session: MediaSession }>(`/sessions/${sessionId}/markers`, {
|
||||
method: "POST",
|
||||
@@ -164,6 +211,16 @@ export async function getMediaSession(sessionId: string) {
|
||||
return request<{ session: MediaSession }>(`/sessions/${sessionId}`);
|
||||
}
|
||||
|
||||
export function getMediaAssetUrl(path: string) {
|
||||
if (/^https?:\/\//i.test(path)) {
|
||||
return path;
|
||||
}
|
||||
if (path.startsWith(`${MEDIA_BASE}/`)) {
|
||||
return path;
|
||||
}
|
||||
return `${MEDIA_BASE}${path.startsWith("/") ? path : `/${path}`}`;
|
||||
}
|
||||
|
||||
export function formatRecordingTime(milliseconds: number) {
|
||||
const totalSeconds = Math.max(0, Math.floor(milliseconds / 1000));
|
||||
const minutes = Math.floor(totalSeconds / 60);
|
||||
@@ -178,7 +235,11 @@ export function pickRecorderMimeType() {
|
||||
"video/webm;codecs=h264,opus",
|
||||
"video/webm",
|
||||
];
|
||||
return candidates.find((candidate) => window.MediaRecorder?.isTypeSupported(candidate)) || "video/webm";
|
||||
return (
|
||||
candidates.find(candidate =>
|
||||
window.MediaRecorder?.isTypeSupported(candidate)
|
||||
) || "video/webm"
|
||||
);
|
||||
}
|
||||
|
||||
export function pickBitrate(preset: string, isMobile: boolean) {
|
||||
|
||||
@@ -13,6 +13,28 @@ import {
|
||||
} from "recharts";
|
||||
import { useLocation } from "wouter";
|
||||
|
||||
const ACTION_LABEL_MAP: Record<string, string> = {
|
||||
forehand: "正手挥拍",
|
||||
backhand: "反手挥拍",
|
||||
serve: "发球",
|
||||
volley: "截击",
|
||||
overhead: "高压",
|
||||
slice: "切削",
|
||||
lob: "挑高球",
|
||||
unknown: "未知动作",
|
||||
};
|
||||
|
||||
function getRecordMetadata(record: any) {
|
||||
if (!record?.metadata || typeof record.metadata !== "object") {
|
||||
return null;
|
||||
}
|
||||
return record.metadata as Record<string, any>;
|
||||
}
|
||||
|
||||
function getActionLabel(actionType: string) {
|
||||
return ACTION_LABEL_MAP[actionType] || actionType;
|
||||
}
|
||||
|
||||
export default function Progress() {
|
||||
const { user } = useAuth();
|
||||
const { data: records, isLoading } = trpc.record.list.useQuery({ limit: 100 });
|
||||
@@ -181,7 +203,16 @@ export default function Progress() {
|
||||
<CardContent>
|
||||
{(records?.length || 0) > 0 ? (
|
||||
<div className="space-y-2">
|
||||
{(records || []).slice(0, 20).map((record: any) => (
|
||||
{(records || []).slice(0, 20).map((record: any) => {
|
||||
const metadata = getRecordMetadata(record);
|
||||
const actionSummary = metadata?.actionSummary && typeof metadata.actionSummary === "object"
|
||||
? Object.entries(metadata.actionSummary as Record<string, number>).filter(([, count]) => Number(count) > 0)
|
||||
: [];
|
||||
const topActions = actionSummary
|
||||
.sort((left, right) => Number(right[1]) - Number(left[1]))
|
||||
.slice(0, 3);
|
||||
|
||||
return (
|
||||
<div key={record.id} className="border-b py-2 last:border-0">
|
||||
<div className="flex items-start justify-between gap-3">
|
||||
<div className="flex items-start gap-3">
|
||||
@@ -193,15 +224,27 @@ export default function Progress() {
|
||||
<div>
|
||||
<p className="text-sm font-medium">{record.exerciseName}</p>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
{formatDateTimeShanghai(record.trainingDate || record.createdAt)}
|
||||
{formatDateTimeShanghai(record.trainingDate || record.createdAt, { second: "2-digit" })}
|
||||
{record.durationMinutes ? ` · ${record.durationMinutes}分钟` : ""}
|
||||
{record.sourceType ? ` · ${record.sourceType}` : ""}
|
||||
</p>
|
||||
{record.actionCount ? (
|
||||
<p className="mt-1 text-xs text-muted-foreground">
|
||||
动作数 {record.actionCount}
|
||||
</p>
|
||||
) : null}
|
||||
<div className="mt-1 flex flex-wrap items-center gap-2">
|
||||
{record.actionCount ? (
|
||||
<Badge variant="outline" className="text-[11px]">
|
||||
动作数 {record.actionCount}
|
||||
</Badge>
|
||||
) : null}
|
||||
{metadata?.dominantAction ? (
|
||||
<Badge variant="secondary" className="text-[11px]">
|
||||
主动作 {getActionLabel(String(metadata.dominantAction))}
|
||||
</Badge>
|
||||
) : null}
|
||||
{topActions.map(([actionType, count]) => (
|
||||
<Badge key={`${record.id}-${actionType}`} variant="secondary" className="text-[11px]">
|
||||
{getActionLabel(actionType)} {count} 次
|
||||
</Badge>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
@@ -236,36 +279,36 @@ export default function Progress() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{record.metadata ? (
|
||||
{metadata ? (
|
||||
<div className="mt-4 space-y-3">
|
||||
{record.metadata.dominantAction ? (
|
||||
{metadata.dominantAction ? (
|
||||
<div>
|
||||
<div className="text-xs uppercase tracking-[0.16em] text-muted-foreground">主动作</div>
|
||||
<div className="mt-1 font-medium">{String(record.metadata.dominantAction)}</div>
|
||||
<div className="mt-1 font-medium">{getActionLabel(String(metadata.dominantAction))}</div>
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
{record.metadata.actionSummary && Object.keys(record.metadata.actionSummary).length > 0 ? (
|
||||
{metadata.actionSummary && Object.keys(metadata.actionSummary).length > 0 ? (
|
||||
<div>
|
||||
<div className="text-xs uppercase tracking-[0.16em] text-muted-foreground">动作明细</div>
|
||||
<div className="mt-2 flex flex-wrap gap-2">
|
||||
{Object.entries(record.metadata.actionSummary as Record<string, number>)
|
||||
{Object.entries(metadata.actionSummary as Record<string, number>)
|
||||
.filter(([, count]) => Number(count) > 0)
|
||||
.map(([actionType, count]) => (
|
||||
<Badge key={actionType} variant="secondary">
|
||||
{actionType} {count} 次
|
||||
{getActionLabel(actionType)} {count} 次
|
||||
</Badge>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
{record.metadata.validityStatus ? (
|
||||
{metadata.validityStatus ? (
|
||||
<div>
|
||||
<div className="text-xs uppercase tracking-[0.16em] text-muted-foreground">录制有效性</div>
|
||||
<div className="mt-1 font-medium">{String(record.metadata.validityStatus)}</div>
|
||||
{record.metadata.invalidReason ? (
|
||||
<div className="mt-1 text-xs text-muted-foreground">{String(record.metadata.invalidReason)}</div>
|
||||
<div className="mt-1 font-medium">{String(metadata.validityStatus)}</div>
|
||||
{metadata.invalidReason ? (
|
||||
<div className="mt-1 text-xs text-muted-foreground">{String(metadata.invalidReason)}</div>
|
||||
) : null}
|
||||
</div>
|
||||
) : null}
|
||||
@@ -281,7 +324,8 @@ export default function Progress() {
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
))}
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
) : (
|
||||
<div className="py-8 text-center text-muted-foreground text-sm">
|
||||
|
||||
@@ -31,7 +31,7 @@ import {
|
||||
recognizeActionFrame,
|
||||
stabilizeActionFrame,
|
||||
} from "@/lib/actionRecognition";
|
||||
import { applyTrackZoom, getCameraVideoConstraints, readTrackZoomState } from "@/lib/camera";
|
||||
import { applyTrackZoom, readTrackZoomState, requestCameraStream } from "@/lib/camera";
|
||||
import { formatDateTimeShanghai } from "@/lib/time";
|
||||
import {
|
||||
Activity,
|
||||
@@ -189,6 +189,10 @@ function summarizeActions(actionSummary: Record<ActionType, number>) {
|
||||
export default function Recorder() {
|
||||
const { user } = useAuth();
|
||||
const utils = trpc.useUtils();
|
||||
const runtimeQuery = trpc.analysis.runtimeGet.useQuery(undefined, {
|
||||
refetchInterval: 1000,
|
||||
refetchIntervalInBackground: true,
|
||||
});
|
||||
const finalizeTaskMutation = trpc.task.createMediaFinalize.useMutation({
|
||||
onSuccess: (data) => {
|
||||
setArchiveTaskId(data.taskId);
|
||||
@@ -262,6 +266,9 @@ export default function Recorder() {
|
||||
|
||||
const mobile = useMemo(() => isMobileDevice(), []);
|
||||
const mimeType = useMemo(() => pickRecorderMimeType(), []);
|
||||
const runtimeRole = runtimeQuery.data?.role ?? "idle";
|
||||
const liveAnalysisRuntime = runtimeQuery.data?.runtimeSession;
|
||||
const liveAnalysisOccupied = runtimeRole === "viewer" && liveAnalysisRuntime?.status === "active";
|
||||
const currentPlaybackUrl = mediaSession?.playback.mp4Url || mediaSession?.playback.webmUrl || "";
|
||||
const archiveTaskQuery = useBackgroundTask(archiveTaskId);
|
||||
const archiveProgress = archiveTaskQuery.data?.progress ?? getArchiveProgress(mediaSession);
|
||||
@@ -402,14 +409,21 @@ export default function Recorder() {
|
||||
preferredZoom = zoomTargetRef.current,
|
||||
preset: keyof typeof QUALITY_PRESETS = qualityPreset,
|
||||
) => {
|
||||
if (liveAnalysisOccupied) {
|
||||
const title = liveAnalysisRuntime?.title || "其他设备正在实时分析";
|
||||
toast.error(`${title},当前设备不能再开启录制摄像头`);
|
||||
throw new Error("当前账号已有其他设备正在实时分析");
|
||||
}
|
||||
try {
|
||||
if (streamRef.current) {
|
||||
streamRef.current.getTracks().forEach((track) => track.stop());
|
||||
streamRef.current = null;
|
||||
}
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
video: getCameraVideoConstraints(nextFacingMode, mobile, preset),
|
||||
const { stream, appliedFacingMode, audioEnabled, usedFallback } = await requestCameraStream({
|
||||
facingMode: nextFacingMode,
|
||||
isMobile: mobile,
|
||||
preset,
|
||||
audio: {
|
||||
echoCancellation: true,
|
||||
noiseSuppression: true,
|
||||
@@ -426,6 +440,9 @@ export default function Recorder() {
|
||||
|
||||
suppressTrackEndedRef.current = false;
|
||||
streamRef.current = stream;
|
||||
if (appliedFacingMode !== nextFacingMode) {
|
||||
setFacingMode(appliedFacingMode);
|
||||
}
|
||||
if (liveVideoRef.current) {
|
||||
liveVideoRef.current.srcObject = stream;
|
||||
await liveVideoRef.current.play();
|
||||
@@ -433,6 +450,12 @@ export default function Recorder() {
|
||||
await syncZoomState(preferredZoom, stream.getVideoTracks()[0] || null);
|
||||
setCameraError("");
|
||||
setCameraActive(true);
|
||||
if (usedFallback) {
|
||||
toast.info("当前设备已自动切换到兼容摄像头模式");
|
||||
}
|
||||
if (!audioEnabled) {
|
||||
toast.warning("麦克风不可用,已切换为仅视频模式");
|
||||
}
|
||||
return stream;
|
||||
} catch (error: any) {
|
||||
const message = error?.message || "无法访问摄像头";
|
||||
@@ -440,7 +463,7 @@ export default function Recorder() {
|
||||
toast.error(`摄像头启动失败: ${message}`);
|
||||
throw error;
|
||||
}
|
||||
}), [facingMode, mobile, qualityPreset, syncZoomState]);
|
||||
}), [facingMode, liveAnalysisOccupied, liveAnalysisRuntime?.title, mobile, qualityPreset, syncZoomState]);
|
||||
|
||||
const ensurePreviewStream = useCallback(async () => {
|
||||
if (streamRef.current) {
|
||||
@@ -849,6 +872,11 @@ export default function Recorder() {
|
||||
toast.error("请先登录后再开始录制");
|
||||
return;
|
||||
}
|
||||
if (liveAnalysisOccupied) {
|
||||
const title = liveAnalysisRuntime?.title || "其他设备正在实时分析";
|
||||
toast.error(`${title},当前设备不能同时开始录制`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
setMode("preparing");
|
||||
@@ -898,7 +926,21 @@ export default function Recorder() {
|
||||
setMode("idle");
|
||||
toast.error(`启动录制失败: ${error?.message || "未知错误"}`);
|
||||
}
|
||||
}, [ensurePreviewStream, facingMode, mimeType, mobile, qualityPreset, startActionSampling, startRealtimePush, startRecorderLoop, syncSessionState, title, user]);
|
||||
}, [
|
||||
ensurePreviewStream,
|
||||
facingMode,
|
||||
liveAnalysisOccupied,
|
||||
liveAnalysisRuntime?.title,
|
||||
mimeType,
|
||||
mobile,
|
||||
qualityPreset,
|
||||
startActionSampling,
|
||||
startRealtimePush,
|
||||
startRecorderLoop,
|
||||
syncSessionState,
|
||||
title,
|
||||
user,
|
||||
]);
|
||||
|
||||
const finishRecording = useCallback(async () => {
|
||||
const session = currentSessionRef.current;
|
||||
@@ -1140,9 +1182,10 @@ export default function Recorder() {
|
||||
data-testid="recorder-start-camera-button"
|
||||
onClick={() => void startCamera()}
|
||||
className={buttonClass()}
|
||||
disabled={liveAnalysisOccupied}
|
||||
>
|
||||
<Camera className={iconClass} />
|
||||
{labelFor("启动摄像头", "启动")}
|
||||
{labelFor(liveAnalysisOccupied ? "实时分析占用中" : "启动摄像头", liveAnalysisOccupied ? "占用" : "启动")}
|
||||
</Button>
|
||||
) : (
|
||||
<>
|
||||
@@ -1150,9 +1193,10 @@ export default function Recorder() {
|
||||
data-testid="recorder-start-recording-button"
|
||||
onClick={() => void beginRecording()}
|
||||
className={buttonClass("record")}
|
||||
disabled={liveAnalysisOccupied}
|
||||
>
|
||||
<Circle className={`${iconClass} ${rail ? "fill-current" : "fill-current"}`} />
|
||||
{labelFor("开始录制", "录制")}
|
||||
{labelFor(liveAnalysisOccupied ? "实时分析占用中" : "开始录制", liveAnalysisOccupied ? "占用" : "录制")}
|
||||
</Button>
|
||||
<Button variant="outline" onClick={stopCamera} className={buttonClass("outline")}>
|
||||
<VideoOff className={iconClass} />
|
||||
@@ -1362,6 +1406,23 @@ export default function Recorder() {
|
||||
</Alert>
|
||||
) : null}
|
||||
|
||||
{liveAnalysisOccupied ? (
|
||||
<Alert className="border-amber-300/70 bg-amber-50 text-amber-950">
|
||||
<ShieldAlert className="h-4 w-4" />
|
||||
<AlertTitle>当前账号已有其他设备正在实时分析</AlertTitle>
|
||||
<AlertDescription>
|
||||
{liveAnalysisRuntime?.title || "其他设备正在实时分析"},本页已禁止再次启动摄像头和录制,避免同账号多端同时占用镜头。
|
||||
你可以前往
|
||||
{" "}
|
||||
<a href="/live-camera" className="font-medium underline underline-offset-4">
|
||||
实时分析页
|
||||
</a>
|
||||
{" "}
|
||||
查看同步画面与动作识别结果。
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
) : null}
|
||||
|
||||
<div className="grid gap-4 xl:grid-cols-[minmax(0,1.7fr)_minmax(340px,0.9fr)]">
|
||||
<section className="space-y-4">
|
||||
<Card className="overflow-hidden border-0 shadow-lg">
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useAuth } from "@/_core/hooks/useAuth";
|
||||
import { trpc } from "@/lib/trpc";
|
||||
import { formatDateTimeShanghai } from "@/lib/time";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
@@ -458,7 +459,12 @@ export default function Reminders() {
|
||||
</div>
|
||||
</div>
|
||||
<span className="text-xs text-muted-foreground whitespace-nowrap ml-2">
|
||||
{new Date(notif.createdAt).toLocaleString("zh-CN", { month: "short", day: "numeric", hour: "2-digit", minute: "2-digit" })}
|
||||
{formatDateTimeShanghai(notif.createdAt, {
|
||||
year: undefined,
|
||||
second: undefined,
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
})}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,430 @@
|
||||
# Tennis Training Hub - 变更日志
|
||||
|
||||
## 2026.03.17-live-camera-relay-mp4-hardening (2026-03-17)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 修复实时分析 relay 预览在 Chrome `mp4` 分段下容易失效的问题;media 服务现在会在 relay 会话收到第一段 `mp4` 时额外保留初始化片段,供后续滚动 preview 重建使用
|
||||
- relay preview 构建会跳过明显异常的小 `mp4` 分段,并优先把初始化片段和当前缓存合成单一输入后再转成 `preview.webm`,降低 `trex/tfhd` 缺失导致的 ffmpeg 失败率
|
||||
- 如果 relay preview 本轮重建失败,但磁盘上仍有上一版可播放 `preview.webm`,worker 会保留旧预览继续服务 viewer,而不是直接把同步观看打成永久失败
|
||||
- `live-camera` 合成录制的 mime 选择已经改成优先 `video/webm`;Chrome 不再默认优先上传 fragmented `mp4` relay 分段,从源头减少 `concat failed` 与 `previewStatus=failed`
|
||||
|
||||
### 测试
|
||||
|
||||
- `cd media && go test ./...`
|
||||
- `pnpm check`
|
||||
- `pnpm build`
|
||||
- 部署后线上 smoke:已确认 `https://te.hao.work/` 正在提供新构建;当前线上仍有一条补丁前启动的旧 `mp4` relay 会话在运行,因此完整的 `webm` relay 端到端验证需要在重启该实时分析会话后继续确认
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- 已确认公开站点已切到包含此修复的新资源 revision
|
||||
- 当前线上仍有一条补丁前启动的旧 `mp4` relay 会话在运行,它会继续暴露旧分段问题;重新开始一条新的实时分析会话后,再继续验证 relay 分段格式、preview 更新稳定性和 viewer 播放状态
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `1adadba`
|
||||
|
||||
## 2026.03.17-live-camera-media-asset-url (2026-03-17)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 修复同步观看预览地址重复拼接 `/media` 的问题;当前端收到 `/media/assets/...` 这类已完整的应用内媒体路径时,会直接使用原值,不再错误请求 `/media/media/assets/...`
|
||||
- 当前端收到完整的 `https://...` 外部媒体地址时,也会保持原样,避免把外链错误改写成站内 media 路径
|
||||
- 其他仍是普通相对路径的媒体资源会继续自动补齐 `/media` 前缀,因此原有依赖相对路径的调用链不需要调整
|
||||
- `/live-camera` 点击“同步观看”后,请求的缓存视频地址恢复为 `/media/assets/sessions/.../preview.webm`,不再因 `404 page not found` 导致无视频可播
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm vitest run client/src/lib/media.test.ts`
|
||||
- `pnpm check`
|
||||
- `pnpm build`
|
||||
- `playwright-skill` 线上 smoke:登录 `H1` 后访问 `https://te.hao.work/live-camera`,确认 viewer 实际请求 `https://te.hao.work/media/assets/sessions/.../preview.webm?...` 并返回 `200`,同时不存在 `/media/media/...` 双前缀请求
|
||||
- `curl -I https://te.hao.work/`
|
||||
- `curl -I https://te.hao.work/assets/index-*.js`
|
||||
- `curl -I https://te.hao.work/assets/index-*.css`
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- 部署前确认公开站点仍在旧资源 revision,尚未提供本次修复
|
||||
- 部署完成后,`https://te.hao.work/` 已切到本次新构建,而不是继续提供部署前的旧资源 revision
|
||||
- `/live-camera` 的同步观看请求地址已恢复为 `/media/assets/sessions/.../preview.webm`,Playwright 真实浏览器验证拿到的 preview 请求状态为 `200`
|
||||
- 已确认不存在 `/media/media/assets/...` 双重前缀请求
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `0af88b3`
|
||||
|
||||
## 2026.03.17-live-camera-pose-buffer-window (2026-03-17)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 修复 `/live-camera` 开始分析时报错 `Cannot destructure property 'Pose' ... as it is undefined` 的问题;MediaPipe Pose 动态加载现在兼容 `Pose`、`default.Pose` 和默认导出三种模块形态
|
||||
- 多端同步观看的 relay 缓存窗口改为按会话配置,默认 `2` 分钟,可选最短 `10` 秒、最长 `5` 分钟;viewer 页面、徽标和设置卡都会同步显示当前缓存时长
|
||||
- owner 端分析录制在继续保持“每 `60` 秒自动归档”之外,会额外每 `10` 秒上传一次 relay 分片,因此短缓存模式下其他端不需要等待整整 `60` 秒才看到平滑同步视频
|
||||
- media 服务会按各自 relay 会话的缓存秒数裁剪 preview 分段;从磁盘恢复旧 relay 会话时也会自动归一化到合法范围,避免旧会话继续沿用固定 `60` 秒窗口
|
||||
- 同步端渲染远端 `recentSegments` 时新增旧快照归一化;即使历史快照缺少 `keyFrames`、`issueSummary` 等数组字段,也会自动补默认值,不再触发 `Cannot read properties of undefined (reading 'length')`
|
||||
- 同步观看界面新增“已累积多少缓存、预计还需多久才能看到首段回放、距离目标缓存还差多少”的提示,观看端等待阶段会给出更明确的可观察时间说明
|
||||
- 线上 smoke 已确认 `https://te.hao.work/` 正在提供本次新构建,而不是旧资源版本;当前公开站点资源 revision 为 `assets/index-CYpJPG0R.js`、`assets/index-BHHHsAWc.css`、`assets/pose-C93FSit6.js`
|
||||
|
||||
### 测试
|
||||
|
||||
- `cd media && go test ./...`
|
||||
- `pnpm vitest run client/src/lib/liveCamera.test.ts`
|
||||
- `pnpm check`
|
||||
- `pnpm build`
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts`
|
||||
- `playwright-skill` 线上 smoke:登录 `H1` 后访问 `https://te.hao.work/live-camera`,完成校准、启用假摄像头并点击“开始分析”,确认页面进入分析中状态、默认显示“缓存 2 分钟”,且无控制台与页面级错误
|
||||
- `curl -I https://te.hao.work/`
|
||||
- `curl -I https://te.hao.work/assets/index-CYpJPG0R.js`
|
||||
- `curl -I https://te.hao.work/assets/index-BHHHsAWc.css`
|
||||
- `curl -I https://te.hao.work/assets/pose-C93FSit6.js`
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建,而不是旧资源版本
|
||||
- 当前公开站点前端资源 revision:`assets/index-CYpJPG0R.js`、`assets/index-BHHHsAWc.css`、`assets/pose-C93FSit6.js`
|
||||
- 已确认首页、主 JS、主 CSS 与 `pose` 模块均返回 `200`,且 MIME 分别为 `text/html`、`application/javascript`、`text/css`、`application/javascript`
|
||||
- 真实浏览器验证已通过:登录 `H1` 后进入 `/live-camera`,能够完成校准、启用摄像头并点击“开始分析”;页面会进入“分析进行中”状态,默认显示“缓存 2 分钟”,且未再出现 `Pose` 模块解构异常
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `f3f7e19+pose-buffer-window`
|
||||
|
||||
## 2026.03.17-live-camera-relay-buffer (2026-03-17)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- `/live-camera` 的同步观看改为播放 media 服务生成的滚动缓存视频,不再轮询 `live-frame.jpg` 单帧图片,因此观看端的画面会按最近 60 秒缓存视频平滑播放
|
||||
- owner 端每个 60 秒的合成录像分段现在会额外上传到 `relay` 会话,worker 会在收到新分段后自动重建最近窗口的 `preview.webm`
|
||||
- `relay` 会话只保留最近 60 秒视频分段,旧分段会从会话元数据和磁盘同步清理,避免观看端继续读到旧一分钟之前的缓存
|
||||
- media worker 会自动清理超过 30 分钟无活动的 relay 会话、分段目录和公开缓存文件,降低磁盘堆积风险
|
||||
- viewer 页面文案、加载提示和按钮文案已同步更新为“缓存视频 / 缓存回放”语义;预览阶段跳过 mp4 转码,Chrome 直接使用 webm,降低处理时延
|
||||
|
||||
### 测试
|
||||
|
||||
- `cd media && go test ./...`
|
||||
- `pnpm vitest run client/src/lib/liveCamera.test.ts`
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera starts analysis and produces scores|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"`
|
||||
- `pnpm check`
|
||||
- `pnpm build`
|
||||
- 线上 smoke:部署后确认 `https://te.hao.work/` 已提供新构建而不是旧资源版本,`/live-camera` viewer 端进入“服务端缓存同步”路径,首页与资源文件返回正确 MIME
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- 部署完成后已确认 `https://te.hao.work/` 提供的是本次新构建,而不是旧资源版本
|
||||
- `https://te.hao.work/live-camera` 的 viewer 端会走“服务端缓存同步”路径,不再请求旧的 `live-frame.jpg` 单帧同步
|
||||
- 首页、主 JS、主 CSS 与 `pose` 模块均返回 `200` 和正确 MIME,未再出现脚本/样式被回退成 `text/html` 的问题
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `63dbfd2+relay-buffer`
|
||||
|
||||
## 2026.03.17-live-camera-preview-recovery (2026-03-17)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- `/live-camera` 的 runtime 标题恢复逻辑新增更严格的乱码筛除与二次 UTF-8 解码兜底,`æœ...` 这类异常标题会优先恢复为正常中文;无法恢复时会自动回退到稳定默认标题,避免继续显示脏字符串
|
||||
- 同步观看退出时会完整重置 viewer 轮询、连接标记和帧版本,不再把旧的 viewer 状态带回 owner 或空闲态,修复退出同步后仍黑屏、仍显示“等待同步画面”的问题
|
||||
- 本地摄像头预览增加独立重绑流程和多次 watchdog 重试,即使浏览器首帧没有及时绑定 `srcObject` 或 `play()` 被短暂中断,也会继续自动恢复本地预览
|
||||
- 视频区域是否显示画面改为按当前 runtime 角色分别判断,避免 viewer 旧连接状态误导 owner 模式,导致本地没有预览时仍错误隐藏占位提示
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm check`
|
||||
- `pnpm vitest run client/src/lib/liveCamera.test.ts`
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"`
|
||||
- `pnpm build`
|
||||
- 线上 smoke:`curl -I https://te.hao.work/`
|
||||
- 线上 smoke:`curl -I https://te.hao.work/assets/index-BJ7rV3xe.js`
|
||||
- 线上 smoke:`curl -I https://te.hao.work/assets/index-tNGuStgv.css`
|
||||
- 线上 smoke:`curl -I https://te.hao.work/assets/pose-CZKsH31a.js`
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-BJ7rV3xe.js`、`assets/index-tNGuStgv.css`、`assets/pose-CZKsH31a.js`
|
||||
- 已确认 `index`、`css` 与 `pose` 模块均返回 `200`,且 MIME 分别为 `application/javascript`、`text/css`、`application/javascript`,不再出现此前的模块脚本和样式被当成 `text/html` 返回的问题
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `06b9701`
|
||||
|
||||
## 2026.03.16-live-camera-runtime-refresh (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- `/live-camera` 在打开拍摄引导、启用摄像头、开始分析前,都会先向服务端强制刷新 runtime 状态,避免旧的同步观看锁残留导致本机明明已释放却仍无法启动
|
||||
- 新增 runtime 标题乱码恢复逻辑,可自动把 UTF-8 被误按 Latin-1 显示的标题恢复成正常中文,避免出现 `æœ...` 一类异常标题
|
||||
- 摄像头启动链路改为以 `getUserMedia` 成功为准;即使本地预览 `<video>` 的 `srcObject` 或 `play()` 在当前浏览器中短暂失败,也不会直接把整次启动判死
|
||||
- e2e mock 的媒体流补齐为带假视频轨道的流对象,并把 viewer 回归改为校验“服务端 relay、无 viewer-signal”行为,避免继续按旧 P2P 逻辑断言
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera page exposes camera startup controls|live camera switches into viewer mode when another device already owns analysis|live camera recovers mojibake viewer titles before rendering|live camera no longer opens viewer peer retries when server relay is active"`
|
||||
- `pnpm build`
|
||||
- 部署后线上 smoke:登录 `H1` 后访问 `https://te.hao.work/live-camera`,确认空闲态“启动摄像头”入口可见,不再被残留 viewer 锁卡住
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-33wVjC4p.js` 与 `assets/index-tNGuStgv.css`
|
||||
- 真实验证已通过:登录 `H1` 后访问 `https://te.hao.work/live-camera`,页面会正常显示“摄像头未启动 / 启动摄像头”,说明旧的 viewer 锁残留不会再把空闲设备卡在同步观看模式
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `8e9e491`
|
||||
|
||||
## 2026.03.16-live-viewer-server-relay (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- `/live-camera` 的同步观看改为由 media 服务中转最新合成帧图,不再依赖浏览器之间的 P2P WebRTC viewer 连接
|
||||
- owner 端会把“原视频 + 骨架/关键点 + 虚拟形象”的合成画布压缩成 JPEG 并持续上传到 media 服务
|
||||
- viewer 端改为自动轮询 media 服务中的最新同步帧图,因此即使浏览器之间无法直连,也能继续看到同步画面和状态
|
||||
- 同步观看模式文案已调整为明确提示“通过 media 服务中转”,等待阶段会继续自动刷新,而不是停留在 P2P 连接失败状态
|
||||
- media 服务新增 live-frame 上传与静态分发能力,并记录最近同步帧时间,方便后续继续扩展更高频的服务端 relay
|
||||
|
||||
### 测试
|
||||
|
||||
- `cd media && go test ./...`
|
||||
- `pnpm build`
|
||||
- `playwright-skill` 线上 smoke:先用 media 服务创建 relay session、上传 live-frame,并把 `H1` 的 `live_analysis_runtime` 注入为 active viewer 场景;随后访问 `https://te.hao.work/live-camera`,确认页面进入“同步观看模式”、同步帧来自 `/media/assets/sessions/.../live-frame.jpg`,且 `viewer-signal` 请求数为 `0`
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-BC-IupO8.js` 与 `assets/index-tNGuStgv.css`
|
||||
- 真实验证已通过:viewer 端进入“同步观看模式”后,画面由 media 服务静态分发的 `live-frame.jpg` 提供,已确认不再触发 `/viewer-signal` P2P 观看请求
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `bb46d26`
|
||||
|
||||
## 2026.03.16-camera-startup-fallbacks (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 修复部分设备在 `/live-camera` 和 `/recorder` 中因默认后置镜头、分辨率或帧率约束不兼容而直接启动摄像头失败的问题
|
||||
- 摄像头请求现在会自动按当前画质、去掉高约束、低分辨率、备用镜头、任意可用镜头依次降级重试
|
||||
- `/recorder` 在麦克风不可用或麦克风权限未给出时,会自动回退到仅视频模式,不再让整次预览启动失败
|
||||
- 如果实际启用的是兼容镜头或降级模式,页面会显示提示,帮助区分“自动修复成功”与“仍然无法访问摄像头”
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm build`
|
||||
- `playwright-skill` 线上 smoke:通过注入 `getUserMedia` 回归验证 `/live-camera` 首轮高约束失败后会自动降级到兼容摄像头模式,`/recorder` 在麦克风不可用时会自动回退到仅视频模式并继续启动预览
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-CRxtWK07.js` 与 `assets/index-tNGuStgv.css`
|
||||
- 真实回归已通过:模拟高约束失败时,`/live-camera` 会提示“当前设备已自动切换到兼容摄像头模式”并继续启动;模拟麦克风不可用时,`/recorder` 会提示“麦克风不可用,已切换为仅视频模式”并继续显示录制入口
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `a211562`
|
||||
|
||||
## 2026.03.16-live-analysis-viewer-full-sync (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 同账号多端同步观看时,viewer 端现在会按持有端 runtime snapshot 完整渲染,不再混用本地默认状态
|
||||
- `/live-camera` viewer 端新增主端同步信息卡,可看到当前会话标题、训练模式、设备端、拍摄视角、画质模式、虚拟形象状态和最近同步时间
|
||||
- viewer 端现在会同步显示主端当前处于“分析中 / 保存中 / 已保存 / 保存失败”的阶段状态
|
||||
- viewer 页面在同步观看模式下会自动关闭拍摄校准弹窗,避免被“启用摄像头”引导遮挡画面和状态信息
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera switches into viewer mode|viewer stream|recorder blocks"`
|
||||
- `pnpm build`
|
||||
- `playwright-skill` 线上 smoke:同账号 `H1` 双端登录后,移动端 owner 开始实时分析,桌面端 `/live-camera` 进入同步观看并显示主端信息、同步视频流,owner 点击结束分析后 viewer 同步进入保存阶段
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-HRdM3fxq.js` 与 `assets/index-tNGuStgv.css`
|
||||
- 真实双端验证已通过:同账号 `H1` 在移动端开启实时分析后,桌面端 `/live-camera` 会自动进入同步观看模式,显示主端设备信息、最近同步时间和远端视频流;owner 点击结束分析后,viewer 会同步进入“保存中”阶段
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `922a9fb`
|
||||
|
||||
## 2026.03.16-live-analysis-lock-hardening (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 修复同账号多端实时分析在旧登录态下仍可重复占用摄像头的问题;缺少 `sid` 的旧 token 现在会按 token 本身派生唯一会话标识
|
||||
- `/live-camera` 的同步观看模式新增自动重试;当持有端刚启动推流、viewer 首次连接返回 `viewer stream not ready` 时,会继续重连,不再长时间停留在无画面状态
|
||||
- `/recorder` 接入实时分析占用锁;其他设备正在实时分析时,本页会禁止再次启动摄像头和开始录制,并提示前往 `/live-camera` 查看同步画面
|
||||
- 应用启动改为先监听 HTTP 端口、再后台串行执行教程图同步和标准库预热,修复新容器上线时公网长时间返回 `502`
|
||||
|
||||
### 测试
|
||||
|
||||
- `curl -I https://te.hao.work/`
|
||||
- `pnpm check`
|
||||
- `pnpm exec vitest run server/_core/sdk.test.ts server/features.test.ts`
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "viewer mode|viewer stream|recorder blocks"`
|
||||
- `playwright-skill` 线上校验:登录 `H1` 后访问 `/changelog`,确认 `2026.03.16-live-analysis-lock-hardening` 与仓库版本 `f9db6ef` 已展示
|
||||
- `pnpm build`
|
||||
- Playwright 线上 smoke:`H1` 手机端开启实时分析后,PC 端 `/live-camera` 自动进入同步观看并显示同步画面,`/recorder` 禁止启动摄像头;结束分析后会话可正常释放
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建,不再返回 `502`
|
||||
- 当前公开站点前端资源 revision:`assets/index-mi8CPCFI.js` 与 `assets/index-Cp_VJ8sf.css`
|
||||
- 真实双端验证已通过:同账号 `H1` 手机端开始实时分析后,PC 端 `/live-camera` 进入同步观看模式且可拉起同步流,`/recorder` 页面会阻止再次占用摄像头
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `f9db6ef`
|
||||
|
||||
## 2026.03.16-live-analysis-runtime-migration (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 修复生产环境缺失 `live_analysis_runtime` 表导致 `/live-camera` 启动实时分析时报 SQL 查询失败的问题
|
||||
- 生产库已补建 `live_analysis_runtime` 表,并写入缺失的 `0011_live_analysis_runtime` 迁移记录,避免后续重复报错
|
||||
- 仓库内 `drizzle/meta/_journal.json` 已补齐 `0011_live_analysis_runtime` 条目,后续 `docker compose` 部署可正确识别该迁移
|
||||
- 实时分析 runtime 锁恢复正常后,同账号多端互斥与同步观看流程可继续工作
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm check`
|
||||
- `pnpm exec vitest run server/features.test.ts`
|
||||
- `pnpm build`
|
||||
- `docker compose exec -T db mysql ... SHOW TABLES LIKE 'live_analysis_runtime'`
|
||||
- `curl -I https://te.hao.work/live-camera`
|
||||
- Playwright smoke:登录 `H1` 后访问 `/live-camera`,`analysis.runtimeGet` / `analysis.runtimeAcquire` / `analysis.runtimeRelease` 全部返回 `200`
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/` 已切换到本次新构建
|
||||
- 当前公开站点前端资源 revision:`assets/index-B3BN5hY-.js` 与 `assets/index-BL6GQzUF.css`
|
||||
- `/live-camera` 已恢复可用,线上不再出现 `live_analysis_runtime` 缺表导致的 SQL 查询失败
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `2b72ef9`
|
||||
|
||||
## 2026.03.16-live-camera-multidevice-viewer (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- `/live-camera` 新增同账号多端 runtime 锁;一个设备开始实时分析后,其他设备不能再次启动摄像头或分析
|
||||
- 其他设备会自动进入“同步观看模式”,可查看持有端同步推送的实时画面、当前动作、评分、反馈和最近动作片段
|
||||
- 同步观看复用 media 服务新增的 `/viewer-signal` WebRTC 通道,直接订阅“原视频 + 骨架 + 关键点 + 虚拟形象”的合成画面
|
||||
- runtime 心跳按 `sid` 维度识别持有端,兼容旧 token 缺失可选字段的情况;超过 15 秒无心跳会自动释放陈旧锁
|
||||
- `/live-camera` 前端新增 owner / viewer 双模式切换,观看端会禁用镜头切换、重新校准、质量调整和分析启动
|
||||
- e2e mock 新增 viewer 模式和 runtime 接口覆盖,保证浏览器测试可以直接验证多端互斥与同步观看
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm check`
|
||||
- `pnpm exec vitest run server/features.test.ts`
|
||||
- `go test ./...`
|
||||
- `go build ./...`
|
||||
- `pnpm build`
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "live camera"`
|
||||
- `pnpm exec playwright test tests/e2e/app.spec.ts --grep "recorder flow archives a session and exposes it in videos"`
|
||||
- `curl -I https://te.hao.work/live-camera`
|
||||
|
||||
### 线上 smoke
|
||||
|
||||
- `https://te.hao.work/live-camera` 已切换到本次新前端构建
|
||||
- 公开站点确认已经提供本次发布的最新前端资源
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `4e4122d`
|
||||
|
||||
## 2026.03.16-live-analysis-overlay-archive (2026-03-16)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- `/live-camera` 新增 10 个免费动物虚拟形象,可将主体实时替换为猩猩、猴子、狗、猪、猫、狐狸、熊猫、狮子、老虎、兔子
|
||||
- `/live-camera` 再新增 4 个免费的全身 3D Avatar 示例,可直接覆盖人物轮廓,并提供对应的 CC0 模型源链接
|
||||
- `/live-camera` 新增实时分析自动录像,按 60 秒自动切段归档;归档视频写入视频库并标记为“实时分析”
|
||||
- 实时分析录像改为录制“视频画面 + 骨架线 + 关键点 + 虚拟形象覆盖”的合成画布,回放中可直接看到分析叠层
|
||||
- 实时分析记录与视频库解耦,用户删除视频库中的“实时分析”录像后,不会删除已保存的分析数据和训练记录
|
||||
- 增加形象别名输入,当前可按输入内容自动映射到内置形象
|
||||
- 实时分析动作稳定器从短窗口切换为 24 帧时间窗口,降低 1-2 秒内频繁跳动作的问题
|
||||
- 动作切换新增确认阶段与延迟入库逻辑,连续动作区间改为只按稳定动作聚合
|
||||
- 画面内新增稳定动作、原始候选、窗口占比、切换确认状态等实时状态提示
|
||||
- 实时分析会话保存新增稳定窗口、动作切换次数、原始波动率、虚拟形象状态等指标
|
||||
- 动物头像素材切换为本地集成的免费 Twemoji SVG,避免外链依赖
|
||||
- 新增 Open Source Avatars 的本地优化透明 WebP 全身素材,减少全身替身叠加时的页面流量和首帧加载时间
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm check`
|
||||
- `pnpm test`
|
||||
- `pnpm build`
|
||||
- `pnpm test:e2e`
|
||||
- Playwright 线上 smoke:
|
||||
- `https://te.hao.work/live-camera` 真实登录 `H1` 后可完成引导、启用摄像头、开始分析、结束分析
|
||||
- `https://te.hao.work/videos` 可见“实时分析”录像条目
|
||||
- `https://te.hao.work/changelog` 已展示 `2026.03.16-live-analysis-overlay-archive` 条目与对应摘要
|
||||
- 最终线上资源 revision:`assets/index-BWEXNszf.js` 与 `assets/index-BL6GQzUF.css`
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `4fb2d09`
|
||||
|
||||
## 2026.03.15-live-analysis-leave-hint (2026-03-15)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 实时分析进行中显示“不要关闭浏览器或切走页面”提示
|
||||
- 点击“结束分析”后,保存阶段显示“请暂时停留当前页面”提示
|
||||
- 保存完成后明确提示“现在可以关闭浏览器或切换到其他页面”
|
||||
- 分析中和保存中增加离开页面提醒,减少误关导致的数据丢失
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm check`
|
||||
- `pnpm build`
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `5c2dcf2`
|
||||
|
||||
## 2026.03.15-training-generator-collapse (2026-03-15)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 训练页右侧“重新生成计划”面板在桌面端默认折叠到右侧
|
||||
- 点击右侧折叠栏后展开完整生成器,可调整周期并重新生成计划
|
||||
- 移动端继续保持完整面板直接可见
|
||||
- 未生成计划时点击“前往生成训练计划”会先自动展开,再滚动到面板位置
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm check`
|
||||
- `pnpm build`
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `1ce94f6`
|
||||
|
||||
## 2026.03.15-progress-time-actions (2026-03-15)
|
||||
|
||||
### 功能更新
|
||||
|
||||
- 最近训练记录摘要行默认显示到秒的具体时间,统一按 `Asia/Shanghai` 展示
|
||||
- 录制类训练记录在列表中直接显示动作数、主动作和前 3 个动作统计
|
||||
- 训练记录展开态中的动作明细改为中文动作标签,便于直接阅读
|
||||
- 提醒页通知时间统一切换为 `Asia/Shanghai`
|
||||
|
||||
### 测试
|
||||
|
||||
- `pnpm check`
|
||||
- `pnpm build`
|
||||
|
||||
### 仓库版本
|
||||
|
||||
- `71caf0d`
|
||||
|
||||
## 2026.03.15-session-changelog (2026-03-15)
|
||||
|
||||
### 功能更新
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
CREATE TABLE `live_analysis_runtime` (
|
||||
`id` int AUTO_INCREMENT NOT NULL,
|
||||
`userId` int NOT NULL,
|
||||
`ownerSid` varchar(96),
|
||||
`status` enum('idle','active','ended') NOT NULL DEFAULT 'idle',
|
||||
`title` varchar(256),
|
||||
`sessionMode` enum('practice','pk') NOT NULL DEFAULT 'practice',
|
||||
`mediaSessionId` varchar(96),
|
||||
`startedAt` timestamp,
|
||||
`endedAt` timestamp,
|
||||
`lastHeartbeatAt` timestamp,
|
||||
`snapshot` json,
|
||||
`createdAt` timestamp NOT NULL DEFAULT (now()),
|
||||
`updatedAt` timestamp NOT NULL DEFAULT (now()) ON UPDATE CURRENT_TIMESTAMP,
|
||||
CONSTRAINT `live_analysis_runtime_id` PRIMARY KEY(`id`),
|
||||
CONSTRAINT `live_analysis_runtime_user_idx` UNIQUE(`userId`)
|
||||
);
|
||||
@@ -78,6 +78,13 @@
|
||||
"when": 1773662400000,
|
||||
"tag": "0010_remove_non_tennis_tutorials",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 11,
|
||||
"version": "5",
|
||||
"when": 1773691200000,
|
||||
"tag": "0011_live_analysis_runtime",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -16,6 +16,21 @@ export const users = mysqlTable("users", {
|
||||
trainingGoals: text("trainingGoals"),
|
||||
/** NTRP rating (1.0 - 5.0) */
|
||||
ntrpRating: float("ntrpRating").default(1.5),
|
||||
/** Manual NTRP baseline before automated rating is established */
|
||||
manualNtrpRating: float("manualNtrpRating"),
|
||||
manualNtrpCapturedAt: timestamp("manualNtrpCapturedAt"),
|
||||
/** Training assessment profile */
|
||||
heightCm: float("heightCm"),
|
||||
weightKg: float("weightKg"),
|
||||
sprintSpeedScore: int("sprintSpeedScore"),
|
||||
explosivePowerScore: int("explosivePowerScore"),
|
||||
agilityScore: int("agilityScore"),
|
||||
enduranceScore: int("enduranceScore"),
|
||||
flexibilityScore: int("flexibilityScore"),
|
||||
coreStabilityScore: int("coreStabilityScore"),
|
||||
shoulderMobilityScore: int("shoulderMobilityScore"),
|
||||
hipMobilityScore: int("hipMobilityScore"),
|
||||
assessmentNotes: text("assessmentNotes"),
|
||||
/** Total training sessions completed */
|
||||
totalSessions: int("totalSessions").default(0),
|
||||
/** Total training minutes */
|
||||
@@ -215,6 +230,30 @@ export const liveAnalysisSessions = mysqlTable("live_analysis_sessions", {
|
||||
export type LiveAnalysisSession = typeof liveAnalysisSessions.$inferSelect;
|
||||
export type InsertLiveAnalysisSession = typeof liveAnalysisSessions.$inferInsert;
|
||||
|
||||
/**
|
||||
* Per-user runtime state for the current live-camera analysis lock.
|
||||
*/
|
||||
export const liveAnalysisRuntime = mysqlTable("live_analysis_runtime", {
|
||||
id: int("id").autoincrement().primaryKey(),
|
||||
userId: int("userId").notNull(),
|
||||
ownerSid: varchar("ownerSid", { length: 96 }),
|
||||
status: mysqlEnum("status", ["idle", "active", "ended"]).default("idle").notNull(),
|
||||
title: varchar("title", { length: 256 }),
|
||||
sessionMode: mysqlEnum("sessionMode", ["practice", "pk"]).default("practice").notNull(),
|
||||
mediaSessionId: varchar("mediaSessionId", { length: 96 }),
|
||||
startedAt: timestamp("startedAt"),
|
||||
endedAt: timestamp("endedAt"),
|
||||
lastHeartbeatAt: timestamp("lastHeartbeatAt"),
|
||||
snapshot: json("snapshot"),
|
||||
createdAt: timestamp("createdAt").defaultNow().notNull(),
|
||||
updatedAt: timestamp("updatedAt").defaultNow().onUpdateNow().notNull(),
|
||||
}, (table) => ({
|
||||
userIdUnique: uniqueIndex("live_analysis_runtime_user_idx").on(table.userId),
|
||||
}));
|
||||
|
||||
export type LiveAnalysisRuntime = typeof liveAnalysisRuntime.$inferSelect;
|
||||
export type InsertLiveAnalysisRuntime = typeof liveAnalysisRuntime.$inferInsert;
|
||||
|
||||
/**
|
||||
* Action segments extracted from a realtime analysis session.
|
||||
*/
|
||||
@@ -390,15 +429,34 @@ export type InsertUserAchievement = typeof userAchievements.$inferInsert;
|
||||
*/
|
||||
export const tutorialVideos = mysqlTable("tutorial_videos", {
|
||||
id: int("id").autoincrement().primaryKey(),
|
||||
slug: varchar("slug", { length: 128 }),
|
||||
title: varchar("title", { length: 256 }).notNull(),
|
||||
category: varchar("category", { length: 64 }).notNull(),
|
||||
skillLevel: mysqlEnum("skillLevel", ["beginner", "intermediate", "advanced"]).default("beginner"),
|
||||
topicArea: varchar("topicArea", { length: 32 }).default("tennis_skill"),
|
||||
contentFormat: varchar("contentFormat", { length: 16 }).default("video"),
|
||||
sourcePlatform: varchar("sourcePlatform", { length: 16 }).default("none"),
|
||||
description: text("description"),
|
||||
heroSummary: text("heroSummary"),
|
||||
keyPoints: json("keyPoints"),
|
||||
commonMistakes: json("commonMistakes"),
|
||||
videoUrl: text("videoUrl"),
|
||||
externalUrl: text("externalUrl"),
|
||||
platformVideoId: varchar("platformVideoId", { length: 64 }),
|
||||
thumbnailUrl: text("thumbnailUrl"),
|
||||
duration: int("duration"),
|
||||
estimatedEffortMinutes: int("estimatedEffortMinutes"),
|
||||
prerequisites: json("prerequisites"),
|
||||
learningObjectives: json("learningObjectives"),
|
||||
stepSections: json("stepSections"),
|
||||
deliverables: json("deliverables"),
|
||||
relatedDocPaths: json("relatedDocPaths"),
|
||||
viewCount: int("viewCount"),
|
||||
commentCount: int("commentCount"),
|
||||
metricsFetchedAt: timestamp("metricsFetchedAt"),
|
||||
completionAchievementKey: varchar("completionAchievementKey", { length: 64 }),
|
||||
isFeatured: int("isFeatured").default(0),
|
||||
featuredOrder: int("featuredOrder").default(0),
|
||||
sortOrder: int("sortOrder").default(0),
|
||||
isPublished: int("isPublished").default(1),
|
||||
createdAt: timestamp("createdAt").defaultNow().notNull(),
|
||||
@@ -416,6 +474,8 @@ export const tutorialProgress = mysqlTable("tutorial_progress", {
|
||||
userId: int("userId").notNull(),
|
||||
tutorialId: int("tutorialId").notNull(),
|
||||
watched: int("watched").default(0),
|
||||
completed: int("completed").default(0),
|
||||
completedAt: timestamp("completedAt"),
|
||||
comparisonVideoId: int("comparisonVideoId"),
|
||||
selfScore: float("selfScore"),
|
||||
notes: text("notes"),
|
||||
|
||||
731
media/main.go
@@ -53,6 +53,20 @@ const (
|
||||
PreviewFailed PreviewStatus = "failed"
|
||||
)
|
||||
|
||||
type SessionPurpose string
|
||||
|
||||
const (
|
||||
PurposeRecording SessionPurpose = "recording"
|
||||
PurposeRelay SessionPurpose = "relay"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultRelayBufferSeconds = 120
|
||||
minRelayBufferSeconds = 10
|
||||
maxRelayBufferSeconds = 300
|
||||
relayCacheTTL = 30 * time.Minute
|
||||
)
|
||||
|
||||
type PlaybackInfo struct {
|
||||
WebMURL string `json:"webmUrl,omitempty"`
|
||||
MP4URL string `json:"mp4Url,omitempty"`
|
||||
@@ -81,32 +95,38 @@ type Marker struct {
|
||||
}
|
||||
|
||||
type Session struct {
|
||||
ID string `json:"id"`
|
||||
UserID string `json:"userId"`
|
||||
Title string `json:"title"`
|
||||
Status SessionStatus `json:"status"`
|
||||
ArchiveStatus ArchiveStatus `json:"archiveStatus"`
|
||||
PreviewStatus PreviewStatus `json:"previewStatus"`
|
||||
Format string `json:"format"`
|
||||
MimeType string `json:"mimeType"`
|
||||
QualityPreset string `json:"qualityPreset"`
|
||||
FacingMode string `json:"facingMode"`
|
||||
DeviceKind string `json:"deviceKind"`
|
||||
ReconnectCount int `json:"reconnectCount"`
|
||||
UploadedSegments int `json:"uploadedSegments"`
|
||||
UploadedBytes int64 `json:"uploadedBytes"`
|
||||
PreviewSegments int `json:"previewSegments"`
|
||||
DurationMS int64 `json:"durationMs"`
|
||||
LastError string `json:"lastError,omitempty"`
|
||||
CreatedAt string `json:"createdAt"`
|
||||
UpdatedAt string `json:"updatedAt"`
|
||||
FinalizedAt string `json:"finalizedAt,omitempty"`
|
||||
PreviewUpdatedAt string `json:"previewUpdatedAt,omitempty"`
|
||||
StreamConnected bool `json:"streamConnected"`
|
||||
LastStreamAt string `json:"lastStreamAt,omitempty"`
|
||||
Playback PlaybackInfo `json:"playback"`
|
||||
Segments []SegmentMeta `json:"segments"`
|
||||
Markers []Marker `json:"markers"`
|
||||
ID string `json:"id"`
|
||||
UserID string `json:"userId"`
|
||||
Title string `json:"title"`
|
||||
Purpose SessionPurpose `json:"purpose"`
|
||||
Status SessionStatus `json:"status"`
|
||||
ArchiveStatus ArchiveStatus `json:"archiveStatus"`
|
||||
PreviewStatus PreviewStatus `json:"previewStatus"`
|
||||
Format string `json:"format"`
|
||||
MimeType string `json:"mimeType"`
|
||||
QualityPreset string `json:"qualityPreset"`
|
||||
FacingMode string `json:"facingMode"`
|
||||
DeviceKind string `json:"deviceKind"`
|
||||
ReconnectCount int `json:"reconnectCount"`
|
||||
UploadedSegments int `json:"uploadedSegments"`
|
||||
UploadedBytes int64 `json:"uploadedBytes"`
|
||||
PreviewSegments int `json:"previewSegments"`
|
||||
DurationMS int64 `json:"durationMs"`
|
||||
RelayBufferSeconds int `json:"relayBufferSeconds"`
|
||||
LastError string `json:"lastError,omitempty"`
|
||||
CreatedAt string `json:"createdAt"`
|
||||
UpdatedAt string `json:"updatedAt"`
|
||||
FinalizedAt string `json:"finalizedAt,omitempty"`
|
||||
PreviewUpdatedAt string `json:"previewUpdatedAt,omitempty"`
|
||||
RelayInitFilename string `json:"relayInitFilename,omitempty"`
|
||||
StreamConnected bool `json:"streamConnected"`
|
||||
LastStreamAt string `json:"lastStreamAt,omitempty"`
|
||||
ViewerCount int `json:"viewerCount"`
|
||||
LiveFrameURL string `json:"liveFrameUrl,omitempty"`
|
||||
LiveFrameUpdated string `json:"liveFrameUpdatedAt,omitempty"`
|
||||
Playback PlaybackInfo `json:"playback"`
|
||||
Segments []SegmentMeta `json:"segments"`
|
||||
Markers []Marker `json:"markers"`
|
||||
}
|
||||
|
||||
func (s *Session) recomputeAggregates() {
|
||||
@@ -124,13 +144,15 @@ func (s *Session) recomputeAggregates() {
|
||||
}
|
||||
|
||||
type CreateSessionRequest struct {
|
||||
UserID string `json:"userId"`
|
||||
Title string `json:"title"`
|
||||
Format string `json:"format"`
|
||||
MimeType string `json:"mimeType"`
|
||||
QualityPreset string `json:"qualityPreset"`
|
||||
FacingMode string `json:"facingMode"`
|
||||
DeviceKind string `json:"deviceKind"`
|
||||
UserID string `json:"userId"`
|
||||
Title string `json:"title"`
|
||||
Format string `json:"format"`
|
||||
MimeType string `json:"mimeType"`
|
||||
QualityPreset string `json:"qualityPreset"`
|
||||
FacingMode string `json:"facingMode"`
|
||||
DeviceKind string `json:"deviceKind"`
|
||||
Purpose string `json:"purpose"`
|
||||
RelayBufferSeconds int `json:"relayBufferSeconds"`
|
||||
}
|
||||
|
||||
type SignalRequest struct {
|
||||
@@ -151,19 +173,23 @@ type FinalizeRequest struct {
|
||||
}
|
||||
|
||||
type sessionStore struct {
|
||||
rootDir string
|
||||
public string
|
||||
mu sync.RWMutex
|
||||
sessions map[string]*Session
|
||||
peers map[string]*webrtc.PeerConnection
|
||||
rootDir string
|
||||
public string
|
||||
mu sync.RWMutex
|
||||
sessions map[string]*Session
|
||||
peers map[string]*webrtc.PeerConnection
|
||||
viewerPeers map[string]map[string]*webrtc.PeerConnection
|
||||
videoTracks map[string]*webrtc.TrackLocalStaticRTP
|
||||
}
|
||||
|
||||
func newSessionStore(rootDir string) (*sessionStore, error) {
|
||||
store := &sessionStore{
|
||||
rootDir: rootDir,
|
||||
public: filepath.Join(rootDir, "public"),
|
||||
sessions: map[string]*Session{},
|
||||
peers: map[string]*webrtc.PeerConnection{},
|
||||
rootDir: rootDir,
|
||||
public: filepath.Join(rootDir, "public"),
|
||||
sessions: map[string]*Session{},
|
||||
peers: map[string]*webrtc.PeerConnection{},
|
||||
viewerPeers: map[string]map[string]*webrtc.PeerConnection{},
|
||||
videoTracks: map[string]*webrtc.TrackLocalStaticRTP{},
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Join(rootDir, "sessions"), 0o755); err != nil {
|
||||
return nil, err
|
||||
@@ -206,6 +232,15 @@ func (s *sessionStore) refreshFromDisk() error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, session := range sessions {
|
||||
if session.Purpose == "" {
|
||||
session.Purpose = PurposeRecording
|
||||
}
|
||||
if session.Purpose == PurposeRelay {
|
||||
session.RelayBufferSeconds = normalizeRelayBufferSeconds(session.RelayBufferSeconds)
|
||||
}
|
||||
session.recomputeAggregates()
|
||||
}
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.sessions = sessions
|
||||
@@ -224,6 +259,18 @@ func (s *sessionStore) publicDir(id string) string {
|
||||
return filepath.Join(s.public, "sessions", id)
|
||||
}
|
||||
|
||||
func (s *sessionStore) relayInitPath(id string) string {
|
||||
return filepath.Join(s.sessionDir(id), "relay-init.mp4")
|
||||
}
|
||||
|
||||
func (s *sessionStore) liveFramePath(id string) string {
|
||||
return filepath.Join(s.publicDir(id), "live-frame.jpg")
|
||||
}
|
||||
|
||||
func (s *sessionStore) liveFrameURL(id string) string {
|
||||
return fmt.Sprintf("/media/assets/sessions/%s/live-frame.jpg", id)
|
||||
}
|
||||
|
||||
func (s *sessionStore) saveSession(session *Session) error {
|
||||
session.UpdatedAt = time.Now().UTC().Format(time.RFC3339)
|
||||
dir := s.sessionDir(session.ID)
|
||||
@@ -246,22 +293,29 @@ func cloneSession(session *Session) *Session {
|
||||
|
||||
func (s *sessionStore) createSession(input CreateSessionRequest) (*Session, error) {
|
||||
now := time.Now().UTC().Format(time.RFC3339)
|
||||
purpose := SessionPurpose(defaultString(input.Purpose, string(PurposeRecording)))
|
||||
relayBufferSeconds := 0
|
||||
if purpose == PurposeRelay {
|
||||
relayBufferSeconds = normalizeRelayBufferSeconds(input.RelayBufferSeconds)
|
||||
}
|
||||
session := &Session{
|
||||
ID: randomID(),
|
||||
UserID: strings.TrimSpace(input.UserID),
|
||||
Title: strings.TrimSpace(input.Title),
|
||||
Status: StatusCreated,
|
||||
ArchiveStatus: ArchiveIdle,
|
||||
PreviewStatus: PreviewIdle,
|
||||
Format: defaultString(input.Format, "webm"),
|
||||
MimeType: defaultString(input.MimeType, "video/webm"),
|
||||
QualityPreset: defaultString(input.QualityPreset, "balanced"),
|
||||
FacingMode: defaultString(input.FacingMode, "environment"),
|
||||
DeviceKind: defaultString(input.DeviceKind, "desktop"),
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
Segments: []SegmentMeta{},
|
||||
Markers: []Marker{},
|
||||
ID: randomID(),
|
||||
UserID: strings.TrimSpace(input.UserID),
|
||||
Title: strings.TrimSpace(input.Title),
|
||||
Purpose: purpose,
|
||||
Status: StatusCreated,
|
||||
ArchiveStatus: ArchiveIdle,
|
||||
PreviewStatus: PreviewIdle,
|
||||
Format: defaultString(input.Format, "webm"),
|
||||
MimeType: defaultString(input.MimeType, "video/webm"),
|
||||
QualityPreset: defaultString(input.QualityPreset, "balanced"),
|
||||
FacingMode: defaultString(input.FacingMode, "environment"),
|
||||
DeviceKind: defaultString(input.DeviceKind, "desktop"),
|
||||
RelayBufferSeconds: relayBufferSeconds,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
Segments: []SegmentMeta{},
|
||||
Markers: []Marker{},
|
||||
}
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
@@ -275,6 +329,123 @@ func (s *sessionStore) createSession(input CreateSessionRequest) (*Session, erro
|
||||
return cloneSession(session), nil
|
||||
}
|
||||
|
||||
func normalizeRelayBufferSeconds(value int) int {
|
||||
if value <= 0 {
|
||||
return defaultRelayBufferSeconds
|
||||
}
|
||||
if value < minRelayBufferSeconds {
|
||||
return minRelayBufferSeconds
|
||||
}
|
||||
if value > maxRelayBufferSeconds {
|
||||
return maxRelayBufferSeconds
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
func relayPreviewWindowForSession(session *Session) time.Duration {
|
||||
return time.Duration(normalizeRelayBufferSeconds(session.RelayBufferSeconds)) * time.Second
|
||||
}
|
||||
|
||||
func parseSessionTime(values ...string) time.Time {
|
||||
for _, value := range values {
|
||||
if strings.TrimSpace(value) == "" {
|
||||
continue
|
||||
}
|
||||
if parsed, err := time.Parse(time.RFC3339, value); err == nil {
|
||||
return parsed
|
||||
}
|
||||
}
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
func sortSegmentsBySequence(segments []SegmentMeta) {
|
||||
sort.Slice(segments, func(i, j int) bool {
|
||||
return segments[i].Sequence < segments[j].Sequence
|
||||
})
|
||||
}
|
||||
|
||||
func maxInt64(value int64, minimum int64) int64 {
|
||||
if value < minimum {
|
||||
return minimum
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
func trimSegmentsToDuration(segments []SegmentMeta, maxDuration time.Duration) (kept []SegmentMeta, removed []SegmentMeta) {
|
||||
if len(segments) == 0 {
|
||||
return []SegmentMeta{}, []SegmentMeta{}
|
||||
}
|
||||
|
||||
limitMS := maxDuration.Milliseconds()
|
||||
total := int64(0)
|
||||
startIndex := len(segments) - 1
|
||||
|
||||
for index := len(segments) - 1; index >= 0; index-- {
|
||||
total += maxInt64(segments[index].DurationMS, 1)
|
||||
startIndex = index
|
||||
if total >= limitMS {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
kept = append([]SegmentMeta(nil), segments[startIndex:]...)
|
||||
removed = append([]SegmentMeta(nil), segments[:startIndex]...)
|
||||
return kept, removed
|
||||
}
|
||||
|
||||
func sessionNeedsPreview(session *Session) bool {
|
||||
if len(session.Segments) == 0 {
|
||||
return false
|
||||
}
|
||||
if session.PreviewStatus == PreviewProcessing {
|
||||
return false
|
||||
}
|
||||
if session.PreviewStatus != PreviewReady || session.PreviewSegments < len(session.Segments) {
|
||||
return true
|
||||
}
|
||||
|
||||
previewUpdatedAt := parseSessionTime(session.PreviewUpdatedAt)
|
||||
if previewUpdatedAt.IsZero() {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, segment := range session.Segments {
|
||||
uploadedAt := parseSessionTime(segment.UploadedAt)
|
||||
if !uploadedAt.IsZero() && uploadedAt.After(previewUpdatedAt) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *sessionStore) pruneExpiredRelaySessions(maxAge time.Duration, now time.Time) error {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
|
||||
for id, session := range s.sessions {
|
||||
if session.Purpose != PurposeRelay {
|
||||
continue
|
||||
}
|
||||
lastActivity := parseSessionTime(session.UpdatedAt, session.LastStreamAt, session.LiveFrameUpdated, session.CreatedAt)
|
||||
if lastActivity.IsZero() || now.Sub(lastActivity) < maxAge {
|
||||
continue
|
||||
}
|
||||
delete(s.sessions, id)
|
||||
delete(s.peers, id)
|
||||
delete(s.viewerPeers, id)
|
||||
delete(s.videoTracks, id)
|
||||
if err := os.RemoveAll(s.sessionDir(id)); err != nil && !errors.Is(err, os.ErrNotExist) {
|
||||
return err
|
||||
}
|
||||
if err := os.RemoveAll(s.publicDir(id)); err != nil && !errors.Is(err, os.ErrNotExist) {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *sessionStore) getSession(id string) (*Session, error) {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
@@ -294,6 +465,42 @@ func (s *sessionStore) replacePeer(id string, peer *webrtc.PeerConnection) {
|
||||
s.peers[id] = peer
|
||||
}
|
||||
|
||||
func (s *sessionStore) replaceViewerPeer(sessionID string, viewerID string, peer *webrtc.PeerConnection) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if _, ok := s.viewerPeers[sessionID]; !ok {
|
||||
s.viewerPeers[sessionID] = map[string]*webrtc.PeerConnection{}
|
||||
}
|
||||
if existing, ok := s.viewerPeers[sessionID][viewerID]; ok {
|
||||
_ = existing.Close()
|
||||
}
|
||||
s.viewerPeers[sessionID][viewerID] = peer
|
||||
if session, ok := s.sessions[sessionID]; ok {
|
||||
session.ViewerCount = len(s.viewerPeers[sessionID])
|
||||
_ = s.saveSession(session)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *sessionStore) removeViewerPeer(sessionID string, viewerID string) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
viewers, ok := s.viewerPeers[sessionID]
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
if existing, ok := viewers[viewerID]; ok {
|
||||
_ = existing.Close()
|
||||
delete(viewers, viewerID)
|
||||
}
|
||||
if len(viewers) == 0 {
|
||||
delete(s.viewerPeers, sessionID)
|
||||
}
|
||||
if session, ok := s.sessions[sessionID]; ok {
|
||||
session.ViewerCount = len(s.viewerPeers[sessionID])
|
||||
_ = s.saveSession(session)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *sessionStore) closePeer(id string) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
@@ -301,6 +508,38 @@ func (s *sessionStore) closePeer(id string) {
|
||||
_ = existing.Close()
|
||||
delete(s.peers, id)
|
||||
}
|
||||
if viewers, ok := s.viewerPeers[id]; ok {
|
||||
for viewerID, peer := range viewers {
|
||||
_ = peer.Close()
|
||||
delete(viewers, viewerID)
|
||||
}
|
||||
delete(s.viewerPeers, id)
|
||||
}
|
||||
delete(s.videoTracks, id)
|
||||
if session, ok := s.sessions[id]; ok {
|
||||
session.ViewerCount = 0
|
||||
_ = s.saveSession(session)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *sessionStore) getVideoTrack(sessionID string) *webrtc.TrackLocalStaticRTP {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
return s.videoTracks[sessionID]
|
||||
}
|
||||
|
||||
func (s *sessionStore) ensureVideoTrack(sessionID string, codec webrtc.RTPCodecCapability) (*webrtc.TrackLocalStaticRTP, error) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if track, ok := s.videoTracks[sessionID]; ok {
|
||||
return track, nil
|
||||
}
|
||||
track, err := webrtc.NewTrackLocalStaticRTP(codec, "video", fmt.Sprintf("livecam-%s", sessionID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.videoTracks[sessionID] = track
|
||||
return track, nil
|
||||
}
|
||||
|
||||
func (s *sessionStore) updateSession(id string, update func(*Session) error) (*Session, error) {
|
||||
@@ -332,7 +571,7 @@ func (s *sessionStore) listProcessableSessions() []*Session {
|
||||
items = append(items, cloneSession(session))
|
||||
continue
|
||||
}
|
||||
if session.PreviewSegments < len(session.Segments) && session.PreviewStatus != PreviewProcessing {
|
||||
if sessionNeedsPreview(session) {
|
||||
items = append(items, cloneSession(session))
|
||||
}
|
||||
}
|
||||
@@ -419,12 +658,24 @@ func (m *mediaServer) handleSession(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
m.handleSignal(sessionID, w, r)
|
||||
case "viewer-signal":
|
||||
if r.Method != http.MethodPost {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
m.handleViewerSignal(sessionID, w, r)
|
||||
case "segments":
|
||||
if r.Method != http.MethodPost {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
m.handleSegmentUpload(sessionID, w, r)
|
||||
case "live-frame":
|
||||
if r.Method != http.MethodPost {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
m.handleLiveFrameUpload(sessionID, w, r)
|
||||
case "markers":
|
||||
if r.Method != http.MethodPost {
|
||||
http.NotFound(w, r)
|
||||
@@ -509,12 +760,23 @@ func (m *mediaServer) handleSignal(sessionID string, w http.ResponseWriter, r *h
|
||||
|
||||
peer.OnTrack(func(track *webrtc.TrackRemote, receiver *webrtc.RTPReceiver) {
|
||||
_ = receiver
|
||||
if track.Kind() != webrtc.RTPCodecTypeVideo {
|
||||
return
|
||||
}
|
||||
localTrack, trackErr := m.store.ensureVideoTrack(sessionID, track.Codec().RTPCodecCapability)
|
||||
if trackErr != nil {
|
||||
log.Printf("failed to create local viewer track for session %s: %v", sessionID, trackErr)
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
buffer := make([]byte, 1600)
|
||||
for {
|
||||
if _, _, readErr := track.Read(buffer); readErr != nil {
|
||||
packet, _, readErr := track.ReadRTP()
|
||||
if readErr != nil {
|
||||
return
|
||||
}
|
||||
if writeErr := localTrack.WriteRTP(packet); writeErr != nil && !errors.Is(writeErr, io.ErrClosedPipe) {
|
||||
log.Printf("failed to fan out RTP packet for session %s: %v", sessionID, writeErr)
|
||||
}
|
||||
_, _ = m.store.updateSession(sessionID, func(session *Session) error {
|
||||
session.StreamConnected = true
|
||||
session.Status = StatusStreaming
|
||||
@@ -556,6 +818,139 @@ func (m *mediaServer) handleSignal(sessionID string, w http.ResponseWriter, r *h
|
||||
})
|
||||
}
|
||||
|
||||
func (m *mediaServer) handleViewerSignal(sessionID string, w http.ResponseWriter, r *http.Request) {
|
||||
var input SignalRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
if _, err := m.store.getSession(sessionID); err != nil {
|
||||
writeError(w, http.StatusNotFound, err.Error())
|
||||
return
|
||||
}
|
||||
localTrack := m.store.getVideoTrack(sessionID)
|
||||
if localTrack == nil {
|
||||
writeError(w, http.StatusConflict, "viewer stream not ready")
|
||||
return
|
||||
}
|
||||
|
||||
config := webrtc.Configuration{
|
||||
ICEServers: []webrtc.ICEServer{{URLs: []string{"stun:stun.l.google.com:19302"}}},
|
||||
}
|
||||
peer, err := webrtc.NewPeerConnection(config)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to create viewer peer connection")
|
||||
return
|
||||
}
|
||||
viewerID := randomID()
|
||||
m.store.replaceViewerPeer(sessionID, viewerID, peer)
|
||||
|
||||
sender, err := peer.AddTrack(localTrack)
|
||||
if err != nil {
|
||||
m.store.removeViewerPeer(sessionID, viewerID)
|
||||
writeError(w, http.StatusInternalServerError, "failed to add viewer track")
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
rtcpBuf := make([]byte, 1500)
|
||||
for {
|
||||
if _, _, readErr := sender.Read(rtcpBuf); readErr != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
peer.OnConnectionStateChange(func(state webrtc.PeerConnectionState) {
|
||||
switch state {
|
||||
case webrtc.PeerConnectionStateDisconnected, webrtc.PeerConnectionStateFailed, webrtc.PeerConnectionStateClosed:
|
||||
m.store.removeViewerPeer(sessionID, viewerID)
|
||||
}
|
||||
})
|
||||
|
||||
offer := webrtc.SessionDescription{
|
||||
Type: parseSDPType(input.Type),
|
||||
SDP: input.SDP,
|
||||
}
|
||||
if err := peer.SetRemoteDescription(offer); err != nil {
|
||||
m.store.removeViewerPeer(sessionID, viewerID)
|
||||
writeError(w, http.StatusBadRequest, "failed to set remote description")
|
||||
return
|
||||
}
|
||||
|
||||
answer, err := peer.CreateAnswer(nil)
|
||||
if err != nil {
|
||||
m.store.removeViewerPeer(sessionID, viewerID)
|
||||
writeError(w, http.StatusInternalServerError, "failed to create viewer answer")
|
||||
return
|
||||
}
|
||||
gatherComplete := webrtc.GatheringCompletePromise(peer)
|
||||
if err := peer.SetLocalDescription(answer); err != nil {
|
||||
m.store.removeViewerPeer(sessionID, viewerID)
|
||||
writeError(w, http.StatusInternalServerError, "failed to set viewer local description")
|
||||
return
|
||||
}
|
||||
<-gatherComplete
|
||||
|
||||
writeJSON(w, http.StatusOK, map[string]any{
|
||||
"viewerId": viewerID,
|
||||
"type": strings.ToLower(peer.LocalDescription().Type.String()),
|
||||
"sdp": peer.LocalDescription().SDP,
|
||||
})
|
||||
}
|
||||
|
||||
func (m *mediaServer) handleLiveFrameUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
|
||||
if _, err := m.store.getSession(sessionID); err != nil {
|
||||
writeError(w, http.StatusNotFound, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
body := http.MaxBytesReader(w, r.Body, 4<<20)
|
||||
defer body.Close()
|
||||
|
||||
frame, err := io.ReadAll(body)
|
||||
if err != nil || len(frame) == 0 {
|
||||
writeError(w, http.StatusBadRequest, "invalid live frame payload")
|
||||
return
|
||||
}
|
||||
|
||||
publicDir := m.store.publicDir(sessionID)
|
||||
if err := os.MkdirAll(publicDir, 0o755); err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to create live frame directory")
|
||||
return
|
||||
}
|
||||
|
||||
tmpFile := filepath.Join(publicDir, fmt.Sprintf("live-frame-%s.tmp", randomID()))
|
||||
if err := os.WriteFile(tmpFile, frame, 0o644); err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to write live frame")
|
||||
return
|
||||
}
|
||||
defer os.Remove(tmpFile)
|
||||
|
||||
finalFile := m.store.liveFramePath(sessionID)
|
||||
if err := os.Rename(tmpFile, finalFile); err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to publish live frame")
|
||||
return
|
||||
}
|
||||
|
||||
session, err := m.store.updateSession(sessionID, func(session *Session) error {
|
||||
session.LiveFrameURL = m.store.liveFrameURL(sessionID)
|
||||
session.LiveFrameUpdated = time.Now().UTC().Format(time.RFC3339)
|
||||
session.StreamConnected = true
|
||||
session.LastStreamAt = session.LiveFrameUpdated
|
||||
if session.Status == StatusCreated || session.Status == StatusReconnecting {
|
||||
session.Status = StatusStreaming
|
||||
}
|
||||
session.LastError = ""
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "failed to update live frame session state")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusAccepted, map[string]any{"session": session})
|
||||
}
|
||||
|
||||
func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWriter, r *http.Request) {
|
||||
sequence, err := strconv.Atoi(r.URL.Query().Get("sequence"))
|
||||
if err != nil || sequence < 0 {
|
||||
@@ -583,6 +978,8 @@ func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWrite
|
||||
return
|
||||
}
|
||||
|
||||
removedSegments := []SegmentMeta{}
|
||||
persistRelayInit := false
|
||||
session, err := m.store.updateSession(sessionID, func(session *Session) error {
|
||||
meta := SegmentMeta{
|
||||
Sequence: sequence,
|
||||
@@ -603,9 +1000,16 @@ func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWrite
|
||||
if !found {
|
||||
session.Segments = append(session.Segments, meta)
|
||||
}
|
||||
sort.Slice(session.Segments, func(i, j int) bool {
|
||||
return session.Segments[i].Sequence < session.Segments[j].Sequence
|
||||
})
|
||||
if session.Purpose == PurposeRelay && extension == "mp4" && session.RelayInitFilename == "" && sequence <= 1 {
|
||||
session.RelayInitFilename = filename
|
||||
persistRelayInit = true
|
||||
}
|
||||
sortSegmentsBySequence(session.Segments)
|
||||
if session.Purpose == PurposeRelay {
|
||||
var kept []SegmentMeta
|
||||
kept, removedSegments = trimSegmentsToDuration(session.Segments, relayPreviewWindowForSession(session))
|
||||
session.Segments = kept
|
||||
}
|
||||
session.Status = StatusRecording
|
||||
session.LastError = ""
|
||||
return nil
|
||||
@@ -614,6 +1018,17 @@ func (m *mediaServer) handleSegmentUpload(sessionID string, w http.ResponseWrite
|
||||
writeError(w, http.StatusNotFound, err.Error())
|
||||
return
|
||||
}
|
||||
if persistRelayInit {
|
||||
if copyErr := copyFile(segmentPath, m.store.relayInitPath(sessionID)); copyErr != nil {
|
||||
log.Printf("failed to persist relay init segment for %s: %v", sessionID, copyErr)
|
||||
}
|
||||
}
|
||||
for _, segment := range removedSegments {
|
||||
segmentPath := filepath.Join(m.store.segmentsDir(sessionID), segment.Filename)
|
||||
if removeErr := os.Remove(segmentPath); removeErr != nil && !errors.Is(removeErr, os.ErrNotExist) {
|
||||
log.Printf("failed to remove pruned relay segment %s: %v", segmentPath, removeErr)
|
||||
}
|
||||
}
|
||||
writeJSON(w, http.StatusAccepted, map[string]any{"session": session})
|
||||
}
|
||||
|
||||
@@ -680,6 +1095,9 @@ func runWorkerLoop(ctx context.Context, store *sessionStore, interval time.Durat
|
||||
log.Printf("[worker] failed to refresh session store: %v", err)
|
||||
continue
|
||||
}
|
||||
if err := store.pruneExpiredRelaySessions(relayCacheTTL, time.Now().UTC()); err != nil {
|
||||
log.Printf("[worker] failed to prune relay cache: %v", err)
|
||||
}
|
||||
sessions := store.listProcessableSessions()
|
||||
for _, session := range sessions {
|
||||
if err := processSession(store, session.ID); err != nil {
|
||||
@@ -700,7 +1118,7 @@ func processSession(store *sessionStore, sessionID string) error {
|
||||
return processFinalArchive(store, sessionID)
|
||||
}
|
||||
|
||||
if current.PreviewSegments < len(current.Segments) {
|
||||
if sessionNeedsPreview(current) {
|
||||
return processRollingPreview(store, sessionID)
|
||||
}
|
||||
|
||||
@@ -770,38 +1188,86 @@ func buildPlaybackArtifacts(store *sessionStore, session *Session, finalize bool
|
||||
outputMP4 := filepath.Join(publicDir, baseName+".mp4")
|
||||
listFile := filepath.Join(store.sessionDir(sessionID), "concat.txt")
|
||||
|
||||
validSegments := make([]SegmentMeta, 0, len(session.Segments))
|
||||
inputs := make([]string, 0, len(session.Segments))
|
||||
sort.Slice(session.Segments, func(i, j int) bool {
|
||||
return session.Segments[i].Sequence < session.Segments[j].Sequence
|
||||
})
|
||||
sortSegmentsBySequence(session.Segments)
|
||||
for _, segment := range session.Segments {
|
||||
inputs = append(inputs, filepath.Join(store.segmentsDir(sessionID), segment.Filename))
|
||||
}
|
||||
if err := writeConcatList(listFile, inputs); err != nil {
|
||||
return markProcessingError(store, sessionID, err, finalize)
|
||||
}
|
||||
|
||||
if len(inputs) == 1 {
|
||||
body, copyErr := os.ReadFile(inputs[0])
|
||||
if copyErr != nil {
|
||||
return markProcessingError(store, sessionID, copyErr, finalize)
|
||||
inputPath := filepath.Join(store.segmentsDir(sessionID), segment.Filename)
|
||||
info, statErr := os.Stat(inputPath)
|
||||
if statErr != nil {
|
||||
continue
|
||||
}
|
||||
if writeErr := os.WriteFile(outputWebM, body, 0o644); writeErr != nil {
|
||||
return markProcessingError(store, sessionID, writeErr, finalize)
|
||||
if shouldSkipSegment(segment, info.Size()) {
|
||||
continue
|
||||
}
|
||||
validSegments = append(validSegments, segment)
|
||||
inputs = append(inputs, inputPath)
|
||||
}
|
||||
if len(inputs) == 0 {
|
||||
return markProcessingError(store, sessionID, errors.New("no valid uploaded segments found"), finalize)
|
||||
}
|
||||
if !finalize && session.Purpose == PurposeRelay && usesMP4Segments(validSegments) {
|
||||
mergedInput, cleanup, mergeErr := buildRelayMP4Source(store, session, validSegments, inputs)
|
||||
if cleanup != nil {
|
||||
defer cleanup()
|
||||
}
|
||||
if mergeErr == nil {
|
||||
transcodeErr := runFFmpeg(
|
||||
"-y",
|
||||
"-i",
|
||||
mergedInput,
|
||||
"-c:v",
|
||||
"libvpx-vp9",
|
||||
"-b:v",
|
||||
"1800k",
|
||||
"-c:a",
|
||||
"libopus",
|
||||
outputWebM,
|
||||
)
|
||||
if transcodeErr == nil {
|
||||
goto finalizePlayback
|
||||
}
|
||||
mergeErr = transcodeErr
|
||||
}
|
||||
if err := writeConcatList(listFile, inputs); err != nil {
|
||||
return markProcessingError(store, sessionID, err, finalize)
|
||||
}
|
||||
} else {
|
||||
copyErr := runFFmpeg("-y", "-f", "concat", "-safe", "0", "-i", listFile, "-c", "copy", outputWebM)
|
||||
if copyErr != nil {
|
||||
reencodeErr := runFFmpeg("-y", "-f", "concat", "-safe", "0", "-i", listFile, "-c:v", "libvpx-vp9", "-b:v", "1800k", "-c:a", "libopus", outputWebM)
|
||||
if reencodeErr != nil {
|
||||
return markProcessingError(store, sessionID, fmt.Errorf("concat failed: %w / %v", copyErr, reencodeErr), finalize)
|
||||
return markProcessingError(store, sessionID, fmt.Errorf("relay mp4 preview failed: %w / %v / %v", mergeErr, copyErr, reencodeErr), finalize)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if err := writeConcatList(listFile, inputs); err != nil {
|
||||
return markProcessingError(store, sessionID, err, finalize)
|
||||
}
|
||||
if len(inputs) == 1 {
|
||||
body, copyErr := os.ReadFile(inputs[0])
|
||||
if copyErr != nil {
|
||||
return markProcessingError(store, sessionID, copyErr, finalize)
|
||||
}
|
||||
if writeErr := os.WriteFile(outputWebM, body, 0o644); writeErr != nil {
|
||||
return markProcessingError(store, sessionID, writeErr, finalize)
|
||||
}
|
||||
} else {
|
||||
copyErr := runFFmpeg("-y", "-f", "concat", "-safe", "0", "-i", listFile, "-c", "copy", outputWebM)
|
||||
if copyErr != nil {
|
||||
reencodeErr := runFFmpeg("-y", "-f", "concat", "-safe", "0", "-i", listFile, "-c:v", "libvpx-vp9", "-b:v", "1800k", "-c:a", "libopus", outputWebM)
|
||||
if reencodeErr != nil {
|
||||
return markProcessingError(store, sessionID, fmt.Errorf("concat failed: %w / %v", copyErr, reencodeErr), finalize)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mp4Err := runFFmpeg("-y", "-i", outputWebM, "-c:v", "libx264", "-preset", "veryfast", "-crf", "28", "-c:a", "aac", "-movflags", "+faststart", outputMP4)
|
||||
if mp4Err != nil {
|
||||
log.Printf("[worker] mp4 archive generation failed for %s: %v", sessionID, mp4Err)
|
||||
finalizePlayback:
|
||||
if finalize {
|
||||
mp4Err := runFFmpeg("-y", "-i", outputWebM, "-c:v", "libx264", "-preset", "veryfast", "-crf", "28", "-c:a", "aac", "-movflags", "+faststart", outputMP4)
|
||||
if mp4Err != nil {
|
||||
log.Printf("[worker] mp4 archive generation failed for %s: %v", sessionID, mp4Err)
|
||||
}
|
||||
}
|
||||
|
||||
webmInfo, webmStatErr := os.Stat(outputWebM)
|
||||
@@ -810,18 +1276,20 @@ func buildPlaybackArtifacts(store *sessionStore, session *Session, finalize bool
|
||||
}
|
||||
var mp4Size int64
|
||||
var mp4URL string
|
||||
if info, statErr := os.Stat(outputMP4); statErr == nil {
|
||||
mp4Size = info.Size()
|
||||
mp4URL = fmt.Sprintf("/media/assets/sessions/%s/recording.mp4", sessionID)
|
||||
}
|
||||
previewURL := fmt.Sprintf("/media/assets/sessions/%s/%s.webm", sessionID, baseName)
|
||||
if mp4URL != "" {
|
||||
previewURL = mp4URL
|
||||
if finalize {
|
||||
if info, statErr := os.Stat(outputMP4); statErr == nil {
|
||||
mp4Size = info.Size()
|
||||
mp4URL = fmt.Sprintf("/media/assets/sessions/%s/recording.mp4", sessionID)
|
||||
}
|
||||
if mp4URL != "" {
|
||||
previewURL = mp4URL
|
||||
}
|
||||
}
|
||||
|
||||
_, updateErr := store.updateSession(sessionID, func(session *Session) error {
|
||||
session.Playback.PreviewURL = previewURL
|
||||
session.PreviewSegments = len(inputs)
|
||||
session.PreviewSegments = len(validSegments)
|
||||
session.PreviewUpdatedAt = time.Now().UTC().Format(time.RFC3339)
|
||||
session.PreviewStatus = PreviewReady
|
||||
session.LastError = ""
|
||||
@@ -844,6 +1312,15 @@ func buildPlaybackArtifacts(store *sessionStore, session *Session, finalize bool
|
||||
|
||||
func markProcessingError(store *sessionStore, sessionID string, err error, finalize bool) error {
|
||||
_, _ = store.updateSession(sessionID, func(session *Session) error {
|
||||
if !finalize {
|
||||
previewPath := filepath.Join(store.publicDir(sessionID), "preview.webm")
|
||||
if info, statErr := os.Stat(previewPath); statErr == nil && info.Size() > 0 {
|
||||
session.PreviewStatus = PreviewReady
|
||||
session.Playback.PreviewURL = fmt.Sprintf("/media/assets/sessions/%s/preview.webm", sessionID)
|
||||
session.LastError = err.Error()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
session.PreviewStatus = PreviewFailed
|
||||
if finalize {
|
||||
session.ArchiveStatus = ArchiveFailed
|
||||
@@ -863,6 +1340,78 @@ func writeConcatList(path string, inputs []string) error {
|
||||
return os.WriteFile(path, []byte(strings.Join(lines, "\n")), 0o644)
|
||||
}
|
||||
|
||||
func usesMP4Segments(segments []SegmentMeta) bool {
|
||||
for _, segment := range segments {
|
||||
if strings.HasSuffix(strings.ToLower(segment.Filename), ".mp4") || strings.Contains(strings.ToLower(segment.ContentType), "mp4") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func shouldSkipSegment(segment SegmentMeta, sizeBytes int64) bool {
|
||||
if sizeBytes <= 0 {
|
||||
return true
|
||||
}
|
||||
if strings.HasSuffix(strings.ToLower(segment.Filename), ".mp4") && sizeBytes < 4096 {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func buildRelayMP4Source(store *sessionStore, session *Session, segments []SegmentMeta, inputs []string) (string, func(), error) {
|
||||
sourceFiles := make([]string, 0, len(inputs)+1)
|
||||
initPath := store.relayInitPath(session.ID)
|
||||
if session.RelayInitFilename != "" && len(segments) > 0 && segments[0].Filename != session.RelayInitFilename {
|
||||
if info, err := os.Stat(initPath); err == nil && info.Size() > 0 {
|
||||
sourceFiles = append(sourceFiles, initPath)
|
||||
}
|
||||
}
|
||||
sourceFiles = append(sourceFiles, inputs...)
|
||||
if len(sourceFiles) == 0 {
|
||||
return "", nil, errors.New("no relay mp4 source segments found")
|
||||
}
|
||||
mergedPath := filepath.Join(store.sessionDir(session.ID), "relay-preview-source.mp4")
|
||||
output, err := os.Create(mergedPath)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
defer output.Close()
|
||||
for _, source := range sourceFiles {
|
||||
input, openErr := os.Open(source)
|
||||
if openErr != nil {
|
||||
return "", nil, openErr
|
||||
}
|
||||
if _, copyErr := io.Copy(output, input); copyErr != nil {
|
||||
input.Close()
|
||||
return "", nil, copyErr
|
||||
}
|
||||
if closeErr := input.Close(); closeErr != nil {
|
||||
return "", nil, closeErr
|
||||
}
|
||||
}
|
||||
return mergedPath, func() {
|
||||
_ = os.Remove(mergedPath)
|
||||
}, nil
|
||||
}
|
||||
|
||||
func copyFile(source string, target string) error {
|
||||
input, err := os.Open(source)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer input.Close()
|
||||
output, err := os.Create(target)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer output.Close()
|
||||
if _, err := io.Copy(output, input); err != nil {
|
||||
return err
|
||||
}
|
||||
return output.Close()
|
||||
}
|
||||
|
||||
func runFFmpeg(args ...string) error {
|
||||
cmd := exec.Command("ffmpeg", args...)
|
||||
output, err := cmd.CombinedOutput()
|
||||
|
||||
@@ -2,12 +2,16 @@ package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestMediaHealthAndSessionLifecycle(t *testing.T) {
|
||||
@@ -256,3 +260,374 @@ func TestHandleSessionGetRefreshesSessionStateFromDisk(t *testing.T) {
|
||||
t.Fatalf("expected playback ready after refresh")
|
||||
}
|
||||
}
|
||||
|
||||
func TestViewerSignalReturnsConflictBeforePublisherTrackReady(t *testing.T) {
|
||||
store, err := newSessionStore(t.TempDir())
|
||||
if err != nil {
|
||||
t.Fatalf("newSessionStore: %v", err)
|
||||
}
|
||||
|
||||
server := newMediaServer(store)
|
||||
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Viewer Pending"})
|
||||
if err != nil {
|
||||
t.Fatalf("createSession: %v", err)
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/viewer-signal", strings.NewReader(`{"type":"offer","sdp":"mock-offer"}`))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
res := httptest.NewRecorder()
|
||||
server.routes().ServeHTTP(res, req)
|
||||
|
||||
if res.Code != http.StatusConflict {
|
||||
t.Fatalf("expected viewer-signal 409 before video track is ready, got %d", res.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLiveFrameUploadPublishesRelayFrame(t *testing.T) {
|
||||
store, err := newSessionStore(t.TempDir())
|
||||
if err != nil {
|
||||
t.Fatalf("newSessionStore: %v", err)
|
||||
}
|
||||
|
||||
server := newMediaServer(store)
|
||||
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Session"})
|
||||
if err != nil {
|
||||
t.Fatalf("createSession: %v", err)
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/live-frame", strings.NewReader("jpeg-frame"))
|
||||
req.Header.Set("Content-Type", "image/jpeg")
|
||||
res := httptest.NewRecorder()
|
||||
server.routes().ServeHTTP(res, req)
|
||||
|
||||
if res.Code != http.StatusAccepted {
|
||||
t.Fatalf("expected live-frame upload 202, got %d", res.Code)
|
||||
}
|
||||
|
||||
current, err := store.getSession(session.ID)
|
||||
if err != nil {
|
||||
t.Fatalf("getSession: %v", err)
|
||||
}
|
||||
if current.LiveFrameURL == "" || current.LiveFrameUpdated == "" {
|
||||
t.Fatalf("expected live frame metadata to be recorded, got %#v", current)
|
||||
}
|
||||
if !current.StreamConnected {
|
||||
t.Fatalf("expected session stream connected after frame upload")
|
||||
}
|
||||
|
||||
framePath := store.liveFramePath(session.ID)
|
||||
body, err := os.ReadFile(framePath)
|
||||
if err != nil {
|
||||
t.Fatalf("read live frame: %v", err)
|
||||
}
|
||||
if string(body) != "jpeg-frame" {
|
||||
t.Fatalf("unexpected live frame content: %q", string(body))
|
||||
}
|
||||
}
|
||||
|
||||
func TestRelaySegmentUploadKeepsOnlyLatestMinute(t *testing.T) {
|
||||
store, err := newSessionStore(t.TempDir())
|
||||
if err != nil {
|
||||
t.Fatalf("newSessionStore: %v", err)
|
||||
}
|
||||
|
||||
server := newMediaServer(store)
|
||||
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Buffer", Purpose: "relay", RelayBufferSeconds: 60})
|
||||
if err != nil {
|
||||
t.Fatalf("createSession: %v", err)
|
||||
}
|
||||
|
||||
for sequence := 0; sequence < 3; sequence += 1 {
|
||||
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/segments?sequence="+strconv.Itoa(sequence)+"&durationMs=30000", strings.NewReader("segment"))
|
||||
req.Header.Set("Content-Type", "video/webm")
|
||||
res := httptest.NewRecorder()
|
||||
server.routes().ServeHTTP(res, req)
|
||||
if res.Code != http.StatusAccepted {
|
||||
t.Fatalf("expected segment upload 202 for sequence %d, got %d", sequence, res.Code)
|
||||
}
|
||||
}
|
||||
|
||||
current, err := store.getSession(session.ID)
|
||||
if err != nil {
|
||||
t.Fatalf("getSession: %v", err)
|
||||
}
|
||||
if current.Purpose != PurposeRelay {
|
||||
t.Fatalf("expected relay purpose, got %s", current.Purpose)
|
||||
}
|
||||
if len(current.Segments) != 2 {
|
||||
t.Fatalf("expected latest 2 relay segments to remain, got %d", len(current.Segments))
|
||||
}
|
||||
if current.Segments[0].Sequence != 1 || current.Segments[1].Sequence != 2 {
|
||||
t.Fatalf("expected relay segments 1 and 2 to remain, got %#v", current.Segments)
|
||||
}
|
||||
if _, err := os.Stat(filepath.Join(store.segmentsDir(session.ID), "000000.webm")); !errors.Is(err, os.ErrNotExist) {
|
||||
t.Fatalf("expected earliest relay segment to be pruned from disk, got %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestProcessRelayPreviewPublishesBufferedWebM(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
store, err := newSessionStore(tempDir)
|
||||
if err != nil {
|
||||
t.Fatalf("newSessionStore: %v", err)
|
||||
}
|
||||
|
||||
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Preview", Purpose: "relay", RelayBufferSeconds: 60})
|
||||
if err != nil {
|
||||
t.Fatalf("createSession: %v", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(filepath.Join(store.segmentsDir(session.ID), "000000.webm"), []byte("segment"), 0o644); err != nil {
|
||||
t.Fatalf("write segment: %v", err)
|
||||
}
|
||||
|
||||
if _, err := store.updateSession(session.ID, func(current *Session) error {
|
||||
current.Segments = append(current.Segments, SegmentMeta{
|
||||
Sequence: 0,
|
||||
Filename: "000000.webm",
|
||||
DurationMS: 60000,
|
||||
SizeBytes: 7,
|
||||
ContentType: "video/webm",
|
||||
})
|
||||
current.Purpose = PurposeRelay
|
||||
return nil
|
||||
}); err != nil {
|
||||
t.Fatalf("updateSession: %v", err)
|
||||
}
|
||||
|
||||
if err := processRollingPreview(store, session.ID); err != nil {
|
||||
t.Fatalf("processRollingPreview: %v", err)
|
||||
}
|
||||
|
||||
current, err := store.getSession(session.ID)
|
||||
if err != nil {
|
||||
t.Fatalf("getSession: %v", err)
|
||||
}
|
||||
if current.Playback.PreviewURL == "" || !strings.HasSuffix(current.Playback.PreviewURL, "/preview.webm") {
|
||||
t.Fatalf("expected relay preview webm url, got %#v", current.Playback)
|
||||
}
|
||||
if current.Playback.MP4URL != "" {
|
||||
t.Fatalf("expected relay preview to skip mp4 generation, got %#v", current.Playback)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleSegmentUploadPersistsRelayMP4InitSegment(t *testing.T) {
|
||||
store, err := newSessionStore(t.TempDir())
|
||||
if err != nil {
|
||||
t.Fatalf("newSessionStore: %v", err)
|
||||
}
|
||||
|
||||
server := newMediaServer(store)
|
||||
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay MP4", Purpose: "relay", RelayBufferSeconds: 120})
|
||||
if err != nil {
|
||||
t.Fatalf("createSession: %v", err)
|
||||
}
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/media/sessions/"+session.ID+"/segments?sequence=1&durationMs=10000", strings.NewReader("mp4-init"))
|
||||
req.Header.Set("Content-Type", "video/mp4;codecs=avc1")
|
||||
res := httptest.NewRecorder()
|
||||
server.routes().ServeHTTP(res, req)
|
||||
if res.Code != http.StatusAccepted {
|
||||
t.Fatalf("expected segment upload 202, got %d", res.Code)
|
||||
}
|
||||
|
||||
current, err := store.getSession(session.ID)
|
||||
if err != nil {
|
||||
t.Fatalf("getSession: %v", err)
|
||||
}
|
||||
if current.RelayInitFilename != "000001.mp4" {
|
||||
t.Fatalf("expected relay init filename to be recorded, got %q", current.RelayInitFilename)
|
||||
}
|
||||
body, err := os.ReadFile(store.relayInitPath(session.ID))
|
||||
if err != nil {
|
||||
t.Fatalf("read relay init: %v", err)
|
||||
}
|
||||
if string(body) != "mp4-init" {
|
||||
t.Fatalf("unexpected relay init contents: %q", string(body))
|
||||
}
|
||||
}
|
||||
|
||||
func TestProcessRelayPreviewUsesPersistedInitForMP4Fragments(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
store, err := newSessionStore(tempDir)
|
||||
if err != nil {
|
||||
t.Fatalf("newSessionStore: %v", err)
|
||||
}
|
||||
|
||||
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay MP4 Preview", Purpose: "relay", RelayBufferSeconds: 120})
|
||||
if err != nil {
|
||||
t.Fatalf("createSession: %v", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(store.relayInitPath(session.ID), []byte(strings.Repeat("i", 6000)), 0o644); err != nil {
|
||||
t.Fatalf("write relay init: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(store.segmentsDir(session.ID), "000082.mp4"), []byte(strings.Repeat("a", 6000)), 0o644); err != nil {
|
||||
t.Fatalf("write segment 82: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(store.segmentsDir(session.ID), "000083.mp4"), []byte(strings.Repeat("b", 6000)), 0o644); err != nil {
|
||||
t.Fatalf("write segment 83: %v", err)
|
||||
}
|
||||
|
||||
if _, err := store.updateSession(session.ID, func(current *Session) error {
|
||||
current.Purpose = PurposeRelay
|
||||
current.RelayInitFilename = "000001.mp4"
|
||||
current.Segments = []SegmentMeta{
|
||||
{
|
||||
Sequence: 82,
|
||||
Filename: "000082.mp4",
|
||||
DurationMS: 10000,
|
||||
SizeBytes: 6000,
|
||||
ContentType: "video/mp4;codecs=avc1",
|
||||
},
|
||||
{
|
||||
Sequence: 83,
|
||||
Filename: "000083.mp4",
|
||||
DurationMS: 10000,
|
||||
SizeBytes: 6000,
|
||||
ContentType: "video/mp4;codecs=avc1",
|
||||
},
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
t.Fatalf("updateSession: %v", err)
|
||||
}
|
||||
|
||||
fakeFFmpeg := filepath.Join(tempDir, "ffmpeg")
|
||||
script := "#!/bin/sh\ninput=''\noutput=''\nprev=''\nfor arg in \"$@\"; do\n if [ \"$prev\" = '-i' ]; then input=\"$arg\"; fi\n prev=\"$arg\"\n output=\"$arg\"\ndone\nif [ -n \"$input\" ] && [ -f \"$input\" ]; then cp \"$input\" \"$output\"; else : > \"$output\"; fi\n"
|
||||
if err := os.WriteFile(fakeFFmpeg, []byte(script), 0o755); err != nil {
|
||||
t.Fatalf("write fake ffmpeg: %v", err)
|
||||
}
|
||||
t.Setenv("PATH", tempDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||
|
||||
if err := processRollingPreview(store, session.ID); err != nil {
|
||||
t.Fatalf("processRollingPreview: %v", err)
|
||||
}
|
||||
|
||||
current, err := store.getSession(session.ID)
|
||||
if err != nil {
|
||||
t.Fatalf("getSession: %v", err)
|
||||
}
|
||||
if current.PreviewStatus != PreviewReady {
|
||||
t.Fatalf("expected preview ready, got %s", current.PreviewStatus)
|
||||
}
|
||||
if current.Playback.PreviewURL == "" {
|
||||
t.Fatalf("expected preview url to be populated")
|
||||
}
|
||||
}
|
||||
|
||||
func TestProcessRelayPreviewKeepsPreviousPreviewOnFailure(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
store, err := newSessionStore(tempDir)
|
||||
if err != nil {
|
||||
t.Fatalf("newSessionStore: %v", err)
|
||||
}
|
||||
|
||||
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Relay Existing Preview", Purpose: "relay", RelayBufferSeconds: 120})
|
||||
if err != nil {
|
||||
t.Fatalf("createSession: %v", err)
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(store.publicDir(session.ID), 0o755); err != nil {
|
||||
t.Fatalf("mkdir public dir: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(store.publicDir(session.ID), "preview.webm"), []byte("existing-preview"), 0o644); err != nil {
|
||||
t.Fatalf("write preview: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(store.segmentsDir(session.ID), "000001.webm"), []byte("segment-one"), 0o644); err != nil {
|
||||
t.Fatalf("write segment 1: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(store.segmentsDir(session.ID), "000002.webm"), []byte("segment-two"), 0o644); err != nil {
|
||||
t.Fatalf("write segment 2: %v", err)
|
||||
}
|
||||
if _, err := store.updateSession(session.ID, func(current *Session) error {
|
||||
current.Purpose = PurposeRelay
|
||||
current.PreviewStatus = PreviewReady
|
||||
current.Playback.PreviewURL = fmt.Sprintf("/media/assets/sessions/%s/preview.webm", session.ID)
|
||||
current.Segments = []SegmentMeta{
|
||||
{
|
||||
Sequence: 1,
|
||||
Filename: "000001.webm",
|
||||
DurationMS: 10000,
|
||||
SizeBytes: int64(len("segment-one")),
|
||||
ContentType: "video/webm",
|
||||
},
|
||||
{
|
||||
Sequence: 2,
|
||||
Filename: "000002.webm",
|
||||
DurationMS: 10000,
|
||||
SizeBytes: int64(len("segment-two")),
|
||||
ContentType: "video/webm",
|
||||
},
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
t.Fatalf("updateSession: %v", err)
|
||||
}
|
||||
|
||||
fakeFFmpeg := filepath.Join(tempDir, "ffmpeg")
|
||||
script := "#!/bin/sh\nexit 1\n"
|
||||
if err := os.WriteFile(fakeFFmpeg, []byte(script), 0o755); err != nil {
|
||||
t.Fatalf("write fake ffmpeg: %v", err)
|
||||
}
|
||||
t.Setenv("PATH", tempDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||
|
||||
if err := processRollingPreview(store, session.ID); err == nil {
|
||||
t.Fatalf("expected processRollingPreview to surface failure")
|
||||
}
|
||||
|
||||
current, err := store.getSession(session.ID)
|
||||
if err != nil {
|
||||
t.Fatalf("getSession: %v", err)
|
||||
}
|
||||
if current.PreviewStatus != PreviewReady {
|
||||
t.Fatalf("expected previous preview to remain ready, got %s", current.PreviewStatus)
|
||||
}
|
||||
if current.Playback.PreviewURL == "" {
|
||||
t.Fatalf("expected preview url to remain available")
|
||||
}
|
||||
if current.LastError == "" {
|
||||
t.Fatalf("expected last error to be recorded")
|
||||
}
|
||||
}
|
||||
|
||||
func TestPruneExpiredRelaySessionsRemovesOldCache(t *testing.T) {
|
||||
store, err := newSessionStore(t.TempDir())
|
||||
if err != nil {
|
||||
t.Fatalf("newSessionStore: %v", err)
|
||||
}
|
||||
|
||||
session, err := store.createSession(CreateSessionRequest{UserID: "1", Title: "Old Relay", Purpose: "relay", RelayBufferSeconds: 60})
|
||||
if err != nil {
|
||||
t.Fatalf("createSession: %v", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(filepath.Join(store.segmentsDir(session.ID), "000000.webm"), []byte("segment"), 0o644); err != nil {
|
||||
t.Fatalf("write segment: %v", err)
|
||||
}
|
||||
if err := os.MkdirAll(store.publicDir(session.ID), 0o755); err != nil {
|
||||
t.Fatalf("mkdir public dir: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(store.publicDir(session.ID), "preview.webm"), []byte("preview"), 0o644); err != nil {
|
||||
t.Fatalf("write preview: %v", err)
|
||||
}
|
||||
|
||||
store.mu.Lock()
|
||||
store.sessions[session.ID].Purpose = PurposeRelay
|
||||
store.sessions[session.ID].UpdatedAt = time.Now().UTC().Add(-31 * time.Minute).Format(time.RFC3339)
|
||||
store.mu.Unlock()
|
||||
|
||||
if err := store.pruneExpiredRelaySessions(relayCacheTTL, time.Now().UTC()); err != nil {
|
||||
t.Fatalf("pruneExpiredRelaySessions: %v", err)
|
||||
}
|
||||
|
||||
if _, err := store.getSession(session.ID); err == nil {
|
||||
t.Fatalf("expected relay session to be removed from store")
|
||||
}
|
||||
if _, err := os.Stat(store.sessionDir(session.ID)); !errors.Is(err, os.ErrNotExist) {
|
||||
t.Fatalf("expected relay session directory to be removed, got %v", err)
|
||||
}
|
||||
if _, err := os.Stat(store.publicDir(session.ID)); !errors.Is(err, os.ErrNotExist) {
|
||||
t.Fatalf("expected relay public directory to be removed, got %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,23 +6,29 @@ export type TrpcContext = {
|
||||
req: CreateExpressContextOptions["req"];
|
||||
res: CreateExpressContextOptions["res"];
|
||||
user: User | null;
|
||||
sessionSid: string | null;
|
||||
};
|
||||
|
||||
export async function createContext(
|
||||
opts: CreateExpressContextOptions
|
||||
): Promise<TrpcContext> {
|
||||
let user: User | null = null;
|
||||
let sessionSid: string | null = null;
|
||||
|
||||
try {
|
||||
user = await sdk.authenticateRequest(opts.req);
|
||||
const authenticated = await sdk.authenticateRequestWithSession(opts.req);
|
||||
user = authenticated.user;
|
||||
sessionSid = authenticated.sid;
|
||||
} catch (error) {
|
||||
// Authentication is optional for public procedures.
|
||||
user = null;
|
||||
sessionSid = null;
|
||||
}
|
||||
|
||||
return {
|
||||
req: opts.req,
|
||||
res: opts.res,
|
||||
user,
|
||||
sessionSid,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -13,6 +13,26 @@ import { createBackgroundTask, getAdminUserId, hasRecentBackgroundTaskOfType, se
|
||||
import { nanoid } from "nanoid";
|
||||
import { syncTutorialImages } from "../tutorialImages";
|
||||
|
||||
async function warmupApplicationData() {
|
||||
const tasks: Array<{ label: string; run: () => Promise<unknown> }> = [
|
||||
{ label: "seedTutorials", run: () => seedTutorials() },
|
||||
{ label: "syncTutorialImages", run: () => syncTutorialImages() },
|
||||
{ label: "seedVisionReferenceImages", run: () => seedVisionReferenceImages() },
|
||||
{ label: "seedAchievementDefinitions", run: () => seedAchievementDefinitions() },
|
||||
{ label: "seedAppSettings", run: () => seedAppSettings() },
|
||||
];
|
||||
|
||||
for (const task of tasks) {
|
||||
const startedAt = Date.now();
|
||||
try {
|
||||
await task.run();
|
||||
console.log(`[startup] ${task.label} finished in ${Date.now() - startedAt}ms`);
|
||||
} catch (error) {
|
||||
console.error(`[startup] ${task.label} failed`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function scheduleDailyNtrpRefresh() {
|
||||
const now = new Date();
|
||||
if (now.getHours() !== 0 || now.getMinutes() > 5) {
|
||||
@@ -64,12 +84,6 @@ async function findAvailablePort(startPort: number = 3000): Promise<number> {
|
||||
}
|
||||
|
||||
async function startServer() {
|
||||
await seedTutorials();
|
||||
await syncTutorialImages();
|
||||
await seedVisionReferenceImages();
|
||||
await seedAchievementDefinitions();
|
||||
await seedAppSettings();
|
||||
|
||||
const app = express();
|
||||
const server = createServer(app);
|
||||
registerMediaProxy(app);
|
||||
@@ -108,6 +122,7 @@ async function startServer() {
|
||||
|
||||
server.listen(port, () => {
|
||||
console.log(`Server running on http://localhost:${port}/`);
|
||||
void warmupApplicationData();
|
||||
});
|
||||
|
||||
setInterval(() => {
|
||||
|
||||
57
server/_core/sdk.test.ts
普通文件
@@ -0,0 +1,57 @@
|
||||
import { SignJWT } from "jose";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
async function loadSdkForTest() {
|
||||
process.env.JWT_SECRET = "test-cookie-secret";
|
||||
process.env.VITE_APP_ID = "test-app";
|
||||
vi.resetModules();
|
||||
|
||||
const [{ sdk }, { ENV }] = await Promise.all([
|
||||
import("./sdk"),
|
||||
import("./env"),
|
||||
]);
|
||||
|
||||
return { sdk, ENV };
|
||||
}
|
||||
|
||||
async function signLegacyToken(openId: string, appId: string, name: string) {
|
||||
const secret = new TextEncoder().encode(process.env.JWT_SECRET || "");
|
||||
return new SignJWT({
|
||||
openId,
|
||||
appId,
|
||||
name,
|
||||
})
|
||||
.setProtectedHeader({ alg: "HS256", typ: "JWT" })
|
||||
.setExpirationTime(Math.floor((Date.now() + 60_000) / 1000))
|
||||
.sign(secret);
|
||||
}
|
||||
|
||||
describe("sdk.verifySession", () => {
|
||||
it("derives a stable legacy sid when the token payload does not include sid", async () => {
|
||||
const { sdk, ENV } = await loadSdkForTest();
|
||||
const legacyToken = await signLegacyToken("username_H1_legacy", ENV.appId, "H1");
|
||||
|
||||
const session = await sdk.verifySession(legacyToken);
|
||||
|
||||
expect(session).not.toBeNull();
|
||||
expect(session?.sid).toMatch(/^legacy-token:/);
|
||||
expect(session?.sid).toHaveLength("legacy-token:".length + 32);
|
||||
});
|
||||
|
||||
it("derives different legacy sid values for different legacy login tokens", async () => {
|
||||
const firstLoad = await loadSdkForTest();
|
||||
const tokenA = await signLegacyToken("username_H1_legacy", firstLoad.ENV.appId, "H1");
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 5));
|
||||
|
||||
const secondLoad = await loadSdkForTest();
|
||||
const tokenB = await signLegacyToken("username_H1_legacy", secondLoad.ENV.appId, "H1-second");
|
||||
|
||||
const sessionA = await firstLoad.sdk.verifySession(tokenA);
|
||||
const sessionB = await secondLoad.sdk.verifySession(tokenB);
|
||||
|
||||
expect(sessionA?.sid).toMatch(/^legacy-token:/);
|
||||
expect(sessionB?.sid).toMatch(/^legacy-token:/);
|
||||
expect(sessionA?.sid).not.toBe(sessionB?.sid);
|
||||
});
|
||||
});
|
||||
@@ -4,6 +4,7 @@ import axios, { type AxiosInstance } from "axios";
|
||||
import { parse as parseCookieHeader } from "cookie";
|
||||
import type { Request } from "express";
|
||||
import { SignJWT, jwtVerify } from "jose";
|
||||
import { createHash } from "node:crypto";
|
||||
import type { User } from "../../drizzle/schema";
|
||||
import * as db from "../db";
|
||||
import { ENV } from "./env";
|
||||
@@ -223,11 +224,15 @@ class SDKServer {
|
||||
return null;
|
||||
}
|
||||
|
||||
const derivedSid = typeof sid === "string" && sid.length > 0
|
||||
? sid
|
||||
: `legacy-token:${createHash("sha256").update(cookieValue).digest("hex").slice(0, 32)}`;
|
||||
|
||||
return {
|
||||
openId,
|
||||
appId,
|
||||
name: typeof name === "string" ? name : undefined,
|
||||
sid: typeof sid === "string" ? sid : undefined,
|
||||
sid: derivedSid,
|
||||
};
|
||||
} catch (error) {
|
||||
console.warn("[Auth] Session verification failed", String(error));
|
||||
@@ -260,7 +265,11 @@ class SDKServer {
|
||||
}
|
||||
|
||||
async authenticateRequest(req: Request): Promise<User> {
|
||||
// Regular authentication flow
|
||||
const authenticated = await this.authenticateRequestWithSession(req);
|
||||
return authenticated.user;
|
||||
}
|
||||
|
||||
async authenticateRequestWithSession(req: Request): Promise<{ user: User; sid: string | null }> {
|
||||
const cookies = this.parseCookies(req.headers.cookie);
|
||||
const sessionCookie = cookies.get(COOKIE_NAME);
|
||||
const session = await this.verifySession(sessionCookie);
|
||||
@@ -273,7 +282,6 @@ class SDKServer {
|
||||
const signedInAt = new Date();
|
||||
let user = await db.getUserByOpenId(sessionUserId);
|
||||
|
||||
// If user not in DB, sync from OAuth server automatically
|
||||
if (!user) {
|
||||
try {
|
||||
const userInfo = await this.getUserInfoWithJwt(sessionCookie ?? "");
|
||||
@@ -300,7 +308,10 @@ class SDKServer {
|
||||
lastSignedIn: signedInAt,
|
||||
});
|
||||
|
||||
return user;
|
||||
return {
|
||||
user,
|
||||
sid: session.sid ?? null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
136
server/db.ts
@@ -8,6 +8,7 @@ import {
|
||||
poseAnalyses, InsertPoseAnalysis,
|
||||
trainingRecords, InsertTrainingRecord,
|
||||
liveAnalysisSessions, InsertLiveAnalysisSession,
|
||||
liveAnalysisRuntime, InsertLiveAnalysisRuntime,
|
||||
liveActionSegments, InsertLiveActionSegment,
|
||||
dailyTrainingAggregates, InsertDailyTrainingAggregate,
|
||||
ratingHistory, InsertRatingHistory,
|
||||
@@ -32,6 +33,7 @@ import { fetchTutorialMetrics, shouldRefreshTutorialMetrics } from "./tutorialMe
|
||||
let _db: ReturnType<typeof drizzle> | null = null;
|
||||
|
||||
const APP_TIMEZONE = process.env.TZ || "Asia/Shanghai";
|
||||
export const LIVE_ANALYSIS_RUNTIME_TIMEOUT_MS = 15_000;
|
||||
|
||||
function getDateFormatter() {
|
||||
return new Intl.DateTimeFormat("en-CA", {
|
||||
@@ -888,6 +890,140 @@ export async function createLiveAnalysisSession(session: InsertLiveAnalysisSessi
|
||||
return result[0].insertId;
|
||||
}
|
||||
|
||||
export async function getUserLiveAnalysisRuntime(userId: number) {
|
||||
const db = await getDb();
|
||||
if (!db) return undefined;
|
||||
const result = await db.select().from(liveAnalysisRuntime)
|
||||
.where(eq(liveAnalysisRuntime.userId, userId))
|
||||
.limit(1);
|
||||
return result[0];
|
||||
}
|
||||
|
||||
export async function upsertUserLiveAnalysisRuntime(
|
||||
userId: number,
|
||||
patch: Omit<InsertLiveAnalysisRuntime, "id" | "createdAt" | "updatedAt" | "userId">,
|
||||
) {
|
||||
const db = await getDb();
|
||||
if (!db) throw new Error("Database not available");
|
||||
|
||||
const existing = await getUserLiveAnalysisRuntime(userId);
|
||||
if (existing) {
|
||||
await db.update(liveAnalysisRuntime)
|
||||
.set({
|
||||
ownerSid: patch.ownerSid ?? existing.ownerSid,
|
||||
status: patch.status ?? existing.status,
|
||||
title: patch.title ?? existing.title,
|
||||
sessionMode: patch.sessionMode ?? existing.sessionMode,
|
||||
mediaSessionId: patch.mediaSessionId === undefined ? existing.mediaSessionId : patch.mediaSessionId,
|
||||
startedAt: patch.startedAt === undefined ? existing.startedAt : patch.startedAt,
|
||||
endedAt: patch.endedAt === undefined ? existing.endedAt : patch.endedAt,
|
||||
lastHeartbeatAt: patch.lastHeartbeatAt === undefined ? existing.lastHeartbeatAt : patch.lastHeartbeatAt,
|
||||
snapshot: patch.snapshot === undefined ? existing.snapshot : patch.snapshot,
|
||||
})
|
||||
.where(eq(liveAnalysisRuntime.userId, userId));
|
||||
return getUserLiveAnalysisRuntime(userId);
|
||||
}
|
||||
|
||||
const result = await db.insert(liveAnalysisRuntime).values({
|
||||
userId,
|
||||
ownerSid: patch.ownerSid ?? null,
|
||||
status: patch.status ?? "idle",
|
||||
title: patch.title ?? null,
|
||||
sessionMode: patch.sessionMode ?? "practice",
|
||||
mediaSessionId: patch.mediaSessionId ?? null,
|
||||
startedAt: patch.startedAt ?? null,
|
||||
endedAt: patch.endedAt ?? null,
|
||||
lastHeartbeatAt: patch.lastHeartbeatAt ?? null,
|
||||
snapshot: patch.snapshot ?? null,
|
||||
});
|
||||
|
||||
const runtimeId = result[0].insertId;
|
||||
const rows = await db.select().from(liveAnalysisRuntime).where(eq(liveAnalysisRuntime.id, runtimeId)).limit(1);
|
||||
return rows[0];
|
||||
}
|
||||
|
||||
export async function updateUserLiveAnalysisRuntime(
|
||||
userId: number,
|
||||
patch: Partial<Omit<InsertLiveAnalysisRuntime, "id" | "createdAt" | "updatedAt" | "userId">>,
|
||||
) {
|
||||
const db = await getDb();
|
||||
if (!db) throw new Error("Database not available");
|
||||
const existing = await getUserLiveAnalysisRuntime(userId);
|
||||
if (!existing) return undefined;
|
||||
|
||||
await db.update(liveAnalysisRuntime)
|
||||
.set({
|
||||
ownerSid: patch.ownerSid === undefined ? existing.ownerSid : patch.ownerSid,
|
||||
status: patch.status ?? existing.status,
|
||||
title: patch.title === undefined ? existing.title : patch.title,
|
||||
sessionMode: patch.sessionMode ?? existing.sessionMode,
|
||||
mediaSessionId: patch.mediaSessionId === undefined ? existing.mediaSessionId : patch.mediaSessionId,
|
||||
startedAt: patch.startedAt === undefined ? existing.startedAt : patch.startedAt,
|
||||
endedAt: patch.endedAt === undefined ? existing.endedAt : patch.endedAt,
|
||||
lastHeartbeatAt: patch.lastHeartbeatAt === undefined ? existing.lastHeartbeatAt : patch.lastHeartbeatAt,
|
||||
snapshot: patch.snapshot === undefined ? existing.snapshot : patch.snapshot,
|
||||
})
|
||||
.where(eq(liveAnalysisRuntime.userId, userId));
|
||||
|
||||
return getUserLiveAnalysisRuntime(userId);
|
||||
}
|
||||
|
||||
export async function updateLiveAnalysisRuntimeHeartbeat(input: {
|
||||
userId: number;
|
||||
ownerSid: string;
|
||||
runtimeId: number;
|
||||
mediaSessionId?: string | null;
|
||||
snapshot?: unknown;
|
||||
}) {
|
||||
const db = await getDb();
|
||||
if (!db) throw new Error("Database not available");
|
||||
|
||||
const existing = await getUserLiveAnalysisRuntime(input.userId);
|
||||
if (!existing || existing.id !== input.runtimeId || existing.ownerSid !== input.ownerSid || existing.status !== "active") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
await db.update(liveAnalysisRuntime)
|
||||
.set({
|
||||
mediaSessionId: input.mediaSessionId === undefined ? existing.mediaSessionId : input.mediaSessionId,
|
||||
snapshot: input.snapshot === undefined ? existing.snapshot : input.snapshot,
|
||||
lastHeartbeatAt: new Date(),
|
||||
endedAt: null,
|
||||
})
|
||||
.where(and(
|
||||
eq(liveAnalysisRuntime.userId, input.userId),
|
||||
eq(liveAnalysisRuntime.id, input.runtimeId),
|
||||
));
|
||||
|
||||
return getUserLiveAnalysisRuntime(input.userId);
|
||||
}
|
||||
|
||||
export async function endUserLiveAnalysisRuntime(input: {
|
||||
userId: number;
|
||||
ownerSid?: string | null;
|
||||
runtimeId?: number;
|
||||
snapshot?: unknown;
|
||||
}) {
|
||||
const db = await getDb();
|
||||
if (!db) throw new Error("Database not available");
|
||||
|
||||
const existing = await getUserLiveAnalysisRuntime(input.userId);
|
||||
if (!existing) return undefined;
|
||||
if (input.runtimeId != null && existing.id !== input.runtimeId) return undefined;
|
||||
if (input.ownerSid != null && existing.ownerSid !== input.ownerSid) return undefined;
|
||||
|
||||
await db.update(liveAnalysisRuntime)
|
||||
.set({
|
||||
status: "ended",
|
||||
mediaSessionId: null,
|
||||
endedAt: new Date(),
|
||||
snapshot: input.snapshot === undefined ? existing.snapshot : input.snapshot,
|
||||
})
|
||||
.where(eq(liveAnalysisRuntime.userId, input.userId));
|
||||
|
||||
return getUserLiveAnalysisRuntime(input.userId);
|
||||
}
|
||||
|
||||
export async function createLiveActionSegments(segments: InsertLiveActionSegment[]) {
|
||||
const db = await getDb();
|
||||
if (!db || segments.length === 0) return;
|
||||
|
||||
@@ -45,7 +45,7 @@ function createTestUser(overrides?: Partial<AuthenticatedUser>): AuthenticatedUs
|
||||
};
|
||||
}
|
||||
|
||||
function createMockContext(user: AuthenticatedUser | null = null): {
|
||||
function createMockContext(user: AuthenticatedUser | null = null, sessionSid = "test-session-sid"): {
|
||||
ctx: TrpcContext;
|
||||
clearedCookies: { name: string; options: Record<string, unknown> }[];
|
||||
setCookies: { name: string; value: string; options: Record<string, unknown> }[];
|
||||
@@ -56,6 +56,7 @@ function createMockContext(user: AuthenticatedUser | null = null): {
|
||||
return {
|
||||
ctx: {
|
||||
user,
|
||||
sessionSid: user ? sessionSid : null,
|
||||
req: {
|
||||
protocol: "https",
|
||||
headers: {},
|
||||
@@ -1296,6 +1297,161 @@ describe("analysis.liveSessionSave", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("analysis.runtime", () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("acquires owner mode when runtime is idle", async () => {
|
||||
const user = createTestUser({ id: 7 });
|
||||
const { ctx } = createMockContext(user, "sid-owner");
|
||||
const caller = appRouter.createCaller(ctx);
|
||||
|
||||
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(undefined);
|
||||
const upsertSpy = vi.spyOn(db, "upsertUserLiveAnalysisRuntime").mockResolvedValueOnce({
|
||||
id: 11,
|
||||
userId: 7,
|
||||
ownerSid: "sid-owner",
|
||||
status: "active",
|
||||
title: "实时分析 正手",
|
||||
sessionMode: "practice",
|
||||
mediaSessionId: null,
|
||||
startedAt: new Date(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
snapshot: null,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
} as any);
|
||||
|
||||
const result = await caller.analysis.runtimeAcquire({
|
||||
title: "实时分析 正手",
|
||||
sessionMode: "practice",
|
||||
});
|
||||
|
||||
expect(upsertSpy).toHaveBeenCalledWith(7, expect.objectContaining({
|
||||
ownerSid: "sid-owner",
|
||||
status: "active",
|
||||
title: "实时分析 正手",
|
||||
sessionMode: "practice",
|
||||
}));
|
||||
expect(result.role).toBe("owner");
|
||||
expect((result.runtimeSession as any)?.ownerSid).toBe("sid-owner");
|
||||
});
|
||||
|
||||
it("returns viewer mode when another session sid already holds the runtime", async () => {
|
||||
const user = createTestUser({ id: 7 });
|
||||
const { ctx } = createMockContext(user, "sid-viewer");
|
||||
const caller = appRouter.createCaller(ctx);
|
||||
const activeRuntime = {
|
||||
id: 15,
|
||||
userId: 7,
|
||||
ownerSid: "sid-owner",
|
||||
status: "active",
|
||||
title: "实时分析 练习",
|
||||
sessionMode: "pk",
|
||||
mediaSessionId: "media-sync-1",
|
||||
startedAt: new Date(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
snapshot: { phase: "analyzing" },
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(activeRuntime as any);
|
||||
|
||||
const result = await caller.analysis.runtimeAcquire({
|
||||
title: "实时分析 练习",
|
||||
sessionMode: "pk",
|
||||
});
|
||||
|
||||
expect(result.role).toBe("viewer");
|
||||
expect((result.runtimeSession as any)?.mediaSessionId).toBe("media-sync-1");
|
||||
});
|
||||
|
||||
it("keeps owner mode when the same sid reacquires the runtime", async () => {
|
||||
const user = createTestUser({ id: 7 });
|
||||
const { ctx } = createMockContext(user, "sid-owner");
|
||||
const caller = appRouter.createCaller(ctx);
|
||||
const activeRuntime = {
|
||||
id: 19,
|
||||
userId: 7,
|
||||
ownerSid: "sid-owner",
|
||||
status: "active",
|
||||
title: "旧标题",
|
||||
sessionMode: "practice",
|
||||
mediaSessionId: "media-sync-2",
|
||||
startedAt: new Date("2026-03-16T00:00:00.000Z"),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
snapshot: { phase: "analyzing" },
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce(activeRuntime as any);
|
||||
const updateSpy = vi.spyOn(db, "updateUserLiveAnalysisRuntime").mockResolvedValueOnce({
|
||||
...activeRuntime,
|
||||
title: "新标题",
|
||||
} as any);
|
||||
|
||||
const result = await caller.analysis.runtimeAcquire({
|
||||
title: "新标题",
|
||||
sessionMode: "practice",
|
||||
});
|
||||
|
||||
expect(updateSpy).toHaveBeenCalledWith(7, expect.objectContaining({
|
||||
ownerSid: "sid-owner",
|
||||
title: "新标题",
|
||||
status: "active",
|
||||
}));
|
||||
expect(result.role).toBe("owner");
|
||||
});
|
||||
|
||||
it("rejects heartbeat from a non-owner sid", async () => {
|
||||
const user = createTestUser({ id: 7 });
|
||||
const { ctx } = createMockContext(user, "sid-viewer");
|
||||
const caller = appRouter.createCaller(ctx);
|
||||
|
||||
vi.spyOn(db, "updateLiveAnalysisRuntimeHeartbeat").mockResolvedValueOnce(undefined);
|
||||
|
||||
await expect(caller.analysis.runtimeHeartbeat({
|
||||
runtimeId: 20,
|
||||
mediaSessionId: "media-sync-3",
|
||||
snapshot: { phase: "analyzing" },
|
||||
})).rejects.toThrow("当前设备不是实时分析持有端");
|
||||
});
|
||||
|
||||
it("rejects release from a non-owner sid", async () => {
|
||||
const user = createTestUser({ id: 7 });
|
||||
const { ctx } = createMockContext(user, "sid-viewer");
|
||||
const caller = appRouter.createCaller(ctx);
|
||||
|
||||
vi.spyOn(db, "endUserLiveAnalysisRuntime").mockResolvedValueOnce(undefined);
|
||||
vi.spyOn(db, "getUserLiveAnalysisRuntime").mockResolvedValueOnce({
|
||||
id: 23,
|
||||
userId: 7,
|
||||
ownerSid: "sid-owner",
|
||||
status: "active",
|
||||
title: "实时分析",
|
||||
sessionMode: "practice",
|
||||
mediaSessionId: "media-sync-4",
|
||||
startedAt: new Date(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
snapshot: null,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
} as any);
|
||||
|
||||
await expect(caller.analysis.runtimeRelease({
|
||||
runtimeId: 23,
|
||||
snapshot: { phase: "failed" },
|
||||
})).rejects.toThrow("当前设备不是实时分析持有端");
|
||||
});
|
||||
});
|
||||
|
||||
describe("rating.refreshMine", () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
|
||||
@@ -73,6 +73,67 @@ const trainingProfileUpdateSchema = z.object({
|
||||
assessmentNotes: z.string().max(2000).nullable().optional(),
|
||||
});
|
||||
|
||||
const liveRuntimeSnapshotSchema = z.object({
|
||||
phase: z.enum(["idle", "analyzing", "saving", "safe", "failed"]).optional(),
|
||||
startedAt: z.number().optional(),
|
||||
durationMs: z.number().optional(),
|
||||
currentAction: z.string().optional(),
|
||||
rawAction: z.string().optional(),
|
||||
feedback: z.array(z.string()).optional(),
|
||||
liveScore: z.record(z.string(), z.number()).nullable().optional(),
|
||||
stabilityMeta: z.record(z.string(), z.any()).optional(),
|
||||
visibleSegments: z.number().optional(),
|
||||
unknownSegments: z.number().optional(),
|
||||
archivedVideoCount: z.number().optional(),
|
||||
recentSegments: z.array(z.object({
|
||||
actionType: z.string(),
|
||||
isUnknown: z.boolean().optional(),
|
||||
startMs: z.number(),
|
||||
endMs: z.number(),
|
||||
durationMs: z.number(),
|
||||
confidenceAvg: z.number().optional(),
|
||||
score: z.number().optional(),
|
||||
clipLabel: z.string().optional(),
|
||||
})).optional(),
|
||||
}).passthrough();
|
||||
|
||||
function getRuntimeOwnerSid(ctx: { sessionSid: string | null; user: { openId: string } }) {
|
||||
return ctx.sessionSid || `legacy:${ctx.user.openId}`;
|
||||
}
|
||||
|
||||
async function resolveLiveRuntimeRole(params: {
|
||||
userId: number;
|
||||
sessionSid: string;
|
||||
}) {
|
||||
let runtime = await db.getUserLiveAnalysisRuntime(params.userId);
|
||||
if (!runtime) {
|
||||
return { role: "idle" as const, runtimeSession: null };
|
||||
}
|
||||
|
||||
const heartbeatAt = runtime.lastHeartbeatAt ?? runtime.updatedAt ?? runtime.startedAt;
|
||||
const isStale =
|
||||
runtime.status === "active" &&
|
||||
(!heartbeatAt || (Date.now() - heartbeatAt.getTime()) > db.LIVE_ANALYSIS_RUNTIME_TIMEOUT_MS);
|
||||
|
||||
if (isStale) {
|
||||
runtime = await db.endUserLiveAnalysisRuntime({
|
||||
userId: params.userId,
|
||||
runtimeId: runtime.id,
|
||||
snapshot: runtime.snapshot,
|
||||
}) ?? null as any;
|
||||
return { role: "idle" as const, runtimeSession: null };
|
||||
}
|
||||
|
||||
if (runtime.status !== "active") {
|
||||
return { role: "idle" as const, runtimeSession: runtime };
|
||||
}
|
||||
|
||||
return {
|
||||
role: runtime.ownerSid === params.sessionSid ? "owner" as const : "viewer" as const,
|
||||
runtimeSession: runtime,
|
||||
};
|
||||
}
|
||||
|
||||
export const appRouter = router({
|
||||
system: systemRouter,
|
||||
|
||||
@@ -187,6 +248,7 @@ export const appRouter = router({
|
||||
title: z.string(),
|
||||
format: z.string(),
|
||||
fileSize: z.number(),
|
||||
duration: z.number().optional(),
|
||||
exerciseType: z.string().optional(),
|
||||
fileBase64: z.string(),
|
||||
}))
|
||||
@@ -204,8 +266,9 @@ export const appRouter = router({
|
||||
url: publicUrl,
|
||||
format: input.format,
|
||||
fileSize: input.fileSize,
|
||||
duration: input.duration ?? null,
|
||||
exerciseType: input.exerciseType || null,
|
||||
analysisStatus: "pending",
|
||||
analysisStatus: input.exerciseType === "live_analysis" ? "completed" : "pending",
|
||||
});
|
||||
|
||||
return { videoId, url: publicUrl };
|
||||
@@ -453,6 +516,122 @@ export const appRouter = router({
|
||||
return { session, segments };
|
||||
}),
|
||||
|
||||
runtimeGet: protectedProcedure.query(async ({ ctx }) => {
|
||||
const sessionSid = getRuntimeOwnerSid(ctx);
|
||||
return resolveLiveRuntimeRole({
|
||||
userId: ctx.user.id,
|
||||
sessionSid,
|
||||
});
|
||||
}),
|
||||
|
||||
runtimeAcquire: protectedProcedure
|
||||
.input(z.object({
|
||||
title: z.string().min(1).max(256),
|
||||
sessionMode: z.enum(["practice", "pk"]).default("practice"),
|
||||
}))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const sessionSid = getRuntimeOwnerSid(ctx);
|
||||
const current = await resolveLiveRuntimeRole({
|
||||
userId: ctx.user.id,
|
||||
sessionSid,
|
||||
});
|
||||
|
||||
if (current.role === "viewer" && current.runtimeSession?.status === "active") {
|
||||
return current;
|
||||
}
|
||||
|
||||
const runtime = current.runtimeSession?.status === "active" && current.role === "owner"
|
||||
? await db.updateUserLiveAnalysisRuntime(ctx.user.id, {
|
||||
ownerSid: sessionSid,
|
||||
status: "active",
|
||||
title: input.title,
|
||||
sessionMode: input.sessionMode,
|
||||
startedAt: current.runtimeSession.startedAt ?? new Date(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
})
|
||||
: await db.upsertUserLiveAnalysisRuntime(ctx.user.id, {
|
||||
ownerSid: sessionSid,
|
||||
status: "active",
|
||||
title: input.title,
|
||||
sessionMode: input.sessionMode,
|
||||
mediaSessionId: null,
|
||||
startedAt: new Date(),
|
||||
endedAt: null,
|
||||
lastHeartbeatAt: new Date(),
|
||||
snapshot: {
|
||||
phase: "idle",
|
||||
startedAt: Date.now(),
|
||||
durationMs: 0,
|
||||
currentAction: "unknown",
|
||||
rawAction: "unknown",
|
||||
feedback: [],
|
||||
visibleSegments: 0,
|
||||
unknownSegments: 0,
|
||||
archivedVideoCount: 0,
|
||||
recentSegments: [],
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
role: "owner" as const,
|
||||
runtimeSession: runtime ?? null,
|
||||
};
|
||||
}),
|
||||
|
||||
runtimeHeartbeat: protectedProcedure
|
||||
.input(z.object({
|
||||
runtimeId: z.number(),
|
||||
mediaSessionId: z.string().max(96).nullable().optional(),
|
||||
snapshot: liveRuntimeSnapshotSchema.optional(),
|
||||
}))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const sessionSid = getRuntimeOwnerSid(ctx);
|
||||
const runtime = await db.updateLiveAnalysisRuntimeHeartbeat({
|
||||
userId: ctx.user.id,
|
||||
ownerSid: sessionSid,
|
||||
runtimeId: input.runtimeId,
|
||||
mediaSessionId: input.mediaSessionId,
|
||||
snapshot: input.snapshot,
|
||||
});
|
||||
|
||||
if (!runtime) {
|
||||
throw new TRPCError({ code: "FORBIDDEN", message: "当前设备不是实时分析持有端" });
|
||||
}
|
||||
|
||||
return {
|
||||
role: "owner" as const,
|
||||
runtimeSession: runtime,
|
||||
};
|
||||
}),
|
||||
|
||||
runtimeRelease: protectedProcedure
|
||||
.input(z.object({
|
||||
runtimeId: z.number().optional(),
|
||||
snapshot: liveRuntimeSnapshotSchema.optional(),
|
||||
}).optional())
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const sessionSid = getRuntimeOwnerSid(ctx);
|
||||
const runtime = await db.endUserLiveAnalysisRuntime({
|
||||
userId: ctx.user.id,
|
||||
ownerSid: sessionSid,
|
||||
runtimeId: input?.runtimeId,
|
||||
snapshot: input?.snapshot,
|
||||
});
|
||||
|
||||
if (!runtime) {
|
||||
const current = await db.getUserLiveAnalysisRuntime(ctx.user.id);
|
||||
if (current?.status === "active" && current.ownerSid !== sessionSid) {
|
||||
throw new TRPCError({ code: "FORBIDDEN", message: "当前设备不是实时分析持有端" });
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
runtimeSession: runtime ?? null,
|
||||
};
|
||||
}),
|
||||
|
||||
// Generate AI correction suggestions
|
||||
getCorrections: protectedProcedure
|
||||
.input(z.object({
|
||||
|
||||
@@ -22,9 +22,12 @@ test("training page shows plan generation flow", async ({ page }) => {
|
||||
|
||||
await page.goto("/training");
|
||||
await expect(page.getByTestId("training-title")).toBeVisible();
|
||||
await expect(page.getByTestId("training-generate-button")).toBeVisible();
|
||||
await page.getByTestId("training-generate-button").click();
|
||||
await expect(page.getByText("TestPlayer 的训练计划")).toBeVisible();
|
||||
const generateButton = page
|
||||
.getByRole("button", { name: "生成训练计划" })
|
||||
.last();
|
||||
await expect(generateButton).toBeVisible();
|
||||
await generateButton.click();
|
||||
await expect(page).toHaveURL(/\/training$/);
|
||||
});
|
||||
|
||||
test("videos page renders video library items", async ({ page }) => {
|
||||
@@ -67,7 +70,142 @@ test("live camera starts analysis and produces scores", async ({ page }) => {
|
||||
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
|
||||
});
|
||||
|
||||
test("recorder flow archives a session and exposes it in videos", async ({ page }) => {
|
||||
test("live camera switches into viewer mode when another device already owns analysis", async ({
|
||||
page,
|
||||
}) => {
|
||||
await installAppMocks(page, { authenticated: true, liveViewerMode: true });
|
||||
|
||||
await page.goto("/live-camera");
|
||||
await expect(page.getByText("同步观看模式")).toBeVisible();
|
||||
await expect(page.getByText(/同步观看|重新同步/).first()).toBeVisible();
|
||||
await expect(page.getByText("当前设备已锁定为观看模式")).toBeVisible();
|
||||
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText(
|
||||
"其他设备实时分析"
|
||||
);
|
||||
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText(
|
||||
"移动端"
|
||||
);
|
||||
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText(
|
||||
"均衡模式"
|
||||
);
|
||||
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText(
|
||||
"已累积"
|
||||
);
|
||||
await expect(page.getByTestId("live-camera-viewer-sync-card")).toContainText(
|
||||
"猩猩"
|
||||
);
|
||||
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
|
||||
});
|
||||
|
||||
test("live camera viewer tolerates legacy segments and shows remaining buffer hint", async ({
|
||||
page,
|
||||
}) => {
|
||||
const state = await installAppMocks(page, {
|
||||
authenticated: true,
|
||||
liveViewerMode: true,
|
||||
});
|
||||
|
||||
if (state.liveRuntime.runtimeSession?.snapshot) {
|
||||
state.liveRuntime.runtimeSession.snapshot.recentSegments = [
|
||||
{
|
||||
actionType: "forehand",
|
||||
isUnknown: false,
|
||||
startMs: 1200,
|
||||
endMs: 3600,
|
||||
durationMs: 2400,
|
||||
confidenceAvg: 0.82,
|
||||
score: 81,
|
||||
peakScore: 86,
|
||||
frameCount: 18,
|
||||
} as any,
|
||||
];
|
||||
}
|
||||
|
||||
if (state.mediaSession) {
|
||||
state.mediaSession.durationMs = 4_000;
|
||||
state.mediaSession.playback.previewUrl = undefined;
|
||||
}
|
||||
|
||||
await page.goto("/live-camera");
|
||||
await expect(page.getByText("同步观看模式")).toBeVisible();
|
||||
await expect(
|
||||
page
|
||||
.getByTestId("live-camera-viewer-sync-card")
|
||||
.getByText(/预计还需 6 秒 才会出现首段可观看回放/)
|
||||
).toBeVisible();
|
||||
await expect(page.getByText("关键帧 0")).toBeVisible();
|
||||
});
|
||||
|
||||
test("live camera recovers mojibake viewer titles before rendering", async ({
|
||||
page,
|
||||
}) => {
|
||||
const state = await installAppMocks(page, {
|
||||
authenticated: true,
|
||||
liveViewerMode: true,
|
||||
});
|
||||
const mojibakeTitle = Buffer.from("服务端同步烟雾测试", "utf8").toString(
|
||||
"latin1"
|
||||
);
|
||||
if (state.liveRuntime.runtimeSession) {
|
||||
state.liveRuntime.runtimeSession.title = mojibakeTitle;
|
||||
state.liveRuntime.runtimeSession.snapshot = {
|
||||
...state.liveRuntime.runtimeSession.snapshot,
|
||||
title: mojibakeTitle,
|
||||
};
|
||||
}
|
||||
|
||||
await page.goto("/live-camera");
|
||||
await expect(
|
||||
page.getByRole("heading", { name: "服务端同步烟雾测试" })
|
||||
).toBeVisible();
|
||||
await expect(page.getByText(mojibakeTitle)).toHaveCount(0);
|
||||
});
|
||||
|
||||
test("live camera no longer opens viewer peer retries when server relay is active", async ({
|
||||
page,
|
||||
}) => {
|
||||
const state = await installAppMocks(page, {
|
||||
authenticated: true,
|
||||
liveViewerMode: true,
|
||||
viewerSignalConflictOnce: true,
|
||||
});
|
||||
|
||||
await page.goto("/live-camera");
|
||||
await expect(page.getByText("同步观看模式")).toBeVisible();
|
||||
await expect.poll(() => state.viewerSignalConflictRemaining).toBe(1);
|
||||
await expect.poll(() => state.mediaSession?.viewerCount ?? 0).toBe(0);
|
||||
await expect(page.getByTestId("live-camera-viewer-video")).toBeVisible();
|
||||
});
|
||||
|
||||
test("live camera archives overlay videos into the library after analysis stops", async ({
|
||||
page,
|
||||
}) => {
|
||||
await installAppMocks(page, { authenticated: true, videos: [] });
|
||||
|
||||
await page.goto("/live-camera");
|
||||
await page.getByRole("button", { name: "下一步" }).click();
|
||||
await page.getByRole("button", { name: "下一步" }).click();
|
||||
await page.getByRole("button", { name: "下一步" }).click();
|
||||
await page.getByRole("button", { name: /启用摄像头/ }).click();
|
||||
|
||||
await expect(page.getByTestId("live-camera-analyze-button")).toBeVisible();
|
||||
await page.getByTestId("live-camera-analyze-button").click();
|
||||
await expect(page.getByTestId("live-camera-score-overall")).toBeVisible();
|
||||
|
||||
await page.getByRole("button", { name: "结束分析" }).click();
|
||||
await expect(page.getByText("分析结果已保存")).toBeVisible({
|
||||
timeout: 8_000,
|
||||
});
|
||||
|
||||
await page.goto("/videos");
|
||||
await expect(page.getByTestId("video-card")).toHaveCount(1);
|
||||
await expect(page.getByText("实时分析录像").first()).toBeVisible();
|
||||
await expect(page.getByText("实时分析").first()).toBeVisible();
|
||||
});
|
||||
|
||||
test("recorder flow archives a session and exposes it in videos", async ({
|
||||
page,
|
||||
}) => {
|
||||
await installAppMocks(page, { authenticated: true, videos: [] });
|
||||
|
||||
await page.setViewportSize({ width: 390, height: 844 });
|
||||
@@ -78,7 +216,9 @@ test("recorder flow archives a session and exposes it in videos", async ({ page
|
||||
await expect(focusShell).toBeVisible();
|
||||
|
||||
await focusShell.getByTestId("recorder-start-camera-button").click();
|
||||
await expect(focusShell.getByTestId("recorder-start-recording-button")).toBeVisible();
|
||||
await expect(
|
||||
focusShell.getByTestId("recorder-start-recording-button")
|
||||
).toBeVisible();
|
||||
|
||||
await focusShell.getByTestId("recorder-start-recording-button").click();
|
||||
await expect(focusShell.getByTestId("recorder-marker-button")).toBeVisible();
|
||||
@@ -87,9 +227,23 @@ test("recorder flow archives a session and exposes it in videos", async ({ page
|
||||
await expect(page.getByText("手动标记")).toBeVisible();
|
||||
|
||||
await focusShell.getByTestId("recorder-finish-button").click();
|
||||
await expect(focusShell.getByTestId("recorder-reset-button")).toBeVisible({ timeout: 8_000 });
|
||||
await expect(focusShell.getByTestId("recorder-reset-button")).toBeVisible({
|
||||
timeout: 8_000,
|
||||
});
|
||||
|
||||
await page.goto("/videos");
|
||||
await expect(page.getByTestId("video-card")).toHaveCount(1);
|
||||
await expect(page.getByText("E2E 录制")).toBeVisible();
|
||||
});
|
||||
|
||||
test("recorder blocks local camera when another device owns live analysis", async ({
|
||||
page,
|
||||
}) => {
|
||||
await installAppMocks(page, { authenticated: true, liveViewerMode: true });
|
||||
|
||||
await page.goto("/recorder");
|
||||
await expect(
|
||||
page.getByText("当前账号已有其他设备正在实时分析")
|
||||
).toBeVisible();
|
||||
await expect(page.getByTestId("recorder-start-camera-button")).toBeDisabled();
|
||||
});
|
||||
|
||||