feat: auto LLM feedback runner + problem link + 5xx retry
- Add SubmissionFeedbackRunner: async background queue for auto LLM feedback
- Enqueue feedback generation after each submission in submitProblem()
- Register runner in main.cc with CSP_FEEDBACK_AUTO_RUN env var
- Add problem_title to GET /api/v1/submissions/{id} response
- Frontend: clickable problem link on submission detail page
- Enhance LLM prompt with richer analysis dimensions
- Add 5xx/connection error retry (max 5 attempts) in Python LLM script
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
这个提交包含在:
@@ -8,6 +8,7 @@ import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
@@ -88,9 +89,17 @@ def build_fallback_feedback(payload: Dict[str, Any], llm_error: str = "") -> Llm
|
||||
)
|
||||
|
||||
lines: List[str] = []
|
||||
lines.append("### 评测结论")
|
||||
lines.append("### 总体评语")
|
||||
lines.append(f"- 本次状态:**{status}**,分数:**{score}**。")
|
||||
lines.append(f"- 思路评价:{thought}")
|
||||
lines.append(f"- {thought}")
|
||||
lines.append("")
|
||||
lines.append("### 代码逐段讲解")
|
||||
lines.append("- 由于 LLM 分析不可用,暂时无法提供代码逐段讲解。请仔细检查代码逻辑,确保输入输出格式正确。")
|
||||
lines.append("")
|
||||
lines.append("### 知识点提示")
|
||||
lines.append("- 强项:基础实现与调试流程。")
|
||||
lines.append("- 待加强:边界构造、类型一致性、赛场环境兼容性。")
|
||||
lines.append("- 请对照 CSP-J/S 大纲,确认所涉及的算法知识点是否已掌握。")
|
||||
lines.append("")
|
||||
lines.append("### 福建 CSP-J/S 规范检查(C++14)")
|
||||
for tip in risk_tips:
|
||||
@@ -100,16 +109,12 @@ def build_fallback_feedback(payload: Dict[str, Any], llm_error: str = "") -> Llm
|
||||
if runtime_log.strip():
|
||||
lines.append("- 运行日志有输出,建议重点检查边界输入与数组越界风险。")
|
||||
lines.append("")
|
||||
lines.append("### 修改建议(可执行)")
|
||||
lines.append("### 改进建议")
|
||||
lines.append("- 按“先编译通过→再保证正确→最后做优化”的顺序迭代。")
|
||||
lines.append("- `long long` 读写统一 `%lld`;不要使用 `%I64d`。")
|
||||
lines.append("- 清理 signed/unsigned 警告,降低不同编译器行为差异。")
|
||||
lines.append("- 确保 `int main()` 且 `return 0;`。")
|
||||
lines.append("")
|
||||
lines.append("### 知识点评测")
|
||||
lines.append("- 强项:基础实现与调试流程。")
|
||||
lines.append("- 待加强:边界构造、类型一致性、赛场环境兼容性。")
|
||||
lines.append("")
|
||||
lines.append("### 推荐外链资料")
|
||||
for item in DEFAULT_LINKS:
|
||||
lines.append(f"- [{item['title']}]({item['url']})")
|
||||
@@ -162,17 +167,25 @@ def call_llm(payload: Dict[str, Any]) -> LlmResult:
|
||||
raise RuntimeError("missing OI_LLM_API_URL")
|
||||
|
||||
system_prompt = (
|
||||
"你是福建省 CSP-J/S 代码规范与评测老师。"
|
||||
"你是福建省 CSP-J/S 代码规范与评测老师,也是一位经验丰富的算法竞赛教练。"
|
||||
"请严格按 C++14 旧 GCC 环境给建议,重点指出会导致 CE/RE/爆零的风险。"
|
||||
"你的评测需要覆盖以下维度:\n"
|
||||
"1. 总体评语:对代码质量和解题思路的综合评价(2-3句话)\n"
|
||||
"2. 代码逐段讲解:解释代码的关键逻辑和实现思路\n"
|
||||
"3. 知识点提示:涉及的算法和数据结构知识点,与 CSP-J/S 大纲的对应关系\n"
|
||||
"4. 福建 CSP-J/S 规范检查(C++14):指出不符合规范的地方\n"
|
||||
"5. 改进建议:具体可操作的优化方向\n"
|
||||
"6. 推荐外链资料:相关学习资源\n"
|
||||
"输出 JSON,不要输出其他文字。"
|
||||
)
|
||||
user_prompt = {
|
||||
"task": "分析这份提交并给出改进建议",
|
||||
"task": "分析这份提交并给出详细点评",
|
||||
"required_sections": [
|
||||
"评测结论",
|
||||
"总体评语",
|
||||
"代码逐段讲解",
|
||||
"知识点提示",
|
||||
"福建 CSP-J/S 规范检查(C++14)",
|
||||
"修改建议",
|
||||
"知识点评测",
|
||||
"改进建议",
|
||||
"推荐外链资料",
|
||||
],
|
||||
"submission": payload,
|
||||
@@ -197,7 +210,37 @@ def call_llm(payload: Dict[str, Any]) -> LlmResult:
|
||||
],
|
||||
}
|
||||
|
||||
resp = requests.post(api_url, headers=headers, json=body, timeout=50)
|
||||
max_retries = 5
|
||||
last_exc: Optional[Exception] = None
|
||||
resp = None
|
||||
for attempt in range(1, max_retries + 1):
|
||||
try:
|
||||
resp = requests.post(api_url, headers=headers, json=body, timeout=50)
|
||||
if resp.status_code < 500:
|
||||
resp.raise_for_status()
|
||||
break
|
||||
# 5xx — retry
|
||||
last_exc = requests.exceptions.HTTPError(
|
||||
f"HTTP {resp.status_code}", response=resp
|
||||
)
|
||||
print(
|
||||
f"[feedback] LLM returned {resp.status_code}, "
|
||||
f"retry {attempt}/{max_retries}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
except (requests.exceptions.ConnectionError,
|
||||
requests.exceptions.Timeout) as exc:
|
||||
last_exc = exc
|
||||
print(
|
||||
f"[feedback] LLM request failed ({exc}), "
|
||||
f"retry {attempt}/{max_retries}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
if attempt < max_retries:
|
||||
time.sleep(min(2 ** attempt, 16))
|
||||
else:
|
||||
raise last_exc or RuntimeError("LLM request failed after retries")
|
||||
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
|
||||
|
||||
在新工单中引用
屏蔽一个用户