Feat: add env-driven LLM configuration and smoke test
这个提交包含在:
@@ -13,6 +13,14 @@ VITE_OAUTH_PORTAL_URL=
|
||||
VITE_FRONTEND_FORGE_API_URL=
|
||||
VITE_FRONTEND_FORGE_API_KEY=
|
||||
|
||||
# LLM chat completion endpoint
|
||||
LLM_API_URL=https://one.hao.work/v1/chat/completions
|
||||
LLM_API_KEY=replace-with-llm-api-key
|
||||
LLM_MODEL=qwen3.5-plus
|
||||
LLM_MAX_TOKENS=32768
|
||||
LLM_ENABLE_THINKING=0
|
||||
LLM_THINKING_BUDGET=128
|
||||
|
||||
# Optional direct media URL override for browser builds
|
||||
VITE_MEDIA_BASE_URL=/media
|
||||
|
||||
|
||||
在新工单中引用
屏蔽一个用户