Feat: add env-driven LLM configuration and smoke test

这个提交包含在:
cryptocommuniums-afk
2026-03-14 21:54:51 +08:00
父节点 ba35e50528
当前提交 f5ad0449a8
修改 8 个文件,包含 203 行新增9 行删除

28
scripts/llm-smoke.ts 普通文件
查看文件

@@ -0,0 +1,28 @@
import "dotenv/config";
import { invokeLLM } from "../server/_core/llm";
async function main() {
const prompt = process.argv.slice(2).join(" ").trim() || "你好,做个自我介绍";
const result = await invokeLLM({
messages: [{ role: "user", content: prompt }],
});
const firstChoice = result.choices[0];
const content = firstChoice?.message?.content;
console.log(`model=${result.model}`);
console.log(`finish_reason=${firstChoice?.finish_reason ?? "unknown"}`);
if (typeof content === "string") {
console.log(content);
return;
}
console.log(JSON.stringify(content, null, 2));
}
main().catch(error => {
console.error("[LLM smoke test] failed");
console.error(error);
process.exit(1);
});