feat: async task pipeline for media and llm workflows

这个提交包含在:
cryptocommuniums-afk
2026-03-15 00:12:26 +08:00
父节点 1cc863e60e
当前提交 20e183d2da
修改 36 个文件,包含 1961 行新增339 行删除

查看文件

@@ -57,6 +57,9 @@ export type ToolChoice =
export type InvokeParams = {
messages: Message[];
model?: string;
apiUrl?: string;
apiKey?: string;
tools?: Tool[];
toolChoice?: ToolChoice;
tool_choice?: ToolChoice;
@@ -209,13 +212,15 @@ const normalizeToolChoice = (
return toolChoice;
};
const resolveApiUrl = () =>
ENV.llmApiUrl && ENV.llmApiUrl.trim().length > 0
const resolveApiUrl = (apiUrl?: string) =>
apiUrl && apiUrl.trim().length > 0
? apiUrl
: ENV.llmApiUrl && ENV.llmApiUrl.trim().length > 0
? ENV.llmApiUrl
: "https://forge.manus.im/v1/chat/completions";
const assertApiKey = () => {
if (!ENV.llmApiKey) {
const assertApiKey = (apiKey?: string) => {
if (!(apiKey || ENV.llmApiKey)) {
throw new Error("LLM_API_KEY is not configured");
}
};
@@ -266,10 +271,13 @@ const normalizeResponseFormat = ({
};
export async function invokeLLM(params: InvokeParams): Promise<InvokeResult> {
assertApiKey();
assertApiKey(params.apiKey);
const {
messages,
model,
apiUrl,
apiKey,
tools,
toolChoice,
tool_choice,
@@ -280,7 +288,7 @@ export async function invokeLLM(params: InvokeParams): Promise<InvokeResult> {
} = params;
const payload: Record<string, unknown> = {
model: ENV.llmModel,
model: model || ENV.llmModel,
messages: messages.map(normalizeMessage),
};
@@ -315,11 +323,11 @@ export async function invokeLLM(params: InvokeParams): Promise<InvokeResult> {
payload.response_format = normalizedResponseFormat;
}
const response = await fetch(resolveApiUrl(), {
const response = await fetch(resolveApiUrl(apiUrl), {
method: "POST",
headers: {
"content-type": "application/json",
authorization: `Bearer ${ENV.llmApiKey}`,
authorization: `Bearer ${apiKey || ENV.llmApiKey}`,
},
body: JSON.stringify(payload),
});