From 8b554241509433f75051031c8d99922185e3ecbd Mon Sep 17 00:00:00 2001 From: maojindao55 Date: Mon, 17 Feb 2025 14:09:42 +0800 Subject: [PATCH] fix --- .dev.vars | 1 - .gitignore | 1 + functions/api/chat.ts | 34 +++++++++++++++++++++++++++------- 3 files changed, 28 insertions(+), 8 deletions(-) delete mode 100644 .dev.vars diff --git a/.dev.vars b/.dev.vars deleted file mode 100644 index 4de4a74..0000000 --- a/.dev.vars +++ /dev/null @@ -1 +0,0 @@ -DASHSCOPE_API_KEY=sk-8d0715456b2844c7b28d1c4226fd0792 \ No newline at end of file diff --git a/.gitignore b/.gitignore index c6bba59..5518781 100644 --- a/.gitignore +++ b/.gitignore @@ -78,6 +78,7 @@ web_modules/ .env.test.local .env.production.local .env.local +.dev.vars # parcel-bundler cache (https://parceljs.org/) .cache diff --git a/functions/api/chat.ts b/functions/api/chat.ts index 02a463e..aec643e 100644 --- a/functions/api/chat.ts +++ b/functions/api/chat.ts @@ -2,16 +2,36 @@ import OpenAI from 'openai'; export async function onRequestPost({ env, request }) { try { - const { message, personality, history, aiName, index } = await request.json(); - const apiKey = env.DASHSCOPE_API_KEY; + const { message, personality, history, aiName, index, model = "qwen-plus" } = await request.json(); + + // 配置不同模型的设置 + const modelConfigs = { + "qwen-plus": { + apiKey: env.DASHSCOPE_API_KEY, + baseURL: "https://dashscope.aliyuncs.com/compatible-mode/v1" + }, + "hunyuan-turbo": { + apiKey: env.HUNYUAN_API_KEY, + baseURL: "https://api.hunyuan.cloud.tencent.com/v1" + }, + "gpt-4": { + apiKey: env.OPENAI_API_KEY, + baseURL: "https://api.openai.com/v1" + } + }; - if (!apiKey) { - throw new Error('API密钥未配置'); + const modelConfig = modelConfigs[model]; + if (!modelConfig) { + throw new Error('不支持的模型类型'); + } + + if (!modelConfig.apiKey) { + throw new Error(`${model} 的API密钥未配置`); } const openai = new OpenAI({ - apiKey: apiKey, - baseURL: "https://dashscope.aliyuncs.com/compatible-mode/v1" + apiKey: modelConfig.apiKey, + baseURL: modelConfig.baseURL }); // 根据性格设置不同的系统提示语 @@ -68,7 +88,7 @@ export async function onRequestPost({ env, request }) { // 使用流式响应 const stream = await openai.chat.completions.create({ - model: "qwen-plus", + model: model, messages: messages, stream: true, });