Feat: may setup proxy (#238)

* add https proxy

* remove proxy in renderer

* proxy work for openai request

* use proxyAgent to enable system proxy

* add proxy setting

* tweak proxy setting
This commit is contained in:
an-lee
2024-02-01 15:33:37 +08:00
committed by GitHub
parent 93dea4ad54
commit 51a810fdfd
17 changed files with 470 additions and 32 deletions

View File

@@ -31,6 +31,7 @@ import Ffmpeg from "@main/ffmpeg";
import whisper from "@main/whisper";
import { hashFile } from "@/utils";
import { WEB_API_URL } from "@/constants";
import proxyAgent from "@main/proxy-agent";
const logger = log.scope("db/models/conversation");
@Table({
@@ -137,36 +138,51 @@ export class Conversation extends Model<Conversation> {
// choose llm based on engine
llm() {
const { httpAgent, fetch } = proxyAgent();
if (this.engine === "enjoyai") {
return new ChatOpenAI({
openAIApiKey: settings.getSync("user.accessToken") as string,
modelName: this.model,
configuration: {
baseURL: `${process.env.WEB_API_URL || WEB_API_URL}/api/ai`,
return new ChatOpenAI(
{
openAIApiKey: settings.getSync("user.accessToken") as string,
modelName: this.model,
configuration: {
baseURL: `${process.env.WEB_API_URL || WEB_API_URL}/api/ai`,
},
temperature: this.configuration.temperature,
n: this.configuration.numberOfChoices,
maxTokens: this.configuration.maxTokens,
frequencyPenalty: this.configuration.frequencyPenalty,
presencePenalty: this.configuration.presencePenalty,
},
temperature: this.configuration.temperature,
n: this.configuration.numberOfChoices,
maxTokens: this.configuration.maxTokens,
frequencyPenalty: this.configuration.frequencyPenalty,
presencePenalty: this.configuration.presencePenalty,
});
{
httpAgent,
// @ts-ignore
fetch,
}
);
} else if (this.engine === "openai") {
const key = settings.getSync("openai.key") as string;
if (!key) {
throw new Error(t("openaiKeyRequired"));
}
return new ChatOpenAI({
openAIApiKey: key,
modelName: this.model,
configuration: {
baseURL: this.configuration.baseUrl,
return new ChatOpenAI(
{
openAIApiKey: key,
modelName: this.model,
configuration: {
baseURL: this.configuration.baseUrl,
},
temperature: this.configuration.temperature,
n: this.configuration.numberOfChoices,
maxTokens: this.configuration.maxTokens,
frequencyPenalty: this.configuration.frequencyPenalty,
presencePenalty: this.configuration.presencePenalty,
},
temperature: this.configuration.temperature,
n: this.configuration.numberOfChoices,
maxTokens: this.configuration.maxTokens,
frequencyPenalty: this.configuration.frequencyPenalty,
presencePenalty: this.configuration.presencePenalty,
});
{
httpAgent,
// @ts-ignore
fetch,
}
);
} else if (this.engine === "googleGenerativeAi") {
const key = settings.getSync("googleGenerativeAi.key") as string;
if (!key) {

View File

@@ -18,12 +18,13 @@ import mainWindow from "@main/window";
import fs from "fs-extra";
import path from "path";
import settings from "@main/settings";
import OpenAI from "openai";
import OpenAI, { type ClientOptions } from "openai";
import { t } from "i18next";
import { hashFile } from "@/utils";
import { Audio, Message } from "@main/db/models";
import log from "electron-log/main";
import { WEB_API_URL } from "@/constants";
import proxyAgent from "@main/proxy-agent";
const logger = log.scope("db/models/speech");
@Table({
@@ -171,10 +172,10 @@ export class Speech extends Model<Speech> {
const filename = `${Date.now()}${extname}`;
const filePath = path.join(settings.userDataPath(), "speeches", filename);
let openaiConfig = {};
let openaiConfig: ClientOptions = {};
if (engine === "enjoyai") {
openaiConfig = {
apiKey: settings.getSync("user.accessToken"),
apiKey: settings.getSync("user.accessToken") as string,
baseURL: `${process.env.WEB_API_URL || WEB_API_URL}/api/ai`,
};
} else if (engine === "openai") {
@@ -187,7 +188,14 @@ export class Speech extends Model<Speech> {
baseURL: baseUrl || defaultConfig.baseUrl,
};
}
const openai = new OpenAI(openaiConfig);
const { httpAgent, fetch } = proxyAgent();
const openai = new OpenAI({
...openaiConfig,
httpAgent,
// @ts-ignore
fetch,
});
const file = await openai.audio.speech.create({
input: text,

View File

@@ -0,0 +1,21 @@
import settings from "@main/settings";
import { HttpsProxyAgent } from "https-proxy-agent";
import { ProxyAgent } from "proxy-agent";
import fetch from "node-fetch";
export default function () {
const proxyConfig = settings.getSync("proxy") as ProxyConfigType;
let proxyAgent = new ProxyAgent();
if (proxyConfig.enabled && proxyConfig.url) {
proxyAgent = new ProxyAgent({
httpAgent: new HttpsProxyAgent(proxyConfig.url),
httpsAgent: new HttpsProxyAgent(proxyConfig.url),
});
}
return {
httpAgent: proxyAgent,
fetch,
};
}

View File

@@ -7,6 +7,7 @@ import os from "os";
import commandExists from "command-exists";
import log from "electron-log";
import * as i18n from "i18next";
import mainWin from "@main/window";
const logger = log.scope("settings");

View File

@@ -64,6 +64,49 @@ main.init = () => {
// TedProvider
tedProvider.registerIpcHandlers();
// proxy
ipcMain.handle("system-proxy-get", (_event) => {
let proxy = settings.getSync("proxy");
if (!proxy) {
proxy = {
enabled: false,
url: "",
};
settings.setSync("proxy", proxy);
}
return proxy;
});
ipcMain.handle("system-proxy-set", (_event, config) => {
if (!config) {
throw new Error("Invalid proxy config");
}
if (config) {
if (!config.url) {
config.enabled = false;
}
}
if (config.enabled && config.url) {
const uri = new URL(config.url);
const proxyRules = `http=${uri.host};https=${uri.host}`;
mainWindow.webContents.session.setProxy({
proxyRules,
});
mainWindow.webContents.session.closeAllConnections();
} else {
mainWindow.webContents.session.setProxy({
mode: "system",
});
mainWindow.webContents.session.closeAllConnections();
}
return settings.setSync("proxy", config);
});
// BrowserView
ipcMain.handle(
"view-load",