Files
everyone-can-use-english/enjoy/src/renderer/context/ai-settings-provider.tsx
an-lee decea62b90 Feat update openai setting (#876)
* remove google ai

* update openAI models

* use custom models for openai
2024-07-24 11:34:06 +08:00

157 lines
4.2 KiB
TypeScript

import { createContext, useEffect, useState, useContext } from "react";
import { AppSettingsProviderContext } from "@renderer/context";
type AISettingsProviderState = {
setWhisperModel?: (name: string) => Promise<void>;
setWhisperService?: (name: string) => Promise<void>;
whisperConfig?: WhisperConfigType;
refreshWhisperConfig?: () => void;
openai?: LlmProviderType;
setOpenai?: (config: LlmProviderType) => void;
setGptEngine?: (engine: GptEngineSettingType) => void;
currentEngine?: GptEngineSettingType;
};
const initialState: AISettingsProviderState = {};
export const AISettingsProviderContext =
createContext<AISettingsProviderState>(initialState);
export const AISettingsProvider = ({
children,
}: {
children: React.ReactNode;
}) => {
const [gptEngine, setGptEngine] = useState<GptEngineSettingType>({
name: "enjoyai",
models: {
default: "gpt-4o",
},
});
const [openai, setOpenai] = useState<LlmProviderType>(null);
const [whisperConfig, setWhisperConfig] = useState<WhisperConfigType>(null);
const { EnjoyApp, libraryPath, user, apiUrl } = useContext(
AppSettingsProviderContext
);
useEffect(() => {
fetchSettings();
}, []);
useEffect(() => {
if (!libraryPath) return;
refreshWhisperConfig();
}, [libraryPath]);
const refreshWhisperConfig = async () => {
const config = await EnjoyApp.whisper.config();
setWhisperConfig(config);
};
const setWhisperModel = async (name: string) => {
return EnjoyApp.whisper.setModel(name).then((config) => {
if (!config) return;
setWhisperConfig(config);
});
};
const setWhisperService = async (name: WhisperConfigType["service"]) => {
return EnjoyApp.whisper.setService(name).then((config) => {
if (!config) return;
setWhisperConfig(config);
});
};
const fetchSettings = async () => {
const _openai = await EnjoyApp.settings.getLlm("openai");
if (_openai) {
setOpenai(Object.assign({ name: "openai" }, _openai));
}
const _defaultEngine = await EnjoyApp.settings.getDefaultEngine();
const _gptEngine = await EnjoyApp.settings.getGptEngine();
if (_gptEngine) {
setGptEngine(_gptEngine);
} else if (_defaultEngine) {
// Migrate default engine to gpt engine
const engine = {
name: _defaultEngine,
models: {
default: "gpt-4o",
},
};
EnjoyApp.settings.setGptEngine(engine).then(() => {
setGptEngine(engine);
});
} else if (_openai?.key) {
const engine = {
name: "openai",
models: {
default: "gpt-4o",
},
};
EnjoyApp.settings.setGptEngine(engine).then(() => {
setGptEngine(engine);
});
} else {
const engine = {
name: "enjoyai",
models: {
default: "gpt-4o",
},
};
EnjoyApp.settings.setGptEngine(engine).then(() => {
setGptEngine(engine);
});
}
};
const handleSetLlm = async (
name: SupportedLlmProviderType,
config: LlmProviderType
) => {
await EnjoyApp.settings.setLlm(name, config);
const _config = await EnjoyApp.settings.getLlm(name);
switch (name) {
case "openai":
setOpenai(Object.assign({ name: "openai" }, _config));
break;
default:
throw new Error("Unsupported LLM provider");
}
};
return (
<AISettingsProviderContext.Provider
value={{
setGptEngine: (engine: GptEngineSettingType) => {
EnjoyApp.settings.setGptEngine(engine).then(() => {
setGptEngine(engine);
});
},
currentEngine:
gptEngine.name === "openai"
? Object.assign(gptEngine, {
key: openai.key,
baseUrl: openai.baseUrl,
models: openai.models,
})
: Object.assign(gptEngine, {
key: user?.accessToken,
baseUrl: `${apiUrl}/api/ai`,
}),
openai,
setOpenai: (config: LlmProviderType) => handleSetLlm("openai", config),
whisperConfig,
refreshWhisperConfig,
setWhisperModel,
setWhisperService,
}}
>
{children}
</AISettingsProviderContext.Provider>
);
};