Refactor components codes (#538)

* remove deprecated code

* refactor code

* refactor components codes

* fix renderer tests
This commit is contained in:
an-lee
2024-04-19 10:46:04 +08:00
committed by GitHub
parent 5f3ee54bb5
commit e4f5bdcfb9
30 changed files with 509 additions and 1147 deletions

View File

@@ -28,15 +28,15 @@ import {
SelectContent,
SelectItem,
Textarea,
toast,
} from "@renderer/components/ui";
import { useState, useEffect, useContext } from "react";
import {
AppSettingsProviderContext,
AISettingsProviderContext,
} from "@renderer/context";
import { LoaderIcon, Share2Icon } from "lucide-react";
import { LoaderIcon } from "lucide-react";
import { useNavigate } from "react-router-dom";
import { GPT_PROVIDERS, TTS_PROVIDERS, GPTShareButton } from "@renderer/components";
const conversationFormSchema = z.object({
name: z.string().optional(),
@@ -73,7 +73,7 @@ export const ConversationForm = (props: {
}) => {
const { conversation, onFinish } = props;
const [submitting, setSubmitting] = useState<boolean>(false);
const [providers, setProviders] = useState<any>(LLM_PROVIDERS);
const [providers, setProviders] = useState<any>(GPT_PROVIDERS);
const { EnjoyApp } = useContext(AppSettingsProviderContext);
const { openai } = useContext(AISettingsProviderContext);
const navigate = useNavigate();
@@ -126,20 +126,20 @@ export const ConversationForm = (props: {
// @ts-ignore
values: conversation?.id
? {
name: conversation.name,
engine: conversation.engine,
configuration: {
type: conversation.configuration.type || "gpt",
...conversation.configuration,
},
}
name: conversation.name,
engine: conversation.engine,
configuration: {
type: conversation.configuration.type || "gpt",
...conversation.configuration,
},
}
: {
name: defaultConfig.name,
engine: defaultConfig.engine,
configuration: {
...defaultConfig.configuration,
name: defaultConfig.name,
engine: defaultConfig.engine,
configuration: {
...defaultConfig.configuration,
},
},
},
});
const onSubmit = async (data: z.infer<typeof conversationFormSchema>) => {
@@ -149,7 +149,7 @@ export const ConversationForm = (props: {
Object.keys(configuration).forEach((key) => {
if (key === "type") return;
if (!LLM_PROVIDERS[engine]?.configurable.includes(key)) {
if (!GPT_PROVIDERS[engine]?.configurable.includes(key)) {
// @ts-ignore
delete configuration[key];
}
@@ -161,12 +161,12 @@ export const ConversationForm = (props: {
// use default base url if not set
if (!configuration.baseUrl) {
configuration.baseUrl = LLM_PROVIDERS[engine]?.baseUrl;
configuration.baseUrl = GPT_PROVIDERS[engine]?.baseUrl;
}
// use default base url if not set
if (!configuration.tts.baseUrl) {
configuration.tts.baseUrl = LLM_PROVIDERS[engine]?.baseUrl;
configuration.tts.baseUrl = GPT_PROVIDERS[engine]?.baseUrl;
}
if (conversation?.id) {
@@ -275,7 +275,7 @@ export const ConversationForm = (props: {
<SelectContent>
{Object.keys(providers)
.filter((key) =>
LLM_PROVIDERS[key].types.includes(
GPT_PROVIDERS[key].types.includes(
form.watch("configuration.type")
)
)
@@ -343,163 +343,163 @@ export const ConversationForm = (props: {
)}
/>
{LLM_PROVIDERS[form.watch("engine")]?.configurable.includes(
{GPT_PROVIDERS[form.watch("engine")]?.configurable.includes(
"temperature"
) && (
<FormField
control={form.control}
name="configuration.temperature"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.temperature")}
</FormLabel>
<Input
type="number"
min="0"
max="1.0"
step="0.1"
value={field.value}
onChange={(event) => {
field.onChange(
event.target.value
? parseFloat(event.target.value)
: 0.0
);
}}
/>
<FormDescription>
{t("models.conversation.temperatureDescription")}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="configuration.temperature"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.temperature")}
</FormLabel>
<Input
type="number"
min="0"
max="1.0"
step="0.1"
value={field.value}
onChange={(event) => {
field.onChange(
event.target.value
? parseFloat(event.target.value)
: 0.0
);
}}
/>
<FormDescription>
{t("models.conversation.temperatureDescription")}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
{LLM_PROVIDERS[form.watch("engine")]?.configurable.includes(
{GPT_PROVIDERS[form.watch("engine")]?.configurable.includes(
"maxTokens"
) && (
<FormField
control={form.control}
name="configuration.maxTokens"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.maxTokens")}
</FormLabel>
<Input
type="number"
min="0"
value={field.value}
onChange={(event) => {
if (!event.target.value) return;
field.onChange(parseInt(event.target.value));
}}
/>
<FormDescription>
{t("models.conversation.maxTokensDescription")}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="configuration.maxTokens"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.maxTokens")}
</FormLabel>
<Input
type="number"
min="0"
value={field.value}
onChange={(event) => {
if (!event.target.value) return;
field.onChange(parseInt(event.target.value));
}}
/>
<FormDescription>
{t("models.conversation.maxTokensDescription")}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
{LLM_PROVIDERS[form.watch("engine")]?.configurable.includes(
{GPT_PROVIDERS[form.watch("engine")]?.configurable.includes(
"presencePenalty"
) && (
<FormField
control={form.control}
name="configuration.presencePenalty"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.presencePenalty")}
</FormLabel>
<Input
type="number"
min="-2"
step="0.1"
max="2"
value={field.value}
onChange={(event) => {
if (!event.target.value) return;
field.onChange(parseInt(event.target.value));
}}
/>
<FormDescription>
{t("models.conversation.presencePenaltyDescription")}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="configuration.presencePenalty"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.presencePenalty")}
</FormLabel>
<Input
type="number"
min="-2"
step="0.1"
max="2"
value={field.value}
onChange={(event) => {
if (!event.target.value) return;
field.onChange(parseInt(event.target.value));
}}
/>
<FormDescription>
{t("models.conversation.presencePenaltyDescription")}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
{LLM_PROVIDERS[form.watch("engine")]?.configurable.includes(
{GPT_PROVIDERS[form.watch("engine")]?.configurable.includes(
"frequencyPenalty"
) && (
<FormField
control={form.control}
name="configuration.frequencyPenalty"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.frequencyPenalty")}
</FormLabel>
<Input
type="number"
min="-2"
step="0.1"
max="2"
value={field.value}
onChange={(event) => {
if (!event.target.value) return;
field.onChange(parseInt(event.target.value));
}}
/>
<FormDescription>
{t("models.conversation.frequencyPenaltyDescription")}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="configuration.frequencyPenalty"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.frequencyPenalty")}
</FormLabel>
<Input
type="number"
min="-2"
step="0.1"
max="2"
value={field.value}
onChange={(event) => {
if (!event.target.value) return;
field.onChange(parseInt(event.target.value));
}}
/>
<FormDescription>
{t("models.conversation.frequencyPenaltyDescription")}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
{LLM_PROVIDERS[form.watch("engine")]?.configurable.includes(
{GPT_PROVIDERS[form.watch("engine")]?.configurable.includes(
"numberOfChoices"
) && (
<FormField
control={form.control}
name="configuration.numberOfChoices"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.numberOfChoices")}
</FormLabel>
<Input
type="number"
min="1"
step="1.0"
value={field.value}
onChange={(event) => {
field.onChange(
event.target.value
? parseInt(event.target.value)
: 1.0
);
}}
/>
<FormDescription>
{t("models.conversation.numberOfChoicesDescription")}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="configuration.numberOfChoices"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.numberOfChoices")}
</FormLabel>
<Input
type="number"
min="1"
step="1.0"
value={field.value}
onChange={(event) => {
field.onChange(
event.target.value
? parseInt(event.target.value)
: 1.0
);
}}
/>
<FormDescription>
{t("models.conversation.numberOfChoicesDescription")}
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
@@ -531,28 +531,28 @@ export const ConversationForm = (props: {
)}
/>
{LLM_PROVIDERS[form.watch("engine")]?.configurable.includes(
{GPT_PROVIDERS[form.watch("engine")]?.configurable.includes(
"baseUrl"
) && (
<FormField
control={form.control}
name="configuration.baseUrl"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.baseUrl")}
</FormLabel>
<Input
{...field}
placeholder={t(
"models.conversation.baseUrlDescription"
)}
/>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="configuration.baseUrl"
render={({ field }) => (
<FormItem>
<FormLabel>
{t("models.conversation.baseUrl")}
</FormLabel>
<Input
{...field}
placeholder={t(
"models.conversation.baseUrlDescription"
)}
/>
<FormMessage />
</FormItem>
)}
/>
)}
</>
)}
@@ -588,95 +588,95 @@ export const ConversationForm = (props: {
{TTS_PROVIDERS[
form.watch("configuration.tts.engine")
]?.configurable.includes("model") && (
<FormField
control={form.control}
name="configuration.tts.model"
render={({ field }) => (
<FormItem>
<FormLabel>{t("models.conversation.ttsModel")}</FormLabel>
<Select
onValueChange={field.onChange}
defaultValue={field.value}
value={field.value}
>
<FormControl>
<SelectTrigger>
<SelectValue placeholder={t("selectTtsModel")} />
</SelectTrigger>
</FormControl>
<SelectContent>
{(
TTS_PROVIDERS[form.watch("configuration.tts.engine")]
?.models || []
).map((model: string) => (
<SelectItem key={model} value={model}>
{model}
</SelectItem>
))}
</SelectContent>
</Select>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="configuration.tts.model"
render={({ field }) => (
<FormItem>
<FormLabel>{t("models.conversation.ttsModel")}</FormLabel>
<Select
onValueChange={field.onChange}
defaultValue={field.value}
value={field.value}
>
<FormControl>
<SelectTrigger>
<SelectValue placeholder={t("selectTtsModel")} />
</SelectTrigger>
</FormControl>
<SelectContent>
{(
TTS_PROVIDERS[form.watch("configuration.tts.engine")]
?.models || []
).map((model: string) => (
<SelectItem key={model} value={model}>
{model}
</SelectItem>
))}
</SelectContent>
</Select>
<FormMessage />
</FormItem>
)}
/>
)}
{TTS_PROVIDERS[
form.watch("configuration.tts.engine")
]?.configurable.includes("voice") && (
<FormField
control={form.control}
name="configuration.tts.voice"
render={({ field }) => (
<FormItem>
<FormLabel>{t("models.conversation.ttsVoice")}</FormLabel>
<Select
onValueChange={field.onChange}
defaultValue={field.value}
value={field.value}
>
<FormControl>
<SelectTrigger>
<SelectValue placeholder={t("selectTtsVoice")} />
</SelectTrigger>
</FormControl>
<SelectContent>
{(
TTS_PROVIDERS[form.watch("configuration.tts.engine")]
?.voices || []
).map((voice: string) => (
<SelectItem key={voice} value={voice}>
<span className="capitalize">{voice}</span>
</SelectItem>
))}
</SelectContent>
</Select>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="configuration.tts.voice"
render={({ field }) => (
<FormItem>
<FormLabel>{t("models.conversation.ttsVoice")}</FormLabel>
<Select
onValueChange={field.onChange}
defaultValue={field.value}
value={field.value}
>
<FormControl>
<SelectTrigger>
<SelectValue placeholder={t("selectTtsVoice")} />
</SelectTrigger>
</FormControl>
<SelectContent>
{(
TTS_PROVIDERS[form.watch("configuration.tts.engine")]
?.voices || []
).map((voice: string) => (
<SelectItem key={voice} value={voice}>
<span className="capitalize">{voice}</span>
</SelectItem>
))}
</SelectContent>
</Select>
<FormMessage />
</FormItem>
)}
/>
)}
{TTS_PROVIDERS[
form.watch("configuration.tts.engine")
]?.configurable.includes("baseUrl") && (
<FormField
control={form.control}
name="configuration.tts.baseUrl"
render={({ field }) => (
<FormItem>
<FormLabel>{t("models.conversation.ttsBaseUrl")}</FormLabel>
<Input
{...field}
placeholder={t(
"models.conversation.ttsBaseUrlDescription"
)}
/>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="configuration.tts.baseUrl"
render={({ field }) => (
<FormItem>
<FormLabel>{t("models.conversation.ttsBaseUrl")}</FormLabel>
<Input
{...field}
placeholder={t(
"models.conversation.ttsBaseUrlDescription"
)}
/>
<FormMessage />
</FormItem>
)}
/>
)}
</div>
</ScrollArea>
@@ -730,196 +730,3 @@ export const ConversationForm = (props: {
</Form>
);
};
export const LLM_PROVIDERS: { [key: string]: any } = {
enjoyai: {
name: "EnjoyAI",
models: [
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo",
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-instruct",
"gpt-4-turbo",
"gpt-4-turbo-2024-04-09",
"gpt-4-0125-preview",
"gpt-4-turbo-preview",
"gpt-4-1106-preview",
"gpt-4-vision-preview",
"gpt-4",
"gpt-4-32k",
"gpt-4-0613",
"gpt-4-32k-0613",
],
configurable: [
"model",
"roleDefinition",
"temperature",
"numberOfChoices",
"maxTokens",
"frequencyPenalty",
"presencePenalty",
"historyBufferSize",
"tts",
],
types: ["gpt", "tts"],
},
openai: {
name: "OpenAI",
description: t("youNeedToSetupApiKeyBeforeUsingOpenAI"),
models: [
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo",
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-instruct",
"gpt-4-turbo",
"gpt-4-turbo-2024-04-09",
"gpt-4-0125-preview",
"gpt-4-turbo-preview",
"gpt-4-1106-preview",
"gpt-4-vision-preview",
"gpt-4",
"gpt-4-32k",
"gpt-4-0613",
"gpt-4-32k-0613",
],
configurable: [
"model",
"baseUrl",
"roleDefinition",
"temperature",
"numberOfChoices",
"maxTokens",
"frequencyPenalty",
"presencePenalty",
"historyBufferSize",
"tts",
],
types: ["gpt", "tts"],
},
googleGenerativeAi: {
name: "Google Generative AI",
models: ["gemini-pro"],
configurable: [
"model",
"roleDefinition",
"temperature",
"maxTokens",
"historyBufferSize",
"tts",
],
types: ["gpt"],
},
ollama: {
name: "Ollama",
description: t("ensureYouHaveOllamaRunningLocallyAndHasAtLeastOneModel"),
baseUrl: "http://localhost:11434",
models: [],
configurable: [
"model",
"baseUrl",
"roleDefinition",
"temperature",
"maxTokens",
"historyBufferSize",
"frequencyPenalty",
"presencePenalty",
"tts",
],
types: ["gpt"],
},
};
export const TTS_PROVIDERS: { [key: string]: any } = {
enjoyai: {
name: "EnjoyAI",
models: ["tts-1", "tts-1-hd"],
voices: ["alloy", "echo", "fable", "onyx", "nova", "shimmer"],
configurable: ["voice"],
},
openai: {
name: "OpenAI",
description: t("youNeedToSetupApiKeyBeforeUsingOpenAI"),
models: ["tts-1", "tts-1-hd"],
voices: ["alloy", "echo", "fable", "onyx", "nova", "shimmer"],
configurable: ["model", "voice", "baseUrl"],
},
};
const GPTShareButton = (props: {
conversation: Partial<ConversationType>;
}) => {
const { conversation } = props;
const { webApi } = useContext(AppSettingsProviderContext);
const navigate = useNavigate();
const handleShare = () => {
const { configuration } = conversation;
delete configuration.baseUrl
delete configuration?.tts?.baseUrl
if (!configuration.roleDefinition) {
toast.error('shareFailed');
return;
}
webApi
.createPost({
metadata: {
type: "gpt",
content: {
name: conversation.name,
engine: conversation.engine,
configuration,
},
},
})
.then(() => {
toast.success(t("sharedSuccessfully"), {
description: t("sharedGpt"),
action: {
label: t("view"),
onClick: () => {
navigate("/community");
},
},
actionButtonStyle: {
backgroundColor: "var(--primary)",
},
});
})
.catch((err) => {
toast.error(t("shareFailed"), { description: err.message });
});
}
if (!conversation.id) return null;
if (conversation.type !== "gpt") return null;
return (
<AlertDialog>
<AlertDialogTrigger asChild>
<Button variant="link" size="icon" className="rounded-full p-0 w-6 h-6">
<Share2Icon className="w-4 h-4 text-muted-foreground" />
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>{t("shareGpt")}</AlertDialogTitle>
<AlertDialogDescription>
{t("areYouSureToShareThisGptToCommunity")}
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>{t("cancel")}</AlertDialogCancel>
<AlertDialogAction asChild>
<Button variant="default" onClick={handleShare}>
{t("share")}
</Button>
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
);
}

View File

@@ -0,0 +1,101 @@
import { t } from "i18next";
export const GPT_PROVIDERS: { [key: string]: any } = {
enjoyai: {
name: "EnjoyAI",
models: [
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo",
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-instruct",
"gpt-4-turbo",
"gpt-4-turbo-2024-04-09",
"gpt-4-0125-preview",
"gpt-4-turbo-preview",
"gpt-4-1106-preview",
"gpt-4-vision-preview",
"gpt-4",
"gpt-4-32k",
"gpt-4-0613",
"gpt-4-32k-0613",
],
configurable: [
"model",
"roleDefinition",
"temperature",
"numberOfChoices",
"maxTokens",
"frequencyPenalty",
"presencePenalty",
"historyBufferSize",
"tts",
],
types: ["gpt", "tts"],
},
openai: {
name: "OpenAI",
description: t("youNeedToSetupApiKeyBeforeUsingOpenAI"),
models: [
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo",
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-instruct",
"gpt-4-turbo",
"gpt-4-turbo-2024-04-09",
"gpt-4-0125-preview",
"gpt-4-turbo-preview",
"gpt-4-1106-preview",
"gpt-4-vision-preview",
"gpt-4",
"gpt-4-32k",
"gpt-4-0613",
"gpt-4-32k-0613",
],
configurable: [
"model",
"baseUrl",
"roleDefinition",
"temperature",
"numberOfChoices",
"maxTokens",
"frequencyPenalty",
"presencePenalty",
"historyBufferSize",
"tts",
],
types: ["gpt", "tts"],
},
googleGenerativeAi: {
name: "Google Generative AI",
models: ["gemini-pro"],
configurable: [
"model",
"roleDefinition",
"temperature",
"maxTokens",
"historyBufferSize",
"tts",
],
types: ["gpt"],
},
ollama: {
name: "Ollama",
description: t("ensureYouHaveOllamaRunningLocallyAndHasAtLeastOneModel"),
baseUrl: "http://localhost:11434",
models: [],
configurable: [
"model",
"baseUrl",
"roleDefinition",
"temperature",
"maxTokens",
"historyBufferSize",
"frequencyPenalty",
"presencePenalty",
"tts",
],
types: ["gpt"],
},
};

View File

@@ -0,0 +1,95 @@
import { AppSettingsProviderContext } from "@renderer/context";
import { t } from "i18next";
import { useContext } from "react";
import { useNavigate } from "react-router-dom";
import {
AlertDialog,
AlertDialogAction,
AlertDialogCancel,
AlertDialogContent,
AlertDialogDescription,
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
AlertDialogTrigger,
Button,
toast
} from "@renderer/components/ui";
import { Share2Icon } from "lucide-react";
export const GPTShareButton = (props: {
conversation: Partial<ConversationType>;
}) => {
const { conversation } = props;
const { webApi } = useContext(AppSettingsProviderContext);
const navigate = useNavigate();
const handleShare = () => {
const { configuration } = conversation;
delete configuration.baseUrl;
delete configuration?.tts?.baseUrl;
if (!configuration.roleDefinition) {
toast.error("shareFailed");
return;
}
webApi
.createPost({
metadata: {
type: "gpt",
content: {
name: conversation.name,
engine: conversation.engine,
configuration,
},
},
})
.then(() => {
toast.success(t("sharedSuccessfully"), {
description: t("sharedGpt"),
action: {
label: t("view"),
onClick: () => {
navigate("/community");
},
},
actionButtonStyle: {
backgroundColor: "var(--primary)",
},
});
})
.catch((err) => {
toast.error(t("shareFailed"), { description: err.message });
});
};
if (!conversation.id) return null;
if (conversation.type !== "gpt") return null;
return (
<AlertDialog>
<AlertDialogTrigger asChild>
<Button variant="link" size="icon" className="rounded-full p-0 w-6 h-6">
<Share2Icon className="w-4 h-4 text-muted-foreground" />
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>{t("shareGpt")}</AlertDialogTitle>
<AlertDialogDescription>
{t("areYouSureToShareThisGptToCommunity")}
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>{t("cancel")}</AlertDialogCancel>
<AlertDialogAction asChild>
<Button variant="default" onClick={handleShare}>
{t("share")}
</Button>
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
);
};

View File

@@ -1,5 +1,8 @@
export * from "./conversation-form";
export * from "./conversation-shortcuts";
export * from "./speech-form";
export * from "./speech-player";
export * from "./gpt-providers";
export * from "./gpt-share-button";
export * from "./tts-providers";

View File

@@ -1,97 +0,0 @@
import { useState, useContext } from "react";
import { RecordButton, SpeechPlayer } from "@renderer/components";
import {
Button,
Textarea,
Dialog,
DialogContent,
} from "@renderer/components/ui";
import { AppSettingsProviderContext } from "@renderer/context";
import { LoaderIcon } from "lucide-react";
import { t } from "i18next";
export const SpeechForm = (props: {
lastMessage?: MessageType;
onSubmit: (content: string, file: string) => void;
}) => {
const { lastMessage, onSubmit } = props;
const { EnjoyApp } = useContext(AppSettingsProviderContext);
const [transcribing, setTranscribing] = useState(false);
const [editting, setEditting] = useState(false);
const [content, setContent] = useState("");
const [file, setFile] = useState("");
const handleCancel = () => {
setEditting(false);
setContent("");
setFile("");
};
const handleSubmit = () => {
if (!content) return;
onSubmit(content, file);
handleCancel();
};
return (
<>
<RecordButton
disabled={false}
onRecordEnd={async (blob, _duration) => {
setTranscribing(true);
setEditting(true);
EnjoyApp.whisper
.transcribe(
{
type: blob.type.split(";")[0],
arrayBuffer: await blob.arrayBuffer(),
},
lastMessage?.content
)
.then(({ content, file }) => {
setContent(content);
setFile(file);
})
.finally(() => {
setTranscribing(false);
});
}}
/>
<Dialog
open={editting}
onOpenChange={(value) => {
setEditting(value);
}}
>
<DialogContent>
{transcribing ? (
<div className="flex items-center justify-center p-6">
<LoaderIcon className="w-6 h-6 animate-spin" />
</div>
) : (
<div className="">
<div className="my-4">
<Textarea
className="w-full h-36"
value={content}
onChange={(e) => setContent(e.target.value)}
/>
</div>
{file && (
<div className="mb-4">
<SpeechPlayer speech={{ playSource: "enjoy://" + file }} />
</div>
)}
<div className="flex items-center justify-end space-x-2">
<Button variant="secondary" onClick={handleCancel}>
{t("cancel")}
</Button>
<Button onClick={handleSubmit}>{t("send")}</Button>
</div>
</div>
)}
</DialogContent>
</Dialog>
</>
);
};

View File

@@ -0,0 +1,17 @@
import { t } from "i18next";
export const TTS_PROVIDERS: { [key: string]: any } = {
enjoyai: {
name: "EnjoyAI",
models: ["tts-1", "tts-1-hd"],
voices: ["alloy", "echo", "fable", "onyx", "nova", "shimmer"],
configurable: ["voice"],
},
openai: {
name: "OpenAI",
description: t("youNeedToSetupApiKeyBeforeUsingOpenAI"),
models: ["tts-1", "tts-1-hd"],
voices: ["alloy", "echo", "fable", "onyx", "nova", "shimmer"],
configurable: ["model", "voice", "baseUrl"],
},
};