Fix voice chat (#1132)

* fix use-transcribe

* fix transcribe response

* fix voice chat
This commit is contained in:
an-lee
2024-10-17 16:05:33 +08:00
committed by GitHub
parent 2d1c0a6faa
commit 29932eeb44
7 changed files with 70 additions and 58 deletions

View File

@@ -103,9 +103,6 @@ class ChatMessagesHandler {
const transaction = await ChatMessage.sequelize.transaction();
try {
// update content
await message.update({ ...data }, { transaction });
if (recordingUrl) {
// destroy existing recording
await message.recording?.destroy({ transaction });
@@ -113,27 +110,30 @@ class ChatMessagesHandler {
// create new recording
const filePath = enjoyUrlToPath(recordingUrl);
const blob = fs.readFileSync(filePath);
const recording = await Recording.createFromBlob(
await Recording.createFromBlob(
{
type: "audio/wav",
arrayBuffer: blob,
},
{
targetType: "ChatMessage",
targetId: message.id,
referenceText: message.content,
targetId: id,
referenceText: data.content,
},
transaction
);
message.recording = recording;
} else if (message.recording) {
await message.recording.update(
{
referenceText: message.content,
referenceText: data.content,
},
{ transaction }
);
}
// update content
await message.update({ ...data }, { transaction });
await transaction.commit();
return (await message.reload()).toJSON();

View File

@@ -134,8 +134,8 @@ export class ChatMember extends Model<ChatMember> {
) {
if (!mainWindow.win) return;
if (action !== "destroy" && !member.agent) {
await member.reload();
if (action !== "destroy") {
member = await ChatMember.findByPk(member.id);
}
mainWindow.win.webContents.send("db-on-transaction", {
model: "ChatMember",

View File

@@ -200,8 +200,8 @@ export class ChatMessage extends Model<ChatMessage> {
) {
if (!mainWindow.win) return;
if (action !== "destroy" && !chatMessage.agent) {
await chatMessage.reload();
if (action !== "destroy") {
chatMessage = await ChatMessage.findByPk(chatMessage.id);
}
mainWindow.win.webContents.send("db-on-transaction", {

View File

@@ -108,19 +108,10 @@ export class Chat extends Model<Chat> {
static async notify(chat: Chat, action: "create" | "update" | "destroy") {
if (!mainWindow.win) return;
if (
action !== "destroy" &&
(!chat.members || !chat.members.some((m) => m.agent))
) {
chat.members = await ChatMember.findAll({
where: { chatId: chat.id },
include: [
{
association: "agent",
},
],
});
if (action !== "destroy") {
chat = await Chat.findByPk(chat.id);
}
mainWindow.win.webContents.send("db-on-transaction", {
model: "Chat",
id: chat.id,

View File

@@ -173,7 +173,7 @@ export class Recording extends Model<Recording> {
.then((result) => {
logger.debug("upload result:", result.data);
if (result.data.success) {
this.update({ uploadedAt: new Date() });
this.update({ uploadedAt: new Date() }, { hooks: false });
} else {
throw new Error(result.data);
}
@@ -194,7 +194,7 @@ export class Recording extends Model<Recording> {
});
return webApi.syncRecording(this.toJSON()).then(() => {
this.update({ syncedAt: new Date() });
this.update({ syncedAt: new Date() }, { hooks: false });
});
}

View File

@@ -67,13 +67,11 @@ export const ChatUserMessage = (props: {
useEffect(() => {
if (!isLastMessage) return;
// If the message is from recording, wait for user to confirm before asking agent
if (
chatMessage.recording &&
chatMessage.state !== ChatMessageStateEnum.COMPLETED
)
if (chatMessage.state !== ChatMessageStateEnum.COMPLETED) {
return;
askAgent();
} else {
askAgent();
}
}, [chatMessage]);
return (
@@ -87,32 +85,11 @@ export const ChatUserMessage = (props: {
: "bg-muted"
}`}
>
{recording &&
(displayPlayer ? (
<>
<WavesurferPlayer
id={recording.id}
src={recording.src}
autoplay={true}
/>
{recording?.pronunciationAssessment && (
<div className="flex justify-end">
<PronunciationAssessmentScoreDetail
assessment={recording.pronunciationAssessment}
/>
</div>
)}
</>
) : (
<Button
onClick={() => setDisplayPlayer(true)}
className="w-8 h-8"
variant="ghost"
size="icon"
>
<Volume2Icon className="w-5 h-5" />
</Button>
))}
<ChatUserMessageRecording
chatMessage={chatMessage}
displayPlayer={displayPlayer}
setDisplayPlayer={setDisplayPlayer}
/>
{editing ? (
<div className="">
<Textarea
@@ -181,6 +158,47 @@ export const ChatUserMessage = (props: {
);
};
const ChatUserMessageRecording = (props: {
chatMessage: ChatMessageType;
displayPlayer: boolean;
setDisplayPlayer: (value: boolean) => void;
}) => {
const { chatMessage, displayPlayer, setDisplayPlayer } = props;
const { recording } = chatMessage;
if (!recording?.src) return null;
if (displayPlayer) {
return (
<>
<WavesurferPlayer
id={recording.id}
src={recording.src}
autoplay={true}
/>
{recording?.pronunciationAssessment && (
<div className="flex justify-end">
<PronunciationAssessmentScoreDetail
assessment={recording.pronunciationAssessment}
/>
</div>
)}
</>
);
}
return (
<Button
onClick={() => setDisplayPlayer(true)}
className="w-8 h-8"
variant="ghost"
size="icon"
>
<Volume2Icon className="w-5 h-5" />
</Button>
);
};
const ChatUserMessageActions = (props: {
chatMessage: ChatMessageType;
setContent: (content: string) => void;

View File

@@ -106,6 +106,7 @@ export const useTranscribe = () => {
return {
...result,
timeline: [],
url,
};
}
@@ -128,6 +129,7 @@ export const useTranscribe = () => {
return {
...result,
timeline,
url,
};
} else if (transcript) {
setOutput("Aligning the transcript...");
@@ -154,6 +156,7 @@ export const useTranscribe = () => {
return {
...result,
timeline,
url,
};
} else {
throw new Error(t("transcribeFailed"));