display transcription for shared audio

This commit is contained in:
an-lee
2024-01-13 16:32:12 +08:00
parent 80fe9caa90
commit f67a59e756
3 changed files with 49 additions and 2 deletions

View File

@@ -110,6 +110,22 @@ export class Client {
return this.api.delete(`/api/posts/${id}`);
}
transcriptions(params?: {
page?: number;
items?: number;
targetId?: string;
targetType?: string;
targetMd5?: string;
}): Promise<
{
transcriptions: TranscriptionType[];
} & PagyResponseType
> {
return this.api.get("/api/transcriptions", {
params: decamelizeKeys(params),
});
}
syncAudio(audio: Partial<AudioType>) {
return this.api.post("/api/mine/audios", decamelizeKeys(audio));
}

View File

@@ -1,4 +1,5 @@
import { useEffect, useState, useRef, useCallback } from "react";
import { useEffect, useState, useRef, useCallback, useContext } from "react";
import { AppSettingsProviderContext } from "@renderer/context";
import { PitchContour } from "@renderer/components";
import WaveSurfer from "wavesurfer.js";
import { Button, Skeleton } from "@renderer/components/ui";
@@ -19,6 +20,15 @@ export const PostAudio = (props: {
threshold: 1,
});
const [duration, setDuration] = useState<number>(0);
const { webApi } = useContext(AppSettingsProviderContext);
const [currentTime, setCurrentTime] = useState<number>(0);
const [transcription, setTranscription] = useState<TranscriptionType>();
const currentTranscription = (transcription?.result || []).find(
(s) =>
currentTime >= s.offsets.from / 1000.0 &&
currentTime <= s.offsets.to / 1000.0
);
const onPlayClick = useCallback(() => {
wavesurfer.isPlaying() ? wavesurfer.pause() : wavesurfer.play();
@@ -59,6 +69,9 @@ export const PostAudio = (props: {
wavesurfer.on("pause", () => {
setIsPlaying(false);
}),
wavesurfer.on("timeupdate", (time: number) => {
setCurrentTime(time);
}),
wavesurfer.on("decode", () => {
setDuration(wavesurfer.getDuration());
const peaks = wavesurfer.getDecodedData().getChannelData(0);
@@ -80,6 +93,16 @@ export const PostAudio = (props: {
};
}, [wavesurfer]);
useEffect(() => {
webApi
.transcriptions({
targetMd5: audio.md5,
})
.then((response) => {
setTranscription(response?.transcriptions?.[0]);
});
}, [audio.md5]);
return (
<div className="w-full">
<div className="flex justify-end">
@@ -119,6 +142,14 @@ export const PostAudio = (props: {
></div>
</div>
{currentTranscription && (
<div className="mt-2 bg-muted px-4 py-2 rounded">
<div className="text-muted-foreground text-center font-serif">
{currentTranscription.text}
</div>
</div>
)}
{audio.coverUrl && (
<div className="">
<img src={audio.coverUrl} className="w-full rounded" />

View File

@@ -122,7 +122,7 @@ export const PostRecording = (props: {
{
recording.referenceText && (
<div className="mt-2 bg-muted px-4 py-2 rounded">
<div className="text-muted-foreground">
<div className="text-muted-foreground text-center font-serif">
{recording.referenceText}
</div>
</div>