Refactor recorder (#968)
* refactor recorder * refactor recording for full transcription * remove deprecated codes * recording time limit * refactor hotkey invoke
This commit is contained in:
@@ -3,7 +3,6 @@ export * from "./media-caption";
|
||||
export * from "./media-info-panel";
|
||||
export * from "./media-recordings";
|
||||
export * from "./media-current-recording";
|
||||
export * from "./media-recorder";
|
||||
export * from "./media-transcription";
|
||||
export * from "./media-transcription-read-button";
|
||||
export * from "./media-transcription-generate-button";
|
||||
|
||||
@@ -4,7 +4,7 @@ import {
|
||||
HotKeysSettingsProviderContext,
|
||||
MediaPlayerProviderContext,
|
||||
} from "@renderer/context";
|
||||
import { MediaRecorder, RecordingDetail } from "@renderer/components";
|
||||
import { RecordingDetail } from "@renderer/components";
|
||||
import { renderPitchContour } from "@renderer/lib/utils";
|
||||
import { extractFrequencies } from "@/utils";
|
||||
import WaveSurfer from "wavesurfer.js";
|
||||
@@ -46,12 +46,15 @@ import {
|
||||
import { t } from "i18next";
|
||||
import { formatDuration } from "@renderer/lib/utils";
|
||||
import { useHotkeys } from "react-hotkeys-hook";
|
||||
import { LiveAudioVisualizer } from "react-audio-visualize";
|
||||
|
||||
export const MediaCurrentRecording = () => {
|
||||
const {
|
||||
layout,
|
||||
isRecording,
|
||||
setIsRecording,
|
||||
isPaused,
|
||||
recordingTime,
|
||||
mediaRecorder,
|
||||
currentRecording,
|
||||
renderPitchContour: renderMediaPitchContour,
|
||||
regions: mediaRegions,
|
||||
@@ -421,13 +424,53 @@ export const MediaCurrentRecording = () => {
|
||||
}, [currentRecording, isRecording, layout?.width]);
|
||||
|
||||
useHotkeys(currentHotkeys.PlayOrPauseRecording, () => {
|
||||
document.getElementById("recording-play-or-pause-button")?.click();
|
||||
const button = document.getElementById("recording-play-or-pause-button");
|
||||
if (!button) return;
|
||||
|
||||
const rect = button.getBoundingClientRect();
|
||||
const elementAtPoint = document.elementFromPoint(
|
||||
rect.left + rect.width / 2,
|
||||
rect.top + rect.height / 2
|
||||
);
|
||||
if (elementAtPoint !== button && !button.contains(elementAtPoint)) return;
|
||||
|
||||
button.click();
|
||||
});
|
||||
|
||||
useHotkeys(currentHotkeys.PronunciationAssessment, () => {
|
||||
if (isRecording) return;
|
||||
setDetailIsOpen(!detailIsOpen);
|
||||
});
|
||||
|
||||
if (isRecording) return <MediaRecorder />;
|
||||
if (isRecording || isPaused) {
|
||||
return (
|
||||
<div className="h-full w-full flex items-center space-x-4">
|
||||
<div className="flex-1 h-full border rounded-xl shadow-lg relative">
|
||||
<div className="w-full h-full flex justify-center items-center gap-4">
|
||||
<LiveAudioVisualizer
|
||||
mediaRecorder={mediaRecorder}
|
||||
barWidth={2}
|
||||
gap={2}
|
||||
width={480}
|
||||
height="100%"
|
||||
fftSize={512}
|
||||
maxDecibels={-10}
|
||||
minDecibels={-80}
|
||||
smoothingTimeConstant={0.4}
|
||||
/>
|
||||
<span className="serif text-muted-foreground text-sm">
|
||||
{Math.floor(recordingTime / 60)}:
|
||||
{String(recordingTime % 60).padStart(2, "0")}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div className="h-full flex flex-col justify-start space-y-1.5">
|
||||
<MediaRecordButton />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (!currentRecording?.src)
|
||||
return (
|
||||
<div className="h-full w-full flex items-center space-x-4">
|
||||
@@ -443,10 +486,7 @@ export const MediaCurrentRecording = () => {
|
||||
</div>
|
||||
|
||||
<div className="h-full flex flex-col justify-start space-y-1.5">
|
||||
<MediaRecordButton
|
||||
isRecording={isRecording}
|
||||
setIsRecording={setIsRecording}
|
||||
/>
|
||||
<MediaRecordButton />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@@ -494,10 +534,7 @@ export const MediaCurrentRecording = () => {
|
||||
)}
|
||||
</Button>
|
||||
|
||||
<MediaRecordButton
|
||||
isRecording={isRecording}
|
||||
setIsRecording={setIsRecording}
|
||||
/>
|
||||
<MediaRecordButton />
|
||||
|
||||
<Button
|
||||
variant={detailIsOpen ? "secondary" : "outline"}
|
||||
@@ -655,16 +692,69 @@ export const MediaCurrentRecording = () => {
|
||||
);
|
||||
};
|
||||
|
||||
export const MediaRecordButton = (props: {
|
||||
isRecording: boolean;
|
||||
setIsRecording: (value: boolean) => void;
|
||||
}) => {
|
||||
const { isRecording, setIsRecording } = props;
|
||||
export const MediaRecordButton = () => {
|
||||
const {
|
||||
media,
|
||||
recordingBlob,
|
||||
isRecording,
|
||||
startRecording,
|
||||
stopRecording,
|
||||
recordingTime,
|
||||
transcription,
|
||||
currentSegmentIndex,
|
||||
} = useContext(MediaPlayerProviderContext);
|
||||
const { EnjoyApp } = useContext(AppSettingsProviderContext);
|
||||
|
||||
/*
|
||||
* Save recording
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!media) return;
|
||||
if (!transcription) return;
|
||||
if (!recordingBlob) return;
|
||||
|
||||
toast.promise(
|
||||
async () => {
|
||||
const currentSegment =
|
||||
transcription?.result?.timeline?.[currentSegmentIndex];
|
||||
if (!currentSegment) return;
|
||||
|
||||
await EnjoyApp.recordings.create({
|
||||
targetId: media.id,
|
||||
targetType: media.mediaType,
|
||||
blob: {
|
||||
type: recordingBlob.type.split(";")[0],
|
||||
arrayBuffer: await recordingBlob.arrayBuffer(),
|
||||
},
|
||||
referenceId: currentSegmentIndex,
|
||||
referenceText: currentSegment.text,
|
||||
});
|
||||
},
|
||||
{
|
||||
loading: t("savingRecording"),
|
||||
success: t("recordingSaved"),
|
||||
error: (e) => t("failedToSaveRecording" + " : " + e.message),
|
||||
position: "bottom-right",
|
||||
}
|
||||
);
|
||||
}, [recordingBlob, media, transcription]);
|
||||
|
||||
useEffect(() => {
|
||||
if (recordingTime >= 60) {
|
||||
stopRecording();
|
||||
}
|
||||
}, [recordingTime]);
|
||||
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => setIsRecording(!isRecording)}
|
||||
onClick={() => {
|
||||
if (isRecording) {
|
||||
stopRecording();
|
||||
} else {
|
||||
startRecording();
|
||||
}
|
||||
}}
|
||||
id="media-record-button"
|
||||
data-tooltip-id="media-player-tooltip"
|
||||
data-tooltip-content={
|
||||
|
||||
@@ -218,6 +218,20 @@ export const MediaPlayerControls = () => {
|
||||
setActiveRegion(groupRegions[0]);
|
||||
};
|
||||
|
||||
const findAndClickElement = (id: string) => {
|
||||
const button = document.getElementById(id);
|
||||
if (!button) return;
|
||||
|
||||
const rect = button.getBoundingClientRect();
|
||||
const elementAtPoint = document.elementFromPoint(
|
||||
rect.left + rect.width / 2,
|
||||
rect.top + rect.height / 2
|
||||
);
|
||||
if (elementAtPoint !== button && !button.contains(elementAtPoint)) return;
|
||||
|
||||
button.click();
|
||||
};
|
||||
|
||||
/*
|
||||
* Update segmentRegion when currentSegmentIndex is updated
|
||||
*/
|
||||
@@ -376,7 +390,7 @@ export const MediaPlayerControls = () => {
|
||||
useHotkeys(
|
||||
currentHotkeys.PlayOrPause,
|
||||
() => {
|
||||
document.getElementById("media-play-or-pause-button").click();
|
||||
findAndClickElement("media-play-or-pause-button");
|
||||
},
|
||||
{
|
||||
preventDefault: true,
|
||||
@@ -385,7 +399,7 @@ export const MediaPlayerControls = () => {
|
||||
useHotkeys(
|
||||
currentHotkeys.PlayPreviousSegment,
|
||||
() => {
|
||||
document.getElementById("media-play-previous-button").click();
|
||||
findAndClickElement("media-play-previous-button");
|
||||
},
|
||||
{
|
||||
preventDefault: true,
|
||||
@@ -394,7 +408,7 @@ export const MediaPlayerControls = () => {
|
||||
useHotkeys(
|
||||
currentHotkeys.PlayNextSegment,
|
||||
() => {
|
||||
document.getElementById("media-play-next-button").click();
|
||||
findAndClickElement("media-play-next-button");
|
||||
},
|
||||
{
|
||||
preventDefault: true,
|
||||
@@ -403,7 +417,7 @@ export const MediaPlayerControls = () => {
|
||||
useHotkeys(
|
||||
currentHotkeys.StartOrStopRecording,
|
||||
() => {
|
||||
document.getElementById("media-record-button").click();
|
||||
findAndClickElement("media-record-button");
|
||||
},
|
||||
{
|
||||
preventDefault: true,
|
||||
@@ -412,7 +426,7 @@ export const MediaPlayerControls = () => {
|
||||
useHotkeys(
|
||||
currentHotkeys.Compare,
|
||||
() => {
|
||||
document.getElementById("media-compare-button").click();
|
||||
findAndClickElement("media-compare-button");
|
||||
},
|
||||
{
|
||||
preventDefault: true,
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
import { useEffect, useState, useContext, useRef } from "react";
|
||||
import {
|
||||
MediaPlayerProviderContext,
|
||||
AppSettingsProviderContext,
|
||||
} from "@renderer/context";
|
||||
import RecordPlugin from "wavesurfer.js/dist/plugins/record";
|
||||
import WaveSurfer from "wavesurfer.js";
|
||||
import { t } from "i18next";
|
||||
import { toast } from "@renderer/components/ui";
|
||||
import { MediaRecordButton } from "@renderer/components";
|
||||
|
||||
const ONE_MINUTE = 60;
|
||||
|
||||
export const MediaRecorder = () => {
|
||||
const {
|
||||
layout,
|
||||
media,
|
||||
isRecording,
|
||||
setIsRecording,
|
||||
transcription,
|
||||
currentSegmentIndex,
|
||||
currentSegment,
|
||||
createSegment,
|
||||
} = useContext(MediaPlayerProviderContext);
|
||||
const [player, setPlayer] = useState<WaveSurfer>();
|
||||
const [access, setAccess] = useState<boolean>(false);
|
||||
const [duration, setDuration] = useState<number>(0);
|
||||
const { EnjoyApp } = useContext(AppSettingsProviderContext);
|
||||
|
||||
const ref = useRef(null);
|
||||
|
||||
const askForMediaAccess = () => {
|
||||
EnjoyApp.system.preferences.mediaAccess("microphone").then((access) => {
|
||||
if (access) {
|
||||
setAccess(true);
|
||||
} else {
|
||||
setAccess(false);
|
||||
toast.warning(t("noMicrophoneAccess"));
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const createRecording = async (params: { blob: Blob; duration: number }) => {
|
||||
if (!media) return;
|
||||
|
||||
const { blob, duration } = params;
|
||||
|
||||
toast.promise(
|
||||
async () => {
|
||||
const currentSegment =
|
||||
transcription?.result?.timeline?.[currentSegmentIndex];
|
||||
if (!currentSegment) return;
|
||||
|
||||
await EnjoyApp.recordings.create({
|
||||
targetId: media.id,
|
||||
targetType: media.mediaType,
|
||||
blob: {
|
||||
type: blob.type.split(";")[0],
|
||||
arrayBuffer: await blob.arrayBuffer(),
|
||||
},
|
||||
referenceId: currentSegmentIndex,
|
||||
referenceText: currentSegment.text,
|
||||
duration,
|
||||
});
|
||||
},
|
||||
{
|
||||
loading: t("savingRecording"),
|
||||
success: t("recordingSaved"),
|
||||
error: (e) => t("failedToSaveRecording" + " : " + e.message),
|
||||
position: "bottom-right",
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!access) return;
|
||||
if (!isRecording) return;
|
||||
if (!ref.current) return;
|
||||
if (!layout?.playerHeight) return;
|
||||
|
||||
const ws = WaveSurfer.create({
|
||||
container: ref.current,
|
||||
fillParent: true,
|
||||
height: layout.playerHeight,
|
||||
autoCenter: false,
|
||||
normalize: false,
|
||||
});
|
||||
setPlayer(ws);
|
||||
|
||||
const record = ws.registerPlugin(RecordPlugin.create());
|
||||
let startAt = 0;
|
||||
|
||||
record.on("record-start", () => {
|
||||
startAt = Date.now();
|
||||
});
|
||||
|
||||
record.on("record-end", async (blob: Blob) => {
|
||||
createRecording({ blob, duration: Date.now() - startAt });
|
||||
setIsRecording(false);
|
||||
});
|
||||
let interval: NodeJS.Timeout;
|
||||
|
||||
RecordPlugin.getAvailableAudioDevices()
|
||||
.then((devices) => devices.find((d) => d.kind === "audioinput"))
|
||||
.then((device) => {
|
||||
if (device) {
|
||||
record.startRecording({ deviceId: device.deviceId });
|
||||
setDuration(0);
|
||||
interval = setInterval(() => {
|
||||
setDuration((duration) => {
|
||||
if (duration >= ONE_MINUTE) {
|
||||
record.stopRecording();
|
||||
}
|
||||
return duration + 0.1;
|
||||
});
|
||||
}, 100);
|
||||
} else {
|
||||
toast.error(t("cannotFindMicrophone"));
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
if (interval) clearInterval(interval);
|
||||
record?.stopRecording();
|
||||
player?.destroy();
|
||||
};
|
||||
}, [ref, isRecording, access, layout?.playerHeight]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!currentSegment) {
|
||||
createSegment();
|
||||
}
|
||||
askForMediaAccess();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="h-full w-full flex items-center space-x-4">
|
||||
<div className="flex-1 h-full border rounded-xl shadow-lg relative">
|
||||
<span className="absolute bottom-2 right-2 serif">
|
||||
{duration.toFixed(1)}
|
||||
<span className="text-xs"> / {ONE_MINUTE}</span>
|
||||
</span>
|
||||
<div className="h-full" ref={ref}></div>
|
||||
</div>
|
||||
|
||||
<div className="h-full flex flex-col justify-start space-y-1.5">
|
||||
<MediaRecordButton
|
||||
isRecording={isRecording}
|
||||
setIsRecording={setIsRecording}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -30,18 +30,18 @@ import {
|
||||
} from "@renderer/components/ui";
|
||||
import { TimelineEntry } from "echogarden/dist/utilities/Timeline.d.js";
|
||||
import { t } from "i18next";
|
||||
import WaveSurfer from "wavesurfer.js";
|
||||
import {
|
||||
CheckIcon,
|
||||
ChevronDownIcon,
|
||||
DownloadIcon,
|
||||
GaugeCircleIcon,
|
||||
LoaderIcon,
|
||||
MicIcon,
|
||||
MoreHorizontalIcon,
|
||||
SquareIcon,
|
||||
PauseIcon,
|
||||
PlayIcon,
|
||||
Trash2Icon,
|
||||
} from "lucide-react";
|
||||
import RecordPlugin from "wavesurfer.js/dist/plugins/record";
|
||||
import { useRecordings } from "@renderer/hooks";
|
||||
import { formatDateTime } from "@renderer/lib/utils";
|
||||
import { MediaPlayer, MediaProvider } from "@vidstack/react";
|
||||
@@ -50,10 +50,9 @@ import {
|
||||
defaultLayoutIcons,
|
||||
} from "@vidstack/react/player/layouts/default";
|
||||
import { Caption, RecordingDetail } from "@renderer/components";
|
||||
import { LiveAudioVisualizer } from "react-audio-visualize";
|
||||
|
||||
const TEN_MINUTES = 60 * 10;
|
||||
let interval: NodeJS.Timeout;
|
||||
|
||||
export const MediaTranscriptionReadButton = (props: {
|
||||
children: React.ReactNode;
|
||||
}) => {
|
||||
@@ -278,13 +277,21 @@ export const MediaTranscriptionReadButton = (props: {
|
||||
|
||||
const RecorderButton = (props: { onRecorded: () => void }) => {
|
||||
const { onRecorded } = props;
|
||||
const { media, transcription } = useContext(MediaPlayerProviderContext);
|
||||
const {
|
||||
media,
|
||||
recordingBlob,
|
||||
isRecording,
|
||||
isPaused,
|
||||
togglePauseResume,
|
||||
startRecording,
|
||||
stopRecording,
|
||||
transcription,
|
||||
currentSegmentIndex,
|
||||
mediaRecorder,
|
||||
recordingTime,
|
||||
} = useContext(MediaPlayerProviderContext);
|
||||
const { EnjoyApp } = useContext(AppSettingsProviderContext);
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
const [recorder, setRecorder] = useState<RecordPlugin>();
|
||||
const [access, setAccess] = useState<boolean>(false);
|
||||
const [duration, setDuration] = useState<number>(0);
|
||||
const ref = useRef(null);
|
||||
|
||||
const askForMediaAccess = () => {
|
||||
EnjoyApp.system.preferences.mediaAccess("microphone").then((access) => {
|
||||
@@ -297,53 +304,34 @@ const RecorderButton = (props: { onRecorded: () => void }) => {
|
||||
});
|
||||
};
|
||||
|
||||
const startRecord = () => {
|
||||
if (isRecording) return;
|
||||
if (!recorder) {
|
||||
toast.warning(t("noMicrophoneAccess"));
|
||||
return;
|
||||
}
|
||||
useEffect(() => {
|
||||
askForMediaAccess();
|
||||
}, []);
|
||||
|
||||
RecordPlugin.getAvailableAudioDevices()
|
||||
.then((devices) => devices.find((d) => d.kind === "audioinput"))
|
||||
.then((device) => {
|
||||
if (device) {
|
||||
recorder.startRecording({ deviceId: device.deviceId });
|
||||
setIsRecording(true);
|
||||
setDuration(0);
|
||||
interval = setInterval(() => {
|
||||
setDuration((duration) => {
|
||||
if (duration >= TEN_MINUTES) {
|
||||
recorder.stopRecording();
|
||||
}
|
||||
return duration + 0.1;
|
||||
});
|
||||
}, 100);
|
||||
} else {
|
||||
toast.error(t("cannotFindMicrophone"));
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const createRecording = async (blob: Blob) => {
|
||||
useEffect(() => {
|
||||
if (!media) return;
|
||||
if (!transcription) return;
|
||||
if (!recordingBlob) return;
|
||||
|
||||
toast.promise(
|
||||
EnjoyApp.recordings
|
||||
.create({
|
||||
async () => {
|
||||
const currentSegment =
|
||||
transcription?.result?.timeline?.[currentSegmentIndex];
|
||||
if (!currentSegment) return;
|
||||
|
||||
await EnjoyApp.recordings.create({
|
||||
targetId: media.id,
|
||||
targetType: media.mediaType,
|
||||
blob: {
|
||||
type: blob.type.split(";")[0],
|
||||
arrayBuffer: await blob.arrayBuffer(),
|
||||
type: recordingBlob.type.split(";")[0],
|
||||
arrayBuffer: await recordingBlob.arrayBuffer(),
|
||||
},
|
||||
referenceId: -1,
|
||||
referenceText: transcription.result.timeline
|
||||
.map((s: TimelineEntry) => s.text)
|
||||
.join("\n"),
|
||||
duration,
|
||||
})
|
||||
.then(() => onRecorded()),
|
||||
});
|
||||
},
|
||||
{
|
||||
loading: t("savingRecording"),
|
||||
success: t("recordingSaved"),
|
||||
@@ -351,66 +339,76 @@ const RecorderButton = (props: { onRecorded: () => void }) => {
|
||||
position: "bottom-right",
|
||||
}
|
||||
);
|
||||
};
|
||||
}, [recordingBlob, media, transcription]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!access) return;
|
||||
if (!ref?.current) return;
|
||||
if (recordingTime >= TEN_MINUTES) {
|
||||
onRecorded();
|
||||
}
|
||||
}, [recordingTime]);
|
||||
|
||||
const ws = WaveSurfer.create({
|
||||
container: ref.current,
|
||||
fillParent: true,
|
||||
height: 40,
|
||||
autoCenter: false,
|
||||
normalize: false,
|
||||
});
|
||||
if (isRecording) {
|
||||
return (
|
||||
<div className="h-16 flex items-center justify-center px-6">
|
||||
<div className="flex items-center space-x-2">
|
||||
<LiveAudioVisualizer
|
||||
mediaRecorder={mediaRecorder}
|
||||
barWidth={2}
|
||||
gap={2}
|
||||
width={250}
|
||||
height={30}
|
||||
fftSize={512}
|
||||
maxDecibels={-10}
|
||||
minDecibels={-80}
|
||||
smoothingTimeConstant={0.4}
|
||||
/>
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{Math.floor(recordingTime / 60)}:
|
||||
{String(recordingTime % 60).padStart(2, "0")}
|
||||
</span>
|
||||
<Button
|
||||
onClick={togglePauseResume}
|
||||
className="rounded-full shadow w-8 h-8"
|
||||
size="icon"
|
||||
>
|
||||
{isPaused ? (
|
||||
<PlayIcon
|
||||
data-tooltip-id="chat-input-tooltip"
|
||||
data-tooltip-content={t("continue")}
|
||||
fill="white"
|
||||
className="w-4 h-4"
|
||||
/>
|
||||
) : (
|
||||
<PauseIcon
|
||||
data-tooltip-id="chat-input-tooltip"
|
||||
data-tooltip-content={t("pause")}
|
||||
fill="white"
|
||||
className="w-4 h-4"
|
||||
/>
|
||||
)}
|
||||
</Button>
|
||||
<Button
|
||||
data-tooltip-id="chat-input-tooltip"
|
||||
data-tooltip-content={t("finish")}
|
||||
onClick={stopRecording}
|
||||
className="rounded-full bg-green-500 hover:bg-green-600 shadow w-8 h-8"
|
||||
size="icon"
|
||||
>
|
||||
<CheckIcon className="w-4 h-4 text-white" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const record = ws.registerPlugin(RecordPlugin.create());
|
||||
setRecorder(record);
|
||||
|
||||
record.on("record-end", async (blob: Blob) => {
|
||||
if (interval) clearInterval(interval);
|
||||
createRecording(blob);
|
||||
setIsRecording(false);
|
||||
});
|
||||
|
||||
return () => {
|
||||
if (interval) clearInterval(interval);
|
||||
recorder?.stopRecording();
|
||||
ws?.destroy();
|
||||
};
|
||||
}, [access, ref]);
|
||||
|
||||
useEffect(() => {
|
||||
askForMediaAccess();
|
||||
}, []);
|
||||
return (
|
||||
<div className="h-16 flex items-center justify-center px-6">
|
||||
<div
|
||||
ref={ref}
|
||||
className={isRecording ? "w-full mr-4" : "w-0 overflow-hidden"}
|
||||
></div>
|
||||
{isRecording && (
|
||||
<div className="text-muted-foreground text-sm w-24 mr-4">
|
||||
{duration.toFixed(1)} / {TEN_MINUTES}
|
||||
</div>
|
||||
)}
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="aspect-square p-0 h-12 rounded-full bg-red-500 hover:bg-red-500/90"
|
||||
onClick={() => {
|
||||
if (isRecording) {
|
||||
recorder?.stopRecording();
|
||||
} else {
|
||||
startRecord();
|
||||
}
|
||||
}}
|
||||
onClick={() => startRecording()}
|
||||
>
|
||||
{isRecording ? (
|
||||
<SquareIcon fill="white" className="w-6 h-6 text-white" />
|
||||
) : (
|
||||
<MicIcon className="w-6 h-6 text-white" />
|
||||
)}
|
||||
<MicIcon className="w-6 h-6 text-white" />
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -16,6 +16,7 @@ import { TimelineEntry } from "echogarden/dist/utilities/Timeline.d.js";
|
||||
import { toast } from "@renderer/components/ui";
|
||||
import { Tooltip } from "react-tooltip";
|
||||
import { debounce } from "lodash";
|
||||
import { useAudioRecorder } from "react-audio-voice-recorder";
|
||||
|
||||
type MediaPlayerContextType = {
|
||||
layout: {
|
||||
@@ -77,8 +78,14 @@ type MediaPlayerContextType = {
|
||||
transcriptionDraft: TranscriptionType["result"];
|
||||
setTranscriptionDraft: (result: TranscriptionType["result"]) => void;
|
||||
// Recordings
|
||||
startRecording: () => void;
|
||||
stopRecording: () => void;
|
||||
togglePauseResume: () => void;
|
||||
recordingBlob: Blob;
|
||||
isRecording: boolean;
|
||||
setIsRecording: (isRecording: boolean) => void;
|
||||
isPaused: boolean;
|
||||
recordingTime: number;
|
||||
mediaRecorder: MediaRecorder;
|
||||
currentRecording: RecordingType;
|
||||
setCurrentRecording: (recording: RecordingType) => void;
|
||||
recordings: RecordingType[];
|
||||
@@ -163,7 +170,6 @@ export const MediaPlayerProvider = ({
|
||||
const [fitZoomRatio, setFitZoomRatio] = useState<number>(1.0);
|
||||
const [zoomRatio, setZoomRatio] = useState<number>(1.0);
|
||||
|
||||
const [isRecording, setIsRecording] = useState<boolean>(false);
|
||||
const [currentRecording, setCurrentRecording] = useState<RecordingType>(null);
|
||||
|
||||
const [transcriptionDraft, setTranscriptionDraft] =
|
||||
@@ -185,6 +191,17 @@ export const MediaPlayerProvider = ({
|
||||
hasMore: hasMoreRecordings,
|
||||
} = useRecordings(media, currentSegmentIndex);
|
||||
|
||||
const {
|
||||
startRecording,
|
||||
stopRecording,
|
||||
togglePauseResume,
|
||||
recordingBlob,
|
||||
isRecording,
|
||||
isPaused,
|
||||
recordingTime,
|
||||
mediaRecorder,
|
||||
} = useAudioRecorder();
|
||||
|
||||
const { segment, createSegment } = useSegments({
|
||||
targetId: media?.id,
|
||||
targetType: media?.mediaType,
|
||||
@@ -625,8 +642,14 @@ export const MediaPlayerProvider = ({
|
||||
transcribingOutput,
|
||||
transcriptionDraft,
|
||||
setTranscriptionDraft,
|
||||
startRecording,
|
||||
stopRecording,
|
||||
togglePauseResume,
|
||||
recordingBlob,
|
||||
isRecording,
|
||||
setIsRecording,
|
||||
isPaused,
|
||||
recordingTime,
|
||||
mediaRecorder,
|
||||
currentRecording,
|
||||
setCurrentRecording,
|
||||
recordings,
|
||||
|
||||
Reference in New Issue
Block a user