Feat: may download segment (#601)

* may download segment audio

* display original audio with the recording in post
This commit is contained in:
an-lee
2024-05-13 10:25:28 +08:00
committed by GitHub
parent c01548d3a0
commit 156222f28c
16 changed files with 258 additions and 5 deletions

View File

@@ -440,4 +440,19 @@ export class Client {
payment(id: string): Promise<PaymentType> {
return this.api.get(`/api/payments/${id}`);
}
mineSegments(params?: {
page?: number;
segmentIndex?: number;
targetId?: string;
targetType?: string;
}): Promise<
{
segments: SegmentType[];
} & PagyResponseType
> {
return this.api.get("/api/mine/segments", {
params: decamelizeKeys(params),
});
}
}

View File

@@ -168,6 +168,7 @@
"translate": "translate",
"displayIpa": "display IPA",
"displayNotes": "display Notes",
"downloadSegment": "download segment",
"detail": "detail",
"remove": "remove",
"share": "share",

View File

@@ -168,6 +168,7 @@
"translate": "翻译",
"displayIpa": "标注音标",
"displayNotes": "显示笔记",
"downloadSegment": "下载选段",
"detail": "详情",
"remove": "删除",
"share": "分享",

View File

@@ -5,6 +5,7 @@ import downloader from "@main/downloader";
import log from "@main/logger";
import { t } from "i18next";
import youtubedr from "@main/youtubedr";
import { pathToEnjoyUrl } from "@/main/utils";
const logger = log.scope("db/handlers/audios-handler");
@@ -187,6 +188,24 @@ class AudiosHandler {
});
}
private async crop(
_event: IpcMainEvent,
id: string,
params: { startTime: number; endTime: number }
) {
const audio = await Audio.findOne({
where: { id },
});
if (!audio) {
throw new Error(t("models.audio.notFound"));
}
const { startTime, endTime } = params;
const output = await audio.crop({ startTime, endTime });
return pathToEnjoyUrl(output);
}
register() {
ipcMain.handle("audios-find-all", this.findAll);
ipcMain.handle("audios-find-one", this.findOne);
@@ -194,6 +213,7 @@ class AudiosHandler {
ipcMain.handle("audios-update", this.update);
ipcMain.handle("audios-destroy", this.destroy);
ipcMain.handle("audios-upload", this.upload);
ipcMain.handle("audios-crop", this.crop);
}
}

View File

@@ -5,6 +5,7 @@ import downloader from "@main/downloader";
import log from "@main/logger";
import { t } from "i18next";
import youtubedr from "@main/youtubedr";
import { pathToEnjoyUrl } from "@main/utils";
const logger = log.scope("db/handlers/videos-handler");
@@ -173,6 +174,24 @@ class VideosHandler {
});
}
private async crop(
_event: IpcMainEvent,
id: string,
params: { startTime: number; endTime: number }
) {
const video = await Video.findOne({
where: { id },
});
if (!video) {
throw new Error(t("models.video.notFound"));
}
const { startTime, endTime } = params;
const output = await video.crop({ startTime, endTime });
return pathToEnjoyUrl(output);
}
register() {
ipcMain.handle("videos-find-all", this.findAll);
ipcMain.handle("videos-find-one", this.findOne);
@@ -180,6 +199,7 @@ class VideosHandler {
ipcMain.handle("videos-update", this.update);
ipcMain.handle("videos-destroy", this.destroy);
ipcMain.handle("videos-upload", this.upload);
ipcMain.handle("videos-crop", this.crop);
}
}

View File

@@ -28,6 +28,7 @@ import Ffmpeg from "@main/ffmpeg";
import { Client } from "@/api";
import startCase from "lodash/startCase";
import { v5 as uuidv5 } from "uuid";
import FfmpegWrapper from "@main/ffmpeg";
const SIZE_LIMIT = 1024 * 1024 * 50; // 50MB
@@ -189,6 +190,23 @@ export class Audio extends Model<Audio> {
});
}
async crop(params: { startTime: number; endTime: number }) {
const { startTime, endTime } = params;
const ffmpeg = new FfmpegWrapper();
const output = path.join(
settings.cachePath(),
`${this.name}(${startTime.toFixed(2)}s-${endTime.toFixed(2)}).mp3`
);
await ffmpeg.crop(this.filePath, {
startTime,
endTime,
output,
});
return output;
}
@BeforeCreate
static async setupDefaultAttributes(audio: Audio) {
try {

View File

@@ -28,6 +28,7 @@ import Ffmpeg from "@main/ffmpeg";
import { Client } from "@/api";
import startCase from "lodash/startCase";
import { v5 as uuidv5 } from "uuid";
import FfmpegWrapper from "@main/ffmpeg";
const SIZE_LIMIT = 1024 * 1024 * 100; // 100MB
@@ -210,6 +211,23 @@ export class Video extends Model<Video> {
});
}
async crop(params: { startTime: number; endTime: number }) {
const { startTime, endTime } = params;
const ffmpeg = new FfmpegWrapper();
const output = path.join(
settings.cachePath(),
`${this.name}(${startTime.toFixed(2)}s-${endTime.toFixed(2)}).mp3`
);
await ffmpeg.crop(this.filePath, {
startTime,
endTime,
output,
});
return output;
}
@BeforeCreate
static async setupDefaultAttributes(video: Video) {
try {

View File

@@ -244,6 +244,9 @@ contextBridge.exposeInMainWorld("__ENJOY_APP__", {
upload: (id: string) => {
return ipcRenderer.invoke("audios-upload", id);
},
crop: (id: string, params: { startTime: number; endTime: number }) => {
return ipcRenderer.invoke("audios-crop", id, params);
},
},
videos: {
findAll: (params: {
@@ -267,6 +270,9 @@ contextBridge.exposeInMainWorld("__ENJOY_APP__", {
upload: (id: string) => {
return ipcRenderer.invoke("videos-upload", id);
},
crop: (id: string, params: { startTime: number; endTime: number }) => {
return ipcRenderer.invoke("videos-crop", id, params);
},
},
recordings: {
findAll: (params?: {

View File

@@ -1,5 +1,8 @@
import { useEffect, useState, useContext } from "react";
import { MediaPlayerProviderContext } from "@renderer/context";
import {
AppSettingsProviderContext,
MediaPlayerProviderContext,
} from "@renderer/context";
import cloneDeep from "lodash/cloneDeep";
import { Button, toast } from "@renderer/components/ui";
import { ConversationShortcuts } from "@renderer/components";
@@ -10,6 +13,7 @@ import {
CheckIcon,
SpeechIcon,
NotebookPenIcon,
DownloadIcon,
} from "lucide-react";
import {
Timeline,
@@ -21,7 +25,10 @@ import { MediaCaptionTabs } from "./media-captions";
export const MediaCaption = () => {
const {
media,
currentSegmentIndex,
currentSegment,
createSegment,
currentTime,
transcription,
regions,
@@ -31,6 +38,7 @@ export const MediaCaption = () => {
setEditingRegion,
setTranscriptionDraft,
} = useContext(MediaPlayerProviderContext);
const { EnjoyApp } = useContext(AppSettingsProviderContext);
const [activeIndex, setActiveIndex] = useState<number>(0);
const [selectedIndices, setSelectedIndices] = useState<number[]>([]);
const [multiSelecting, setMultiSelecting] = useState<boolean>(false);
@@ -132,6 +140,89 @@ export const MediaCaption = () => {
}
};
const handleDownload = async () => {
if (activeRegion && !activeRegion.id.startsWith("segment-region")) {
handleDownloadActiveRegion();
} else {
handleDownloadSegment();
}
};
const handleDownloadSegment = async () => {
const segment = currentSegment || (await createSegment());
if (!segment) return;
EnjoyApp.dialog
.showSaveDialog({
title: t("download"),
defaultPath: `${media.name}(${segment.startTime.toFixed(
2
)}s-${segment.endTime.toFixed(2)}s).mp3`,
})
.then((savePath) => {
if (!savePath) return;
toast.promise(
EnjoyApp.download.start(segment.src, savePath as string),
{
loading: t("downloading", { file: media.filename }),
success: () => t("downloadedSuccessfully"),
error: t("downloadFailed"),
position: "bottom-right",
}
);
})
.catch((err) => {
toast.error(err.message);
});
};
const handleDownloadActiveRegion = async () => {
if (!activeRegion) return;
let src: string;
try {
if (media.mediaType === "Audio") {
src = await EnjoyApp.audios.crop(media.id, {
startTime: activeRegion.start,
endTime: activeRegion.end,
});
} else if (media.mediaType === "Video") {
src = await EnjoyApp.videos.crop(media.id, {
startTime: activeRegion.start,
endTime: activeRegion.end,
});
}
} catch (err) {
toast.error(err.message);
}
if (!src) {
toast.error(t("downloadFailed"));
return;
}
EnjoyApp.dialog
.showSaveDialog({
title: t("download"),
defaultPath: `${media.name}(${activeRegion.start.toFixed(
2
)}s-${activeRegion.end.toFixed(2)}s).mp3`,
})
.then((savePath) => {
if (!savePath) return;
toast.promise(EnjoyApp.download.start(src, savePath as string), {
loading: t("downloading", { file: media.filename }),
success: () => t("downloadedSuccessfully"),
error: t("downloadFailed"),
position: "bottom-right",
});
})
.catch((err) => {
toast.error(err.message);
});
};
useEffect(() => {
if (!caption) return;
@@ -349,6 +440,17 @@ export const MediaCaption = () => {
/>
)}
</Button>
<Button
variant="outline"
size="icon"
className="rounded-full w-8 h-8 p-0"
data-tooltip-id="media-player-tooltip"
data-tooltip-content={t("downloadSegment")}
onClick={handleDownload}
>
<DownloadIcon className="w-4 h-4" />
</Button>
</div>
</div>
);

View File

@@ -58,6 +58,8 @@ export const MediaCurrentRecording = () => {
wavesurfer,
zoomRatio,
editingRegion,
currentSegment,
createSegment,
currentTime: mediaCurrentTime,
} = useContext(MediaPlayerProviderContext);
const { webApi, EnjoyApp } = useContext(AppSettingsProviderContext);
@@ -195,6 +197,16 @@ export const MediaCurrentRecording = () => {
}
}
try {
const segment = currentSegment || (await createSegment());
if (!segment) throw new Error("Failed to create segment");
await EnjoyApp.segments.sync(segment.id);
} catch (error) {
toast.error(t("shareFailed"), { description: error.message });
return;
}
webApi
.createPost({
targetId: currentRecording.id,

View File

@@ -19,6 +19,8 @@ export const MediaRecorder = () => {
setIsRecording,
transcription,
currentSegmentIndex,
currentSegment,
createSegment,
} = useContext(MediaPlayerProviderContext);
const [player, setPlayer] = useState<WaveSurfer>();
const [access, setAccess] = useState<boolean>(false);
@@ -125,6 +127,9 @@ export const MediaRecorder = () => {
}, [ref, isRecording, access, layout?.playerHeight]);
useEffect(() => {
if (!currentSegment) {
createSegment();
}
askForMediaAccess();
}, []);

View File

@@ -1,4 +1,4 @@
import { useEffect, useState, useRef, useCallback } from "react";
import { useEffect, useState, useRef, useCallback, useContext } from "react";
import { renderPitchContour } from "@renderer/lib/utils";
import { extractFrequencies } from "@/utils";
import WaveSurfer from "wavesurfer.js";
@@ -8,11 +8,14 @@ import { useIntersectionObserver } from "@uidotdev/usehooks";
import { secondsToTimestamp } from "@renderer/lib/utils";
import { t } from "i18next";
import { XCircleIcon } from "lucide-react";
import { AppSettingsProviderContext } from "@renderer/context";
import { WavesurferPlayer } from "@renderer/components";
export const PostRecording = (props: {
recording: RecordingType;
height?: number;
}) => {
const { webApi } = useContext(AppSettingsProviderContext);
const { recording, height = 80 } = props;
const [initialized, setInitialized] = useState(false);
const [isPlaying, setIsPlaying] = useState(false);
@@ -23,11 +26,28 @@ export const PostRecording = (props: {
});
const [duration, setDuration] = useState<number>(0);
const [error, setError] = useState<string>(null);
const [segment, setSegment] = useState<SegmentType>(null);
const onPlayClick = useCallback(() => {
wavesurfer.isPlaying() ? wavesurfer.pause() : wavesurfer.play();
}, [wavesurfer]);
const fetchSegment = async () => {
if (segment) return;
webApi
.mineSegments({
targetId: recording.targetId,
targetType: recording.targetType,
segmentIndex: recording.referenceId,
})
.then((res) => {
if (res.segments.length === 0) return;
setSegment(res.segments[0]);
});
};
useEffect(() => {
// use the intersection observer to only create the wavesurfer instance
// when the player is visible
@@ -53,6 +73,8 @@ export const PostRecording = (props: {
setWavesurfer(ws);
fetchSegment();
return () => {
setWavesurfer(null);
};
@@ -159,12 +181,14 @@ export const PostRecording = (props: {
</div>
{recording.referenceText && (
<div className="mt-2 bg-muted px-4 py-2 rounded">
<div className="my-2 bg-muted px-4 py-2 rounded">
<div className="text-muted-foreground text-center font-serif select-text">
{recording.referenceText}
</div>
</div>
)}
{segment?.src && <WavesurferPlayer id={segment.id} src={segment.src} />}
</div>
);
};

View File

@@ -85,7 +85,7 @@ type MediaPlayerContextType = {
createNote: (params: any) => void;
// Segments
currentSegment: SegmentType;
createSegment: () => void;
createSegment: () => Promise<SegmentType | void>;
};
export const MediaPlayerProviderContext =

View File

@@ -29,7 +29,7 @@ export const useSegments = (props: {
};
const createSegment = () => {
EnjoyApp.segments
return EnjoyApp.segments
.create({
targetId,
targetType,
@@ -37,6 +37,7 @@ export const useSegments = (props: {
})
.then((segment) => {
setSegment(segment);
return segment;
})
.catch((err) => {
toast.error(err.message);

View File

@@ -145,6 +145,10 @@ type EnjoyAppType = {
update: (id: string, params: any) => Promise<AudioType | undefined>;
destroy: (id: string) => Promise<undefined>;
upload: (id: string) => Promise<void>;
crop: (
id: string,
params: { startTime: number; endTime: number }
) => Promise<string>;
};
videos: {
findAll: (params: any) => Promise<VideoType[]>;
@@ -153,6 +157,10 @@ type EnjoyAppType = {
update: (id: string, params: any) => Promise<VideoType | undefined>;
destroy: (id: string) => Promise<undefined>;
upload: (id: string) => Promise<void>;
crop: (
id: string,
params: { startTime: number; endTime: number }
) => Promise<string>;
};
recordings: {
findAll: (where: any) => Promise<RecordingType[]>;

View File

@@ -11,6 +11,8 @@ type SegmentType = {
startTime: number;
endTime: number;
src: string;
isSynced?: boolean;
isUploaded?: boolean;
syncedAt?: Date;
uploadedAt?: Date
updatedAt: Date