may share recording

This commit is contained in:
an-lee
2024-01-13 15:04:28 +08:00
parent d655da9aea
commit 0ecaf4bdff
10 changed files with 326 additions and 106 deletions

View File

@@ -342,6 +342,12 @@
"sharePrompt": "Share prompt",
"sharedPrompt": "Shared a prompt",
"areYouSureToShareThisPromptToCommunity": "Are you sure to share this prompt to community?",
"shareRecording": "Share recording",
"sharedRecording": "Shared a recording",
"areYouSureToShareThisRecordingToCommunity": "Are you sure to share this recording to community?",
"shareStory": "Share story",
"sharedStory": "Shared a story",
"areYouSureToShareThisStoryToCommunity": "Are you sure to share this story to community?",
"addToLibary": "Add to library",
"areYouSureToAddThisVideoToYourLibrary": "Are you sure to add this video to library?",
"areYouSureToAddThisAudioToYourLibrary": "Are you sure to add this audio to library?",

View File

@@ -342,6 +342,12 @@
"sharePrompt": "分享提示语",
"sharedPrompt": "分享了一条提示语",
"areYouSureToShareThisPromptToCommunity": "您确定要分享此提示语到社区吗?",
"shareRecording": "分享录音",
"sharedRecording": "分享了一条录音",
"areYouSureToShareThisRecordingToCommunity": "您确定要分享此录音到社区吗?",
"shareStory": "分享文章",
"sharedStory": "分享了一篇文章",
"areYouSureToShareThisStoryToCommunity": "您确定要分享此文章到社区吗?",
"addToLibary": "添加到资源库",
"areYouSureToAddThisVideoToYourLibrary": "您确定要添加此视频到资料库吗?",
"areYouSureToAddThisAudioToYourLibrary": "您确定要添加此音频到资料库吗?",

View File

@@ -1,4 +1,6 @@
export * from "./posts";
export * from "./post-audio-player";
export * from "./post-audio";
export * from "./post-card";
export * from "./post-actions";
export * from "./post-medium";
export * from "./post-recording";

View File

@@ -6,8 +6,11 @@ import { PlayIcon, PauseIcon } from "lucide-react";
import { useIntersectionObserver } from "@uidotdev/usehooks";
import { secondsToTimestamp } from "@renderer/lib/utils";
export const PostAudioPlayer = (props: { src: string; height?: number }) => {
const { src, height = 80 } = props;
export const PostAudio = (props: {
audio: Partial<MediumType>;
height?: number;
}) => {
const { audio, height = 80 } = props;
const [initialized, setInitialized] = useState(false);
const [isPlaying, setIsPlaying] = useState(false);
const [wavesurfer, setWavesurfer] = useState(null);
@@ -25,12 +28,12 @@ export const PostAudioPlayer = (props: { src: string; height?: number }) => {
// use the intersection observer to only create the wavesurfer instance
// when the player is visible
if (!entry?.isIntersecting) return;
if (!src) return;
if (!audio.sourceUrl) return;
if (wavesurfer) return;
const ws = WaveSurfer.create({
container: containerRef.current,
url: src,
url: audio.sourceUrl,
height,
barWidth: 1,
cursorWidth: 0,
@@ -41,11 +44,10 @@ export const PostAudioPlayer = (props: { src: string; height?: number }) => {
minPxPerSec: 100,
waveColor: "#ddd",
progressColor: "rgba(0, 0, 0, 0.25)",
normalize: true,
});
setWavesurfer(ws);
}, [src, entry]);
}, [audio.sourceUrl, entry]);
useEffect(() => {
if (!wavesurfer) return;
@@ -81,7 +83,7 @@ export const PostAudioPlayer = (props: { src: string; height?: number }) => {
return (
<div className="w-full">
<div className="flex justify-end">
<span className="text-xs text-muted-foreground mb-1">
<span className="text-xs text-muted-foreground">
{secondsToTimestamp(duration)}
</span>
</div>
@@ -116,6 +118,12 @@ export const PostAudioPlayer = (props: { src: string; height?: number }) => {
ref={containerRef}
></div>
</div>
{audio.coverUrl && (
<div className="">
<img src={audio.coverUrl} className="w-full rounded" />
</div>
)}
</div>
);
};

View File

@@ -1,6 +1,4 @@
import { useContext, useEffect, useState } from "react";
import { AppSettingsProviderContext } from "@renderer/context";
import { PostAudioPlayer, PostActions } from "@renderer/components";
import { PostRecording, PostActions, PostMedium } from "@renderer/components";
import {
Avatar,
AvatarImage,
@@ -9,11 +7,6 @@ import {
} from "@renderer/components/ui";
import { formatDateTime } from "@renderer/lib/utils";
import { t } from "i18next";
import { MediaPlayer, MediaProvider } from "@vidstack/react";
import {
DefaultVideoLayout,
defaultLayoutIcons,
} from "@vidstack/react/player/layouts/default";
import Markdown from "react-markdown";
export const PostCard = (props: { post: PostType }) => {
@@ -53,53 +46,18 @@ export const PostCard = (props: { post: PostType }) => {
<PostMedium medium={post.target as MediumType} />
)}
{post.targetType == "Recording" && (
<>
<div className="text-xs text-muted-foreground">
{t("sharedRecording")}
</div>
<PostRecording recording={post.target as RecordingType} />
</>
)}
<PostActions post={post} />
</div>
);
};
const PostMedium = (props: { medium: MediumType }) => {
const { medium } = props;
if (!medium.sourceUrl) return null;
return (
<div className="space-y-2">
{medium.mediumType == "Video" && (
<>
<div className="text-xs text-muted-foreground">
{t("sharedAudio")}
</div>
<MediaPlayer
poster={medium.coverUrl}
src={{
type: `${medium.mediumType.toLowerCase()}/${
medium.extname.replace(".", "") || "mp4"
}`,
src: medium.sourceUrl,
}}
>
<MediaProvider />
<DefaultVideoLayout icons={defaultLayoutIcons} />
</MediaPlayer>
</>
)}
{medium.mediumType == "Audio" && (
<>
<div className="text-xs text-muted-foreground">
{t("sharedAudio")}
</div>
<PostAudioPlayer src={medium.sourceUrl} />
</>
)}
{medium.coverUrl && medium.mediumType == "Audio" && (
<div className="">
<img src={medium.coverUrl} className="w-full rounded" />
</div>
)}
</div>
);
};
const PostOptions = (props: { post: PostType }) => {};

View File

@@ -0,0 +1,45 @@
import { PostAudio } from "@renderer/components";
import { t } from "i18next";
import { MediaPlayer, MediaProvider } from "@vidstack/react";
import {
DefaultVideoLayout,
defaultLayoutIcons,
} from "@vidstack/react/player/layouts/default";
export const PostMedium = (props: { medium: MediumType }) => {
const { medium } = props;
if (!medium.sourceUrl) return null;
return (
<div className="space-y-2">
{medium.mediumType == "Video" && (
<>
<div className="text-xs text-muted-foreground">
{t("sharedAudio")}
</div>
<MediaPlayer
poster={medium.coverUrl}
src={{
type: `${medium.mediumType.toLowerCase()}/${
medium.extname.replace(".", "") || "mp4"
}`,
src: medium.sourceUrl,
}}
>
<MediaProvider />
<DefaultVideoLayout icons={defaultLayoutIcons} />
</MediaPlayer>
</>
)}
{medium.mediumType == "Audio" && (
<>
<div className="text-xs text-muted-foreground">
{t("sharedAudio")}
</div>
<PostAudio audio={medium as Partial<AudioType>} />
</>
)}
</div>
);
};

View File

@@ -0,0 +1,133 @@
import { useEffect, useState, useRef, useCallback } from "react";
import { PitchContour } from "@renderer/components";
import WaveSurfer from "wavesurfer.js";
import { Button, Skeleton } from "@renderer/components/ui";
import { PlayIcon, PauseIcon } from "lucide-react";
import { useIntersectionObserver } from "@uidotdev/usehooks";
import { secondsToTimestamp } from "@renderer/lib/utils";
export const PostRecording = (props: {
recording: RecordingType;
height?: number;
}) => {
const { recording, height = 80 } = props;
const [initialized, setInitialized] = useState(false);
const [isPlaying, setIsPlaying] = useState(false);
const [wavesurfer, setWavesurfer] = useState(null);
const containerRef = useRef();
const [ref, entry] = useIntersectionObserver({
threshold: 1,
});
const [duration, setDuration] = useState<number>(0);
const onPlayClick = useCallback(() => {
wavesurfer.isPlaying() ? wavesurfer.pause() : wavesurfer.play();
}, [wavesurfer]);
useEffect(() => {
// use the intersection observer to only create the wavesurfer instance
// when the player is visible
if (!entry?.isIntersecting) return;
if (!recording.src) return;
if (wavesurfer) return;
const ws = WaveSurfer.create({
container: containerRef.current,
url: recording.src,
height,
barWidth: 1,
cursorWidth: 0,
autoCenter: true,
autoScroll: true,
dragToSeek: true,
hideScrollbar: true,
minPxPerSec: 100,
waveColor: "rgba(0, 0, 0, 0.25)",
progressColor: "rgba(0, 0, 0, 0.5)",
});
setWavesurfer(ws);
}, [recording.src, entry]);
useEffect(() => {
if (!wavesurfer) return;
const subscriptions = [
wavesurfer.on("play", () => {
setIsPlaying(true);
}),
wavesurfer.on("pause", () => {
setIsPlaying(false);
}),
wavesurfer.on("decode", () => {
setDuration(wavesurfer.getDuration());
const peaks = wavesurfer.getDecodedData().getChannelData(0);
const sampleRate = wavesurfer.options.sampleRate;
wavesurfer.renderer.getWrapper().appendChild(
PitchContour({
peaks,
sampleRate,
height,
})
);
setInitialized(true);
}),
];
return () => {
subscriptions.forEach((unsub) => unsub());
wavesurfer?.destroy();
};
}, [wavesurfer]);
return (
<div className="w-full">
<div className="flex justify-end">
<span className="text-xs text-muted-foreground">
{secondsToTimestamp(duration)}
</span>
</div>
<div
ref={ref}
className="bg-sky-500/30 rounded-lg grid grid-cols-9 items-center relative h-[80px]"
>
{!initialized && (
<div className="col-span-9 flex flex-col justify-around h-[80px]">
<Skeleton className="h-2 w-full rounded-full" />
<Skeleton className="h-2 w-full rounded-full" />
<Skeleton className="h-2 w-full rounded-full" />
</div>
)}
<div className={`flex justify-center ${initialized ? "" : "hidden"}`}>
<Button
onClick={onPlayClick}
className="aspect-square rounded-full p-2 w-12 h-12 bg-blue-600 hover:bg-blue-500"
>
{isPlaying ? (
<PauseIcon className="w-6 h-6 text-white" />
) : (
<PlayIcon className="w-6 h-6 text-white" />
)}
</Button>
</div>
<div
className={`col-span-8 ${initialized ? "" : "hidden"}`}
ref={containerRef}
></div>
</div>
{
recording.referenceText && (
<div className="mt-2 bg-muted px-4 py-2 rounded">
<div className="text-muted-foreground">
{recording.referenceText}
</div>
</div>
)
}
</div>
);
};

View File

@@ -4,18 +4,26 @@ import { RecordingPlayer } from "@renderer/components";
import {
AlertDialog,
AlertDialogHeader,
AlertDialogTrigger,
AlertDialogDescription,
AlertDialogTitle,
AlertDialogContent,
AlertDialogFooter,
AlertDialogCancel,
AlertDialogAction,
Button,
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
useToast,
} from "@renderer/components/ui";
import { ChevronDownIcon, Trash2Icon, InfoIcon, Share2Icon } from "lucide-react";
import {
ChevronDownIcon,
Trash2Icon,
InfoIcon,
Share2Icon,
} from "lucide-react";
import { formatDateTime, secondsToTimestamp } from "@renderer/lib/utils";
import { t } from "i18next";
@@ -26,39 +34,69 @@ export const RecordingCard = (props: {
}) => {
const { recording, id, onSelect } = props;
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false);
const { EnjoyApp } = useContext(AppSettingsProviderContext);
const { EnjoyApp, webApi } = useContext(AppSettingsProviderContext);
const [isPlaying, setIsPlaying] = useState(false);
const { toast } = useToast();
const handleDelete = () => {
EnjoyApp.recordings.destroy(recording.id);
};
const handleShare = async () => {
if (!recording.updatedAt) {
try {
await EnjoyApp.recordings.upload(recording.id);
} catch (error) {
toast({
description: error.message,
variant: "destructive",
});
return;
}
}
webApi
.createPost({
targetId: recording.id,
targetType: "Recording",
})
.then(() => {
toast({
description: t("recordingShared"),
});
})
.catch((error) => {
toast({
description: error.message,
variant: "destructive",
});
});
};
return (
<div id={id} className="flex items-center justify-end px-4 transition-all">
<DropdownMenu>
<div className="w-full">
<div className="bg-white rounded-lg py-2 px-4 relative mb-1">
<div className="flex items-center justify-end space-x-2">
<span className="text-xs text-muted-foreground">
{secondsToTimestamp(recording.duration / 1000)}
</span>
</div>
<div className="w-full">
<div className="bg-white rounded-lg py-2 px-4 relative mb-1">
<div className="flex items-center justify-end space-x-2">
<span className="text-xs text-muted-foreground">
{secondsToTimestamp(recording.duration / 1000)}
</span>
</div>
<RecordingPlayer
recording={recording}
isPlaying={isPlaying}
setIsPlaying={setIsPlaying}
/>
<RecordingPlayer
recording={recording}
isPlaying={isPlaying}
setIsPlaying={setIsPlaying}
/>
<div className="flex items-center justify-end space-x-2">
<Button
onClick={onSelect}
variant="ghost"
size="sm"
className="p-1 h-6"
>
<InfoIcon
className={`w-4 h-4
<div className="flex items-center justify-end space-x-2">
<Button
onClick={onSelect}
variant="ghost"
size="sm"
className="p-1 h-6"
>
<InfoIcon
className={`w-4 h-4
${
recording.pronunciationAssessment
? recording.pronunciationAssessment
@@ -71,29 +109,53 @@ export const RecordingCard = (props: {
: "text-muted-foreground"
}
`}
/>
</Button>
/>
</Button>
<AlertDialog>
<AlertDialogTrigger asChild>
<Button variant="ghost" size="sm" className="p-1 h-6">
<Share2Icon className="w-4 h-4 text-muted-foreground" />
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>{t("shareRecording")}</AlertDialogTitle>
<AlertDialogDescription>
{t("areYouSureToShareThisRecordingToCommunity")}
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>{t("cancel")}</AlertDialogCancel>
<AlertDialogAction>
<Button onClick={handleShare}>{t("share")}</Button>
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
<DropdownMenu>
<DropdownMenuTrigger>
<ChevronDownIcon className="w-4 h-4 text-muted-foreground" />
</DropdownMenuTrigger>
</div>
</div>
<div className="flex justify-end">
<span className="text-xs text-muted-foreground">
{formatDateTime(recording.createdAt)}
</span>
<DropdownMenuContent>
<DropdownMenuItem onClick={() => setIsDeleteDialogOpen(true)}>
<span className="mr-auto text-destructive capitalize">
{t("delete")}
</span>
<Trash2Icon className="w-4 h-4 text-destructive" />
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</div>
</div>
<DropdownMenuContent>
<DropdownMenuItem onClick={() => setIsDeleteDialogOpen(true)}>
<span className="mr-auto text-destructive capitalize">
{t("delete")}
</span>
<Trash2Icon className="w-4 h-4 text-destructive" />
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
<div className="flex justify-end">
<span className="text-xs text-muted-foreground">
{formatDateTime(recording.createdAt)}
</span>
</div>
</div>
<AlertDialog
open={isDeleteDialogOpen}

View File

@@ -11,7 +11,7 @@ type PostType = {
user: UserType;
targetType?: string;
targetId?: string;
target?: MediumType | StoryType;
target?: MediumType | StoryType | RecordingType;
createdAt: Date;
updatedAt: Date;
};

View File

@@ -1,12 +1,12 @@
type RecordingType = {
id: string;
filename: string;
filename?: string;
target?: AudioType | (MessageType & any);
targetId: string;
targetType: string;
pronunciationAssessment?: PronunciationAssessmentType & any;
segmentIndex: number;
segmentText?: string;
referenceId: number;
referenceText?: string;
duration?: number;
src?: string;
md5: string;