From 9dc168f22e5a1bd4eef7e43c8386acf3d64d4e23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=8C=97=E6=9E=B3?= <7854742+wang_rumeng@user.noreply.gitee.com> Date: Fri, 4 Jul 2025 14:32:28 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BC=98=E5=8C=96=E7=BB=86=E8=8A=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- components/pages/home-page2.tsx | 2 +- components/pages/style/home-page2.css | 4 +- components/pages/work-flow/media-viewer.tsx | 91 ++++++--- components/pages/work-flow/task-info.tsx | 193 ++++++++++++------ .../pages/work-flow/use-workflow-data.tsx | 65 ++++-- components/video-screen-layout.tsx | 2 +- components/work-flow/constants.ts | 12 +- 7 files changed, 259 insertions(+), 110 deletions(-) diff --git a/components/pages/home-page2.tsx b/components/pages/home-page2.tsx index 4ec0a2e..b9cc1ae 100644 --- a/components/pages/home-page2.tsx +++ b/components/pages/home-page2.tsx @@ -151,7 +151,7 @@ export function HomePage2() { {/* Create Project Button */} -
+
{ + setUserHasInteracted(true); setIsMuted(!isMuted); if (mainVideoRef.current) { mainVideoRef.current.muted = !isMuted; @@ -62,6 +65,7 @@ export function MediaViewer({ }; const handleVolumeChange = (newVolume: number) => { + setUserHasInteracted(true); setVolume(newVolume); if (mainVideoRef.current) { mainVideoRef.current.volume = newVolume; @@ -79,20 +83,63 @@ export function MediaViewer({ } }; + useEffect(() => { + if (finalVideoRef.current && finalVideoReady) { + if (isFinalVideoPlaying) { + finalVideoRef.current.play().catch(error => { + console.log('最终视频自动播放被阻止:', error); + // 如果自动播放被阻止,将状态设置为暂停 + setIsFinalVideoPlaying(false); + }); + } else { + finalVideoRef.current.pause(); + } + } + }, [isFinalVideoPlaying, finalVideoReady]); + // 最终视频播放控制 const toggleFinalVideoPlay = () => { + setUserHasInteracted(true); + setIsFinalVideoPlaying(!isFinalVideoPlaying); + }; + + // 处理最终视频加载完成 + const handleFinalVideoLoaded = () => { if (finalVideoRef.current) { + setFinalVideoReady(true); + applyVolumeSettings(finalVideoRef.current); + + // 如果当前状态是应该播放的,尝试播放 if (isFinalVideoPlaying) { - finalVideoRef.current.pause(); - } else { - finalVideoRef.current.play(); + finalVideoRef.current.play().catch(error => { + console.log('最终视频自动播放被阻止:', error); + setIsFinalVideoPlaying(false); + }); } - setIsFinalVideoPlaying(!isFinalVideoPlaying); } }; + // 处理视频点击 - 首次交互时尝试播放 + const handleVideoClick = () => { + if (!userHasInteracted && finalVideoRef.current && finalVideoReady) { + setUserHasInteracted(true); + if (isFinalVideoPlaying) { + finalVideoRef.current.play().catch(error => { + console.log('视频播放失败:', error); + }); + } + } + }; + + // 包装编辑按钮点击事件 + const handleEditClick = (tab: string) => { + setUserHasInteracted(true); + onEditModalOpen(tab); + }; + // 全屏控制 const toggleFullscreen = () => { + setUserHasInteracted(true); if (!document.fullscreenElement) { // 进入全屏 if (finalVideoRef.current) { @@ -164,6 +211,17 @@ export function MediaViewer({ }; }, []); + // 组件卸载时清理视频状态 + useEffect(() => { + return () => { + // 清理最终视频状态 + setFinalVideoReady(false); + if (finalVideoRef.current) { + finalVideoRef.current.pause(); + } + }; + }, []); + // 渲染音量控制组件 const renderVolumeControls = () => (
@@ -228,7 +286,6 @@ export function MediaViewer({
); }; diff --git a/components/pages/work-flow/task-info.tsx b/components/pages/work-flow/task-info.tsx index b438efc..7ea8189 100644 --- a/components/pages/work-flow/task-info.tsx +++ b/components/pages/work-flow/task-info.tsx @@ -3,6 +3,16 @@ import React from 'react'; import { motion } from 'framer-motion'; import { Skeleton } from '@/components/ui/skeleton'; +import { + Image, + Video, + CheckCircle, + Music, + Loader2, + User, + Scissors, + Tv +} from 'lucide-react'; interface TaskInfoProps { isLoading: boolean; @@ -10,7 +20,32 @@ interface TaskInfoProps { currentLoadingText: string; } +// 根据加载文本返回对应的图标 +const getStageIcon = (loadingText: string) => { + const text = loadingText.toLowerCase(); + + if (text.includes('sketch')) { + return Image; + } else if (text.includes('video')) { + return Video; + } else if (text.includes('character')) { + return User; + } else if (text.includes('audio')) { + return Music; + } else if (text.includes('post')) { + return Scissors; + } else if (text.includes('final')) { + return Tv; + } else if (text.includes('complete')) { + return CheckCircle; + } else { + return Loader2; + } +}; + export function TaskInfo({ isLoading, taskObject, currentLoadingText }: TaskInfoProps) { + const StageIcon = getStageIcon(currentLoadingText); + if (isLoading) { return ( <> @@ -46,7 +81,7 @@ export function TaskInfo({ isLoading, taskObject, currentLoadingText }: TaskInfo }} /> + + + + + {/* 阶段图标 */} - {/* 背景发光效果 */} - {currentLoadingText} + - - {/* 主文字 - 颜色填充动画 */} + - - {currentLoadingText} - + {currentLoadingText} + + + {/* 主文字 - 颜色填充动画 */} + + + {currentLoadingText} + + + + {/* 动态光点效果 */} + + + {/* 文字底部装饰线 */} + - - {/* 动态光点效果 */} - - - {/* 文字底部装饰线 */} - + (null); const [taskSketch, setTaskSketch] = useState([]); + const [taskRoles, setTaskRoles] = useState([]); const [taskVideos, setTaskVideos] = useState([]); const [sketchCount, setSketchCount] = useState(0); const [isLoading, setIsLoading] = useState(true); @@ -49,7 +50,7 @@ export function useWorkflowData() { // 模拟分批获取分镜草图 for (let i = 0; i < totalSketches; i++) { - await new Promise(resolve => setTimeout(resolve, 5000)); // 10s + await new Promise(resolve => setTimeout(resolve, MOCK_DELAY_TIME.sketch)); // 10s const newSketch = { id: `sketch-${i}`, @@ -76,17 +77,37 @@ export function useWorkflowData() { // 模拟接口请求 每次获取一个角色 轮询获取 const getTaskRole = async (taskId: string) => { - await new Promise(resolve => setTimeout(resolve, 2000 * selectedMockData.roles.length)); // 延长到30秒 + setTaskRoles([]); + const roleData = selectedMockData.roles; + const totalRoles = roleData.length; + + for (let i = 0; i < totalRoles; i++) { + // 先更新loading文字显示当前正在生成的角色 + setCurrentLoadingText(STEP_MESSAGES.newCharacter(i, totalRoles)); + + await new Promise(resolve => setTimeout(resolve, MOCK_DELAY_TIME.character)); // 2s 一个角色 + + // 添加角色到列表 + setTaskRoles(prev => [...prev, roleData[i]]); + + // 更新loading文字显示已完成的角色数量 + setCurrentLoadingText(STEP_MESSAGES.newCharacter(i + 1, totalRoles)); + + // 如果不是最后一个角色,稍微延迟一下让用户看到更新 + if (i < totalRoles - 1) { + await new Promise(resolve => setTimeout(resolve, 500)); + } + } }; // 模拟接口请求 获取背景音 const getTaskBackgroundAudio = async (taskId: string) => { - await new Promise(resolve => setTimeout(resolve, 2000)); // 10s + await new Promise(resolve => setTimeout(resolve, MOCK_DELAY_TIME.audio)); // 10s }; // 模拟接口请求 获取最终成品 const getTaskFinalProduct = async (taskId: string) => { - await new Promise(resolve => setTimeout(resolve, 10000)); // 50s + await new Promise(resolve => setTimeout(resolve, MOCK_DELAY_TIME.final)); // 50s }; // 模拟接口请求 每次获取一个分镜视频 轮询获取 @@ -99,7 +120,7 @@ export function useWorkflowData() { // 模拟分批获取分镜视频 for (let i = 0; i < totalVideos; i++) { - await new Promise(resolve => setTimeout(resolve, 6000)); // 60s + await new Promise(resolve => setTimeout(resolve, MOCK_DELAY_TIME.video)); // 60s const newVideo = { id: `video-${i}`, @@ -130,6 +151,8 @@ export function useWorkflowData() { } const totalSketches = selectedMockData.sketch.length; + const totalVideos = selectedMockData.video.length; + const totalCharacters = selectedMockData.roles.length; if (currentStep === '1') { if (isGeneratingSketch) { @@ -138,10 +161,14 @@ export function useWorkflowData() { setCurrentLoadingText(STEP_MESSAGES.sketchComplete); } } else if (currentStep === '2') { - setCurrentLoadingText(STEP_MESSAGES.character); + // 在角色生成阶段,loading文字已经在 getTaskRole 函数中直接管理 + // 这里不需要额外设置,避免覆盖 + if (taskRoles.length === totalCharacters) { + setCurrentLoadingText(STEP_MESSAGES.newCharacter(totalCharacters, totalCharacters)); + } } else if (currentStep === '3') { if (isGeneratingVideo) { - setCurrentLoadingText(STEP_MESSAGES.video(taskVideos.length, totalSketches)); + setCurrentLoadingText(STEP_MESSAGES.video(taskVideos.length, totalVideos)); } else { setCurrentLoadingText(STEP_MESSAGES.videoComplete); } @@ -152,7 +179,7 @@ export function useWorkflowData() { } else { setCurrentLoadingText(STEP_MESSAGES.complete); } - }, [isLoading, currentStep, isGeneratingSketch, sketchCount, isGeneratingVideo, taskVideos.length, taskSketch.length]); + }, [isLoading, currentStep, isGeneratingSketch, sketchCount, isGeneratingVideo, taskVideos.length, taskSketch.length, taskRoles.length]); // 初始化数据 useEffect(() => { @@ -194,7 +221,15 @@ export function useWorkflowData() { // 获取分镜视频后,开始获取背景音 await getTaskBackgroundAudio(taskId); - await new Promise(resolve => setTimeout(resolve, 2000)); + // 后期制作:抽卡中 对口型中 配音中 一致性处理中 + setCurrentLoadingText(STEP_MESSAGES.postProduction('Selecting optimal frames')); + await new Promise(resolve => setTimeout(resolve, MOCK_DELAY_TIME.postProduction)); + setCurrentLoadingText(STEP_MESSAGES.postProduction('Aligning lip sync')); + await new Promise(resolve => setTimeout(resolve, MOCK_DELAY_TIME.postProduction)); + setCurrentLoadingText(STEP_MESSAGES.postProduction('Adding background audio')); + await new Promise(resolve => setTimeout(resolve, MOCK_DELAY_TIME.postProduction)); + setCurrentLoadingText(STEP_MESSAGES.postProduction('Consistency processing')); + await new Promise(resolve => setTimeout(resolve, MOCK_DELAY_TIME.postProduction)); // 修改 taskObject 下的 taskStatus 为 '5' setTaskObject((prev: any) => ({ @@ -202,15 +237,7 @@ export function useWorkflowData() { taskStatus: '5' })); setCurrentStep('5'); - // 后期制作:抽卡中 对口型中 配音中 一致性处理中 - setCurrentLoadingText(STEP_MESSAGES.postProduction('Selecting optimal frames')); - await new Promise(resolve => setTimeout(resolve, 10000)); - setCurrentLoadingText(STEP_MESSAGES.postProduction('Aligning lip sync')); - await new Promise(resolve => setTimeout(resolve, 10000)); - setCurrentLoadingText(STEP_MESSAGES.postProduction('Adding background audio')); - await new Promise(resolve => setTimeout(resolve, 10000)); - setCurrentLoadingText(STEP_MESSAGES.postProduction('Consistency processing')); - await new Promise(resolve => setTimeout(resolve, 10000)); + // 获取背景音后,开始获取最终成品 await getTaskFinalProduct(taskId); await new Promise(resolve => setTimeout(resolve, 2000)); diff --git a/components/video-screen-layout.tsx b/components/video-screen-layout.tsx index 5c08109..b7a5b0c 100644 --- a/components/video-screen-layout.tsx +++ b/components/video-screen-layout.tsx @@ -139,7 +139,7 @@ function VideoScreenLayoutComponent({ videos }: VideoScreenLayoutProps) { }; return ( -
+
{/* 视频面板容器 */}
`Generating sketch ${count + 1 > total ? total : count + 1}/${total}...`, sketchComplete: 'Sketch generation complete', character: 'Drawing characters...', + newCharacter: (count: number, total: number) => `Drawing character ${count + 1 > total ? total : count + 1}/${total}...`, video: (count: number, total: number) => `Generating video ${count + 1 > total ? total : count + 1}/${total}...`, videoComplete: 'Video generation complete', audio: 'Generating background audio...', postProduction: (step: string) => `Post-production: ${step}...`, final: 'Generating final product...', complete: 'Task completed' -}; \ No newline at end of file +}; + +export const MOCK_DELAY_TIME = { + sketch: 5000, // 5s 一个草图 + character: 2000, // 2s 一个角色 + video: 6000, // 6s 一个分镜视频 + audio: 2000, // 2s 一个音频 + postProduction: 2000, // 2s 一个后期制作 + final: 10000, // 10s 一个最终成品 +} \ No newline at end of file