diff --git a/vite/public/assets/1014/Q4-1-1.mp3 b/vite/public/assets/1014/Q4-1-1.mp3 deleted file mode 100644 index ab655dd..0000000 Binary files a/vite/public/assets/1014/Q4-1-1.mp3 and /dev/null differ diff --git a/vite/public/assets/1016/Q4-1-1.mp3 b/vite/public/assets/1016/Q4-1-1.mp3 new file mode 100644 index 0000000..15fa742 Binary files /dev/null and b/vite/public/assets/1016/Q4-1-1.mp3 differ diff --git a/vite/public/cuelist_1009.json b/vite/public/cuelist_1009.json index 00d0d91..d2e00b8 100644 --- a/vite/public/cuelist_1009.json +++ b/vite/public/cuelist_1009.json @@ -49,7 +49,7 @@ "nextcue": 4.1, "callback":"numpad", "numpad_type":"userid", - "input_time": 26000 + "input_time": 26000 }, { "id": 4.1, @@ -68,7 +68,7 @@ "type": "phone", "description": "裝置啟動", "auto": true, - "audioFile": "assets/0926/Q4-1-1_0926.mp3", + "audioFile": "assets/1016/Q4-1-1.mp3", "nextcue": 4.2, "status":"intro", "status_delay": 3000 diff --git a/vite/src/pages/flow_free.jsx b/vite/src/pages/flow_free.jsx index 1f9b420..8e011ce 100644 --- a/vite/src/pages/flow_free.jsx +++ b/vite/src/pages/flow_free.jsx @@ -116,6 +116,9 @@ export function FreeFlow(){ setPadInput(); setChoice(); + + if(refChatTimer.current) clearInterval(refChatTimer.current); + } function onOsc(payload){ @@ -510,10 +513,18 @@ export function FreeFlow(){ // refSpeechPaused.current=false; } function sendSpeechStart(){ + + if(refCurrentCue.current?.type!='chat') return; + + console.log('------- speech start -------'); sendOsc(OSC_ADDRESS.SPEECH, 'start'); refSpeaking.current=true; } function sendSpeechEnd(){ + + // if(refCurrentCue.current?.type!='chat' || chatStatus!=ChatStatus.User) return; + + console.log('------- speech end -------'); sendOsc(OSC_ADDRESS.SPEECH, 'stop'); refSpeaking.current=false; } @@ -700,7 +711,9 @@ export function FreeFlow(){ // sendOsc(OSC_ADDRESS.SPEECH_PAUSE, data.speech_idle_time.toString()); refPauseTimer.current=setTimeout(()=>{ - refSpeaking.current=false; + + // sendSpeechEnd(); + console.log('~~~ pause timer ended, process speech'); // if(refSpeechPaused.current) processSpeech(); @@ -712,10 +725,12 @@ export function FreeFlow(){ function onSpeechEnd(){ - sendSpeechEnd(); + if(currentCue?.type!='chat') return; // Only process if current cue is user input if(chatStatus!=ChatStatus.User) return; // Only process if chat status is User + + sendSpeechEnd(); console.log('~~~ on speech end, start pause timer',data.speech_idle_time); // refSpeechPaused.current=true; @@ -730,7 +745,7 @@ export function FreeFlow(){ if(refChatTimer.current) clearInterval(refChatTimer.current); // sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown - sendSpeechEnd(); + // sendSpeechEnd(); if(currentCue?.type!='chat') return; // Only process if current cue is user input @@ -810,7 +825,7 @@ export function FreeFlow(){ recognition.onspeechstart=(e)=>{ console.log('Speech start:', e); - refSpeaking.current=true; + sendSpeechStart(); }; // recognition.onspeechend=(e)=>{ @@ -846,8 +861,8 @@ export function FreeFlow(){ } if(transcript.length>0){ - sendSpeechStart(); - if(refChatTimer.current) clearInterval(refChatTimer.current); + // sendSpeechStart(); + // if(refChatTimer.current) clearInterval(refChatTimer.current); } sendOscStatus(OSC_ADDRESS.CLIENT_INPUT, `${data.id}#${transcript}`); @@ -868,9 +883,11 @@ export function FreeFlow(){ console.log('Still speaking, do not play AI audio yet'); sendPrompt(); return; + }else{ + // play ai audio + console.log('AI audio ready, play it:', audioUrl); + playAudio(audioUrl); } - // play ai audio - playAudio(audioUrl); } },[audioUrl]); @@ -902,7 +919,7 @@ export function FreeFlow(){ sendOsc(OSC_ADDRESS.INPUT, text); - sendSpeechEnd(); + // sendSpeechEnd(); },[chatStatus]);