main
reng 2 months ago
parent e8ea4fbd66
commit 4da47a6f86
  1. BIN
      vite/public/assets/1014/Q4-1-1.mp3
  2. BIN
      vite/public/assets/1016/Q4-1-1.mp3
  3. 2
      vite/public/cuelist_1009.json
  4. 33
      vite/src/pages/flow_free.jsx

@ -68,7 +68,7 @@
"type": "phone",
"description": "裝置啟動",
"auto": true,
"audioFile": "assets/0926/Q4-1-1_0926.mp3",
"audioFile": "assets/1016/Q4-1-1.mp3",
"nextcue": 4.2,
"status":"intro",
"status_delay": 3000

@ -116,6 +116,9 @@ export function FreeFlow(){
setPadInput();
setChoice();
if(refChatTimer.current) clearInterval(refChatTimer.current);
}
function onOsc(payload){
@ -510,10 +513,18 @@ export function FreeFlow(){
// refSpeechPaused.current=false;
}
function sendSpeechStart(){
if(refCurrentCue.current?.type!='chat') return;
console.log('------- speech start -------');
sendOsc(OSC_ADDRESS.SPEECH, 'start');
refSpeaking.current=true;
}
function sendSpeechEnd(){
// if(refCurrentCue.current?.type!='chat' || chatStatus!=ChatStatus.User) return;
console.log('------- speech end -------');
sendOsc(OSC_ADDRESS.SPEECH, 'stop');
refSpeaking.current=false;
}
@ -700,7 +711,9 @@ export function FreeFlow(){
// sendOsc(OSC_ADDRESS.SPEECH_PAUSE, data.speech_idle_time.toString());
refPauseTimer.current=setTimeout(()=>{
refSpeaking.current=false;
// sendSpeechEnd();
console.log('~~~ pause timer ended, process speech');
// if(refSpeechPaused.current)
processSpeech();
@ -712,11 +725,13 @@ export function FreeFlow(){
function onSpeechEnd(){
sendSpeechEnd();
if(currentCue?.type!='chat') return; // Only process if current cue is user input
if(chatStatus!=ChatStatus.User) return; // Only process if chat status is User
sendSpeechEnd();
console.log('~~~ on speech end, start pause timer',data.speech_idle_time);
// refSpeechPaused.current=true;
setPauseTimer();
@ -730,7 +745,7 @@ export function FreeFlow(){
if(refChatTimer.current) clearInterval(refChatTimer.current);
// sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown
sendSpeechEnd();
// sendSpeechEnd();
if(currentCue?.type!='chat') return; // Only process if current cue is user input
@ -810,7 +825,7 @@ export function FreeFlow(){
recognition.onspeechstart=(e)=>{
console.log('Speech start:', e);
refSpeaking.current=true;
sendSpeechStart();
};
// recognition.onspeechend=(e)=>{
@ -846,8 +861,8 @@ export function FreeFlow(){
}
if(transcript.length>0){
sendSpeechStart();
if(refChatTimer.current) clearInterval(refChatTimer.current);
// sendSpeechStart();
// if(refChatTimer.current) clearInterval(refChatTimer.current);
}
sendOscStatus(OSC_ADDRESS.CLIENT_INPUT, `${data.id}#${transcript}`);
@ -868,10 +883,12 @@ export function FreeFlow(){
console.log('Still speaking, do not play AI audio yet');
sendPrompt();
return;
}
}else{
// play ai audio
console.log('AI audio ready, play it:', audioUrl);
playAudio(audioUrl);
}
}
},[audioUrl]);
useEffect(()=>{
@ -902,7 +919,7 @@ export function FreeFlow(){
sendOsc(OSC_ADDRESS.INPUT, text);
sendSpeechEnd();
// sendSpeechEnd();
},[chatStatus]);

Loading…
Cancel
Save