diff --git a/vite/public/assets/1014/Q4-1-1.mp3 b/vite/public/assets/1014/Q4-1-1.mp3
new file mode 100644
index 0000000..ab655dd
Binary files /dev/null and b/vite/public/assets/1014/Q4-1-1.mp3 differ
diff --git a/vite/public/cuelist_1009.json b/vite/public/cuelist_1009.json
index 00d54a5..00d0d91 100644
--- a/vite/public/cuelist_1009.json
+++ b/vite/public/cuelist_1009.json
@@ -49,7 +49,7 @@
"nextcue": 4.1,
"callback":"numpad",
"numpad_type":"userid",
- "input_time": 12000
+ "input_time": 26000
},
{
"id": 4.1,
diff --git a/vite/src/comps/debug.jsx b/vite/src/comps/debug.jsx
index 25d0238..18c9342 100644
--- a/vite/src/comps/debug.jsx
+++ b/vite/src/comps/debug.jsx
@@ -14,7 +14,7 @@ export function DebugControl({onReset}){
}
return (
-
+
)
diff --git a/vite/src/pages/flow_free.jsx b/vite/src/pages/flow_free.jsx
index c2d7805..1f9b420 100644
--- a/vite/src/pages/flow_free.jsx
+++ b/vite/src/pages/flow_free.jsx
@@ -74,7 +74,6 @@ export function FreeFlow(){
// const refLight=useRef();
const refPauseTimer=useRef();
- const refSpeechPaused=useRef(false);
const refChatCueEnd=useRef(false);
const refContainer=useRef();
@@ -89,6 +88,7 @@ export function FreeFlow(){
const refVolDownInterval=useRef();
const refChatTimer=useRef();
+ const refSpeaking=useRef(false);
const [lastOsc, setLastOsc]=useState();
@@ -112,7 +112,7 @@ export function FreeFlow(){
reset();
resetUser();
sendOsc(OSC_ADDRESS.CHOICE, 'reset');
- sendOsc(OSC_ADDRESS.SPEECH, 'stop');
+ sendSpeechEnd();
setPadInput();
setChoice();
@@ -289,9 +289,14 @@ export function FreeFlow(){
}else{
// if history contains user input, send it
-
- sendPrompt();
-
+ const last_user_input = history.slice().reverse().find(msg => msg.role === 'user');
+ console.log('Audio ended, checking for user input in history:', last_user_input);
+ if(last_user_input.content!='...'){
+ sendPrompt();
+ }else{
+ setChatStatus(ChatStatus.User); // Reset chat status to Clear
+ }
+
}
}
@@ -377,9 +382,10 @@ export function FreeFlow(){
sendOsc(OSC_ADDRESS.HINT, ''); // Clear hint message
sendOsc(OSC_ADDRESS.INPUT, ''); // Clear input message
- sendOsc(OSC_ADDRESS.SPEECH, 'stop');
+ sendSpeechEnd();
setPadInput();
+
switch(cue.type){
case 'chat':
@@ -488,11 +494,10 @@ export function FreeFlow(){
resetData();
}
}
- if(cue.type=='user_input') {
+ if(cue.type=='user_input' || cue.type=='chat'){
sendOsc(OSC_ADDRESS.COUNTDOWN, cue.duration || '0'); // Send OSC countdown message
}else{
- if(cue.type!='chat')
- sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown for non-chat cues
+ sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown for non-chat cues
}
if(cue.numpad_type=='choice'){
setChoice();
@@ -504,7 +509,14 @@ export function FreeFlow(){
if(refPauseTimer.current) clearTimeout(refPauseTimer.current);
// refSpeechPaused.current=false;
}
-
+ function sendSpeechStart(){
+ sendOsc(OSC_ADDRESS.SPEECH, 'start');
+ refSpeaking.current=true;
+ }
+ function sendSpeechEnd(){
+ sendOsc(OSC_ADDRESS.SPEECH, 'stop');
+ refSpeaking.current=false;
+ }
function onCueEnd() {
refTimer.current?.stop(); // Stop the timer when cue ends
@@ -525,8 +537,8 @@ export function FreeFlow(){
sendOsc(OSC_ADDRESS.HINT, ''); // Clear hint message
+ sendSpeechEnd();
- sendOsc(OSC_ADDRESS.SPEECH, 'stop');
if(cue.numpad_type=='choice'){
if(!choice){
@@ -577,7 +589,7 @@ export function FreeFlow(){
}
function exportFile(){
- const user_input = history.find(msg => msg.role === 'user');
+ const user_input = history.find(msg => msg.role === 'user' && msg.content!='...');
const default_image=!(user_input && user_input.content.trim() !== '');
sendOsc(OSC_ADDRESS.EXPORT, `${getUploadFolder()}#${getDataId()}#${summary||''}#${getFileId(padInput)}#${choice||''}#${default_image?'default':'generated'}`); // Send OSC export message
@@ -585,9 +597,10 @@ export function FreeFlow(){
}
function startChatTimer(){
- sendOsc(OSC_ADDRESS.COUNTDOWN, refCurrentCue.current?.chatInterval || '0'); // Send OSC countdown message
+ // sendOsc(OSC_ADDRESS.COUNTDOWN, refCurrentCue.current?.chatInterval || '0'); // Send OSC countdown message
if(refChatTimer.current) clearInterval(refChatTimer.current);
-
+ if(refCurrentCue.current?.type!='chat') return;
+
let timeleft=refCurrentCue.current?.chatInterval || 0;
const endTime=new Date().getTime()+timeleft*1000;
@@ -681,26 +694,33 @@ export function FreeFlow(){
},[userId]);
- function onSpeechEnd(){
-
- sendOsc(OSC_ADDRESS.SPEECH, 'stop');
-
- if(currentCue?.type!='chat') return; // Only process if current cue is user input
- if(chatStatus!=ChatStatus.User) return; // Only process if chat status is User
-
- console.log('~~~ on speech end, start pause timer',data.speech_idle_time);
- // refSpeechPaused.current=true;
+ function setPauseTimer(){
if(refPauseTimer.current) clearTimeout(refPauseTimer.current);
- sendOsc(OSC_ADDRESS.SPEECH_PAUSE, data.speech_idle_time.toString());
+ // sendOsc(OSC_ADDRESS.SPEECH_PAUSE, data.speech_idle_time.toString());
refPauseTimer.current=setTimeout(()=>{
+ refSpeaking.current=false;
console.log('~~~ pause timer ended, process speech');
// if(refSpeechPaused.current)
processSpeech();
- sendOsc(OSC_ADDRESS.SPEECH_PAUSE, '0');
+ // sendOsc(OSC_ADDRESS.SPEECH_PAUSE, '0');
}, data.speech_idle_time);
+ }
+
+
+ function onSpeechEnd(){
+
+ sendSpeechEnd();
+
+ if(currentCue?.type!='chat') return; // Only process if current cue is user input
+ if(chatStatus!=ChatStatus.User) return; // Only process if chat status is User
+
+ console.log('~~~ on speech end, start pause timer',data.speech_idle_time);
+ // refSpeechPaused.current=true;
+ setPauseTimer();
+
}
function processSpeech(){
@@ -708,8 +728,9 @@ export function FreeFlow(){
if(refPauseTimer.current) clearTimeout(refPauseTimer.current);
if(refChatTimer.current) clearInterval(refChatTimer.current);
- sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown
+ // sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown
+ sendSpeechEnd();
if(currentCue?.type!='chat') return; // Only process if current cue is user input
@@ -789,7 +810,7 @@ export function FreeFlow(){
recognition.onspeechstart=(e)=>{
console.log('Speech start:', e);
-
+ refSpeaking.current=true;
};
// recognition.onspeechend=(e)=>{
@@ -817,14 +838,17 @@ export function FreeFlow(){
if(transcript!=finalTranscript){
refInput.current.value = transcript;
+
// clear pause timer
// console.log('~~~~ clear pause timer');
if(refPauseTimer.current) clearTimeout(refPauseTimer.current);
- refSpeechPaused.current=false;
}
- sendOsc(OSC_ADDRESS.SPEECH, 'start');
+ if(transcript.length>0){
+ sendSpeechStart();
+ if(refChatTimer.current) clearInterval(refChatTimer.current);
+ }
sendOscStatus(OSC_ADDRESS.CLIENT_INPUT, `${data.id}#${transcript}`);
// Send current input via OSC
@@ -839,7 +863,15 @@ export function FreeFlow(){
if(refCurrentCue.current?.type!='chat') return;
- if(audioUrl) playAudio(audioUrl);
+ if(audioUrl){
+ if(refSpeaking.current) {
+ console.log('Still speaking, do not play AI audio yet');
+ sendPrompt();
+ return;
+ }
+ // play ai audio
+ playAudio(audioUrl);
+ }
},[audioUrl]);
useEffect(()=>{
@@ -867,8 +899,10 @@ export function FreeFlow(){
text = '';
break;
}
- sendOsc(OSC_ADDRESS.SPEECH, 'stop');
+
+
sendOsc(OSC_ADDRESS.INPUT, text);
+ sendSpeechEnd();
},[chatStatus]);