main
reng 2 months ago
parent 961333603d
commit e8ea4fbd66
  1. BIN
      vite/public/assets/1014/Q4-1-1.mp3
  2. 2
      vite/public/cuelist_1009.json
  3. 10
      vite/src/comps/debug.jsx
  4. 96
      vite/src/pages/flow_free.jsx

@ -49,7 +49,7 @@
"nextcue": 4.1,
"callback":"numpad",
"numpad_type":"userid",
"input_time": 12000
"input_time": 26000
},
{
"id": 4.1,

@ -14,7 +14,7 @@ export function DebugControl({onReset}){
}
return (
<div className="grid grid-cols-3 gap-2 [&>button]:rounded-full [&>button]:bg-white bg-gray-200 p-2 w-full justify-center">
<div className="grid grid-cols-4 gap-2 [&>button]:rounded-full [&>button]:bg-white bg-gray-200 p-2 w-full justify-center">
<button onClick={() =>{
sendOsc(OSC_ADDRESS.STATUS, 'reset');
onReset();
@ -25,8 +25,12 @@ export function DebugControl({onReset}){
<button onClick={() => sendOsc(OSC_ADDRESS.STATUS, 'end')}>end</button>
{/* <div className="flex flex-col gap-1"> */}
<button className="btn btn-success" onClick={() => sendPrompt('a telephone on table with light')}>Test Prompt #1</button>
<button className="btn btn-success" onClick={() => sendPrompt(TEST_PROMPT)}>Test Prompt #2</button>
<div className="col-span-4 grid grid-cols-4 gap-2">
<input type="text" className="p-1 col-span-3 bg-white" placeholder="Enter prompt here" id="prompt_input" defaultValue={TEST_PROMPT}/>
<button className="btn btn-success" onClick={() => sendPrompt(document.getElementById('prompt_input').value)}>Send Prompt</button>
</div>
{/* <button className="btn btn-success" onClick={() => sendPrompt('a telephone on table with light')}>Test Prompt #1</button>
<button className="btn btn-success" onClick={() => sendPrompt(TEST_PROMPT)}>Test Prompt #2</button> */}
{/* </div> */}
</div>
)

@ -74,7 +74,6 @@ export function FreeFlow(){
// const refLight=useRef();
const refPauseTimer=useRef();
const refSpeechPaused=useRef(false);
const refChatCueEnd=useRef(false);
const refContainer=useRef();
@ -89,6 +88,7 @@ export function FreeFlow(){
const refVolDownInterval=useRef();
const refChatTimer=useRef();
const refSpeaking=useRef(false);
const [lastOsc, setLastOsc]=useState();
@ -112,7 +112,7 @@ export function FreeFlow(){
reset();
resetUser();
sendOsc(OSC_ADDRESS.CHOICE, 'reset');
sendOsc(OSC_ADDRESS.SPEECH, 'stop');
sendSpeechEnd();
setPadInput();
setChoice();
@ -289,9 +289,14 @@ export function FreeFlow(){
}else{
// if history contains user input, send it
sendPrompt();
const last_user_input = history.slice().reverse().find(msg => msg.role === 'user');
console.log('Audio ended, checking for user input in history:', last_user_input);
if(last_user_input.content!='...'){
sendPrompt();
}else{
setChatStatus(ChatStatus.User); // Reset chat status to Clear
}
}
}
@ -377,9 +382,10 @@ export function FreeFlow(){
sendOsc(OSC_ADDRESS.HINT, ''); // Clear hint message
sendOsc(OSC_ADDRESS.INPUT, ''); // Clear input message
sendOsc(OSC_ADDRESS.SPEECH, 'stop');
sendSpeechEnd();
setPadInput();
switch(cue.type){
case 'chat':
@ -488,11 +494,10 @@ export function FreeFlow(){
resetData();
}
}
if(cue.type=='user_input') {
if(cue.type=='user_input' || cue.type=='chat'){
sendOsc(OSC_ADDRESS.COUNTDOWN, cue.duration || '0'); // Send OSC countdown message
}else{
if(cue.type!='chat')
sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown for non-chat cues
sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown for non-chat cues
}
if(cue.numpad_type=='choice'){
setChoice();
@ -504,7 +509,14 @@ export function FreeFlow(){
if(refPauseTimer.current) clearTimeout(refPauseTimer.current);
// refSpeechPaused.current=false;
}
function sendSpeechStart(){
sendOsc(OSC_ADDRESS.SPEECH, 'start');
refSpeaking.current=true;
}
function sendSpeechEnd(){
sendOsc(OSC_ADDRESS.SPEECH, 'stop');
refSpeaking.current=false;
}
function onCueEnd() {
refTimer.current?.stop(); // Stop the timer when cue ends
@ -525,8 +537,8 @@ export function FreeFlow(){
sendOsc(OSC_ADDRESS.HINT, ''); // Clear hint message
sendSpeechEnd();
sendOsc(OSC_ADDRESS.SPEECH, 'stop');
if(cue.numpad_type=='choice'){
if(!choice){
@ -577,7 +589,7 @@ export function FreeFlow(){
}
function exportFile(){
const user_input = history.find(msg => msg.role === 'user');
const user_input = history.find(msg => msg.role === 'user' && msg.content!='...');
const default_image=!(user_input && user_input.content.trim() !== '');
sendOsc(OSC_ADDRESS.EXPORT, `${getUploadFolder()}#${getDataId()}#${summary||''}#${getFileId(padInput)}#${choice||''}#${default_image?'default':'generated'}`); // Send OSC export message
@ -585,9 +597,10 @@ export function FreeFlow(){
}
function startChatTimer(){
sendOsc(OSC_ADDRESS.COUNTDOWN, refCurrentCue.current?.chatInterval || '0'); // Send OSC countdown message
// sendOsc(OSC_ADDRESS.COUNTDOWN, refCurrentCue.current?.chatInterval || '0'); // Send OSC countdown message
if(refChatTimer.current) clearInterval(refChatTimer.current);
if(refCurrentCue.current?.type!='chat') return;
let timeleft=refCurrentCue.current?.chatInterval || 0;
const endTime=new Date().getTime()+timeleft*1000;
@ -681,26 +694,33 @@ export function FreeFlow(){
},[userId]);
function onSpeechEnd(){
sendOsc(OSC_ADDRESS.SPEECH, 'stop');
if(currentCue?.type!='chat') return; // Only process if current cue is user input
if(chatStatus!=ChatStatus.User) return; // Only process if chat status is User
console.log('~~~ on speech end, start pause timer',data.speech_idle_time);
// refSpeechPaused.current=true;
function setPauseTimer(){
if(refPauseTimer.current) clearTimeout(refPauseTimer.current);
sendOsc(OSC_ADDRESS.SPEECH_PAUSE, data.speech_idle_time.toString());
// sendOsc(OSC_ADDRESS.SPEECH_PAUSE, data.speech_idle_time.toString());
refPauseTimer.current=setTimeout(()=>{
refSpeaking.current=false;
console.log('~~~ pause timer ended, process speech');
// if(refSpeechPaused.current)
processSpeech();
sendOsc(OSC_ADDRESS.SPEECH_PAUSE, '0');
// sendOsc(OSC_ADDRESS.SPEECH_PAUSE, '0');
}, data.speech_idle_time);
}
function onSpeechEnd(){
sendSpeechEnd();
if(currentCue?.type!='chat') return; // Only process if current cue is user input
if(chatStatus!=ChatStatus.User) return; // Only process if chat status is User
console.log('~~~ on speech end, start pause timer',data.speech_idle_time);
// refSpeechPaused.current=true;
setPauseTimer();
}
function processSpeech(){
@ -708,8 +728,9 @@ export function FreeFlow(){
if(refPauseTimer.current) clearTimeout(refPauseTimer.current);
if(refChatTimer.current) clearInterval(refChatTimer.current);
sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown
// sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown
sendSpeechEnd();
if(currentCue?.type!='chat') return; // Only process if current cue is user input
@ -789,7 +810,7 @@ export function FreeFlow(){
recognition.onspeechstart=(e)=>{
console.log('Speech start:', e);
refSpeaking.current=true;
};
// recognition.onspeechend=(e)=>{
@ -817,14 +838,17 @@ export function FreeFlow(){
if(transcript!=finalTranscript){
refInput.current.value = transcript;
// clear pause timer
// console.log('~~~~ clear pause timer');
if(refPauseTimer.current) clearTimeout(refPauseTimer.current);
refSpeechPaused.current=false;
}
sendOsc(OSC_ADDRESS.SPEECH, 'start');
if(transcript.length>0){
sendSpeechStart();
if(refChatTimer.current) clearInterval(refChatTimer.current);
}
sendOscStatus(OSC_ADDRESS.CLIENT_INPUT, `${data.id}#${transcript}`);
// Send current input via OSC
@ -839,7 +863,15 @@ export function FreeFlow(){
if(refCurrentCue.current?.type!='chat') return;
if(audioUrl) playAudio(audioUrl);
if(audioUrl){
if(refSpeaking.current) {
console.log('Still speaking, do not play AI audio yet');
sendPrompt();
return;
}
// play ai audio
playAudio(audioUrl);
}
},[audioUrl]);
useEffect(()=>{
@ -867,8 +899,10 @@ export function FreeFlow(){
text = '';
break;
}
sendOsc(OSC_ADDRESS.SPEECH, 'stop');
sendOsc(OSC_ADDRESS.INPUT, text);
sendSpeechEnd();
},[chatStatus]);

Loading…
Cancel
Save