import { invoke } from '@tauri-apps/api/core'; import { useEffect, useRef, useState } from "react"; import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition'; import { Countdown } from "../comps/timer"; import { Status, useChat } from "../util/useChat"; import { getSummary } from "../util/chat"; import { saveHistory } from "../util/output"; import NumPad, { NUMPAD_TYPE } from "../comps/numpad"; import { Light } from "../comps/light"; import { useData } from "../util/useData"; import VoiceAnalysis from "../comps/voiceanalysis"; import { sendOsc, OSC_ADDRESS, updatePrompt, onOscMessageReceived, sendOscStatus } from "../util/osc"; import { DebugControl, TEST_PROMPT } from "../comps/debug"; import { useUser } from "../util/useUser"; const CUELIST_FILE = 'cuelist_1009.json'; const AUDIO_FADE_TIME=3000; // in ms const EmojiType={ phone: '๐Ÿ“ž', headphone: '๐ŸŽง', speaker: '๐Ÿ”Š', chat: '๐Ÿค–', chat_end: '๐Ÿค–', user_input: '๐Ÿ’ฌ', announce: '๐Ÿ“ข', } const ChatStatus={ System: 'system', User: 'user', Processing: 'processing', Clear: 'clear', End: 'end', Playing: 'playing', Message: 'message', } const Voice={ ONYX: 'onyx', SHIMMER: 'shimmer', } export function FreeFlow(){ const { data }=useData(); const [cuelist, setCuelist] = useState([]); const [currentCue, setCurrentCue] = useState(null); const [nextCue, setNextCue] = useState(null); const [localIP, setLocalIP] = useState(null); const [chatWelcome, setChatWelcome] = useState(null); const [audioInput, setAudioInput] = useState(true); const [autoSend, setAutoSend] = useState(true); const [chatStatus, setChatStatus] = useState(ChatStatus.System); // System, User, Processing const [padInput, setPadInput] = useState(null); const [inputReady, setInputReady] = useState(false); const { userId, setUserId, getFileId, setPassword, reset:resetUser, uploadHistory, setSummary, summary,setChoice,choice, getUploadFolder,getDataId, writeSheet } = useUser(); const refTimer=useRef(); const refAudio=useRef(); const refAudioPrompt=useRef(); const refAudioAnnounce=useRef(); const refInput=useRef(); // const refLight=useRef(); const refPauseTimer=useRef(); const refSpeechPaused=useRef(false); const refChatCueEnd=useRef(false); const refContainer=useRef(); const refCurrentCue= useRef(null); const refData=useRef(data); const refHintTimeout=useRef(); const refInputTimeout=useRef(); const refFadeOutInterval=useRef(); const refVolDownInterval=useRef(); const [lastOsc, setLastOsc]=useState(); const { history, status, reset, sendMessage, setStatus, audioOutput, setAudioOutput, stop:stopChat, audioUrl }=useChat(); const { transcript, finalTranscript, listening, resetTranscript, browserSupportsSpeechRecognition, isMicrophoneAvailable, }=useSpeechRecognition(); function resetData() { setSummary(null); reset(); resetUser(); sendOsc(OSC_ADDRESS.CHOICE, 'reset'); sendOsc(OSC_ADDRESS.SPEECH, 'stop'); setPadInput(); setChoice(); } function onOsc(payload){ console.log('onOsc', payload); const address=payload.addr; const message=payload.args[0]; const params=message.split('#'); if(params[0]!='all'){ console.log('set lastOsc', {address, params}); setLastOsc(()=>({address, params})); return; } switch(address){ case OSC_ADDRESS.PLAY_CUE: setNextCue(()=>params[1]); break; case OSC_ADDRESS.STOP_CUE: onStop(); break; case OSC_ADDRESS.RESET_CUE: sendOsc(OSC_ADDRESS.STATUS, 'reset'); onStop(); resetData(); break; } // Handle OSC messages here } function sendPrompt(){ // send prompt let raw_prompt=history[history.length-1]?.prompt || ''; if(raw_prompt && raw_prompt.trim() !== '') { const prompt = `${data?.sd_prompt_prefix || ''}${raw_prompt}${data?.sd_prompt_suffix || ''}`; updatePrompt(prompt); sendOsc(OSC_ADDRESS.PROMPT, prompt); // play audio for prompt refAudioPrompt.current?.play().catch(error => { console.error("Audio prompt playback error:", error); }); refAudioPrompt.current.onended = () => { console.log('Audio prompt ended, setting chat status to User'); setChatStatus(ChatStatus.User); // Set chat status to User after audio ends const user_input = history.find(msg => msg.role === 'user'); if(user_input && user_input.content.trim() !== '') { sendOsc(OSC_ADDRESS.STATUS, 'go'); // Send OSC status message } } }else{ setChatStatus(()=>ChatStatus.User); // Reset chat status to User after audio ends } } function playAudio(url){ if(!url) return; console.log('Playing audio:', url); if(refCurrentCue.current?.layer=='announce'){ if(refAudioAnnounce.current) { refAudioAnnounce.current.pause(); // Stop any currently playing announce audio } refAudioAnnounce.current = new Audio(url); refAudioAnnounce.current.loop=refCurrentCue.current?.loop || false; // Set loop if defined in cue refAudioAnnounce.current.play().catch(error => { console.error("Audio announce playback error:", error); }); // lower audio if(refAudio.current) { // fade out current audio if(refVolDownInterval.current){ clearInterval(refVolDownInterval.current); } const dest=0.2; let fadeOutInterval = setInterval(() => { if (refAudio.current.volume > dest) { refAudio.current.volume =Math.max(dest, refAudio.current.volume - (1.0-dest)/(AUDIO_FADE_TIME/100)); // Decrease volume gradually //console.log('Fading out audio volume:', refAudio.current.volume); } else { clearInterval(fadeOutInterval); } }, 100); refVolDownInterval.current=fadeOutInterval; } return; } if(refAudioAnnounce.current) { refAudioAnnounce.current.pause(); // Stop any currently playing announce audio refAudioAnnounce.current = null; } if(refAudio.current) { refAudio.current.pause(); // Stop any currently playing audio } let audioUrl = url; // if(voice==Voice.SHIMMER) audioUrl = url.replace(Voice.ONYX, Voice.SHIMMER); // console.log('Using voice:', voice, 'for audio:', audioUrl); const audio = new Audio(audioUrl); //TODO: if cue end, don't play audio if(refCurrentCue.current?.type=='chat'){ // if(refChatCueEnd.current) { // console.log('Chat cue has ended, not playing audio:', url); // setChatStatus(ChatStatus.Clear); // Reset chat status to Clear // onCueEnd(); // return; // } } audio.loop=refCurrentCue.current?.loop || false; // Set loop if defined in cue audio.addEventListener("loadedmetadata", () => { if(refCurrentCue.current?.type!='chat' && refCurrentCue.current?.type!='user_input') { refTimer.current?.restart(audio.duration*1000 || 0); audio.play().catch(error => { console.error("Audio playback error:", error); }); }else{ if(refCurrentCue.current?.type=='chat'){ if(refTimer.current?.remainingTime < audio.duration*1000) { console.log('Audio duration is longer than remaining cue time, not playing audio:', url); // send propmpt sendPrompt(); return; }else{ setChatStatus(()=>ChatStatus.System); audio.play().catch(error => { console.error("Audio playback error:", error); }); } }else{ setChatStatus(()=>ChatStatus.Playing); audio.play().catch(error => { console.error("Audio playback error:", error); }); } } }); audio.onended = () => { if(refCurrentCue.current?.type!='chat'){ setChatStatus(ChatStatus.End); onCueEnd(); console.log('Audio ended, ending current cue'); }else{ // if history contains user input, send it sendPrompt(); } } refAudio.current = audio; // Store the new audio reference } function fadeOutAudio(callback){ if(refVolDownInterval.current) clearInterval(refVolDownInterval.current); if(refAudio.current || refAudioAnnounce.current){ console.log('Fading out audio'); let audio = refAudio.current; let announce = refAudioAnnounce.current; if(refFadeOutInterval.current){ clearInterval(refFadeOutInterval.current); refFadeOutInterval.current=null; } let fadeOutInterval = setInterval(() => { if(audio){ if (audio.volume > 0) { audio.volume =Math.max(0, audio.volume - 1.0/(AUDIO_FADE_TIME/100)); // Decrease volume gradually } else { audio.pause(); audio.volume = 0; // Reset volume for next play } } if(announce){ if (announce.volume > 0) { announce.volume = Math.max(0, announce.volume - 1.0/(AUDIO_FADE_TIME/100)); // Decrease volume gradually } else { //clearInterval(fadeOutInterval); announce.pause(); announce.volume = 0; // Reset volume for next play } } if((audio==null || audio.volume==0) && (announce==null || announce.volume==0)){ clearInterval(fadeOutInterval); if(callback) callback(); } }, 100); // Decrease volume every 100ms refFadeOutInterval.current=fadeOutInterval; }else{ if(callback) callback(); } } function playCue(cue) { if(!cue) return; console.log('Playing cue:', cue); // stop audio // if(refAudio.current) refAudio.current.pause(); setCurrentCue(cue); refCurrentCue.current = cue; // Store the current cue in ref if(parseFloat(cue.id)<=4.2){ // Special case for starting a conversation console.log('clear conversation...'); reset(); const prompt = `${data?.sd_prompt_prefix || ''}${TEST_PROMPT}${data?.sd_prompt_suffix || ''}`; updatePrompt(prompt); } // clear unity hint if(refHintTimeout.current) clearTimeout(refHintTimeout.current); sendOsc(OSC_ADDRESS.HINT, ''); // Clear hint message sendOsc(OSC_ADDRESS.INPUT, ''); // Clear input message sendOsc(OSC_ADDRESS.SPEECH, 'stop'); setPadInput(); switch(cue.type){ case 'chat': // Special case for starting a conversation refChatCueEnd.current=false; resetTranscript(); console.log('Starting conversation...'); setChatStatus(ChatStatus.User); //sendMessage(null, false, false, null); // Send initial message with voice //setChatWelcome(true); //resetData(); // Reset data for new conversation break; case 'summary': console.log('Getting summary...'); setChatStatus(ChatStatus.Clear); // Set chat status to Processing let message=refInput.current?.value?.trim() || history.map(el=>`${el.role}:${el.content}`).join('\n'); console.log('Summary input message:', message); if(!message || message.length==0) { setSummary(); console.log('no message input, clear summary'); onCueEnd(); // End the current cue after getting summary }else{ getSummary(message, data).then(summary_ => { console.log('Summary:', summary_); onCueEnd(); // End the current cue after getting summary setSummary(()=>summary_?.result); refContainer.current.scrollTop = refContainer.current.scrollHeight; // Scroll to bottom }).catch(error => { console.error('Error getting summary:', error); }); } break; case 'user_input': setChatStatus(ChatStatus.Message); // Set chat status to User resetTranscript(); // Reset transcript for user input break; default: setChatStatus(ChatStatus.Clear); break; } // if(cue.callback=='fade_in_light') refLight.current.fadeIn(); // Fade in light for conversation start // if(cue.callback=='fade_out_light') refLight.current.fadeOut(); // Fade out light for conversation end if(cue.hint!=null && cue.hint_time!=null){ refHintTimeout.current=setTimeout(()=>{ sendOsc(OSC_ADDRESS.HINT, cue.hint); // Send OSC hint message }, cue.hint_time); } setInputReady(false); if(cue.input_time!=null){ if(refInputTimeout.current) clearTimeout(refInputTimeout.current); refInputTimeout.current=setTimeout(()=>{ setInputReady(()=>true); }, cue.input_time); } if(cue.audioFile){ playAudio(cue.audioFile); } if(cue.duration){ refTimer.current.restart(cue.duration*1000, ()=>{ onCueEnd(cue); }); } switch(cue.callback){ case 'exportFile': sendOsc(OSC_ADDRESS.HINT,''); exportFile(); break; // case 'fadeout': // fadeOutAudio(); // break; } // control unity if(cue.status && cue.status!='go') { if(cue.status_delay) { setTimeout(()=>{ sendOsc(OSC_ADDRESS.STATUS, cue.status); // Send OSC status message }, cue.status_delay); }else{ sendOsc(OSC_ADDRESS.STATUS, cue.status); // Send OSC status message } if(cue.status=='reset') { // refLight.current.set(1); resetData(); } } if(cue.type=='chat' || cue.type=='user_input') { sendOsc(OSC_ADDRESS.COUNTDOWN, cue.duration || '0'); // Send OSC countdown message }else{ sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown for non-chat cues } if(cue.numpad_type=='choice'){ setChoice(); } sendOscStatus(OSC_ADDRESS.CLIENT_STATUS,`${data.id}#playcue#${cue.id}`); console.log('~~~~ clear pause timer'); if(refPauseTimer.current) clearTimeout(refPauseTimer.current); // refSpeechPaused.current=false; } function onCueEnd() { refTimer.current?.stop(); // Stop the timer when cue ends if(!refCurrentCue.current) return; const cue= refCurrentCue.current; // Get the current cue from ref if(cue.type=='chat'){ // if(chatStatus==ChatStatus.System) { // console.log('Still talking...'); // refChatCueEnd.current=true; // return; // } console.log('save chat history:', history); uploadHistory(history); // Save chat history when cue ends } sendOsc(OSC_ADDRESS.HINT, ''); // Clear hint message sendOsc(OSC_ADDRESS.SPEECH, 'stop'); if(cue.numpad_type=='choice'){ if(!choice){ console.log('set default choice to save'); setChoice('save'); sendOsc(OSC_ADDRESS.CHOICE, 'save'); // Send OSC save choice message }else{ // sendOsc(OSC_ADDRESS.CHOICE, choice); // Send OSC save choice message } } refAudio.current?.pause(); // Pause any playing audio console.log('onCueEnd:', cue.id); resetTranscript(); // Reset transcript after cue ends sendOscStatus(OSC_ADDRESS.CLIENT_STATUS, `${data.id}#endcue#${cue.id}`); if(cue.auto || cue.callback=='numpad'){ playCue(cuelist.find(c => c.id === cue.nextcue)); } } function onStop(){ console.log('Stopping current cue'); if(refAudio.current) { refAudio.current.pause(); refAudio.current = null; } if(refAudioAnnounce.current) { refAudioAnnounce.current.pause(); refAudioAnnounce.current = null; } setCurrentCue(null); refCurrentCue.current = null; // Clear the current cue reference refTimer.current.restart(0); stopChat(); // Stop chat processing } function onNumpad(mess){ console.log('onNumPad',mess); setPadInput(()=>mess); } function exportFile(){ const user_input = history.find(msg => msg.role === 'user'); const default_image=!(user_input && user_input.content.trim() !== ''); sendOsc(OSC_ADDRESS.EXPORT, `${getUploadFolder()}#${getDataId()}#${summary||''}#${getFileId(padInput)}#${choice||''}#${default_image?'default':'generated'}`); // Send OSC export message writeSheet(); } useEffect(()=>{ if(!lastOsc) return; console.log('Process last OSC:', lastOsc); if(lastOsc.params[0]!=data.id) return; switch(lastOsc.address){ case OSC_ADDRESS.PLAY_CUE: setNextCue(()=>lastOsc.params[1]); break; case OSC_ADDRESS.STOP_CUE: onStop(); break; case OSC_ADDRESS.RESET_CUE: sendOsc(OSC_ADDRESS.STATUS, 'reset'); onStop(); resetData(); break; } },[lastOsc]); useEffect(()=>{ if(padInput==null) return; console.log('Numpad input:', padInput); if(refCurrentCue.current?.callback!='numpad') return; let cue=refCurrentCue.current; let next=cue.nextcue; switch(cue.numpad_type){ case NUMPAD_TYPE.USERID: console.log('set id', padInput); setUserId(()=>padInput); break; case NUMPAD_TYPE.CHOICE: next=cue.branch[padInput.toString()].nextcue; setChoice(()=>cue.branch[padInput.toString()].description); // Set choice for user input sendOsc(OSC_ADDRESS.CHOICE, cue.branch[padInput.toString()].description); break; case NUMPAD_TYPE.PASSWORD: setPassword(padInput); // sendOsc(OSC_ADDRESS.PASSWORD, mess); // Send OSC password message // sendOsc(OSC_ADDRESS.CHOICE, choice); // Send OSC save choice message break; } if(next){ onStop(); console.log('Finish enter number, next cue:', next); playCue(cuelist.find(c => c.id === next)); } },[padInput]); useEffect(()=>{ if(userId>=1 && userId<=24) { console.log('User ID set:', userId); //playCue(cuelist.find(c => c.id === refCurrentCue.current.nextcue)); // Play cue 5 when userId is set } },[userId]); function onSpeechEnd(){ sendOsc(OSC_ADDRESS.SPEECH, 'stop'); if(currentCue?.type!='chat') return; // Only process if current cue is user input if(chatStatus!=ChatStatus.User) return; // Only process if chat status is User console.log('~~~ on speech end, start pause timer',data.speech_idle_time); // refSpeechPaused.current=true; if(refPauseTimer.current) clearTimeout(refPauseTimer.current); refPauseTimer.current=setTimeout(()=>{ console.log('~~~ pause timer ended, process speech'); // if(refSpeechPaused.current) processSpeech(); }, data.speech_idle_time); } function processSpeech(){ if(currentCue?.type!='chat') return; // Only process if current cue is user input console.log('processSpeech:', finalTranscript); if(refChatCueEnd.current) { console.log('Chat cue has ended, do not processing speech'); onCueEnd(); return; } if(autoSend && transcript.trim().length > 0) { console.log('Auto sending transcript:', transcript); // onCueEnd(); const message= refInput.current?.value?.trim(); if(message && message.length>0) { console.log('Ending conversation with message:', message); sendMessage(message, false, false, null); setChatWelcome(false); setChatStatus(ChatStatus.Processing); // Set chat status to Processing } resetTranscript(); } } function manualSendMessage() { if(currentCue?.type!='chat') return; // Only process if current cue is user input if(chatStatus!=ChatStatus.User) return; // Only process if chat status is User const message= refInput.current?.value?.trim(); if(message && message.length>0) { console.log('Manual sending message:', message); sendMessage(message, false, false, null); setChatWelcome(false); setChatStatus(ChatStatus.Processing); // Set chat status to Processing } resetTranscript(); } useEffect(()=>{ console.log('Final transcript changed:', finalTranscript); if(finalTranscript.trim().length > 0) { onSpeechEnd(); } },[finalTranscript]); function startRecognition() { SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' }).then(() => { console.log("Speech recognition started."); }).catch(error => { console.error("Error starting speech recognition:", error); }); } function blurText(text) { if(!text) return ''; return text.replace(/./g, '*'); } useEffect(()=>{ if(audioInput && isMicrophoneAvailable) { startRecognition(); const recognition= SpeechRecognition.getRecognition(); recognition.onspeechstart=(e)=>{ console.log('Speech start:', e); }; // recognition.onspeechend=(e)=>{ // console.log('Speech end:', e); // startRecognition(); // }; }else{ console.log('Stopping speech recognition...'); SpeechRecognition.stopListening(); } },[audioInput]); useEffect(()=>{ if((currentCue?.type=='chat' && chatStatus==ChatStatus.User) || currentCue?.type=='user_input') { // console.log('transcript state changed:', transcript); if(transcript!=finalTranscript){ refInput.current.value = transcript; // clear pause timer // console.log('~~~~ clear pause timer'); if(refPauseTimer.current) clearTimeout(refPauseTimer.current); refSpeechPaused.current=false; } sendOsc(OSC_ADDRESS.SPEECH, 'start'); sendOscStatus(OSC_ADDRESS.CLIENT_INPUT, `${data.id}#${transcript}`); // Send current input via OSC } },[transcript]); useEffect(()=>{ if(refCurrentCue.current?.type!='chat') return; if(audioUrl) playAudio(audioUrl); },[audioUrl]); useEffect(()=>{ resetTranscript(); let text=''; switch(chatStatus) { case ChatStatus.System: text = '็ญ‰ๆˆ‘ไธ€ไธ‹\nๆ›ๆˆ‘่ชชๅ›‰'; break; case ChatStatus.User: text = 'ๆ›ไฝ ่ชชไบ†'; break; case ChatStatus.Processing: text = '่จ˜ๆ†ถ่ฎ€ๅ–ไธญ'; break; case ChatStatus.Message: text = '่ซ‹็•™่จ€'; break; case ChatStatus.Clear: default: text = ''; break; } sendOsc(OSC_ADDRESS.SPEECH, 'stop'); sendOsc(OSC_ADDRESS.INPUT, text); },[chatStatus]); useEffect(()=>{ switch(status) { case Status.SUCCESS: console.log('Success!'); setStatus(Status.IDLE); refInput.current.value = ''; resetTranscript(); refContainer.current.scrollTop = refContainer.current.scrollHeight; break; } },[status]); useEffect(()=>{ if(!nextCue) return; console.log('Next cue:', nextCue); const next=cuelist.find(c => c.name === nextCue); if(currentCue?.fadeout){ // fade out audio fadeOutAudio(()=>{ console.log('fade out then play next cue:', next); playCue(next); setNextCue(()=>null); }); }else{ playCue(next); setNextCue(null); } },[nextCue]); useEffect(()=>{ fetch(CUELIST_FILE) .then(response => response.json()) .then(data => { console.log('Cuelist data:', data); setCuelist(data.cuelist); }) .catch(error => { console.error('Error fetching cuelist:', error); }); refAudioPrompt.current = new Audio('assets/sfx/sfx-05.mp3'); // Load audio prompt if available onOscMessageReceived(onOsc); // Set up OSC message listener invoke('get_ip').then((ip)=>{ console.log('Local IP address:', ip); setLocalIP(ip); }); },[]); return (
PC {data?.id} {localIP || '...'}
{refCurrentCue.current?.name}
{/* */} {/* */} {/* */} {/*
*/}
UserId{userId} FileId{getFileId()} Choice{choice || ''}
{/* */} {cuelist?.map(({id, name, description, type, auto, audioFile,...props}, index) => ( {/* */} ))}
ID Name Description Type Auto Audio Duration Action
{id} {name} {description} {EmojiType[type]} {auto ? 'โคต๏ธ' : ''} {audioFile || ""} {props.duration || ''} {props.callback && `<${props.callback}>`}{props.status && `(${props.status})`}
{history?.map((msg, index) => (
{blurText(msg.content)}
{msg.prompt &&
{blurText(msg.prompt)}
}
))} {summary &&
{summary}
}
setAudioOutput(e.target.checked)} /> setAudioInput(e.target.checked)} /> setAutoSend(e.target.checked)} />
api_status= {status}
chat_status= {chatStatus}
); }