import { useEffect, useRef, useState } from "react"; import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition'; import { Countdown } from "../comps/timer"; import { Status, useChat } from "../util/useChat"; import { getSummary } from "../util/chat"; import { saveHistory } from "../util/output"; import NumPad from "../comps/numpad"; import { Light } from "../comps/light"; import { useData } from "../util/useData"; import VoiceAnalysis from "../comps/voiceanalysis"; import { sendOsc, OSC_ADDRESS } from "../util/osc"; import { DebugControl } from "../comps/debug"; const EmojiType={ phone: '๐Ÿ“ž', headphone: '๐ŸŽง', speaker: '๐Ÿ”Š', chat: '๐Ÿค–', user_input: '๐Ÿ’ฌ', } const ChatStatus={ System: 'system', User: 'user', Processing: 'processing', } const Voice={ ONYX: 'onyx', SHIMMER: 'shimmer', } export function FreeFlow(){ const { data }=useData(); const [cuelist, setCuelist] = useState([]); const [currentCue, setCurrentCue] = useState(null); const [chatWelcome, setChatWelcome] = useState(null); const [audioInput, setAudioInput] = useState(true); const [autoSend, setAutoSend] = useState(true); const [userId, setUserId] = useState(); const [summary, setSummary] = useState(null); const [voice, setVoice] = useState(Voice.ONYX); const [chatStatus, setChatStatus] = useState(ChatStatus.System); // System, User, Processing const refTimer=useRef(); const refAudio=useRef(); const refInput=useRef(); const refLight=useRef(); const refContainer=useRef(); const refCurrentCue= useRef(null); const { history, status, reset, sendMessage, setStatus, audioOutput, setAudioOutput, stop:stopChat, audioUrl, }=useChat(); const { transcript, finalTranscript, listening, resetTranscript, browserSupportsSpeechRecognition, isMicrophoneAvailable, }=useSpeechRecognition(); function resetData() { setSummary(null); reset(); } function playAudio(url){ if(!url) return; console.log('Playing audio:', url); if(refAudio.current) { refAudio.current.pause(); // Stop any currently playing audio } let audioUrl = url; if(voice==Voice.SHIMMER) audioUrl = url.replace(Voice.ONYX, Voice.SHIMMER); console.log('Using voice:', voice, 'for audio:', audioUrl); const audio = new Audio(audioUrl); audio.loop=refCurrentCue.current?.loop || false; // Set loop if defined in cue audio.play().catch(error => { console.error("Audio playback error:", error); }); audio.onended = () => { if(refCurrentCue.current?.type!='chat') onCueEnd(); else{ setChatStatus(ChatStatus.User); // Reset chat status to User after audio ends } } refAudio.current = audio; // Store the new audio reference audio.addEventListener("loadedmetadata", () => { if(refCurrentCue.current?.type!='chat' && refCurrentCue.current?.type!='user_input') { refTimer.current?.restart(audio.duration*1000 || 0); }else{ setChatStatus(()=>ChatStatus.System); } }); } function playCue(cue) { if(!cue) return; console.log('Playing cue:', cue); setCurrentCue(cue); refCurrentCue.current = cue; // Store the current cue in ref if(parseFloat(cue.id)<=4.2){ // Special case for starting a conversation console.log('clear conversation...'); reset(); } switch(cue.type){ case 'chat': // Special case for starting a conversation resetTranscript(); console.log('Starting conversation...'); sendMessage(null, false, false, voice); // Send initial message with voice setChatWelcome(true); resetData(); // Reset data for new conversation break; case 'chat_end': const message= refInput.current?.value?.trim(); console.log('Ending conversation with message:', message); sendMessage(message, false, true, voice); setChatWelcome(false); break; case 'summary': console.log('Getting summary...'); getSummary(history.map(el=>`${el.role}:${el.content}`).join('\n'), data).then(summary => { console.log('Summary:', summary); onCueEnd(); // End the current cue after getting summary setSummary(summary); refContainer.current.scrollTop = refContainer.current.scrollHeight; // Scroll to bottom }).catch(error => { console.error('Error getting summary:', error); }); break; case 'user_input': console.log('User input cue, setting chat status to User'); setChatStatus(ChatStatus.User); // Set chat status to User for user input cues resetTranscript(); // Reset transcript for user input break; } if(cue.audioFile){ playAudio(cue.audioFile); } if(cue.duration){ refTimer.current.restart(cue.duration*1000, ()=>{ onCueEnd(cue); }); } // control unity if(cue.status){ sendOsc(OSC_ADDRESS.STATUS, cue.status); // Send OSC status message } if(cue.type=='chat' || cue.type=='user_input') { sendOsc(OSC_ADDRESS.COUNTDOWN, cue.duration || '0'); // Send OSC countdown message }else{ sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown for non-chat cues } } function onCueEnd() { refTimer.current?.stop(); // Stop the timer when cue ends refAudio.current?.pause(); // Pause any playing audio if(!refCurrentCue.current) return; const cue= refCurrentCue.current; // Get the current cue from ref console.log('onCueEnd:', cue.id); if(cue.callback=='start_conversation') refLight.current.fadeOut(); // Fade in light for conversation start if(cue.callback=='summary') refLight.current.fadeIn(); // Fade out light for conversation end resetTranscript(); // Reset transcript after cue ends if(cue.auto) { playCue(cuelist.find(c => c.id === cue.nextcue)); } } function onStop(){ console.log('Stopping current cue'); if(refAudio.current) { refAudio.current.pause(); refAudio.current = null; } setCurrentCue(null); refCurrentCue.current = null; // Clear the current cue reference refTimer.current.restart(0); stopChat(); // Stop chat processing } function onNumpad(mess){ if(refCurrentCue.current?.callback!='numpad') return; console.log('Numpad input:', mess); setUserId(()=>mess); } function saveImage(){ sendOsc('/export', 'output/test.png'); // Send OSC message to save image } useEffect(()=>{ if(userId>=1 && userId<=24) { console.log('User ID set:', userId); playCue(cuelist.find(c => c.id === currentCue.nextcue)); // Play cue 5 when userId is set } },[userId]); function onSpeechEnd(){ console.log('onSpeechEnd:', finalTranscript); if(currentCue?.type!='chat') return; // Only process if current cue is user input if(autoSend && transcript.trim().length > 0) { console.log('Auto sending transcript:', transcript); // onCueEnd(); const message= refInput.current?.value?.trim(); if(message && message.length>0) { console.log('Ending conversation with message:', message); sendMessage(message, false, false, voice); setChatWelcome(false); setChatStatus(ChatStatus.Processing); // Set chat status to Processing } resetTranscript(); } } useEffect(()=>{ onSpeechEnd(); // Call onSpeechEnd when finalTranscript changes },[finalTranscript]); useEffect(()=>{ if(audioInput && isMicrophoneAvailable) { SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' }).then(() => { console.log("Speech recognition started."); }).catch(error => { console.error("Error starting speech recognition:", error); }); const recognition= SpeechRecognition.getRecognition(); recognition.onspeechstart=(e)=>{ console.log('Sound start:', e); }; }else{ console.log('Stopping speech recognition...'); SpeechRecognition.stopListening(); } },[audioInput]); useEffect(()=>{ // if(listening){ if((currentCue?.type=='chat' && chatStatus==ChatStatus.User) || currentCue?.type=='user_input') { refInput.current.value = transcript; } // } },[transcript]); useEffect(()=>{ if(audioUrl) playAudio(audioUrl); },[audioUrl]); useEffect(()=>{ resetTranscript(); sendOsc(OSC_ADDRESS.INPUT, chatStatus); },[chatStatus]); useEffect(()=>{ switch(status) { case Status.SUCCESS: console.log('Success!'); setStatus(Status.IDLE); refInput.current.value = ''; resetTranscript(); refContainer.current.scrollTop = refContainer.current.scrollHeight; break; } },[status]); useEffect(()=>{ fetch('/cuelist_free.json') .then(response => response.json()) .then(data => { console.log('Cuelist data:', data); setCuelist(data.cuelist); }) .catch(error => { console.error('Error fetching cuelist:', error); }); },[]); return (
{refCurrentCue.current?.name}
{/* */}
{/* */} {cuelist?.map(({id, name, description, type, auto, audioFile,...props}, index) => ( {/* */} ))}
IDName Description Type Auto Audio / Due
{id}{name} {description} {EmojiType[type]} {auto ? 'โคต๏ธ' : ''} {audioFile || props.duration} {props.callback && `<${props.callback}>`}
{history?.map((msg, index) => (
{msg.content}
{msg.prompt &&
{msg.prompt}
}
))} {summary &&
{summary?.result}
}
setAudioOutput(e.target.checked)} /> setAudioInput(e.target.checked)} /> setAutoSend(e.target.checked)} />
api_status= {status}
chat_status= {chatStatus}
); }