@ -1,14 +1,19 @@
import { useEffect , useRef , useState } from "react" ;
import SpeechRecognition , { useSpeechRecognition } from 'react-speech-recognition' ;
import { Countdown } from "../comps/timer" ;
import { Conversation } from "./conversation" ;
import { Status , useChat } from "../util/useChat" ;
import { getSummary } from "../util/chat" ;
import { saveHistory } from "../util/output" ;
const EmojiType = {
phone : '📞' ,
headphone : '🎧' ,
speaker : '🔊' ,
chat : '💬' ,
chat : '🤖' ,
user _input : '💬' ,
}
export function Flow ( ) {
@ -16,20 +21,56 @@ export function Flow(){
const [ cuelist , setCuelist ] = useState ( [ ] ) ;
const [ currentCue , setCurrentCue ] = useState ( null ) ;
const [ chatWelcome , setChatWelcome ] = useState ( null ) ;
const [ audioInput , setAudioInput ] = useState ( false ) ;
const refTimer = useRef ( ) ;
const refAudio = useRef ( ) ;
const refInput = useRef ( ) ;
const { history , status , reset , sendMessage , setStatus , audioOutput , setAudioOutput , stop : stopChat } = useChat ( ) ;
const refCurrentCue = useRef ( null ) ;
const { history , status , reset , sendMessage , setStatus , audioOutput , setAudioOutput , stop : stopChat , audioUrl , } = useChat ( ) ;
const {
transcript ,
finalTranscript ,
listening ,
resetTranscript ,
browserSupportsSpeechRecognition ,
isMicrophoneAvailable ,
} = useSpeechRecognition ( ) ;
function playAudio ( url ) {
if ( ! url ) return ;
console . log ( 'Playing audio:' , url ) ;
if ( refAudio . current ) {
refAudio . current . pause ( ) ; / / S t o p a n y c u r r e n t l y p l a y i n g a u d i o
}
const audio = new Audio ( url ) ;
audio . loop = refCurrentCue . current ? . loop || false ; / / S e t l o o p i f d e f i n e d i n c u e
audio . play ( ) . catch ( error => {
console . error ( "Audio playback error:" , error ) ;
} ) ;
audio . onended = ( ) => {
onCueEnd ( ) ;
}
refAudio . current = audio ; / / S t o r e t h e n e w a u d i o r e f e r e n c e
audio . addEventListener ( "loadedmetadata" , ( ) => {
refTimer . current . restart ( audio . duration * 1000 || 0 ) ;
} ) ;
}
function playCue ( cue ) {
if ( ! cue ) return ;
console . log ( 'Playing cue:' , cue ) ;
setCurrentCue ( cue ) ;
refCurrentCue . current = cue ; / / S t o r e t h e c u r r e n t c u e i n r e f
if ( parseFloat ( cue . id ) <= 4.1 ) {
/ / S p e c i a l c a s e f o r s t a r t i n g a c o n v e r s a t i o n
@ -37,38 +78,29 @@ export function Flow(){
reset ( ) ;
}
if ( cue . type == 'chat' && cue . id == '4.1' ) {
if ( cue . type == 'chat' ) {
/ / S p e c i a l c a s e f o r s t a r t i n g a c o n v e r s a t i o n
console . log ( 'Starting conversation...' ) ;
sendMessage ( ) ;
setChatWelcome ( true ) ;
}
if ( cue . audioFile ) {
/ / S t o p a n y c u r r e n t l y p l a y i n g a u d i o
if ( refAudio . current ) {
refAudio . current . pause ( ) ;
}
resetTranscript ( ) ;
const audio = new Audio ( cue . audioFile ) ;
if ( cue . loop ) {
audio . loop = true ;
if ( cue . id == 4.1 ) {
console . log ( 'Starting conversation...' ) ;
sendMessage ( ) ;
setChatWelcome ( true ) ;
} else {
const message = refInput . current . value ? . trim ( ) ;
if ( message && message . length > 0 ) {
sendMessage ( message ) ;
setChatWelcome ( false ) ;
} else {
onCueEnd ( cue ) ; / / i f n o m e s s a g e , j u s t c o n t i n u e t o n e x t c u e
}
}
}
audio . play ( ) . catch ( error => {
console . error ( 'Error playing audio:' , error ) ;
} ) ;
audio . onended = ( ) => {
onCueEnd ( cue ) ;
}
refAudio . current = audio ;
audio . addEventListener ( "loadedmetadata" , ( ) => {
refTimer . current . restart ( audio . duration * 1000 || 0 ) ;
} ) ;
if ( cue . audioFile ) {
playAudio ( cue . audioFile ) ;
}
if ( cue . duration ) {
@ -78,30 +110,18 @@ export function Flow(){
}
}
function onCueEnd ( cue ) {
function onCueEnd ( ) {
if ( ! cue ) return ;
console . log ( 'onCueEnd:' , cue . id ) ;
if ( cue . type == 'chat' ) {
/ / s e n d C h a t M e s s a g e
const message = refInput . current . value ? . trim ( ) ;
if ( message && message . length > 0 ) {
sendMessage ( message ) ;
setChatWelcome ( false ) ;
if ( ! refCurrentCue . current ) return ;
const cue = refCurrentCue . current ; / / G e t t h e c u r r e n t c u e f r o m r e f
} else {
/ / i f n o m e s s a g e , j u s t c o n t i n u e t o n e x t c u e
console . log ( 'No message to send, continuing to next cue' ) ;
playCue ( cuelist . find ( c => c . id === cue . nextcue ) ) ;
}
console . log ( 'onCueEnd:' , cue . id ) ;
} else {
if ( cue . auto ) {
playCue ( cuelist . find ( c => c . id === cue . nextcue ) ) ;
}
if ( cue . auto ) {
playCue ( cuelist . find ( c => c . id === cue . nextcue ) ) ;
}
}
function onStop ( ) {
@ -110,19 +130,55 @@ export function Flow(){
refAudio . current . pause ( ) ;
refAudio . current = null ;
}
setCurrentCue ( null ) ;
refCurrentCue . current = null ; / / C l e a r t h e c u r r e n t c u e r e f e r e n c e
refTimer . current . restart ( 0 ) ;
stopChat ( ) ; / / S t o p c h a t p r o c e s s i n g
}
useEffect ( ( ) => {
if ( audioInput && isMicrophoneAvailable ) {
SpeechRecognition . startListening ( { continuous : true , language : 'zh-TW' } ) . then ( ( ) => {
console . log ( "Speech recognition started." ) ;
} ) . catch ( error => {
console . error ( "Error starting speech recognition:" , error ) ;
} ) ;
} else {
console . log ( 'Stopping speech recognition...' ) ;
SpeechRecognition . stopListening ( ) ;
}
} , [ audioInput ] ) ;
useEffect ( ( ) => {
/ / i f ( l i s t e n i n g ) {
if ( currentCue ? . type == 'user_input' ) refInput . current . value = transcript ;
/ / }
} , [ transcript ] ) ;
useEffect ( ( ) => {
if ( audioUrl ) playAudio ( audioUrl ) ;
} , [ audioUrl ] ) ;
useEffect ( ( ) => {
switch ( status ) {
case Status . SUCCESS :
console . log ( 'Success!' ) ;
setStatus ( Status . IDLE ) ;
refInput . current . value = ''
refInput . current . value = '' ;
resetTranscript ( ) ;
if ( chatWelcome ) {
return ;
@ -130,22 +186,35 @@ export function Flow(){
/ / p l a y n e x t c u e
if ( currentCue . nextcue != 5 && currentCue . nextcue != 6 ) { / / Q 5 & Q 6 w a i t f o r a u d i o e n d
if ( currentCue . nextcue ) {
playCue ( cuelist . find ( c => c . id === currentCue . nextcue ) ) ;
} else {
setCurrentCue ( null ) ;
}
/ / i f ( c u r r e n t C u e . n e x t c u e ! = 5 & & c u r r e n t C u e . n e x t c u e ! = 6 ) { / / Q 5 & Q 6 w a i t f o r a u d i o e n d
/ / i f ( c u r r e n t C u e . n e x t c u e ) {
/ / p l a y C u e ( c u e l i s t . f i n d ( c = > c . i d = = = c u r r e n t C u e . n e x t c u e ) ) ;
/ / } e l s e {
/ / s e t C u r r e n t C u e ( n u l l ) ;
/ / }
/ / }
if ( refCurrentCue . current . callback == 'summary' ) {
/ / g e t s u m m a r y
console . log ( 'Getting summary...' ) ;
getSummary ( history . map ( el => ` ${ el . role } : ${ el . content } ` ) . join ( '\n' ) ) . then ( summary => {
console . log ( 'Summary:' , summary ) ;
} ) . catch ( error => {
console . error ( 'Error getting summary:' , error ) ;
} ) ;
}
break ;
case Status . AUDIO _ENDED :
console . log ( 'Audio ended' ) ;
if ( currentCue . nextcue == 5 || currentCue . nextcue == 6 ) { / / Q 5 & Q 6 w a i t f o r a u d i o e n d
playCue ( cuelist . find ( c => c . id === currentCue . nextcue ) ) ;
}
break ;
/ / c a s e S t a t u s . A U D I O _ E N D E D :
/ / c o n s o l e . l o g ( ' A u d i o e n d e d ' ) ;
/ / i f ( c u r r e n t C u e . n e x t c u e = = 5 | | c u r r e n t C u e . n e x t c u e = = 6 ) { / / Q 5 & Q 6 w a i t f o r a u d i o e n d
/ / p l a y C u e ( c u e l i s t . f i n d ( c = > c . i d = = = c u r r e n t C u e . n e x t c u e ) ) ;
/ / }
/ / b r e a k ;
}
} , [ status ] ) ;
@ -170,10 +239,13 @@ export function Flow(){
< div className = "w-full p-2 flex flex-row justify-center gap-2 *:w-[10vw] *:h-[10vw]" >
< div className = "bg-gray-100 text-4xl font-bold mb-4 flex justify-center items-center" >
{ currentCue ? . name }
{ refCurrentCue . current ? . name }
< / div >
< Countdown ref = { refTimer } / >
< button className = "!bg-red-300" onClick = { onStop } > Stop < / button >
< button className = "!bg-yellow-300" onClick = { ( ) => {
saveHistory ( history ) ;
} } > Save < / button >
< / div >
< div className = " max-h-[33vh] overflow-y-auto" >
< table className = "border-collapse **:border-y w-full **:p-2" >
@ -218,13 +290,18 @@ export function Flow(){
) ) }
< / div >
< textarea ref = { refInput } name = "message" rows = { 2 }
className = { ` w-full border-1 resize-none p-2 ${ status != Status . IDLE && status != Status . AUDIO _ENDED ? 'bg-gray-500' : '' } ` }
disabled = { status != Status . IDLE && status != Status . AUDIO _ENDED } > < / textarea >
className = { ` w-full border-1 resize-none p-2 ${ currentCue ? . type != 'user_input' ? 'bg-gray-500' : '' } ` }
disabled = { currentCue ? . type != 'user_input' } > < / textarea >
< div className = "flex flex-row justify-end gap-2" >
< span className = "flex flex-row gap-1" >
< label > audio _output < / label >
< input type = 'checkbox' checked = { audioOutput } onChange = { ( e ) => setAudioOutput ( e . target . checked ) } / >
< / span >
< span className = "flex flex-row gap-1" >
< label > audio _input < / label >
< input type = 'checkbox' checked = { audioInput } onChange = { ( e ) => setAudioInput ( e . target . checked ) } / >
< / span >
< div className = "rounded-2xl bg-gray-300 self-end px-4 tracking-widest" > chat _status = { status } < / div >
< / div >
< / div >