parent
90afe5725f
commit
a894548c78
21 changed files with 766 additions and 381 deletions
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,52 @@ |
||||
{ |
||||
"cuelist": [ |
||||
{ |
||||
"id": 1, |
||||
"name": "Q1", |
||||
"type": "space", |
||||
"description": "Annonce", |
||||
"audioFile": "assets/q1.mp3", |
||||
"loop": true |
||||
}, |
||||
{ |
||||
"id": 2, |
||||
"name": "Q2", |
||||
"type": "headphone", |
||||
"description": "Guide for drink", |
||||
"auto": true, |
||||
"audioFile": "assets/q2.mp3" |
||||
}, |
||||
{ |
||||
"id": 3, |
||||
"name": "Q3", |
||||
"description": "Guide for phone", |
||||
"type": "headphone", |
||||
"auto": false, |
||||
"audioFile": "assets/q3.mp3" |
||||
}, |
||||
{ |
||||
"id": 4, |
||||
"name": "Q4", |
||||
"type": "phone", |
||||
"description": "Guide to construct scene", |
||||
"auto": true, |
||||
"duration": 60 |
||||
}, |
||||
{ |
||||
"id": 5, |
||||
"name": "Q5", |
||||
"type": "phone", |
||||
"description": "Guide to call", |
||||
"duration": 60, |
||||
"auto": true |
||||
}, |
||||
{ |
||||
"id": 6, |
||||
"name": "Q6", |
||||
"type": "space", |
||||
"description": "Ending", |
||||
"audioFile": "assets/q6.mp3" |
||||
} |
||||
] |
||||
} |
||||
|
||||
@ -1 +1,12 @@ |
||||
@import "tailwindcss"; |
||||
|
||||
#root{ |
||||
@apply flex flex-col h-screen; |
||||
} |
||||
.checkbox{ |
||||
@apply flex flex-row items-center gap-1; |
||||
} |
||||
|
||||
main{ |
||||
@apply flex-1 flex flex-col gap-4 justify-start p-8 overflow-y-auto; |
||||
} |
||||
|
Before Width: | Height: | Size: 4.0 KiB |
@ -0,0 +1,64 @@ |
||||
import { useEffect, useState, useRef, forwardRef, useImperativeHandle, } from "react" |
||||
|
||||
const Update_Interval = 1000; // 1 second |
||||
|
||||
export const Countdown=forwardRef(({time, callback, auto, ...props}, ref)=>{ |
||||
|
||||
|
||||
const refTime = useRef(time); |
||||
const refInterval = useRef(null); |
||||
const refDisplay=useRef(); |
||||
|
||||
|
||||
function restart(newTime, callback_func) { |
||||
|
||||
console.log('restart countdown:', newTime, 'ms'); |
||||
|
||||
if(refInterval.current) { |
||||
clearInterval(refInterval.current); |
||||
} |
||||
|
||||
refTime.current = newTime || time; |
||||
refInterval.current=setInterval(() => { |
||||
if(refTime.current>0){ |
||||
refTime.current -= Update_Interval; |
||||
// console.log('Countdown:', refTime.current/1000); |
||||
}else{ |
||||
refTime.current = 0; |
||||
clearInterval(refInterval.current); |
||||
|
||||
if(typeof callback_func === 'function'){ |
||||
callback_func(); |
||||
}else{ |
||||
if(callback) callback(); |
||||
} |
||||
} |
||||
|
||||
if(refDisplay.current) refDisplay.current.innerText = (refTime.current/1000).toFixed(0); |
||||
|
||||
}, Update_Interval); |
||||
|
||||
|
||||
} |
||||
|
||||
useEffect(()=>{ |
||||
|
||||
if(auto) |
||||
restart(time); |
||||
|
||||
// return () => { |
||||
// clearInterval(refInterval.current); |
||||
// } |
||||
|
||||
}, [time, callback]); |
||||
|
||||
useImperativeHandle(ref, () => ({ |
||||
restart, |
||||
})); |
||||
|
||||
return ( |
||||
<div className="text-3xl rounded-full bg-gray-200 w-[10vw] h-[10vw] flex justify-center items-center" |
||||
ref={refDisplay}></div> |
||||
) |
||||
|
||||
}); |
||||
@ -1,10 +1,24 @@ |
||||
import { StrictMode } from 'react' |
||||
import { createRoot } from 'react-dom/client' |
||||
import { BrowserRouter, Routes, Route } from "react-router"; |
||||
|
||||
import './index.css' |
||||
import App from './App.jsx' |
||||
import { Settings } from './pages/settings.jsx'; |
||||
import { Flow } from './pages/flow.jsx'; |
||||
import { Conversation } from './pages/conversation.jsx'; |
||||
|
||||
createRoot(document.getElementById('root')).render( |
||||
<StrictMode> |
||||
<App /> |
||||
|
||||
<BrowserRouter> |
||||
{/* <App /> */} |
||||
<App /> |
||||
<Routes> |
||||
<Route path="/" element={<Conversation />} /> |
||||
<Route path="/flow" element={<Flow />} /> |
||||
<Route path="/settings" element={<Settings />} /> |
||||
</Routes> |
||||
</BrowserRouter> |
||||
</StrictMode>, |
||||
) |
||||
|
||||
@ -0,0 +1,401 @@ |
||||
import { useEffect, useRef, useState } from 'react'; |
||||
import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition'; |
||||
import { gsap } from "gsap"; |
||||
import { SplitText } from 'gsap/SplitText'; |
||||
|
||||
import Input from '../comps/input'; |
||||
import { Countdown } from '../comps/timer'; |
||||
|
||||
import { Prompt_Count, Prompt_Interval } from '../util/constant'; |
||||
import { sendChatMessage } from '../util/chat'; |
||||
import { textToSpeech } from '../util/tts'; |
||||
|
||||
gsap.registerPlugin(SplitText); |
||||
|
||||
|
||||
|
||||
export function Conversation() { |
||||
|
||||
const [history, setHistory] = useState([]); |
||||
const [processing, setProcessing] = useState(false); |
||||
const [showProcessing, setShowProcessing] = useState(false); |
||||
const [audioOutput, setAudioOutput] = useState(false); |
||||
|
||||
const refPromptCount = useRef(0); |
||||
const [useTimer, setUseTimer] = useState(false); |
||||
|
||||
const [prompt, setPrompt] = useState([]); |
||||
|
||||
const refHistoryContainer= useRef(null); |
||||
const refPrompContainer= useRef(null); |
||||
const refInput=useRef(null); |
||||
const refTimer=useRef(null); |
||||
|
||||
const { |
||||
transcript, |
||||
finalTranscript, |
||||
listening, |
||||
resetTranscript, |
||||
browserSupportsSpeechRecognition, |
||||
isMicrophoneAvailable, |
||||
}=useSpeechRecognition(); |
||||
|
||||
|
||||
function restart(){ |
||||
console.log("Restarting..."); |
||||
setHistory([]); |
||||
setPrompt([]); |
||||
refInput.current.value = ''; |
||||
resetTranscript(); |
||||
SpeechRecognition.stopListening(); |
||||
|
||||
// create start message |
||||
const startTime=Date.now(); |
||||
setProcessing(true); |
||||
sendChatMessage([]).then(response => { |
||||
if (!response.ok) { |
||||
throw new Error('Network response was not ok'); |
||||
} |
||||
|
||||
let data=response; |
||||
console.log('get reply: ', data, new Date(Date.now()-startTime).toISOString().slice(11, 19)); |
||||
|
||||
|
||||
// add to history |
||||
setHistory(() => [{ |
||||
role: 'assistant', |
||||
content: data.output_text, |
||||
}]); |
||||
setPrompt(()=>[ |
||||
data.prompt, |
||||
]); |
||||
|
||||
// tts |
||||
if(!audioOutput) { |
||||
|
||||
setProcessing(false); |
||||
|
||||
|
||||
}else{ |
||||
console.log('create speech:', data.output_text); |
||||
textToSpeech(data.output_text).then(audioUrl => { |
||||
const audio = new Audio(audioUrl); |
||||
|
||||
console.log('play audio...', new Date(Date.now()-startTime).toISOString().slice(11, 19)); |
||||
|
||||
audio.play().catch(error => { |
||||
console.error('Audio playback failed:', error); |
||||
}); |
||||
|
||||
setProcessing(false); |
||||
|
||||
|
||||
}).catch(error => { |
||||
console.error('TTS error:', error); |
||||
}); |
||||
|
||||
} |
||||
|
||||
}); |
||||
} |
||||
|
||||
function toggleAudio(value) { |
||||
console.log("onclickAudio", listening, browserSupportsSpeechRecognition, isMicrophoneAvailable); |
||||
if(!browserSupportsSpeechRecognition) { |
||||
console.warn("Browser does not support speech recognition."); |
||||
return; |
||||
} |
||||
if(!isMicrophoneAvailable) { |
||||
console.warn("Microphone is not available."); |
||||
return; |
||||
} |
||||
|
||||
if(!listening && value){ |
||||
SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' }).then(() => { |
||||
console.log("Speech recognition started."); |
||||
}).catch(error => { |
||||
console.error("Error starting speech recognition:", error); |
||||
}); |
||||
|
||||
}else{ |
||||
SpeechRecognition.stopListening(); |
||||
} |
||||
} |
||||
|
||||
function onTimerEnd(){ |
||||
if(!useTimer) return; |
||||
|
||||
refPromptCount.current += 1; |
||||
if(refPromptCount.current > Prompt_Count) { |
||||
console.warn("Maximum prompt count reached, stopping timer."); |
||||
return; |
||||
} |
||||
|
||||
|
||||
} |
||||
|
||||
|
||||
|
||||
function onSubmit(event) { |
||||
event.preventDefault(); |
||||
|
||||
if(processing) { |
||||
console.warn("Already processing, ignoring submission."); |
||||
return; |
||||
} |
||||
setProcessing(true); |
||||
setShowProcessing(true); |
||||
|
||||
const input = event.target.elements.input.value; |
||||
|
||||
if(!input.trim()?.length) { |
||||
console.warn("Input is empty, ignoring submission."); |
||||
return; |
||||
} |
||||
|
||||
const startTime=Date.now(); |
||||
console.log("Submit reply:", input); |
||||
|
||||
sendChatMessage([ |
||||
...history, |
||||
{ |
||||
role:'user', |
||||
content: input, |
||||
} |
||||
]).then(response => { |
||||
if (!response.ok) { |
||||
throw new Error('Network response was not ok'); |
||||
setProcessing(false); |
||||
} |
||||
|
||||
let data=response; |
||||
console.log('get reply: ', data, new Date(Date.now()-startTime).toISOString().slice(11, 19)); |
||||
|
||||
// add to history |
||||
|
||||
|
||||
setPrompt([ |
||||
...prompt, |
||||
data.prompt, |
||||
]); |
||||
|
||||
|
||||
|
||||
if(!audioOutput) { |
||||
|
||||
setHistory(prev => [...prev, { |
||||
role: 'assistant', |
||||
content: data.output_text, |
||||
}]); |
||||
|
||||
setProcessing(false); |
||||
setShowProcessing(false); |
||||
}else{ |
||||
// tts |
||||
console.log('create speech:', data.output_text); |
||||
textToSpeech(data.output_text).then(audioUrl => { |
||||
const audio = new Audio(audioUrl); |
||||
|
||||
console.log('play audio...', new Date(Date.now()-startTime).toISOString().slice(11, 19)); |
||||
setShowProcessing(false); |
||||
setHistory(prev => [...prev, { |
||||
role: 'assistant', |
||||
content: data.output_text, |
||||
}]); |
||||
|
||||
audio.play().catch(error => { |
||||
console.error('Audio playback failed:', error); |
||||
}); |
||||
|
||||
audio.addEventListener('ended',() => { |
||||
console.log('Audio playback ended'); |
||||
setProcessing(()=>false); |
||||
}); |
||||
|
||||
}).catch(error => { |
||||
console.error('TTS error:', error); |
||||
setProcessing(()=>false); |
||||
}); |
||||
} |
||||
|
||||
}); |
||||
|
||||
// clear input |
||||
event.target.elements.input.value = ''; |
||||
// setProcessing(()=>false); |
||||
setHistory(prev => [...prev, { |
||||
role: 'user', |
||||
content:input, |
||||
}]); |
||||
|
||||
|
||||
} |
||||
useEffect(()=>{ |
||||
refHistoryContainer.current.scrollTop = refHistoryContainer.current.scrollHeight; |
||||
|
||||
// Animate the history items |
||||
if(history.length === 0) return; |
||||
|
||||
let last_item=document.querySelector('.last_history'); |
||||
|
||||
if(!last_item) return; |
||||
if(last_item.classList.contains('user')) return; |
||||
console.log('last_item', last_item); |
||||
|
||||
let split=SplitText.create(last_item, { |
||||
type: "chars", |
||||
aria:'hidden' |
||||
}); |
||||
console.log('split', split); |
||||
gsap.fromTo(split.chars, { |
||||
opacity: 0, |
||||
}, { |
||||
opacity: 1, |
||||
y: 0, |
||||
duration: 0.5, |
||||
ease: "steps(1)", |
||||
stagger: 0.1, |
||||
onComplete:()=>{ |
||||
if(useTimer) { |
||||
refTimer.current.restart(Prompt_Interval); |
||||
} |
||||
} |
||||
}); |
||||
|
||||
|
||||
|
||||
},[history]); |
||||
useEffect(()=>{ |
||||
refPrompContainer.current.scrollTop = refPrompContainer.current.scrollHeight; |
||||
},[prompt]); |
||||
|
||||
|
||||
useEffect(()=>{ |
||||
|
||||
if(listening){ |
||||
refInput.current.value = transcript; |
||||
} |
||||
|
||||
},[transcript]); |
||||
|
||||
|
||||
useEffect(()=>{ |
||||
if(finalTranscript){ |
||||
refInput.current.value = finalTranscript; |
||||
console.log('Final Transcript:', finalTranscript); |
||||
|
||||
if(processing) return; // Prevent submission if already processing |
||||
|
||||
|
||||
// Submit the final transcript |
||||
onSubmit({ |
||||
preventDefault: () => {}, |
||||
target: { |
||||
elements: { |
||||
input: refInput.current |
||||
} |
||||
} |
||||
}); |
||||
resetTranscript(); // Clear the transcript after submission |
||||
|
||||
} |
||||
},[finalTranscript]); |
||||
|
||||
useEffect(()=>{ |
||||
|
||||
console.log('window.SpeechRecognition=', window.SpeechRecognition || window.webkitSpeechRecognition); |
||||
|
||||
// if (navigator.getUserMedia){ |
||||
|
||||
// navigator.getUserMedia({audio:true}, |
||||
// function(stream) { |
||||
// // start_microphone(stream); |
||||
// console.log('Microphone access granted.'); |
||||
// }, |
||||
// function(e) { |
||||
// alert('Error capturing audio.'); |
||||
// } |
||||
// ); |
||||
|
||||
// } else { alert('getUserMedia not supported in this browser.'); } |
||||
|
||||
|
||||
},[]); |
||||
|
||||
|
||||
|
||||
return ( |
||||
<main className=''> |
||||
<div className='flex flex-row items-center justify-between'> |
||||
<Input /> |
||||
<span className='checkbox'> |
||||
<input |
||||
type="checkbox" |
||||
id="use_timer" |
||||
name="use_timer" |
||||
checked={useTimer} |
||||
onChange={(e) => setUseTimer(e.target.checked)}/> |
||||
<label>useTimer</label> |
||||
</span> |
||||
<Countdown ref={refTimer} time={Prompt_Interval} callback={onTimerEnd} /> |
||||
</div> |
||||
<div ref={refPrompContainer} className='flex-1 flex flex-col gap-2 border-4 overflow-y-auto'> |
||||
{prompt?.length==0 ? ( |
||||
<div className='p-2 border-b border-gray-200'>Promp will appear here...</div> |
||||
):( |
||||
prompt?.map((item, index) => ( |
||||
<div key={index} className='p-2 border-b border-gray-500 bg-pink-200'> |
||||
<p className='text-lg'>{item}</p> |
||||
</div> |
||||
)) |
||||
)} |
||||
</div> |
||||
<div ref={refHistoryContainer} className='flex-1 overflow-y-auto'> |
||||
<div className='flex flex-col justify-end gap-2'> |
||||
{history?.length==0 && !showProcessing? ( |
||||
<div className='p-2'>History will appear here...</div> |
||||
):( |
||||
history.map((item, index) => ( |
||||
<div key={index} className={`p-2 rounded border-4 ${item.role === 'user' ? 'bg-gray-100' : 'bg-yellow-100'}`}> |
||||
<p className={`text-lg whitespace-pre-wrap history_item ${index==history?.length-1 && item.role!='user' && 'last_history'}`}>{item.content}</p> |
||||
</div> |
||||
)) |
||||
)} |
||||
{showProcessing && ( |
||||
<div className='p-2 rounded border-4 bg-yellow-100'> |
||||
<span className='animate-pulse'>...</span> |
||||
</div> |
||||
)} |
||||
</div> |
||||
</div> |
||||
<div className='flex flex-col gap-2'> |
||||
<div className='flex flex-row justify-end gap-2 '> |
||||
<button className='self-end' onClick={restart}>Restart</button> |
||||
<span className='flex-1'></span> |
||||
<button className='' onClick={()=>{ |
||||
refInput.current.value='' |
||||
resetTranscript(); |
||||
}}>clear</button> |
||||
<span className='checkbox'> |
||||
<input |
||||
type="checkbox" |
||||
id="audio_input" |
||||
name="audio_input" |
||||
checked={listening} |
||||
onChange={(e)=>toggleAudio(e.target.checked)} |
||||
/> |
||||
<label htmlFor="audio_input">Audio Input</label> |
||||
</span> |
||||
<span className='checkbox'> |
||||
<input type="checkbox" id="audio_output" name="audio_output" checked={audioOutput} onChange={(e) => setAudioOutput(e.target.checked)} /> |
||||
<label htmlFor="audio_output">Audio Output</label> |
||||
</span> |
||||
</div> |
||||
<form className='flex flex-col justify-center *:border-4 gap-4' onSubmit={onSubmit} autoComplete="off"> |
||||
<textarea ref={refInput} id="input" name="input" required className='self-stretch p-2 resize-none' rows={3} autoComplete="off"/> |
||||
<button type="submit" className='uppercase' disabled={processing}>Send</button> |
||||
</form> |
||||
</div> |
||||
</main> |
||||
) |
||||
} |
||||
@ -0,0 +1,9 @@ |
||||
export function Settings(){ |
||||
|
||||
return ( |
||||
<div className="flex flex-col items-center justify-center h-full"> |
||||
<h1 className="text-4xl font-bold mb-4">Settings</h1> |
||||
<p className="text-lg">This page is under construction.</p> |
||||
</div> |
||||
); |
||||
} |
||||
@ -0,0 +1,4 @@ |
||||
export const Prompt_Count= 3; // number of prompts
|
||||
export const Prompt_Interval= 10000; // ms
|
||||
|
||||
export const Call_Interval= 30000; // ms
|
||||
@ -0,0 +1,10 @@ |
||||
import { invoke } from '@tauri-apps/api/core'; |
||||
|
||||
export async function sendOsc(key, message){ |
||||
await invoke('send_osc_message', { |
||||
key: key, |
||||
message: message, |
||||
host:`0.0.0.0:0`, |
||||
target: '127.0.0.1:8787', |
||||
}); |
||||
} |
||||
Loading…
Reference in new issue