add flow page

main
reng 6 months ago
parent 90afe5725f
commit a894548c78
  1. 54
      vite/package-lock.json
  2. 1
      vite/package.json
  3. BIN
      vite/public/assets/q1.mp3
  4. BIN
      vite/public/assets/q2.mp3
  5. BIN
      vite/public/assets/q3.mp3
  6. BIN
      vite/public/assets/q6.mp3
  7. 52
      vite/public/cuelist.json
  8. 11
      vite/src/App.css
  9. 357
      vite/src/App.jsx
  10. 1
      vite/src/assets/react.svg
  11. 19
      vite/src/comps/input.jsx
  12. 64
      vite/src/comps/timer.jsx
  13. 2
      vite/src/index.css
  14. 14
      vite/src/main.jsx
  15. 401
      vite/src/pages/conversation.jsx
  16. 126
      vite/src/pages/flow.jsx
  17. 9
      vite/src/pages/settings.jsx
  18. 10
      vite/src/util/chat.js
  19. 4
      vite/src/util/constant.js
  20. 10
      vite/src/util/osc.js
  21. 2
      vite/src/util/system_prompt.js

@ -14,6 +14,7 @@
"gsap": "^3.13.0",
"react": "^19.1.0",
"react-dom": "^19.1.0",
"react-router": "^7.6.2",
"react-speech-recognition": "^4.0.1",
"tailwindcss": "^4.1.8"
},
@ -1787,6 +1788,14 @@
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
"dev": true
},
"node_modules/cookie": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz",
"integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==",
"engines": {
"node": ">=18"
}
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@ -2806,6 +2815,27 @@
"react": "^19.1.0"
}
},
"node_modules/react-router": {
"version": "7.6.2",
"resolved": "https://registry.npmjs.org/react-router/-/react-router-7.6.2.tgz",
"integrity": "sha512-U7Nv3y+bMimgWjhlT5CRdzHPu2/KVmqPwKUCChW8en5P3znxUqwlYFlbmyj8Rgp1SF6zs5X4+77kBVknkg6a0w==",
"dependencies": {
"cookie": "^1.0.1",
"set-cookie-parser": "^2.6.0"
},
"engines": {
"node": ">=20.0.0"
},
"peerDependencies": {
"react": ">=18",
"react-dom": ">=18"
},
"peerDependenciesMeta": {
"react-dom": {
"optional": true
}
}
},
"node_modules/react-speech-recognition": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/react-speech-recognition/-/react-speech-recognition-4.0.1.tgz",
@ -2869,6 +2899,11 @@
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz",
"integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA=="
},
"node_modules/set-cookie-parser": {
"version": "2.7.1",
"resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz",
"integrity": "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ=="
},
"node_modules/shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
@ -4047,6 +4082,11 @@
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
"dev": true
},
"cookie": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz",
"integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="
},
"cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@ -4683,6 +4723,15 @@
"scheduler": "^0.26.0"
}
},
"react-router": {
"version": "7.6.2",
"resolved": "https://registry.npmjs.org/react-router/-/react-router-7.6.2.tgz",
"integrity": "sha512-U7Nv3y+bMimgWjhlT5CRdzHPu2/KVmqPwKUCChW8en5P3znxUqwlYFlbmyj8Rgp1SF6zs5X4+77kBVknkg6a0w==",
"requires": {
"cookie": "^1.0.1",
"set-cookie-parser": "^2.6.0"
}
},
"react-speech-recognition": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/react-speech-recognition/-/react-speech-recognition-4.0.1.tgz",
@ -4731,6 +4780,11 @@
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz",
"integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA=="
},
"set-cookie-parser": {
"version": "2.7.1",
"resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz",
"integrity": "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ=="
},
"shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",

@ -16,6 +16,7 @@
"gsap": "^3.13.0",
"react": "^19.1.0",
"react-dom": "^19.1.0",
"react-router": "^7.6.2",
"react-speech-recognition": "^4.0.1",
"tailwindcss": "^4.1.8"
},

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -0,0 +1,52 @@
{
"cuelist": [
{
"id": 1,
"name": "Q1",
"type": "space",
"description": "Annonce",
"audioFile": "assets/q1.mp3",
"loop": true
},
{
"id": 2,
"name": "Q2",
"type": "headphone",
"description": "Guide for drink",
"auto": true,
"audioFile": "assets/q2.mp3"
},
{
"id": 3,
"name": "Q3",
"description": "Guide for phone",
"type": "headphone",
"auto": false,
"audioFile": "assets/q3.mp3"
},
{
"id": 4,
"name": "Q4",
"type": "phone",
"description": "Guide to construct scene",
"auto": true,
"duration": 60
},
{
"id": 5,
"name": "Q5",
"type": "phone",
"description": "Guide to call",
"duration": 60,
"auto": true
},
{
"id": 6,
"name": "Q6",
"type": "space",
"description": "Ending",
"audioFile": "assets/q6.mp3"
}
]
}

@ -1 +1,12 @@
@import "tailwindcss";
#root{
@apply flex flex-col h-screen;
}
.checkbox{
@apply flex flex-row items-center gap-1;
}
main{
@apply flex-1 flex flex-col gap-4 justify-start p-8 overflow-y-auto;
}

@ -1,366 +1,17 @@
import { useEffect, useRef, useState } from 'react';
import './App.css'
import { sendChatMessage } from './util/chat';
import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition';
import { textToSpeech } from './util/tts';
import { gsap } from "gsap";
import { SplitText } from 'gsap/SplitText';
import { invoke } from '@tauri-apps/api/core';
import Input from './comps/input';
gsap.registerPlugin(SplitText);
const BASE_URL='http://localhost:3333';
function App() {
const [history, setHistory] = useState([]);
const [processing, setProcessing] = useState(false);
const [showProcessing, setShowProcessing] = useState(false);
const [audioOutput, setAudioOutput] = useState(false);
const [prompt, setPrompt] = useState([]);
const refHistoryContainer= useRef(null);
const refPrompContainer= useRef(null);
const refInput=useRef(null);
const {
transcript,
finalTranscript,
listening,
resetTranscript,
browserSupportsSpeechRecognition,
isMicrophoneAvailable,
}=useSpeechRecognition();
function restart(){
console.log("Restarting...");
setHistory([]);
setPrompt([]);
refInput.current.value = '';
resetTranscript();
SpeechRecognition.stopListening();
// create start message
const startTime=Date.now();
setProcessing(true);
sendChatMessage([]).then(response => {
if (!response.ok) {
throw new Error('Network response was not ok');
}
let data=response;
console.log('get reply: ', data, new Date(Date.now()-startTime).toISOString().slice(11, 19));
// add to history
setHistory(() => [{
role: 'assistant',
content: data.output_text,
}]);
setPrompt(()=>[
data.prompt,
]);
// tts
if(!audioOutput) {
setProcessing(false);
}else{
console.log('create speech:', data.output_text);
textToSpeech(data.output_text).then(audioUrl => {
const audio = new Audio(audioUrl);
console.log('play audio...', new Date(Date.now()-startTime).toISOString().slice(11, 19));
audio.play().catch(error => {
console.error('Audio playback failed:', error);
});
setProcessing(false);
}).catch(error => {
console.error('TTS error:', error);
});
}
});
}
function toggleAudio(value) {
console.log("onclickAudio", listening, browserSupportsSpeechRecognition, isMicrophoneAvailable);
if(!browserSupportsSpeechRecognition) {
console.warn("Browser does not support speech recognition.");
return;
}
if(!isMicrophoneAvailable) {
console.warn("Microphone is not available.");
return;
}
if(!listening && value){
SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' }).then(() => {
console.log("Speech recognition started.");
}).catch(error => {
console.error("Error starting speech recognition:", error);
});
}else{
SpeechRecognition.stopListening();
}
}
function onSubmit(event) {
event.preventDefault();
if(processing) {
console.warn("Already processing, ignoring submission.");
return;
}
setProcessing(true);
setShowProcessing(true);
const input = event.target.elements.input.value;
if(!input.trim()?.length) {
console.warn("Input is empty, ignoring submission.");
return;
}
const startTime=Date.now();
console.log("Submit reply:", input);
sendChatMessage([
...history,
{
role:'user',
content: input,
}
]).then(response => {
if (!response.ok) {
throw new Error('Network response was not ok');
setProcessing(false);
}
let data=response;
console.log('get reply: ', data, new Date(Date.now()-startTime).toISOString().slice(11, 19));
// add to history
setPrompt([
...prompt,
data.prompt,
]);
if(!audioOutput) {
setHistory(prev => [...prev, {
role: 'assistant',
content: data.output_text,
}]);
setProcessing(false);
setShowProcessing(false);
}else{
// tts
console.log('create speech:', data.output_text);
textToSpeech(data.output_text).then(audioUrl => {
const audio = new Audio(audioUrl);
console.log('play audio...', new Date(Date.now()-startTime).toISOString().slice(11, 19));
setShowProcessing(false);
setHistory(prev => [...prev, {
role: 'assistant',
content: data.output_text,
}]);
audio.play().catch(error => {
console.error('Audio playback failed:', error);
});
audio.addEventListener('ended',() => {
console.log('Audio playback ended');
setProcessing(()=>false);
});
}).catch(error => {
console.error('TTS error:', error);
setProcessing(()=>false);
});
}
});
// clear input
event.target.elements.input.value = '';
// setProcessing(()=>false);
setHistory(prev => [...prev, {
role: 'user',
content:input,
}]);
}
useEffect(()=>{
refHistoryContainer.current.scrollTop = refHistoryContainer.current.scrollHeight;
// Animate the history items
if(history.length === 0) return;
let last_item=document.querySelector('.last_history');
if(!last_item) return;
if(last_item.classList.contains('user')) return;
console.log('last_item', last_item);
let split=SplitText.create(last_item, {
type: "chars",
aria:'hidden'
});
console.log('split', split);
gsap.fromTo(split.chars, {
opacity: 0,
}, {
opacity: 1,
y: 0,
duration: 0.5,
ease: "steps(1)",
stagger: 0.1
});
},[history]);
useEffect(()=>{
refPrompContainer.current.scrollTop = refPrompContainer.current.scrollHeight;
},[prompt]);
useEffect(()=>{
if(listening){
refInput.current.value = transcript;
}
},[transcript]);
useEffect(()=>{
if(finalTranscript){
refInput.current.value = finalTranscript;
console.log('Final Transcript:', finalTranscript);
if(processing) return; // Prevent submission if already processing
// Submit the final transcript
onSubmit({
preventDefault: () => {},
target: {
elements: {
input: refInput.current
}
}
});
resetTranscript(); // Clear the transcript after submission
}
},[finalTranscript]);
useEffect(()=>{
console.log('window.SpeechRecognition=', window.SpeechRecognition || window.webkitSpeechRecognition);
// if (navigator.getUserMedia){
// navigator.getUserMedia({audio:true},
// function(stream) {
// // start_microphone(stream);
// console.log('Microphone access granted.');
// },
// function(e) {
// alert('Error capturing audio.');
// }
// );
// } else { alert('getUserMedia not supported in this browser.'); }
},[]);
return (
<main className='h-screen flex flex-col gap-8 justify-end p-8'>
<Input />
<div ref={refPrompContainer} className='flex-1 flex flex-col gap-2 border-4 overflow-y-auto'>
{prompt?.length==0 ? (
<div className='p-2 border-b border-gray-200'>Promp will appear here...</div>
):(
prompt?.map((item, index) => (
<div key={index} className='p-2 border-b border-gray-500 bg-pink-200'>
<p className='text-lg'>{item}</p>
</div>
))
)}
</div>
<div ref={refHistoryContainer} className='flex-1 overflow-y-auto'>
<div className='flex flex-col justify-end gap-2'>
{history?.length==0 && !showProcessing? (
<div className='p-2'>History will appear here...</div>
):(
history.map((item, index) => (
<div key={index} className={`p-2 rounded border-4 ${item.role === 'user' ? 'bg-gray-100' : 'bg-yellow-100'}`}>
<p className={`text-lg whitespace-pre-wrap history_item ${index==history?.length-1 && item.role!='user' && 'last_history'}`}>{item.content}</p>
</div>
))
)}
{showProcessing && (
<div className='p-2 rounded border-4 bg-yellow-100'>
<span className='animate-pulse'>...</span>
</div>
)}
</div>
</div>
<div className='flex flex-col gap-2'>
<div className='flex flex-row justify-end gap-2 '>
<button className='self-end' onClick={restart}>Restart</button>
<span className='flex-1'></span>
<button className='' onClick={()=>{
refInput.current.value=''
resetTranscript();
}}>clear</button>
<span className='flex flex-row items-center gap-1'>
<input
type="checkbox"
id="audio_input"
name="audio_input"
checked={listening}
onChange={(e)=>toggleAudio(e.target.checked)}
/>
<label htmlFor="audio_input">Audio Input</label>
</span>
<span className='flex flex-row items-center gap-1'>
<input type="checkbox" id="audio_output" name="audio_output" checked={audioOutput} onChange={(e) => setAudioOutput(e.target.checked)} />
<label htmlFor="audio_output">Audio Output</label>
</span>
</div>
<form className='flex flex-col justify-center *:border-4 gap-4' onSubmit={onSubmit} autoComplete="off">
<textarea ref={refInput} id="input" name="input" required className='self-stretch p-2 resize-none' rows={3} autoComplete="off"/>
<button type="submit" className='uppercase' disabled={processing}>Send</button>
</form>
<div className='w-full flex flex-row gap-2 justify-center py-2 px-8 *:bg-pink-200 *:px-2'>
<a href="/">Conversation</a>
<a href="/flow">Flow</a>
<a href="/settings">Settings</a>
</div>
</main>
)
}

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>

Before

Width:  |  Height:  |  Size: 4.0 KiB

@ -1,7 +1,8 @@
import { writeFile, BaseDirectory, exists, mkdir } from '@tauri-apps/plugin-fs';
import { path } from '@tauri-apps/api';
import { invoke } from '@tauri-apps/api/core';
import { sendOsc } from '../util/osc';
export default function Input(){
@ -34,12 +35,7 @@ export default function Input(){
console.log('File saved:', `${folder}/${file.name}`);
// send osc to TD
await invoke('send_osc_message', {
key: '/upload',
host:`0.0.0.0:0`,
target: '127.0.0.1:8787',
message: `${folder}/${file.name}`,
});
await sendOsc('/upload', `${folder}/${file.name}`);
} else {
@ -67,7 +63,7 @@ export default function Input(){
<div className="flex flex-col items-stretch p-2 gap-4">
<form className="flex flex-row justify-start *:border-4 gap-4" onSubmit={onUploadFile}>
<label className="border-none">File</label>
<label className="border-none">file</label>
<input type="file" accept="image/*" className="self-end" />
<button type="submit" className="uppercase">Send</button>
</form>
@ -78,12 +74,7 @@ export default function Input(){
const value = e.target.value;
console.log('Range value changed:', value);
invoke('send_osc_message', {
key:'/control_strength',
message: (value/100.0).toString(),
host:`0.0.0.0:0`,
target: '127.0.0.1:8787',
});
sendOsc('/control_strength', (value/100.0).toString());
}}
/>
</div>

@ -0,0 +1,64 @@
import { useEffect, useState, useRef, forwardRef, useImperativeHandle, } from "react"
const Update_Interval = 1000; // 1 second
export const Countdown=forwardRef(({time, callback, auto, ...props}, ref)=>{
const refTime = useRef(time);
const refInterval = useRef(null);
const refDisplay=useRef();
function restart(newTime, callback_func) {
console.log('restart countdown:', newTime, 'ms');
if(refInterval.current) {
clearInterval(refInterval.current);
}
refTime.current = newTime || time;
refInterval.current=setInterval(() => {
if(refTime.current>0){
refTime.current -= Update_Interval;
// console.log('Countdown:', refTime.current/1000);
}else{
refTime.current = 0;
clearInterval(refInterval.current);
if(typeof callback_func === 'function'){
callback_func();
}else{
if(callback) callback();
}
}
if(refDisplay.current) refDisplay.current.innerText = (refTime.current/1000).toFixed(0);
}, Update_Interval);
}
useEffect(()=>{
if(auto)
restart(time);
// return () => {
// clearInterval(refInterval.current);
// }
}, [time, callback]);
useImperativeHandle(ref, () => ({
restart,
}));
return (
<div className="text-3xl rounded-full bg-gray-200 w-[10vw] h-[10vw] flex justify-center items-center"
ref={refDisplay}></div>
)
});

@ -3,7 +3,7 @@
@layer base{
button{
@apply rounded-full border-4 px-2 bg-slate-200;
@apply rounded-full border-4 px-2 bg-slate-200 cursor-pointer;
}
}

@ -1,10 +1,24 @@
import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client'
import { BrowserRouter, Routes, Route } from "react-router";
import './index.css'
import App from './App.jsx'
import { Settings } from './pages/settings.jsx';
import { Flow } from './pages/flow.jsx';
import { Conversation } from './pages/conversation.jsx';
createRoot(document.getElementById('root')).render(
<StrictMode>
<BrowserRouter>
{/* <App /> */}
<App />
<Routes>
<Route path="/" element={<Conversation />} />
<Route path="/flow" element={<Flow />} />
<Route path="/settings" element={<Settings />} />
</Routes>
</BrowserRouter>
</StrictMode>,
)

@ -0,0 +1,401 @@
import { useEffect, useRef, useState } from 'react';
import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition';
import { gsap } from "gsap";
import { SplitText } from 'gsap/SplitText';
import Input from '../comps/input';
import { Countdown } from '../comps/timer';
import { Prompt_Count, Prompt_Interval } from '../util/constant';
import { sendChatMessage } from '../util/chat';
import { textToSpeech } from '../util/tts';
gsap.registerPlugin(SplitText);
export function Conversation() {
const [history, setHistory] = useState([]);
const [processing, setProcessing] = useState(false);
const [showProcessing, setShowProcessing] = useState(false);
const [audioOutput, setAudioOutput] = useState(false);
const refPromptCount = useRef(0);
const [useTimer, setUseTimer] = useState(false);
const [prompt, setPrompt] = useState([]);
const refHistoryContainer= useRef(null);
const refPrompContainer= useRef(null);
const refInput=useRef(null);
const refTimer=useRef(null);
const {
transcript,
finalTranscript,
listening,
resetTranscript,
browserSupportsSpeechRecognition,
isMicrophoneAvailable,
}=useSpeechRecognition();
function restart(){
console.log("Restarting...");
setHistory([]);
setPrompt([]);
refInput.current.value = '';
resetTranscript();
SpeechRecognition.stopListening();
// create start message
const startTime=Date.now();
setProcessing(true);
sendChatMessage([]).then(response => {
if (!response.ok) {
throw new Error('Network response was not ok');
}
let data=response;
console.log('get reply: ', data, new Date(Date.now()-startTime).toISOString().slice(11, 19));
// add to history
setHistory(() => [{
role: 'assistant',
content: data.output_text,
}]);
setPrompt(()=>[
data.prompt,
]);
// tts
if(!audioOutput) {
setProcessing(false);
}else{
console.log('create speech:', data.output_text);
textToSpeech(data.output_text).then(audioUrl => {
const audio = new Audio(audioUrl);
console.log('play audio...', new Date(Date.now()-startTime).toISOString().slice(11, 19));
audio.play().catch(error => {
console.error('Audio playback failed:', error);
});
setProcessing(false);
}).catch(error => {
console.error('TTS error:', error);
});
}
});
}
function toggleAudio(value) {
console.log("onclickAudio", listening, browserSupportsSpeechRecognition, isMicrophoneAvailable);
if(!browserSupportsSpeechRecognition) {
console.warn("Browser does not support speech recognition.");
return;
}
if(!isMicrophoneAvailable) {
console.warn("Microphone is not available.");
return;
}
if(!listening && value){
SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' }).then(() => {
console.log("Speech recognition started.");
}).catch(error => {
console.error("Error starting speech recognition:", error);
});
}else{
SpeechRecognition.stopListening();
}
}
function onTimerEnd(){
if(!useTimer) return;
refPromptCount.current += 1;
if(refPromptCount.current > Prompt_Count) {
console.warn("Maximum prompt count reached, stopping timer.");
return;
}
}
function onSubmit(event) {
event.preventDefault();
if(processing) {
console.warn("Already processing, ignoring submission.");
return;
}
setProcessing(true);
setShowProcessing(true);
const input = event.target.elements.input.value;
if(!input.trim()?.length) {
console.warn("Input is empty, ignoring submission.");
return;
}
const startTime=Date.now();
console.log("Submit reply:", input);
sendChatMessage([
...history,
{
role:'user',
content: input,
}
]).then(response => {
if (!response.ok) {
throw new Error('Network response was not ok');
setProcessing(false);
}
let data=response;
console.log('get reply: ', data, new Date(Date.now()-startTime).toISOString().slice(11, 19));
// add to history
setPrompt([
...prompt,
data.prompt,
]);
if(!audioOutput) {
setHistory(prev => [...prev, {
role: 'assistant',
content: data.output_text,
}]);
setProcessing(false);
setShowProcessing(false);
}else{
// tts
console.log('create speech:', data.output_text);
textToSpeech(data.output_text).then(audioUrl => {
const audio = new Audio(audioUrl);
console.log('play audio...', new Date(Date.now()-startTime).toISOString().slice(11, 19));
setShowProcessing(false);
setHistory(prev => [...prev, {
role: 'assistant',
content: data.output_text,
}]);
audio.play().catch(error => {
console.error('Audio playback failed:', error);
});
audio.addEventListener('ended',() => {
console.log('Audio playback ended');
setProcessing(()=>false);
});
}).catch(error => {
console.error('TTS error:', error);
setProcessing(()=>false);
});
}
});
// clear input
event.target.elements.input.value = '';
// setProcessing(()=>false);
setHistory(prev => [...prev, {
role: 'user',
content:input,
}]);
}
useEffect(()=>{
refHistoryContainer.current.scrollTop = refHistoryContainer.current.scrollHeight;
// Animate the history items
if(history.length === 0) return;
let last_item=document.querySelector('.last_history');
if(!last_item) return;
if(last_item.classList.contains('user')) return;
console.log('last_item', last_item);
let split=SplitText.create(last_item, {
type: "chars",
aria:'hidden'
});
console.log('split', split);
gsap.fromTo(split.chars, {
opacity: 0,
}, {
opacity: 1,
y: 0,
duration: 0.5,
ease: "steps(1)",
stagger: 0.1,
onComplete:()=>{
if(useTimer) {
refTimer.current.restart(Prompt_Interval);
}
}
});
},[history]);
useEffect(()=>{
refPrompContainer.current.scrollTop = refPrompContainer.current.scrollHeight;
},[prompt]);
useEffect(()=>{
if(listening){
refInput.current.value = transcript;
}
},[transcript]);
useEffect(()=>{
if(finalTranscript){
refInput.current.value = finalTranscript;
console.log('Final Transcript:', finalTranscript);
if(processing) return; // Prevent submission if already processing
// Submit the final transcript
onSubmit({
preventDefault: () => {},
target: {
elements: {
input: refInput.current
}
}
});
resetTranscript(); // Clear the transcript after submission
}
},[finalTranscript]);
useEffect(()=>{
console.log('window.SpeechRecognition=', window.SpeechRecognition || window.webkitSpeechRecognition);
// if (navigator.getUserMedia){
// navigator.getUserMedia({audio:true},
// function(stream) {
// // start_microphone(stream);
// console.log('Microphone access granted.');
// },
// function(e) {
// alert('Error capturing audio.');
// }
// );
// } else { alert('getUserMedia not supported in this browser.'); }
},[]);
return (
<main className=''>
<div className='flex flex-row items-center justify-between'>
<Input />
<span className='checkbox'>
<input
type="checkbox"
id="use_timer"
name="use_timer"
checked={useTimer}
onChange={(e) => setUseTimer(e.target.checked)}/>
<label>useTimer</label>
</span>
<Countdown ref={refTimer} time={Prompt_Interval} callback={onTimerEnd} />
</div>
<div ref={refPrompContainer} className='flex-1 flex flex-col gap-2 border-4 overflow-y-auto'>
{prompt?.length==0 ? (
<div className='p-2 border-b border-gray-200'>Promp will appear here...</div>
):(
prompt?.map((item, index) => (
<div key={index} className='p-2 border-b border-gray-500 bg-pink-200'>
<p className='text-lg'>{item}</p>
</div>
))
)}
</div>
<div ref={refHistoryContainer} className='flex-1 overflow-y-auto'>
<div className='flex flex-col justify-end gap-2'>
{history?.length==0 && !showProcessing? (
<div className='p-2'>History will appear here...</div>
):(
history.map((item, index) => (
<div key={index} className={`p-2 rounded border-4 ${item.role === 'user' ? 'bg-gray-100' : 'bg-yellow-100'}`}>
<p className={`text-lg whitespace-pre-wrap history_item ${index==history?.length-1 && item.role!='user' && 'last_history'}`}>{item.content}</p>
</div>
))
)}
{showProcessing && (
<div className='p-2 rounded border-4 bg-yellow-100'>
<span className='animate-pulse'>...</span>
</div>
)}
</div>
</div>
<div className='flex flex-col gap-2'>
<div className='flex flex-row justify-end gap-2 '>
<button className='self-end' onClick={restart}>Restart</button>
<span className='flex-1'></span>
<button className='' onClick={()=>{
refInput.current.value=''
resetTranscript();
}}>clear</button>
<span className='checkbox'>
<input
type="checkbox"
id="audio_input"
name="audio_input"
checked={listening}
onChange={(e)=>toggleAudio(e.target.checked)}
/>
<label htmlFor="audio_input">Audio Input</label>
</span>
<span className='checkbox'>
<input type="checkbox" id="audio_output" name="audio_output" checked={audioOutput} onChange={(e) => setAudioOutput(e.target.checked)} />
<label htmlFor="audio_output">Audio Output</label>
</span>
</div>
<form className='flex flex-col justify-center *:border-4 gap-4' onSubmit={onSubmit} autoComplete="off">
<textarea ref={refInput} id="input" name="input" required className='self-stretch p-2 resize-none' rows={3} autoComplete="off"/>
<button type="submit" className='uppercase' disabled={processing}>Send</button>
</form>
</div>
</main>
)
}

@ -0,0 +1,126 @@
import { useEffect, useRef, useState } from "react";
import { Countdown } from "../comps/timer";
export function Flow(){
const [cuelist, setCuelist] = useState([]);
const [currentCue, setCurrentCue] = useState(null);
const refTimer=useRef();
const refAudio=useRef();
function playCue(cue) {
if(!cue) return;
console.log('Playing cue:', cue);
setCurrentCue(cue.name);
if(cue.audioFile){
// Stop any currently playing audio
if(refAudio.current) {
refAudio.current.pause();
}
const audio = new Audio(cue.audioFile);
if(cue.loop){
audio.loop = true;
}
audio.play().catch(error => {
console.error('Error playing audio:', error);
});
audio.onended = () => {
onCueEnd(cue);
}
refAudio.current = audio;
audio.addEventListener("loadedmetadata", () => {
refTimer.current.restart(audio.duration*1000 || 0);
});
}
if(cue.duration){
refTimer.current.restart(cue.duration*1000, ()=>{
onCueEnd(cue);
});
}
}
function onCueEnd(cue) {
if(cue.auto) {
playCue(cuelist.find(c => c.id === cue.id+1));
}
}
function onStop(){
console.log('Stopping current cue');
if(refAudio.current) {
refAudio.current.pause();
refAudio.current = null;
}
setCurrentCue(null);
refTimer.current.restart(0);
}
useEffect(()=>{
fetch('/cuelist.json')
.then(response => response.json())
.then(data => {
console.log('Cuelist data:', data);
setCuelist(data.cuelist);
})
.catch(error => {
console.error('Error fetching cuelist:', error);
});
},[]);
return (
<main className="items-center">
<div className="w-full p-2 flex flex-row justify-center gap-2 *:w-[10vw] *:h-[10vw]">
<div className="bg-gray-100 text-4xl font-bold mb-4 flex justify-center items-center">
{currentCue}
</div>
<Countdown ref={refTimer} />
<button className="bg-red-300 border-0 font-bold uppercase"
onClick={onStop}>Stop</button>
</div>
<table className="border-collapse **:border-y w-full **:p-2">
<thead>
<tr className="text-left">
{/* <th>ID</th> */}
<th>Name</th>
<th>Description</th>
<th>Type</th>
<th>Auto</th>
<th>Audio File</th>
<th></th>
</tr>
</thead>
<tbody>
{cuelist?.map(({id, name, description, type, auto, audioFile,...props}, index) => (
<tr key={id}>
{/* <td>{id}</td> */}
<td>{name}</td>
<td>{description}</td>
<td>{type=='phone'?'📞':(type=='headphone'?'🎧':'🔊')}</td>
<td>{auto ? '↩' : ''}</td>
<td>{audioFile}</td>
<td>
<button className="rounded-full border-none bg-green-200"
onClick={()=>{
playCue({id, name, description, type, auto, audioFile, ...props});
}}>go</button>
</td>
</tr>
))}
</tbody>
</table>
</main>
);
}

@ -0,0 +1,9 @@
export function Settings(){
return (
<div className="flex flex-col items-center justify-center h-full">
<h1 className="text-4xl font-bold mb-4">Settings</h1>
<p className="text-lg">This page is under construction.</p>
</div>
);
}

@ -1,6 +1,7 @@
import { fetch } from '@tauri-apps/plugin-http';
import { invoke } from '@tauri-apps/api/core';
import { system_prompt, welcome_prompt } from './system_prompt';
import { sendOsc } from './osc';
import { invoke } from '@tauri-apps/api/core';
async function getOpenAIToken() {
return invoke('get_env',{name:'OPENAI_API_KEY'});
@ -64,12 +65,7 @@ export async function sendChatMessage(messages) {
const result=JSON.parse(choice.message.content);
// send to tauri
await invoke('send_osc_message', {
key:'/prompt',
message: result.prompt.replaceAll('"', ''), // escape quotes for OSC
host:`0.0.0.0:0`,
target: '127.0.0.1:8787'
});
await sendOsc('/prompt', result.prompt.replaceAll('"', ''));
return {

@ -0,0 +1,4 @@
export const Prompt_Count= 3; // number of prompts
export const Prompt_Interval= 10000; // ms
export const Call_Interval= 30000; // ms

@ -0,0 +1,10 @@
import { invoke } from '@tauri-apps/api/core';
export async function sendOsc(key, message){
await invoke('send_osc_message', {
key: key,
message: message,
host:`0.0.0.0:0`,
target: '127.0.0.1:8787',
});
}

@ -77,3 +77,5 @@ export const welcome_prompt=[
]
export const voice_prompt="Use a calm and expressive voice, soft and poetic in feeling, but with steady, natural rhythm — not slow.";
export const summary_prompt="幫我把以下一段話整理成一段文字,以第一人稱視角作為當事人的文字紀念,文字內容 50 字以內:";
Loading…
Cancel
Save