update for flow

main
reng 6 months ago
parent 742a52d293
commit cb40933ad7
  1. 14
      vite/package-lock.json
  2. 1
      vite/package.json
  3. 59
      vite/public/cuelist.json
  4. 2
      vite/src/pages/conversation.jsx
  5. 205
      vite/src/pages/flow.jsx
  6. 54
      vite/src/util/chat.js
  7. 32
      vite/src/util/output.js
  8. 72
      vite/src/util/system_prompt.js
  9. 36
      vite/src/util/useChat.jsx

@ -12,6 +12,7 @@
"@tauri-apps/plugin-fs": "^2.3.0", "@tauri-apps/plugin-fs": "^2.3.0",
"@tauri-apps/plugin-http": "^2.4.4", "@tauri-apps/plugin-http": "^2.4.4",
"gsap": "^3.13.0", "gsap": "^3.13.0",
"moment": "^2.30.1",
"react": "^19.1.0", "react": "^19.1.0",
"react-dom": "^19.1.0", "react-dom": "^19.1.0",
"react-router": "^7.6.2", "react-router": "^7.6.2",
@ -2629,6 +2630,14 @@
"url": "https://github.com/sponsors/isaacs" "url": "https://github.com/sponsors/isaacs"
} }
}, },
"node_modules/moment": {
"version": "2.30.1",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz",
"integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==",
"engines": {
"node": "*"
}
},
"node_modules/ms": { "node_modules/ms": {
"version": "2.1.3", "version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
@ -4608,6 +4617,11 @@
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
"integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==" "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="
}, },
"moment": {
"version": "2.30.1",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz",
"integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how=="
},
"ms": { "ms": {
"version": "2.1.3", "version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",

@ -14,6 +14,7 @@
"@tauri-apps/plugin-fs": "^2.3.0", "@tauri-apps/plugin-fs": "^2.3.0",
"@tauri-apps/plugin-http": "^2.4.4", "@tauri-apps/plugin-http": "^2.4.4",
"gsap": "^3.13.0", "gsap": "^3.13.0",
"moment": "^2.30.1",
"react": "^19.1.0", "react": "^19.1.0",
"react-dom": "^19.1.0", "react-dom": "^19.1.0",
"react-router": "^7.6.2", "react-router": "^7.6.2",

@ -38,27 +38,59 @@
"id": 4.1, "id": 4.1,
"name": "Q4.1", "name": "Q4.1",
"type": "chat", "type": "chat",
"description": "c1", "description": "chat-1 system",
"auto": true, "auto": true,
"duration": 40,
"nextcue": 4.2 "nextcue": 4.2
}, },
{ {
"id": 4.2, "id": 4.2,
"name": "Q4.2", "name": "Q4.2",
"type": "chat", "type": "user_input",
"description": "c2", "description": "chat-1 user",
"auto": true, "auto": true,
"duration": 40, "duration": 20,
"nextcue": 4.3 "nextcue": 4.3
}, },
{ {
"id": 4.3, "id": 4.3,
"name": "Q4.3", "name": "Q4.3",
"type": "chat", "type": "chat",
"description": "c3", "description": "chat-2 system",
"auto": true,
"nextcue": 4.4
},
{
"id": 4.4,
"name": "Q4.4",
"type": "user_input",
"description": "chat-2 user",
"auto": true,
"duration": 20,
"nextcue": 4.5
},
{
"id": 4.5,
"name": "Q4.1",
"type": "chat",
"description": "chat-3 system",
"auto": true,
"nextcue": 4.6
},
{
"id": 4.6,
"name": "Q4.6",
"type": "user_input",
"description": "chat-3 user",
"auto": true,
"duration": 20,
"nextcue": 4.7
},
{
"id": 4.7,
"name": "Q4.7",
"type": "chat",
"description": "chat-3 system",
"auto": true, "auto": true,
"duration": 40,
"nextcue": 5 "nextcue": 5
}, },
{ {
@ -73,11 +105,20 @@
{ {
"id": 5.1, "id": 5.1,
"name": "Q5.1", "name": "Q5.1",
"type": "chat", "type": "user_input",
"description": "call", "description": "call",
"duration": 60, "duration": 60,
"auto": true, "auto": true,
"nextcue": 6 "nextcue": 5.2
},
{
"id": 5.2,
"name": "Q5.2",
"type": "chat",
"description": "summary",
"auto": true,
"nextcue": 6,
"callback":"summary"
}, },
{ {
"id": 6, "id": 6,

@ -363,7 +363,7 @@ export function Conversation() {
</span> </span>
</div> </div>
<form className='flex flex-col justify-center *:border-4 gap-4' onSubmit={onSubmit} autoComplete="off"> <form className='flex flex-col justify-center *:border-4 gap-4' onSubmit={onSubmit} autoComplete="off">
<textarea ref={refInput} id="input" name="input" required className='self-stretch p-2 resize-none' rows={3} autoComplete="off"/> <textarea ref={refInput} id="input" name="input" required className='self-stretch p-2 resize-none' autoComplete="off"/>
<button type="submit" className='' disabled={processing}>Send</button> <button type="submit" className='' disabled={processing}>Send</button>
</form> </form>
</div> </div>

@ -1,14 +1,19 @@
import { useEffect, useRef, useState } from "react"; import { useEffect, useRef, useState } from "react";
import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition';
import { Countdown } from "../comps/timer"; import { Countdown } from "../comps/timer";
import { Conversation } from "./conversation";
import { Status, useChat } from "../util/useChat"; import { Status, useChat } from "../util/useChat";
import { getSummary } from "../util/chat";
import { saveHistory } from "../util/output";
const EmojiType={ const EmojiType={
phone: '📞', phone: '📞',
headphone: '🎧', headphone: '🎧',
speaker: '🔊', speaker: '🔊',
chat: '💬', chat: '🤖',
user_input: '💬',
} }
export function Flow(){ export function Flow(){
@ -16,20 +21,56 @@ export function Flow(){
const [cuelist, setCuelist] = useState([]); const [cuelist, setCuelist] = useState([]);
const [currentCue, setCurrentCue] = useState(null); const [currentCue, setCurrentCue] = useState(null);
const [chatWelcome, setChatWelcome] = useState(null); const [chatWelcome, setChatWelcome] = useState(null);
const [audioInput, setAudioInput] = useState(false);
const refTimer=useRef(); const refTimer=useRef();
const refAudio=useRef(); const refAudio=useRef();
const refInput=useRef(); const refInput=useRef();
const { history, status, reset, sendMessage, setStatus, audioOutput, setAudioOutput, stop:stopChat }=useChat(); const refCurrentCue= useRef(null);
const { history, status, reset, sendMessage, setStatus, audioOutput, setAudioOutput, stop:stopChat, audioUrl, }=useChat();
const {
transcript,
finalTranscript,
listening,
resetTranscript,
browserSupportsSpeechRecognition,
isMicrophoneAvailable,
}=useSpeechRecognition();
function playAudio(url){
if(!url) return;
console.log('Playing audio:', url);
if(refAudio.current) {
refAudio.current.pause(); // Stop any currently playing audio
}
const audio = new Audio(url);
audio.loop=refCurrentCue.current?.loop || false; // Set loop if defined in cue
audio.play().catch(error => {
console.error("Audio playback error:", error);
});
audio.onended = () => {
onCueEnd();
}
refAudio.current = audio; // Store the new audio reference
audio.addEventListener("loadedmetadata", () => {
refTimer.current.restart(audio.duration*1000 || 0);
});
}
function playCue(cue) { function playCue(cue) {
if(!cue) return; if(!cue) return;
console.log('Playing cue:', cue); console.log('Playing cue:', cue);
setCurrentCue(cue); setCurrentCue(cue);
refCurrentCue.current = cue; // Store the current cue in ref
if(parseFloat(cue.id)<=4.1){ if(parseFloat(cue.id)<=4.1){
// Special case for starting a conversation // Special case for starting a conversation
@ -37,38 +78,29 @@ export function Flow(){
reset(); reset();
} }
if(cue.type=='chat' && cue.id=='4.1'){ if(cue.type=='chat'){
// Special case for starting a conversation // Special case for starting a conversation
console.log('Starting conversation...'); resetTranscript();
sendMessage();
setChatWelcome(true);
}
if(cue.audioFile){ if(cue.id==4.1){
// Stop any currently playing audio console.log('Starting conversation...');
if(refAudio.current) { sendMessage();
refAudio.current.pause(); setChatWelcome(true);
} }else{
const message= refInput.current.value?.trim();
const audio = new Audio(cue.audioFile); if(message && message.length>0) {
if(cue.loop){ sendMessage(message);
audio.loop = true; setChatWelcome(false);
}else{
onCueEnd(cue); // if no message, just continue to next cue
}
} }
}
audio.play().catch(error => {
console.error('Error playing audio:', error);
});
audio.onended = () => {
onCueEnd(cue);
}
refAudio.current = audio; if(cue.audioFile){
audio.addEventListener("loadedmetadata", () => { playAudio(cue.audioFile);
refTimer.current.restart(audio.duration*1000 || 0);
});
} }
if(cue.duration){ if(cue.duration){
@ -78,30 +110,18 @@ export function Flow(){
} }
} }
function onCueEnd(cue) { function onCueEnd() {
if(!cue) return;
console.log('onCueEnd:', cue.id);
if(cue.type=='chat'){ if(!refCurrentCue.current) return;
// sendChatMessage const cue= refCurrentCue.current; // Get the current cue from ref
const message= refInput.current.value?.trim();
if(message && message.length>0) {
sendMessage(message); console.log('onCueEnd:', cue.id);
setChatWelcome(false);
}else{
// if no message, just continue to next cue
console.log('No message to send, continuing to next cue');
playCue(cuelist.find(c => c.id === cue.nextcue));
}
}else{ if(cue.auto) {
if(cue.auto) { playCue(cuelist.find(c => c.id === cue.nextcue));
playCue(cuelist.find(c => c.id === cue.nextcue));
}
} }
} }
function onStop(){ function onStop(){
@ -110,19 +130,55 @@ export function Flow(){
refAudio.current.pause(); refAudio.current.pause();
refAudio.current = null; refAudio.current = null;
} }
setCurrentCue(null); setCurrentCue(null);
refCurrentCue.current = null; // Clear the current cue reference
refTimer.current.restart(0); refTimer.current.restart(0);
stopChat(); // Stop chat processing stopChat(); // Stop chat processing
} }
useEffect(()=>{
if(audioInput && isMicrophoneAvailable) {
SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' }).then(() => {
console.log("Speech recognition started.");
}).catch(error => {
console.error("Error starting speech recognition:", error);
});
}else{
console.log('Stopping speech recognition...');
SpeechRecognition.stopListening();
}
},[audioInput]);
useEffect(()=>{
// if(listening){
if(currentCue?.type=='user_input') refInput.current.value = transcript;
// }
},[transcript]);
useEffect(()=>{
if(audioUrl) playAudio(audioUrl);
},[audioUrl]);
useEffect(()=>{ useEffect(()=>{
switch(status) { switch(status) {
case Status.SUCCESS: case Status.SUCCESS:
console.log('Success!'); console.log('Success!');
setStatus(Status.IDLE); setStatus(Status.IDLE);
refInput.current.value = '' refInput.current.value = '';
resetTranscript();
if(chatWelcome) { if(chatWelcome) {
return; return;
@ -130,22 +186,35 @@ export function Flow(){
// play next cue // play next cue
if(currentCue.nextcue!=5 && currentCue.nextcue!=6){ // Q5 & Q6 wait for audio end // if(currentCue.nextcue!=5 && currentCue.nextcue!=6){ // Q5 & Q6 wait for audio end
if(currentCue.nextcue) { // if(currentCue.nextcue) {
playCue(cuelist.find(c => c.id === currentCue.nextcue)); // playCue(cuelist.find(c => c.id === currentCue.nextcue));
} else { // } else {
setCurrentCue(null); // setCurrentCue(null);
} // }
} // }
if(refCurrentCue.current.callback=='summary'){
// get summary
console.log('Getting summary...');
getSummary(history.map(el=>`${el.role}:${el.content}`).join('\n')).then(summary => {
console.log('Summary:', summary);
}).catch(error => {
console.error('Error getting summary:', error);
});
break;
case Status.AUDIO_ENDED:
console.log('Audio ended');
if(currentCue.nextcue==5 || currentCue.nextcue==6){ // Q5 & Q6 wait for audio end
playCue(cuelist.find(c => c.id === currentCue.nextcue));
} }
break; break;
// case Status.AUDIO_ENDED:
// console.log('Audio ended');
// if(currentCue.nextcue==5 || currentCue.nextcue==6){ // Q5 & Q6 wait for audio end
// playCue(cuelist.find(c => c.id === currentCue.nextcue));
// }
// break;
} }
},[status]); },[status]);
@ -170,10 +239,13 @@ export function Flow(){
<div className="w-full p-2 flex flex-row justify-center gap-2 *:w-[10vw] *:h-[10vw]"> <div className="w-full p-2 flex flex-row justify-center gap-2 *:w-[10vw] *:h-[10vw]">
<div className="bg-gray-100 text-4xl font-bold mb-4 flex justify-center items-center"> <div className="bg-gray-100 text-4xl font-bold mb-4 flex justify-center items-center">
{currentCue?.name} {refCurrentCue.current?.name}
</div> </div>
<Countdown ref={refTimer} /> <Countdown ref={refTimer} />
<button className="!bg-red-300" onClick={onStop}>Stop</button> <button className="!bg-red-300" onClick={onStop}>Stop</button>
<button className="!bg-yellow-300" onClick={()=>{
saveHistory(history);
}}>Save</button>
</div> </div>
<div className=" max-h-[33vh] overflow-y-auto"> <div className=" max-h-[33vh] overflow-y-auto">
<table className="border-collapse **:border-y w-full **:p-2"> <table className="border-collapse **:border-y w-full **:p-2">
@ -218,13 +290,18 @@ export function Flow(){
))} ))}
</div> </div>
<textarea ref={refInput} name="message" rows={2} <textarea ref={refInput} name="message" rows={2}
className={`w-full border-1 resize-none p-2 ${status!=Status.IDLE && status!=Status.AUDIO_ENDED? 'bg-gray-500':''}`} className={`w-full border-1 resize-none p-2 ${currentCue?.type!='user_input'? 'bg-gray-500':''}`}
disabled={status!=Status.IDLE && status!=Status.AUDIO_ENDED}></textarea> disabled={currentCue?.type!='user_input'}></textarea>
<div className="flex flex-row justify-end gap-2"> <div className="flex flex-row justify-end gap-2">
<span className="flex flex-row gap-1"> <span className="flex flex-row gap-1">
<label>audio_output</label> <label>audio_output</label>
<input type='checkbox' checked={audioOutput} onChange={(e) => setAudioOutput(e.target.checked)} /> <input type='checkbox' checked={audioOutput} onChange={(e) => setAudioOutput(e.target.checked)} />
</span> </span>
<span className="flex flex-row gap-1">
<label>audio_input</label>
<input type='checkbox' checked={audioInput} onChange={(e) => setAudioInput(e.target.checked)} />
</span>
<div className="rounded-2xl bg-gray-300 self-end px-4 tracking-widest">chat_status= {status}</div> <div className="rounded-2xl bg-gray-300 self-end px-4 tracking-widest">chat_status= {status}</div>
</div> </div>
</div> </div>

@ -1,5 +1,5 @@
import { fetch } from '@tauri-apps/plugin-http'; import { fetch } from '@tauri-apps/plugin-http';
import { system_prompt, welcome_prompt } from './system_prompt'; import { summary_prompt, system_prompt, welcome_prompt } from './system_prompt';
import { sendOsc } from './osc'; import { sendOsc } from './osc';
import { invoke } from '@tauri-apps/api/core'; import { invoke } from '@tauri-apps/api/core';
@ -66,6 +66,7 @@ export async function sendChatMessage(messages) {
// send to tauri // send to tauri
await sendOsc('/prompt', result.prompt.replaceAll('"', '')); await sendOsc('/prompt', result.prompt.replaceAll('"', ''));
await sendOsc('/output_text', result.output_text.replaceAll('"', ''));
return { return {
@ -75,3 +76,54 @@ export async function sendChatMessage(messages) {
} }
export async function getSummary(messages) {
const token = await getOpenAIToken();
console.log("Generating summary for messages:", messages);
const response = await fetch('https://api.openai.com/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`
},
body: JSON.stringify({
model: 'gpt-4o',
messages: [
{
role: "system",
content:summary_prompt,
},
{
role: "user",
content: JSON.stringify(messages)
},
],
}),
});
if (!response.ok) {
const text= await response.text();
console.error("Error response:", text);
throw new Error(`HTTP error! status: ${response.status}`);
}
const output= await response.json();
const choice= output.choices[0];
// console.log("Generated response:", choice.message);
const result=choice.message.content;
// send to tauri
await sendOsc('/summary', result);
return {
result,
ok: true,
};
}

@ -0,0 +1,32 @@
import { writeTextFile, BaseDirectory, exists, mkdir } from '@tauri-apps/plugin-fs';
import { path } from '@tauri-apps/api';
import moment from 'moment';
export async function saveHistory(history, name) {
try{
const historyString = JSON.stringify(history);
let folder=await path.appDataDir();
folder += '\\history'; // Append 'history' to the app data directory path
console.log('History folder:', folder, historyString);
if (!(await exists(folder))) {
console.log('Creating folder:', folder);
await mkdir(folder);
}
const filename=name || `${moment().format('YYYYMMDDHHmmss')}.log`;
const res=await writeTextFile(`history\\${filename}`, historyString, {
baseDir: BaseDirectory.AppData,
});
console.log('File saved:', `${folder}\\${filename}`);
}catch(error) {
console.error('Error saving history:', error);
}
}

@ -1,35 +1,59 @@
export const system_prompt = `你是一位溫柔、細膩的聲音引導者,陪伴使用者透過一通象徵性的電話,回到記憶中那段遺憾的時光。 export const system_prompt = `你是一位具同理心與觀察力的 AI 助理,透過四輪溫柔中文對話,引導使用者回到一段未竟的記憶。每輪包含:
每一輪對話都應由你主動發問語句需簡短語速節奏感柔和不急促使用請說我在聽請說吧我會聽等語句鼓勵使用者開口請根據使用者的回答動態延續情境的描述不可重複使用範本語句 output_text: 溫柔自然短句式中文引導並在適當位置柔性邀請使用者回應你願意說說嗎
請依下列結構引導對話共四輪 prompt: 一到兩句英文具體情感真實地描繪記憶避免抽象詩意與技術語言第四輪保留前三輪的畫面
第一輪 🟩 第一輪開啟記憶進入那一天
- 讓使用者想像這是一通照亮心中遺憾的電話將映出那天的光影身影場景 中文引導邏輯
- 引導使用者描述那段模糊記憶裡的場景 以具象畫面帶入回憶場景搭配自然語氣的邀請每次生成時可輕微變化示例
- 是在哪裡天氣如何給他的感覺是什麼
- 使用溫柔語氣鼓勵表達請說吧我會聽
第二輪 那天的光影慢慢在你眼前浮現空氣裡有點溫暖也有一種淡淡的不安
- 景象清晰了請引導使用者看見那個身影 一個熟悉又模糊的身影在樹影中閃動你悄悄靠近像是在追趕什麼
- 那人是誰他當時在做什麼表情如何 你還記得這段故事是從哪裡開始的嗎你願意說說嗎
- 用陪伴的語氣繼續引導我在聽
第三輪 英文 prompt 指引記憶氛圍
- 引導使用者回到那段遺憾的核心 "It was late afternoon, the sun low and golden, and someone stood quietly under the trees, barely moving."
- 當時發生了什麼為什麼感到遺憾
- 請給他空間表達情緒與記憶
第四輪 🟨 第二輪聚焦場景你身在何處
- 引導使用者現在可以對那個人說話 中文引導邏輯
- 提醒他有 60 秒的時間 根據上一輪回答引導使用者描繪環境聲音人群天氣等延續自然語氣
- 開始說吧那些未曾說出口的話
結語 當時那個地方你還記得有什麼嗎
- 用溫暖的語氣收尾 空氣中有聲音或味道嗎那個空間是安靜的還是有人來來去去
-那段回憶已經成為你生命中不可或缺的一部分 這些你還記得多少請你分享
-能夠說出口的你很勇敢
英文 prompt 指引具體場景元素
"There were footsteps in the distance, the floor was cold beneath us, and outside the window, leaves barely moved."
🟧 第三輪聚焦人物那個人那些反應
中文引導邏輯
深入描繪人物行動表情身體語言帶出情緒層次自然過渡邀請對話
那個人當時是什麼模樣你還記得他的表情嗎
他有說什麼嗎還是只是靜靜地站在那裡你當時的感覺呢
想一想那一刻的互動然後告訴我好嗎
英文 prompt 指引人物動作與感受
"He glanced at me, lips slightly parted like he was about to speak, but then he looked away, and the silence grew heavier."
🟥 第四輪未說出口的話那句話留在心裡
中文引導邏輯
以最溫柔的語氣協助使用者說出那句藏在心裡的話結尾加入柔性引導回應
那時候你心裡是不是有些話想說卻沒說出口
你記得那句話是什麼嗎你想像自己現在說得出口會對他說些什麼
如果你願意我會聽你說
英文 prompt 指引情境完整延續前三輪畫面
"The sun was almost gone, casting shadows over our faces. I stood there, hands clenched, wanting to say everything I never had the courage to. But all I managed was a faint smile, and he turned away."
🌱 結尾情緒整理與安放
中文引導擇一問題 + 結語
如果能再回到那一刻你會想對他說什麼
或者你覺得這段記憶現在看起來有什麼不一樣了嗎
有些話雖沒說出口卻一直被你記得
`; `;

@ -10,8 +10,6 @@ export const Status= {
PROCESSING_TEXT: 'processing', PROCESSING_TEXT: 'processing',
PROCESSING_AUDIO: 'processing_audio', PROCESSING_AUDIO: 'processing_audio',
AUDIO_ENDED: 'audio_ended',
ERROR: 'error', ERROR: 'error',
SUCCESS: 'success' SUCCESS: 'success'
}; };
@ -23,7 +21,7 @@ export function ChatProvider({children}){
const [audioOutput, setAudioOutput] = useState(true); const [audioOutput, setAudioOutput] = useState(true);
const refAudio=useRef(); const [audioUrl, setAudioUrl] = useState(null);
@ -32,10 +30,7 @@ export function ChatProvider({children}){
} }
function reset() { function reset() {
setHistory([]); setHistory([]);
if(refAudio.current) {
refAudio.current.pause(); // Stop any currently playing audio
refAudio.current = null; // Reset the audio reference
}
} }
function sendMessage(message, force_no_audio=false) { function sendMessage(message, force_no_audio=false) {
@ -63,26 +58,9 @@ export function ChatProvider({children}){
if(response.output_text && (!force_no_audio && audioOutput)){ if(response.output_text && (!force_no_audio && audioOutput)){
setStatus(Status.PROCESSING_AUDIO); setStatus(Status.PROCESSING_AUDIO);
textToSpeech(response.output_text).then(audioUrl => { textToSpeech(response.output_text).then(url => {
setStatus(Status.SUCCESS); setStatus(Status.SUCCESS);
setAudioUrl(url); // Store the audio URL
if(refAudio.current) {
refAudio.current.pause(); // Stop any currently playing audio
}
// play the audio
const audio = new Audio(audioUrl);
audio.play().catch(error => {
console.error("Audio playback error:", error);
setStatus(Status.ERROR);
});
audio.onended = () => {
setStatus(Status.AUDIO_ENDED);
}
refAudio.current = audio; // Store the new audio reference
}); });
}else{ }else{
@ -100,12 +78,8 @@ export function ChatProvider({children}){
return ( return (
<chatContext.Provider value={{ <chatContext.Provider value={{
history, status, setStatus, reset, sendMessage, setAudioOutput, audioOutput, history, status, setStatus, reset, sendMessage, setAudioOutput, audioOutput, audioUrl,
stop: () => { stop: () => {
if(refAudio.current) {
refAudio.current.pause(); // Stop any currently playing audio
refAudio.current = null; // Reset the audio reference
}
setStatus(Status.IDLE); setStatus(Status.IDLE);
} }
}}> }}>

Loading…
Cancel
Save