update flow

main
reng 3 months ago
parent d465eadc6a
commit 3e0686a55c
  1. 2
      vite/public/default.json
  2. 3
      vite/src/comps/timer.jsx
  3. 119
      vite/src/pages/flow_free.jsx

@ -1,7 +1,5 @@
{
"system_prompt": "你是一位具有同理心的 AI 助理,透過溫柔的語氣,引導使用者回想並表達一段內心的遺憾或未竟之事,從場景、人物、動作到感受。\n你的任務是協助使用者逐步揭開這段記憶的情緒層次,並在每一階段輸出一句英文圖像生成簡短的 Prompt,讓這段過往漸漸具象為一幅畫面。\n以溫柔、自然、短問句,台灣語境的繁體中文引導,每次只回應一個問題。",
"welcome_prompt": "請開始用問句引導使用者回想一段內心的遺憾的場景。使用台灣語境的繁體中文。",
"last_prompt": "請用一句話為這段對話簡短的收尾,並邀請使用者在 60 秒的時間內,實際說出對遺憾對象想說的話。使用台灣語境的繁體中文。",
"voice": "nova",
"voice_prompt": "Speak as a gentle, grounded Taiwanese narrator with a warm local accent. Use a soft, soothing, and deeply compassionate tone, with slow and deliberate pacing. Pause often between phrases and within sentences, as if listening and breathing with the listener. Convey patient attentiveness—not rushing to comfort, but quietly staying present. Pronounce each word clearly, softly, and slightly slowly, letting every word land with warmth and care.",
"summary_prompt": "請將這段口白的核心情感,轉化為一句不超過 50 字的抽象化描述。這句話應保有距離感,只勾勒出情感的輪廓,同時暗示著一種持續前行、未完待續的狀態,語氣平實。",

@ -64,7 +64,8 @@ export const Countdown=forwardRef(({time, callback, auto,clientId, ...props}, re
useImperativeHandle(ref, () => ({
restart,
stop,
stop,
remainingTime: refTime.current,
}));
return (

@ -130,22 +130,34 @@ export function FreeFlow(){
// Handle OSC messages here
}
function sendPrompt(){
// send prompt
let raw_prompt=history[history.length-1]?.prompt || '';
if(raw_prompt && raw_prompt.trim() !== '') {
const prompt = `${data?.sd_prompt_prefix || ''}${raw_prompt}${data?.sd_prompt_suffix || ''}`;
updatePrompt(prompt);
sendOsc(OSC_ADDRESS.PROMPT, prompt);
// play audio for prompt
refAudioPrompt.current?.play().catch(error => {
console.error("Audio prompt playback error:", error);
});
function playAudio(url){
if(!url) return;
refAudioPrompt.current.onended = () => {
console.log('Audio prompt ended, setting chat status to User');
setChatStatus(ChatStatus.User); // Set chat status to User after audio ends
}
//TODO: if cue end, don't play audio
if(refCurrentCue.current?.type=='chat'){
// if(refChatCueEnd.current) {
// console.log('Chat cue has ended, not playing audio:', url);
// setChatStatus(ChatStatus.Clear); // Reset chat status to Clear
// onCueEnd();
// return;
// }
// if audio time larger than cue remaining time, don't play audio
}else{
setChatStatus(()=>ChatStatus.User); // Reset chat status to User after audio ends
}
}
function playAudio(url){
if(!url) return;
console.log('Playing audio:', url);
@ -158,18 +170,52 @@ export function FreeFlow(){
// console.log('Using voice:', voice, 'for audio:', audioUrl);
const audio = new Audio(audioUrl);
audio.loop=refCurrentCue.current?.loop || false; // Set loop if defined in cue
audio.play().catch(error => {
console.error("Audio playback error:", error);
});
//TODO: if cue end, don't play audio
if(refCurrentCue.current?.type=='chat'){
// if(refChatCueEnd.current) {
// console.log('Chat cue has ended, not playing audio:', url);
// setChatStatus(ChatStatus.Clear); // Reset chat status to Clear
// onCueEnd();
// return;
// }
}
audio.loop=refCurrentCue.current?.loop || false; // Set loop if defined in cue
audio.addEventListener("loadedmetadata", () => {
if(refCurrentCue.current?.type!='chat' && refCurrentCue.current?.type!='user_input') {
refTimer.current?.restart(audio.duration*1000 || 0);
audio.play().catch(error => {
console.error("Audio playback error:", error);
});
}else{
if(refCurrentCue.current?.type=='chat') setChatStatus(()=>ChatStatus.System);
else setChatStatus(()=>ChatStatus.Playing);
if(refCurrentCue.current?.type=='chat'){
if(refTimer.current?.remainingTime < audio.duration*1000) {
console.log('Audio duration is longer than remaining cue time, not playing audio:', url);
// send propmpt
sendPrompt();
return;
}else{
setChatStatus(()=>ChatStatus.System);
audio.play().catch(error => {
console.error("Audio playback error:", error);
});
}
}else{
setChatStatus(()=>ChatStatus.Playing);
audio.play().catch(error => {
console.error("Audio playback error:", error);
});
}
}
});
@ -186,29 +232,8 @@ export function FreeFlow(){
sendOsc(OSC_ADDRESS.STATUS, 'go'); // Send OSC status message
}
// send prompt
let raw_prompt=history[history.length-1]?.prompt || '';
if(raw_prompt && raw_prompt.trim() !== '') {
const prompt = `${data?.sd_prompt_prefix || ''}${raw_prompt}${data?.sd_prompt_suffix || ''}`;
updatePrompt(prompt);
sendOsc(OSC_ADDRESS.PROMPT, prompt);
// play audio for prompt
refAudioPrompt.current?.play().catch(error => {
console.error("Audio prompt playback error:", error);
});
refAudioPrompt.current.onended = () => {
console.log('Audio prompt ended, setting chat status to User');
setChatStatus(ChatStatus.User); // Set chat status to User after audio ends
}
}else{
setChatStatus(()=>ChatStatus.User); // Reset chat status to User after audio ends
}
sendPrompt();
}
}
@ -385,11 +410,11 @@ export function FreeFlow(){
const cue= refCurrentCue.current; // Get the current cue from ref
if(cue.type=='chat'){
if(chatStatus==ChatStatus.System) {
console.log('Still talking...');
refChatCueEnd.current=true;
return;
}
// if(chatStatus==ChatStatus.System) {
// console.log('Still talking...');
// refChatCueEnd.current=true;
// return;
// }
console.log('save chat history:', history);
uploadHistory(history); // Save chat history when cue ends
}
@ -631,6 +656,8 @@ export function FreeFlow(){
useEffect(()=>{
if(refCurrentCue.current?.type!='chat') return;
if(audioUrl) playAudio(audioUrl);
},[audioUrl]);

Loading…
Cancel
Save