main
ULTRACOMBOS-DEV 5 months ago
parent d87e628386
commit db7194a33e
  1. 2
      vite/src-tauri/Cargo.toml
  2. 2
      vite/src-tauri/src/lib.rs
  3. 16
      vite/src/comps/numpad.jsx
  4. 29
      vite/src/pages/flow_free.jsx

@ -31,4 +31,4 @@ webview2-com = "0.37.0"
windows = "0.61.1"
tauri-plugin-fs = "2"
tauri-plugin-opener = "2"
enttecopendmx ="0.1.0"
enttecopendmx ="0.1.0"

@ -119,7 +119,7 @@ fn send_dmx_message(message: &str) -> Result<(), String> {
}
if let Some(ref mut dmx) = LIGHT {
dmx.set_channel(1, val);
dmx.set_channel(3, val);
dmx.render().unwrap();
} else {
return Err("DMX interface not available".into());

@ -3,6 +3,16 @@ import { use, useEffect, useRef, useState } from "react";
const KEY_ENTER='q';
const KEY_BACKSPACE='a';
const TMP_MAP_KEY={
1:3,
3:1,
4:6,
6:4,
9:7,
7:9,
}
export default function NumPad({onSend}){
const [input, _setInput]=useState();
@ -20,7 +30,7 @@ export default function NumPad({onSend}){
const refAudio=useRef([]);
function onkeydown(e){
// console.log(e.key);
console.log(e.key);
if(e.key===KEY_ENTER){
@ -35,11 +45,11 @@ export default function NumPad({onSend}){
return;
}
const numKey = parseInt(e.key);
const numKey = TMP_MAP_KEY[parseInt(e.key)] || parseInt(e.key);
if(isNaN(numKey) || numKey < 0 || numKey > 9) return; // Ignore non-numeric keys
refAudio.current[numKey]?.play();
setInput((prev)=>`${prev||''}${e.key}`);
setInput((prev)=>`${prev||''}${numKey}`);
}

@ -46,7 +46,7 @@ export function FreeFlow(){
const [userId, setUserId] = useState();
const [summary, setSummary] = useState(null);
const [voice, setVoice] = useState(Voice.ONYX);
//const [speechPaused, setSpeechPaused]=useState(false);
const [chatStatus, setChatStatus] = useState(ChatStatus.System); // System, User, Processing
@ -55,6 +55,9 @@ export function FreeFlow(){
const refInput=useRef();
const refLight=useRef();
const refPauseTimer=useRef();
const refSpeechPaused=useRef(false);
const refContainer=useRef();
const refCurrentCue= useRef(null);
@ -194,6 +197,9 @@ export function FreeFlow(){
}else{
sendOsc(OSC_ADDRESS.COUNTDOWN, '0'); // Reset countdown for non-chat cues
}
if(refPauseTimer.current) clearTimeout(refPauseTimer.current);
refSpeechPaused.current=false;
}
function onCueEnd() {
@ -232,6 +238,8 @@ export function FreeFlow(){
function onNumpad(mess){
console.log('Numpad input:', mess);
if(refCurrentCue.current?.callback!='numpad') return;
console.log('Numpad input:', mess);
@ -252,9 +260,22 @@ export function FreeFlow(){
function onSpeechEnd(){
if(currentCue?.type!='chat') return; // Only process if current cue is user input
console.log('onSpeechEnd:', finalTranscript);
console.log('on speech end, start timer');
refSpeechPaused.current=true;
if(refPauseTimer.current) clearTimeout(refPauseTimer.current);
refPauseTimer.current=setTimeout(()=>{
if(refSpeechPaused.current) processSpeech();
}, data.speech_idle_time);
}
function processSpeech(){
if(currentCue?.type!='chat') return; // Only process if current cue is user input
console.log('processSpeech:', finalTranscript);
if(autoSend && transcript.trim().length > 0) {
console.log('Auto sending transcript:', transcript);
@ -289,7 +310,7 @@ export function FreeFlow(){
const recognition= SpeechRecognition.getRecognition();
recognition.onspeechstart=(e)=>{
console.log('Sound start:', e);
refSpeechPaused.current=false;
};
}else{

Loading…
Cancel
Save