main
reng 6 months ago
parent 1e4857a8a2
commit e7b0c6b8d8
  1. 2
      vite/src-tauri/Cargo.lock
  2. 2
      vite/src-tauri/Cargo.toml
  3. 4
      vite/src-tauri/capabilities/default.json
  4. 33
      vite/src-tauri/src/lib.rs
  5. 2
      vite/src-tauri/tauri.conf.json
  6. 69
      vite/src/App.jsx

@ -104,6 +104,8 @@ dependencies = [
"tauri-plugin-http",
"tauri-plugin-log",
"tokio",
"webview2-com",
"windows",
]
[[package]]

@ -27,3 +27,5 @@ tauri-plugin-http = "2"
dotenv = "0.15.0"
rosc = "0.11.4"
tokio = { version = "1.45.1", features = ["net"] }
webview2-com = "0.37.0"
windows = "0.61.1"

@ -7,6 +7,10 @@
],
"permissions": [
"core:default",
"core:window:default",
"core:app:default",
"core:resources:default",
"core:webview:default",
{
"identifier": "http:default",
"allow": [{ "url": "https://*.openai.com" }]

@ -3,7 +3,12 @@ use std::env;
use rosc::{encoder, OscMessage, OscPacket, OscType};
use std::{net::SocketAddrV4, str::FromStr};
use tokio::net::UdpSocket;
use webview2_com::Microsoft::Web::WebView2::Win32::{
ICoreWebView2Profile4, ICoreWebView2_13, COREWEBVIEW2_PERMISSION_KIND_MICROPHONE,
COREWEBVIEW2_PERMISSION_STATE_DEFAULT,
};
use windows::core::{Interface, PCWSTR};
use tauri::{AppHandle, Manager};
#[tauri::command]
fn get_env(name: &str) -> String {
@ -46,6 +51,30 @@ async fn send_osc_message(
Ok(())
}
#[tauri::command]
fn reset_permission(origin: &str, app: AppHandle) {
let webview = app.get_webview_window("main").unwrap();
let mut origin = origin.to_string();
origin.push('\0');
let origin = origin.encode_utf16().collect::<Vec<u16>>();
webview
.with_webview(move |webview| unsafe {
let core = webview.controller().CoreWebView2().unwrap();
let core = Interface::cast::<ICoreWebView2_13>(&core).unwrap();
let profile = core.Profile().unwrap();
let profile = Interface::cast::<ICoreWebView2Profile4>(&profile).unwrap();
let origin = PCWSTR::from_raw(origin.as_ptr());
profile
.SetPermissionState(
COREWEBVIEW2_PERMISSION_KIND_MICROPHONE,
origin,
COREWEBVIEW2_PERMISSION_STATE_DEFAULT,
None,
)
.unwrap();
})
.unwrap();
}
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
@ -53,7 +82,7 @@ pub fn run() {
dotenv().ok();
tauri::Builder::default()
.invoke_handler(tauri::generate_handler![get_env, send_osc_message])
.invoke_handler(tauri::generate_handler![get_env, send_osc_message, reset_permission])
.plugin(tauri_plugin_http::init())
.setup(|app| {
if cfg!(debug_assertions) {

@ -20,7 +20,7 @@
}
],
"security": {
"csp": null
"csp": "default-src blob: data: filesystem: ws: http: https: 'self'"
}
},
"bundle": {

@ -5,7 +5,7 @@ import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognitio
import { textToSpeech } from './util/tts';
import { gsap } from "gsap";
import { SplitText } from 'gsap/SplitText';
import { set } from 'zod';
import { invoke } from '@tauri-apps/api/core';
gsap.registerPlugin(SplitText);
@ -28,6 +28,8 @@ function App() {
finalTranscript,
listening,
resetTranscript,
browserSupportsSpeechRecognition,
isMicrophoneAvailable,
}=useSpeechRecognition();
@ -52,12 +54,11 @@ function App() {
// add to history
setHistory(prev => [...prev, {
setHistory(() => [{
role: 'assistant',
content: data.output_text,
}]);
setPrompt([
...prompt,
setPrompt(()=>[
data.prompt,
]);
@ -81,9 +82,22 @@ function App() {
}
function toggleAudio() {
// console.log("onclickAudio");
console.log("onclickAudio", listening, browserSupportsSpeechRecognition, isMicrophoneAvailable);
if(!browserSupportsSpeechRecognition) {
console.warn("Browser does not support speech recognition.");
return;
}
if(!isMicrophoneAvailable) {
console.warn("Microphone is not available.");
return;
}
if(!listening){
SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' });
SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' }).then(() => {
console.log("Speech recognition started.");
}).catch(error => {
console.error("Error starting speech recognition:", error);
});
}else{
SpeechRecognition.stopListening();
@ -129,17 +143,14 @@ function App() {
console.log('get reply: ', data, new Date(Date.now()-startTime).toISOString().slice(11, 19));
// add to history
setHistory(prev => [...prev, {
role: 'assistant',
content: data.output_text,
}]);
setPrompt([
...prompt,
data.prompt,
]);
setShowProcessing(false);
// tts
console.log('create speech:', data.output_text);
@ -147,6 +158,11 @@ function App() {
const audio = new Audio(audioUrl);
console.log('play audio...', new Date(Date.now()-startTime).toISOString().slice(11, 19));
setShowProcessing(false);
setHistory(prev => [...prev, {
role: 'assistant',
content: data.output_text,
}]);
audio.play().catch(error => {
console.error('Audio playback failed:', error);
@ -182,8 +198,10 @@ function App() {
if(history.length === 0) return;
let last_item=document.querySelector('.last_history');
console.log('last_item', last_item);
if(!last_item) return;
if(last_item.classList.contains('user')) return;
console.log('last_item', last_item);
let split=SplitText.create(last_item, {
type: "chars",
@ -195,9 +213,9 @@ function App() {
}, {
opacity: 1,
y: 0,
duration: 1,
duration: 0.5,
ease: "steps(1)",
stagger: 0.05
stagger: 0.1
});
@ -239,6 +257,27 @@ function App() {
}
},[finalTranscript]);
useEffect(()=>{
console.log('window.SpeechRecognition=', window.SpeechRecognition || window.webkitSpeechRecognition);
// if (navigator.getUserMedia){
// navigator.getUserMedia({audio:true},
// function(stream) {
// // start_microphone(stream);
// console.log('Microphone access granted.');
// },
// function(e) {
// alert('Error capturing audio.');
// }
// );
// } else { alert('getUserMedia not supported in this browser.'); }
},[]);
return (
@ -261,7 +300,7 @@ function App() {
):(
history.map((item, index) => (
<div key={index} className={`p-2 rounded border-4 ${item.role === 'user' ? 'bg-gray-100' : 'bg-yellow-100'}`}>
<p className={`text-lg whitespace-pre-wrap history_item ${index==history?.length-1 && 'last_history'}`}>{item.content}</p>
<p className={`text-lg whitespace-pre-wrap history_item ${index==history?.length-1 && item.role!='user' && 'last_history'}`}>{item.content}</p>
</div>
))
)}

Loading…
Cancel
Save