diff --git a/DEPLOYMENT.md b/DEPLOYMENT.md new file mode 100644 index 00000000..69c93206 --- /dev/null +++ b/DEPLOYMENT.md @@ -0,0 +1,113 @@ +# Deployment Guide: Running Amica Locally + +This guide provides step-by-step instructions for setting up and running the Rust-powered version of Amica on your local machine. + +## 1. Prerequisites + +Before you begin, you need to have the following software installed on your system: + +* **Node.js:** Amica's user interface is built with Node.js. You will need version `18.18.0` or newer. You can download it from the [official Node.js website](https://nodejs.org/). +* **Rust:** The new backend is written in Rust. The easiest way to install Rust is by using `rustup`. You can find instructions at the [official Rust website](https://www.rust-lang.org/tools/install). +* **`text-generation-webui`:** You must have a working, pre-compiled version of `text-generation-webui`. You can find releases and setup instructions on its [GitHub repository](https://github.com/oobabooga/text-generation-webui). + > **Important:** When you launch `text-generation-webui`, you must enable the API with the `--api` flag. For example: `./start_linux.sh --api`. +* **(Linux Only) Build Dependencies:** On Linux, you will need to install a few extra packages for Tauri to build correctly. You can install them with the following command: + ```bash + sudo apt-get update + sudo apt-get install -y libwebkit2gtk-4.0-dev build-essential curl wget libssl-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev + ``` + > **Note:** This project uses Tauri v1, which requires `libwebkit2gtk-4.0-dev`. If you are working on a project with Tauri v2 or newer, you will need to use `libwebkit2gtk-4.1-dev` instead. + +## 2. Installation and Configuration + +Follow these steps to get the Amica project set up. + +#### Step 1: Clone the Amica Repository + +Open your terminal, navigate to where you want to store the project, and run the following command: + +```bash +git clone https://github.com/semperai/amica +cd amica +``` + +#### Step 2: Install JavaScript Dependencies + +Once you are in the `amica` directory, run this command to install all the necessary frontend packages: + +```bash +npm install +``` + +#### Step 3: Configure the `text-generation-webui` Path + +Amica needs to know where to find your `text-generation-webui` executable. This is configured in a `settings.json` file. + +##### How Configuration Works + +Amica uses a default, bundled configuration file to start. To customize the settings, you must create your own `settings.json` file and place it in the correct application configuration directory for your operating system. + +When Amica starts, it looks for `settings.json` in this order: +1. **Your Custom `settings.json`:** It checks for the file in your OS's standard application config directory. +2. **Default `settings.json`:** If no custom file is found, it falls back to the default settings bundled inside the application. The default has an empty path, so you **must** create a custom file. + +##### Creating Your Custom `settings.json` + +1. First, you need to find your application's configuration directory. The paths are typically: + * **Windows:** `%APPDATA%\\com.heyamica.dev` (you can paste this into the Explorer address bar) + * **macOS:** `~/Library/Application Support/com.heyamica.dev` + * **Linux:** `~/.config/com.heyamica.dev` + + *(Note: The `com.heyamica.dev` directory might not exist until you run Amica at least once.)* + +2. Create a new file named `settings.json` inside that directory. + +3. Copy and paste the following content into your new `settings.json` file: + ```json + { + "text_generation_webui_path": "" + } + ``` + +4. Add the **full path** to your `text-generation-webui` executable inside the quotes. + + * **Windows Example:** + ```json + { + "text_generation_webui_path": "C:\\Users\\YourUser\\Desktop\\text-generation-webui\\start.bat" + } + ``` + *(Note the double backslashes `\\`)* + + * **Linux/macOS Example:** + ```json + { + "text_generation_webui_path": "/home/youruser/text-generation-webui/start.sh" + } + ``` + +If Amica ever has trouble starting, it will show a dialog box explaining the configuration error. This usually means there's a typo in your `settings.json` file or the path to the executable is incorrect. + +## 3. Building the Application + +Now that everything is configured, you can build the final, standalone executable. + +Run the following command in your terminal. This process will compile the Rust backend and package it with the frontend into a single application. It may take several minutes. + +```bash +npm run tauri build +``` + +Once the build is complete, you will find the final application inside the `src-tauri/target/release/bundle/` directory, organized by platform and package type: +* **Windows:** The installer can be found under the `msi/` subdirectory, and the portable `.exe` under the `nsis/` subdirectory (or a similar name). +* **Linux:** The `.AppImage` can be found under the `appimage/` subdirectory, and the `.deb` package under the `deb/` subdirectory. +* **macOS:** The `.app` file is under the `macos/` subdirectory, and the `.dmg` installer is under the `dmg/` subdirectory. + +## 4. Running Amica + +You can now run this executable file directly! There is no need for any further commands. + +On the first run, be sure to open the in-app settings and configure the following: +* **Chatbot Backend:** Select **ChatGPT**. +* In the ChatGPT settings, you may need to enter a dummy API key (e.g., "123") for the UI to proceed, but the key itself is not used by the proxy. + +That's it! Your self-contained, Rust-powered Amica application is now ready to use. diff --git a/package.json b/package.json index abe8c621..018c502f 100644 --- a/package.json +++ b/package.json @@ -71,8 +71,8 @@ }, "devDependencies": { "@gltf-transform/core": "^4.0.10", - "@tauri-apps/api": "^1.6.0", - "@tauri-apps/cli": "^1.6.2", + "@tauri-apps/api": "2.0.0-beta.13", + "@tauri-apps/cli": "2.0.0-beta.8", "@types/dom-speech-recognition": "^0.0.4", "@types/file-saver": "^2.0.7", "@types/formidable": "^3.4.5", diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index e1516c74..215215c0 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -17,7 +17,9 @@ tauri-build = { version = "1.5.5", features = [] } [dependencies] serde_json = "1.0.128" serde = { version = "1.0.210", features = ["derive"] } -tauri = { version = "1.8.0", features = [ "macos-private-api", "system-tray", "shell-open"] } +tauri = { version = "2.0.0-beta.21", features = [ "macos-private-api", "system-tray"] } +reqwest = { version = "0.12.5", default-features = false, features = ["json", "rustls-tls"] } +futures-util = "0.3.30" [features] # this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled. diff --git a/src-tauri/resources/settings.json b/src-tauri/resources/settings.json new file mode 100644 index 00000000..ed9d2e1f --- /dev/null +++ b/src-tauri/resources/settings.json @@ -0,0 +1,3 @@ +{ + "text_generation_webui_path": "" +} diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index ae4e63ae..cb414fed 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -2,52 +2,357 @@ #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] use tauri::{ - CustomMenuItem, - SystemTray, - SystemTrayEvent, - SystemTrayMenu, - SystemTrayMenuItem, + api::process::{Command, CommandEvent}, + CustomMenuItem, Manager, SystemTray, SystemTrayEvent, SystemTrayMenu, SystemTrayMenuItem, }; -use tauri::Manager; +use futures_util::StreamExt; +use std::collections::HashSet; +use std::fs; +use std::path::PathBuf; +use std::sync::{ + atomic::{AtomicBool, Ordering}, + Arc, Mutex, +}; +use tauri::api::{dialog, path}; + +#[derive(serde::Deserialize, Clone)] +struct Settings { + text_generation_webui_path: String, +} + +// the payload type must implement `Serialize` and `Clone`. +#[derive(Clone, serde::Serialize)] +struct Payload { + message: String, +} + +struct AppState { + child_process: Mutex>, + is_terminating: Arc, +} + +fn show_error_and_exit(handle: &tauri::AppHandle, title: &str, message: &str) { + dialog::message(handle.get_window("main").as_ref(), title, message); + std::process::exit(1); +} + +fn shutdown_sidecar(handle: &tauri::AppHandle) { + let app_state = handle.state::(); + + // Use compare_exchange to ensure the shutdown logic runs only once. + if app_state + .is_terminating + .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst) + .is_ok() + { + if let Some(mut child) = app_state.child_process.lock().unwrap().take() { + // First, try to see if the process has already exited. + match child.try_wait() { + Ok(Some(_)) => { + // Process already exited, nothing to do. + } + Ok(None) => { + // Process is still running, so kill it and wait for it to be reaped. + if let Err(e) = child.kill() { + eprintln!("Failed to kill sidecar process: {}", e); + } + if let Err(e) = child.wait() { + eprintln!("Failed to wait for sidecar process to exit: {}", e); + } + } + Err(e) => { + eprintln!("Error calling try_wait on sidecar process: {}", e); + } + } + } + } +} + +fn validate_and_sanitize_path(path: &str) -> Result { + // Reject any input that contains "://" or starts with "http" or contains ".." or null bytes + if path.contains("://") || path.contains("..") || path.contains('\0') || path.trim().to_lowercase().starts_with("http") { + return Err(format!("Invalid path '{}': contains malicious patterns.", path)); + } + + // Normalize/removing leading slashes + let sanitized_path = path.trim_start_matches('/').to_string(); + + // Enforce an allowlist of known good endpoints + let allowlist: HashSet<&str> = [ + "v1/chat/completions", + ].iter().cloned().collect(); + + if !allowlist.contains(sanitized_path.as_str()) { + return Err(format!("Invalid path '{}': not in allowlist.", path)); + } + + Ok(sanitized_path) +} + +#[tauri::command] +async fn quit_app(handle: tauri::AppHandle) { + shutdown_sidecar(&handle); + handle.exit(0); +} #[tauri::command] async fn close_splashscreen(window: tauri::Window) { // Close splashscreen - window.get_window("splashscreen").expect("no window labeled 'splashscreen' found").close().unwrap(); + if let Some(splashscreen) = window.get_window("splashscreen") { + splashscreen.close().unwrap(); + } // Show main window - window.get_window("main").expect("no window labeled 'main' found").show().unwrap(); + window.get_window("main").unwrap().show().unwrap(); } +#[derive(serde::Deserialize)] +struct ProxyRequestPayload { + path: String, + body: serde_json::Value, + authorization: Option, +} -fn main() { - tauri::Builder::default() - .system_tray(SystemTray::new() - .with_menu(SystemTrayMenu::new() - .add_item(CustomMenuItem::new("checkforupdates".to_string(), "Check for updates")) - .add_native_item(SystemTrayMenuItem::Separator) - .add_item(CustomMenuItem::new("help".to_string(), "Help")) - .add_native_item(SystemTrayMenuItem::Separator) - .add_item(CustomMenuItem::new("quit".to_string(), "Quit")) - ) - ) - .on_system_tray_event(|app, event| match event { - SystemTrayEvent::MenuItemClick { id, .. } => { - match id.as_str() { - "quit" => { - std::process::exit(0); - } - "checkforupdates" => { - tauri::api::shell::open(&app.shell_scope(), "https://github.com/semperai/amica/releases/latest", None).expect("failed to open url"); - } - "help" => { - tauri::api::shell::open(&app.shell_scope(), "https://docs.heyamica.com", None).expect("failed to open url"); - } - _ => {} +#[derive(Clone, serde::Serialize)] +struct StreamChunkPayload { + chunk: String, +} + +#[derive(Clone, serde::Serialize)] +struct StreamErrorPayload { + error: String, +} + +#[tauri::command] +async fn proxy_request_streaming( + handle: tauri::AppHandle, + payload: ProxyRequestPayload, +) -> Result<(), String> { + let sanitized_path = validate_and_sanitize_path(&payload.path)?; + let client = reqwest::Client::new(); + let url = format!("http://127.0.0.1:5000/{}", sanitized_path); + + let mut request_builder = client.post(&url); + if let Some(auth) = payload.authorization { + request_builder = request_builder.header("Authorization", format!("Bearer {}", auth)); + } + + let res = request_builder + .json(&payload.body) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + let status = res.status(); + let text = res.text().await.map_err(|e| e.to_string())?; + return Err(format!( + "API request to {} failed with status {}: {}", + url, status, text + )); + } + + let mut stream = res.bytes_stream(); + + tauri::async_runtime::spawn(async move { + while let Some(chunk_result) = stream.next().await { + match chunk_result { + Ok(chunk) => { + let s = String::from_utf8_lossy(&chunk).to_string(); + if let Err(e) = handle.emit_all("stream-chunk", StreamChunkPayload { chunk: s }) { + eprintln!("Failed to emit stream chunk: {}", e); + break; + } + } + Err(e) => { + let error_message = format!("Error reading stream: {}", e); + let _ = handle.emit_all("stream-error", StreamErrorPayload { error: error_message }); + break; + } + } } - } - _ => {} - }) - .invoke_handler(tauri::generate_handler![close_splashscreen]) - .run(tauri::generate_context!()) - .expect("error while running tauri application"); + let _ = handle.emit_all("stream-end", ()); + }); + + Ok(()) +} + +#[tauri::command] +async fn proxy_request_blocking(payload: ProxyRequestPayload) -> Result { + let sanitized_path = validate_and_sanitize_path(&payload.path)?; + let client = reqwest::Client::new(); + // This port should be configurable in the future. + let url = format!("http://127.0.0.1:5000/{}", sanitized_path); + + let mut request_builder = client.post(&url); + if let Some(auth) = payload.authorization { + request_builder = request_builder.header("Authorization", format!("Bearer {}", auth)); + } + + let res = request_builder + .json(&payload.body) + .send() + .await + .map_err(|e| e.to_string())?; + + if res.status().is_success() { + res.json::() + .await + .map_err(|e| e.to_string()) + } else { + let status = res.status(); + let text = res.text().await.map_err(|e| e.to_string())?; + Err(format!( + "API request to {} failed with status {}: {}", + url, status, text + )) + } +} + +fn main() { + let app_state = AppState { + child_process: Mutex::new(None), + is_terminating: Arc::new(AtomicBool::new(false)), + }; + + tauri::Builder::default() + .manage(app_state) + .setup(|app| { + let handle = app.handle().clone(); + + // Load settings + let config_dir = match path::app_config_dir(&handle.config()) { + Some(dir) => dir, + None => { + show_error_and_exit(&handle, "Fatal Error", "Could not determine the application config directory."); + return Ok(()); // Unreachable but needed for type check + } + }; + + let settings_path_in_config = config_dir.join("settings.json"); + + let settings_str = if settings_path_in_config.exists() { + match fs::read_to_string(&settings_path_in_config) { + Ok(s) => s, + Err(e) => { + let msg = format!("Failed to read settings.json from config directory ({}): {}", settings_path_in_config.display(), e); + show_error_and_exit(&handle, "Configuration Error", &msg); + return Ok(()); + } + } + } else { + let resource_path = match handle.path_resolver().resolve_resource("resources/settings.json") { + Some(path) => path, + None => { + show_error_and_exit(&handle, "Fatal Error", "Could not resolve bundled settings.json path."); + return Ok(()); + } + }; + match fs::read_to_string(resource_path) { + Ok(s) => s, + Err(e) => { + let msg = format!("Failed to read bundled settings.json: {}", e); + show_error_and_exit(&handle, "Fatal Error", &msg); + return Ok(()); + } + } + }; + + let settings: Settings = match serde_json::from_str(&settings_str) { + Ok(s) => s, + Err(e) => { + let msg = format!("Failed to parse settings.json: {}. Please check for syntax errors.", e); + show_error_and_exit(&handle, "Configuration Error", &msg); + return Ok(()); + } + }; + + // Validate path + let executable_path = PathBuf::from(&settings.text_generation_webui_path); + if settings.text_generation_webui_path.is_empty() || !executable_path.is_file() { + let msg = format!("The path specified in settings.json is either empty or does not point to a valid file: '{}'", settings.text_generation_webui_path); + show_error_and_exit(&handle, "Configuration Error", &msg); + return Ok(()); + } + + // Launch the external process + tauri::async_runtime::spawn(async move { + let (mut rx, child) = match Command::new(&settings.text_generation_webui_path).spawn() { + Ok(c) => c, + Err(e) => { + let msg = format!( + "Failed to spawn the external process at '{}': {}", + settings.text_generation_webui_path, e + ); + // Show a user-facing dialog. + dialog::message(handle.get_window("main").as_ref(), "Process Error", &msg); + // And gracefully exit the app. + handle.exit(1); + return; + } + }; + + // Reacquire state inside the async task to prevent lifetime issues + let app_state = handle.state::(); + *app_state.child_process.lock().unwrap() = Some(child); + + while let Some(event) = rx.recv().await { + if let CommandEvent::Stdout(line) = event { + if let Err(e) = handle.emit_all("sidecar-output", Payload { message: line.into() }) { + eprintln!("Failed to emit sidecar output: {}", e); + break; + } + } + } + }); + + Ok(()) + }) + .system_tray( + SystemTray::new().with_menu( + SystemTrayMenu::new() + .add_item(CustomMenuItem::new("checkforupdates".to_string(), "Check for updates")) + .add_native_item(SystemTrayMenuItem::Separator) + .add_item(CustomMenuItem::new("help".to_string(), "Help")) + .add_native_item(SystemTrayMenuItem::Separator) + .add_item(CustomMenuItem::new("quit".to_string(), "Quit")), + ), + ) + .on_system_tray_event(|app, event| match event { + SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() { + "quit" => { + app.app_handle().emit_all("confirm-close", ()).unwrap(); + } + "checkforupdates" => { + app.shell() + .open("https://github.com/semperai/amica/releases/latest", None) + .expect("failed to open url"); + } + "help" => { + app.shell() + .open("https://docs.heyamica.com", None) + .expect("failed to open url"); + } + _ => {} + }, + _ => {} + }) + .on_window_event(|event| { + if let tauri::WindowEvent::CloseRequested { api, .. } = event.event() { + api.prevent_close(); + event.window().emit("confirm-close", ()).unwrap(); + } + }) + .invoke_handler(tauri::generate_handler![ + close_splashscreen, + proxy_request_blocking, + proxy_request_streaming, + quit_app + ]) + .build(tauri::generate_context!()) + .expect("error while building tauri application") + .run(|app_handle, event| { + if let tauri::RunEvent::ExitRequested { .. } = event { + shutdown_sidecar(app_handle); + } + }); } diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 8e49480d..247bd829 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -3,64 +3,25 @@ "build": { "beforeBuildCommand": "export NEXT_OUTPUT=\"export\" && npm run build && npm run splash", "beforeDevCommand": "npm run dev", - "devPath": "http://localhost:3000", - "distDir": "../out" + "devUrl": "http://localhost:3000", + "frontendDist": "../out" }, - "package": { + "app": { "productName": "amica", - "version": "0.2.1" - }, - "tauri": { - "allowlist": { - "all": false, - "shell": { - "open": true - } - }, - "bundle": { - "active": true, - "category": "DeveloperTool", - "copyright": "", - "deb": { - "depends": [] - }, - "externalBin": [], - "icon": [ - "icons/32x32.png", - "icons/128x128.png", - "icons/128x128@2x.png", - "icons/icon.icns", - "icons/icon.ico" - ], - "identifier": "com.heyamica.dev", - "longDescription": "", - "macOS": { - "entitlements": "Release.entitlements", - "exceptionDomain": "", - "frameworks": [], - "providerShortName": null, - "signingIdentity": null - }, - "resources": [], - "shortDescription": "", - "targets": "all", - "windows": { - "certificateThumbprint": null, - "digestAlgorithm": "sha256", - "timestampUrl": "" - } - }, - "macOSPrivateApi": true, + "version": "0.2.1", + "identifier": "com.heyamica.dev", "security": { - "csp": null + "csp": null, + "allowlist": { + "shell": { + "open": true + } + } }, "systemTray": { "iconPath": "icons/icon.png", "iconAsTemplate": true }, - "updater": { - "active": false - }, "windows": [ { "width": 800, @@ -83,5 +44,40 @@ "label": "splashscreen" } ] + }, + "bundle": { + "active": true, + "category": "DeveloperTool", + "copyright": "", + "deb": { + "depends": [] + }, + "externalBin": [], + "icon": [ + "icons/32x32.png", + "icons/128x128.png", + "icons/128x128@2x.png", + "icons/icon.icns", + "icons/icon.ico" + ], + "longDescription": "", + "macOS": { + "entitlements": "Release.entitlements", + "exceptionDomain": "", + "frameworks": [], + "providerShortName": null, + "signingIdentity": null, + "usePrivateApi": true + }, + "resources": [ + "resources" + ], + "shortDescription": "", + "targets": "all", + "windows": { + "certificateThumbprint": null, + "digestAlgorithm": "sha256", + "timestampUrl": "" + } } } diff --git a/src/features/chat/openAiChat.ts b/src/features/chat/openAiChat.ts index c4b59935..54b8de84 100644 --- a/src/features/chat/openAiChat.ts +++ b/src/features/chat/openAiChat.ts @@ -1,5 +1,17 @@ import { Message } from "./messages"; import { config } from '@/utils/config'; +import { invoke } from "@tauri-apps/api/tauri"; +import { listen, Event } from "@tauri-apps/api/event"; + +interface OpenAIChoice { + message: { + content: string; + }; +} + +interface OpenAIResponse { + choices: OpenAIChoice[]; +} function getApiKey(configKey: string) { const apiKey = config(configKey); @@ -9,87 +21,73 @@ function getApiKey(configKey: string) { return apiKey; } -async function getResponseStream( +function getResponseStream( messages: Message[], - url: string, + _url: string, // url is now handled by the proxy model: string, apiKey: string, ) { - const headers: Record = { - "Content-Type": "application/json", - "Authorization": `Bearer ${apiKey}`, - "HTTP-Referer": "https://amica.arbius.ai", - "X-Title": "Amica", - }; - - const res = await fetch(`${url}/v1/chat/completions`, { - headers: headers, - method: "POST", - body: JSON.stringify({ - model, - messages, - stream: true, - max_tokens: 200, - }), - }); - - const reader = res.body?.getReader(); - if (res.status !== 200 || ! reader) { - if (res.status === 401) { - throw new Error('Invalid OpenAI authentication'); - } - if (res.status === 402) { - throw new Error('Payment required'); - } - - throw new Error(`OpenAI chat error (${res.status})`); - } + let cleanup = () => {}; const stream = new ReadableStream({ async start(controller: ReadableStreamDefaultController) { - const decoder = new TextDecoder("utf-8"); + const unlistens: Array<() => void> = []; + cleanup = () => unlistens.forEach(fn => fn()); + try { - // sometimes the response is chunked, so we need to combine the chunks - let combined = ""; - while (true) { - const { done, value } = await reader.read(); - if (done) break; - const data = decoder.decode(value); - const chunks = data - .split("data:") - .filter((val) => !!val && val.trim() !== "[DONE]"); - - for (const chunk of chunks) { - // skip comments - if (chunk.length > 0 && chunk[0] === ":") { - continue; + unlistens.push(await listen("stream-chunk", (event: Event) => { + const chunk = event.payload.chunk; + const lines = chunk.split('\n').filter((line: string) => line.startsWith('data: ')); + for (const line of lines) { + const data = line.substring(6); + if (data.trim() === '[DONE]') { + return; } - combined += chunk; - try { - const json = JSON.parse(combined); + const json = JSON.parse(data); const messagePiece = json.choices[0].delta.content; - combined = ""; - if (!!messagePiece) { + if (messagePiece) { controller.enqueue(messagePiece); } } catch (error) { - console.error(error); + console.error("Failed to parse stream chunk:", error, "in chunk:", data); } } - } - } catch (error) { - console.error(error); - controller.error(error); - } finally { - reader.releaseLock(); - controller.close(); + })); + + unlistens.push(await listen("stream-error", (event: Event) => { + console.error("Stream error from backend:", event.payload.error); + controller.error(new Error(event.payload.error)); + cleanup(); + })); + + unlistens.push(await listen("stream-end", () => { + controller.close(); + cleanup(); + })); + + await invoke("proxy_request_streaming", { + payload: { + path: "v1/chat/completions", + authorization: apiKey, + body: { + model, + messages, + stream: true, + max_tokens: 200, + } + } + }); + } catch (e) { + const msg = e instanceof Error ? e.message : String(e); + controller.error(new Error(`Failed to invoke streaming request: ${msg}`)); + cleanup(); } }, - async cancel() { - await reader?.cancel(); - reader.releaseLock(); - } + cancel(reason) { + console.log("Stream cancelled:", reason); + cleanup(); + }, }); return stream; @@ -102,20 +100,34 @@ export async function getOpenAiChatResponseStream(messages: Message[]) { return getResponseStream(messages, url, model, apiKey); } -export async function getOpenAiVisionChatResponse(messages: Message[],) { +export async function getOpenAiVisionChatResponse(messages: Message[]): Promise { const apiKey = getApiKey("vision_openai_apikey"); - const url = config("vision_openai_url"); const model = config("vision_openai_model"); - const stream = await getResponseStream(messages, url, model, apiKey); - const sreader = await stream.getReader(); + let json: OpenAIResponse; + try { + // This is a non-streaming request. + json = await invoke("proxy_request_blocking", { + payload: { + path: "v1/chat/completions", + authorization: apiKey, + body: { + model, + messages, + stream: false, + max_tokens: 200, + } + } + }); + } catch (e) { + const msg = e instanceof Error ? e.message : String(e); + throw new Error(`OpenAI proxy request failed: ${msg}`); + } + - let combined = ""; - while (true) { - const { done, value } = await sreader.read(); - if (done) break; - combined += value; + if (json.choices && json.choices.length > 0 && json.choices[0].message && json.choices[0].message.content) { + return json.choices[0].message.content; } - return combined; + throw new Error("Invalid response structure from OpenAI-compatible API"); } diff --git a/src/pages/_app.tsx b/src/pages/_app.tsx index 59d0c78b..2d9a4585 100644 --- a/src/pages/_app.tsx +++ b/src/pages/_app.tsx @@ -3,7 +3,31 @@ import '@/i18n'; import "@/styles/globals.css"; import "@charcoal-ui/icons"; import type { AppProps } from "next/app"; +import { useEffect } from 'react'; +import { listen } from '@tauri-apps/api/event'; +import { invoke } from '@tauri-apps/api/tauri'; + export default function App({ Component, pageProps }: AppProps) { + useEffect(() => { + let unlisten: (() => void) | undefined; + + const setupListener = async () => { + unlisten = await listen('confirm-close', () => { + if (window.confirm('Are you sure you want to quit?')) { + invoke('quit_app'); + } + }); + }; + + setupListener(); + + return () => { + if (unlisten) { + unlisten(); + } + }; + }, []); + return ( );