Replace bespoke agent service with rig-backed Z.AI chat

This commit is contained in:
2026-04-03 22:22:16 -04:00
parent eeab5be37f
commit a6731e1034
23 changed files with 1145 additions and 207 deletions

View File

@@ -2303,6 +2303,7 @@ dependencies = [
name = "mosaiciq"
version = "0.1.0"
dependencies = [
"futures",
"rig-core",
"serde",
"serde_json",

View File

@@ -25,3 +25,7 @@ serde_json = "1"
rig-core = "0.34.0"
tauri-plugin-store = "2"
tokio = { version = "1", features = ["time"] }
futures = "0.3"
[dev-dependencies]
tauri = { version = "2", features = ["test"] }

View File

@@ -10,4 +10,4 @@
"opener:default",
"store:default"
]
}
}

View File

@@ -0,0 +1,71 @@
use std::pin::Pin;
use futures::{future::BoxFuture, Stream, StreamExt};
use rig::{
client::completion::CompletionClient,
completion::{CompletionModel, Message},
providers::openai,
streaming::StreamedAssistantContent,
};
use crate::agent::AgentRuntimeConfig;
use crate::error::AppError;
const SYSTEM_PROMPT: &str = "You are MosaicIQ's terminal chat assistant. Answer concisely in plain text. Do not claim to run tools, commands, or file operations. If the request is unclear, ask a short clarifying question.";
/// Streaming text output from the upstream chat provider.
pub type ChatGatewayStream = Pin<Box<dyn Stream<Item = Result<String, AppError>> + Send>>;
/// Trait used by the agent service so tests can inject a deterministic gateway.
pub trait ChatGateway: Clone + Send + Sync + 'static {
/// Start a streaming chat turn for the given config, prompt, and prior history.
fn stream_chat(
&self,
runtime: AgentRuntimeConfig,
prompt: String,
history: Vec<Message>,
) -> BoxFuture<'static, Result<ChatGatewayStream, AppError>>;
}
/// Production Rig-backed gateway using the OpenAI-compatible chat completions API.
#[derive(Debug, Clone, Default)]
pub struct RigChatGateway;
impl ChatGateway for RigChatGateway {
fn stream_chat(
&self,
runtime: AgentRuntimeConfig,
prompt: String,
history: Vec<Message>,
) -> BoxFuture<'static, Result<ChatGatewayStream, AppError>> {
Box::pin(async move {
let client = openai::CompletionsClient::builder()
.api_key(runtime.api_key)
.base_url(&runtime.base_url)
.build()
.map_err(|error| AppError::ProviderInit(error.to_string()))?;
let model = client.completion_model(runtime.model);
let upstream = model
.completion_request(Message::user(prompt))
.messages(history)
.preamble(SYSTEM_PROMPT.to_string())
.temperature(0.2)
.stream()
.await
.map_err(|error| AppError::ProviderRequest(error.to_string()))?;
let stream = upstream.filter_map(|item| async move {
match item {
Ok(StreamedAssistantContent::Text(text)) => Some(Ok(text.text)),
Ok(_) => None,
Err(error) => Some(Err(AppError::ProviderRequest(error.to_string()))),
}
});
let stream: ChatGatewayStream = Box::pin(stream);
Ok(stream)
})
}
}

View File

@@ -1,10 +1,15 @@
//! Agent domain logic and request/response types.
mod gateway;
mod service;
mod settings;
mod types;
pub use gateway::{ChatGateway, RigChatGateway};
pub use service::AgentService;
pub use types::{
AgentDeltaEvent, AgentErrorEvent, AgentResultEvent, ChatPromptRequest, ChatStreamStart,
PreparedChatTurn,
AgentConfigStatus, AgentDeltaEvent, AgentErrorEvent, AgentResultEvent, AgentRuntimeConfig,
AgentStoredSettings, ChatPromptRequest, ChatStreamStart, PreparedChatTurn,
SaveAgentSettingsRequest, UpdateAgentApiKeyRequest, AGENT_SETTINGS_STORE_PATH,
DEFAULT_AGENT_BASE_URL, DEFAULT_AGENT_MODEL,
};

View File

@@ -2,25 +2,30 @@
use std::collections::HashMap;
use crate::agent::{ChatPromptRequest, PreparedChatTurn};
use rig::completion::Message;
use tauri::{AppHandle, Runtime};
use crate::agent::{
AgentConfigStatus, AgentRuntimeConfig, AgentStoredSettings, ChatPromptRequest,
PreparedChatTurn, RigChatGateway, SaveAgentSettingsRequest, UpdateAgentApiKeyRequest,
};
use crate::error::AppError;
/// Maintains prompt history per session for the in-process backend agent.
#[derive(Default)]
pub struct AgentService {
sessions: HashMap<String, Vec<String>>,
use super::gateway::ChatGateway;
use super::settings::AgentSettingsService;
#[derive(Debug, Default)]
struct SessionManager {
sessions: HashMap<String, Vec<Message>>,
next_session_id: u64,
}
impl AgentService {
/// Validates an incoming prompt, appends it to the session history, and
/// prepares the reply content for the streaming bridge.
///
/// # Errors
///
/// Returns [`AppError::EmptyPrompt`] when the request does not include a
/// non-whitespace prompt.
pub fn prepare_turn(&mut self, request: ChatPromptRequest) -> Result<PreparedChatTurn, AppError> {
impl SessionManager {
fn prepare_turn(
&mut self,
request: ChatPromptRequest,
runtime: AgentRuntimeConfig,
) -> Result<PreparedChatTurn, AppError> {
let prompt = request.prompt.trim();
if prompt.is_empty() {
return Err(AppError::EmptyPrompt);
@@ -31,92 +36,376 @@ impl AgentService {
format!("session-{}", self.next_session_id)
});
// Persist session-local history now so future implementations can build
// context without changing the command contract.
let history = self.sessions.entry(session_id.clone()).or_default();
history.push(prompt.to_string());
let history_length = history.len();
let prior_history = history.clone();
history.push(Message::user(prompt));
Ok(PreparedChatTurn {
workspace_id: request.workspace_id,
session_id,
prompt: prompt.to_string(),
reply: build_reply(prompt, history_length),
history: prior_history,
runtime,
})
}
fn record_assistant_reply(&mut self, session_id: &str, reply: &str) -> Result<(), AppError> {
let history = self
.sessions
.get_mut(session_id)
.ok_or_else(|| AppError::UnknownSession(session_id.to_string()))?;
history.push(Message::assistant(reply));
Ok(())
}
}
/// Stateful backend agent service combining settings, plaintext key storage, and session history.
#[derive(Debug)]
pub struct AgentService<R: Runtime, G: ChatGateway = RigChatGateway> {
session_manager: SessionManager,
settings: AgentSettingsService<R>,
gateway: G,
}
impl<R: Runtime> AgentService<R, RigChatGateway> {
/// Create a new agent service bound to the current Tauri application.
pub fn new(app_handle: &AppHandle<R>) -> Result<Self, AppError> {
Self::new_with_gateway(app_handle, RigChatGateway)
}
}
impl<R: Runtime, G: ChatGateway> AgentService<R, G> {
/// Create a new agent service with a caller-supplied gateway.
pub fn new_with_gateway(app_handle: &AppHandle<R>, gateway: G) -> Result<Self, AppError> {
Ok(Self {
session_manager: SessionManager::default(),
settings: AgentSettingsService::new(app_handle),
gateway,
})
}
/// Clone the configured chat gateway for work that must outlive the state lock.
pub fn gateway(&self) -> G {
self.gateway.clone()
}
/// Prepare a new chat turn, resolving provider settings and the stored API key.
pub fn prepare_turn(
&mut self,
request: ChatPromptRequest,
) -> Result<PreparedChatTurn, AppError> {
let runtime = self.resolve_runtime()?;
self.session_manager.prepare_turn(request, runtime)
}
/// Record the assistant reply after the stream completes successfully.
pub fn record_assistant_reply(
&mut self,
session_id: &str,
reply: &str,
) -> Result<(), AppError> {
self.session_manager
.record_assistant_reply(session_id, reply)
}
/// Return the current public agent configuration status.
pub fn get_config_status(&self) -> Result<AgentConfigStatus, AppError> {
let settings = self.settings.load()?;
Ok(self.build_status(settings))
}
/// Persist the base URL and model.
pub fn save_settings(
&mut self,
request: SaveAgentSettingsRequest,
) -> Result<AgentConfigStatus, AppError> {
let base_url = request.base_url.trim();
let model = request.model.trim();
if base_url.is_empty() {
return Err(AppError::InvalidSettings(
"base URL cannot be empty".to_string(),
));
}
if model.is_empty() {
return Err(AppError::InvalidSettings(
"model cannot be empty".to_string(),
));
}
let mut settings = self.settings.load()?;
settings.base_url = base_url.to_string();
settings.model = model.to_string();
let persisted = self.settings.save(settings)?;
Ok(self.build_status(persisted))
}
/// Save or replace the plaintext API key.
pub fn update_api_key(
&mut self,
request: UpdateAgentApiKeyRequest,
) -> Result<AgentConfigStatus, AppError> {
let api_key = request.api_key.trim().to_string();
if api_key.is_empty() {
return Err(AppError::ApiKeyMissing);
}
let settings = self.settings.set_api_key(api_key)?;
Ok(self.build_status(settings))
}
/// Remove the stored API key.
pub fn clear_api_key(&mut self) -> Result<AgentConfigStatus, AppError> {
let settings = self.settings.set_api_key(String::new())?;
Ok(self.build_status(settings))
}
fn build_status(&self, settings: AgentStoredSettings) -> AgentConfigStatus {
let has_api_key = !settings.api_key.trim().is_empty();
AgentConfigStatus {
configured: has_api_key,
has_api_key,
base_url: settings.base_url,
model: settings.model,
}
}
fn resolve_runtime(&self) -> Result<AgentRuntimeConfig, AppError> {
let settings = self.settings.load()?;
let api_key = settings.api_key.trim().to_string();
if api_key.is_empty() {
return Err(AppError::AgentNotConfigured);
}
Ok(AgentRuntimeConfig {
base_url: settings.base_url,
model: settings.model,
api_key,
})
}
}
fn build_reply(prompt: &str, history_length: usize) -> String {
if history_length == 1 {
return format!(
"Backend agent received: {prompt}\n\nStreaming is now active for plain-text chat. Ask a follow-up question to continue this workspace session."
);
}
format!(
"Backend agent received: {prompt}\n\nContinuing the existing workspace conversation. This is turn {history_length} in the current session."
)
}
#[cfg(test)]
mod tests {
use super::AgentService;
use crate::agent::ChatPromptRequest;
use std::env;
use std::fs;
use std::path::PathBuf;
use std::sync::{Mutex, OnceLock};
use std::time::{SystemTime, UNIX_EPOCH};
use super::SessionManager;
use crate::agent::{
AgentRuntimeConfig, AgentService, ChatPromptRequest, SaveAgentSettingsRequest,
UpdateAgentApiKeyRequest, DEFAULT_AGENT_BASE_URL, DEFAULT_AGENT_MODEL,
};
use crate::error::AppError;
mod prepare_turn {
use super::{AgentService, AppError, ChatPromptRequest};
use rig::completion::Message;
use tauri::test::{mock_builder, mock_context, noop_assets, MockRuntime};
#[test]
fn returns_empty_prompt_error_when_request_contains_only_whitespace() {
let mut service = AgentService::default();
#[test]
fn returns_empty_prompt_error_when_request_contains_only_whitespace() {
let mut sessions = SessionManager::default();
let result = sessions.prepare_turn(
ChatPromptRequest {
workspace_id: "workspace-1".to_string(),
session_id: None,
prompt: " ".to_string(),
},
AgentRuntimeConfig {
base_url: "https://example.com".to_string(),
model: "glm-5.1".to_string(),
api_key: "key".to_string(),
},
);
assert_eq!(result.unwrap_err(), AppError::EmptyPrompt);
}
#[test]
fn creates_new_session_when_request_does_not_provide_one() {
let mut sessions = SessionManager::default();
let result = sessions
.prepare_turn(
ChatPromptRequest {
workspace_id: "workspace-1".to_string(),
session_id: None,
prompt: "Summarize AAPL".to_string(),
},
AgentRuntimeConfig {
base_url: "https://example.com".to_string(),
model: "glm-5.1".to_string(),
api_key: "key".to_string(),
},
)
.unwrap();
assert_eq!(result.session_id, "session-1");
assert!(result.history.is_empty());
}
#[test]
fn reuses_existing_session_and_returns_prior_history() {
let mut sessions = SessionManager::default();
let session_id = "session-42".to_string();
sessions
.prepare_turn(
ChatPromptRequest {
workspace_id: "workspace-1".to_string(),
session_id: Some(session_id.clone()),
prompt: "First prompt".to_string(),
},
AgentRuntimeConfig {
base_url: "https://example.com".to_string(),
model: "glm-5.1".to_string(),
api_key: "key".to_string(),
},
)
.unwrap();
sessions
.record_assistant_reply(&session_id, "First reply")
.unwrap();
let result = sessions
.prepare_turn(
ChatPromptRequest {
workspace_id: "workspace-1".to_string(),
session_id: Some(session_id),
prompt: "Second prompt".to_string(),
},
AgentRuntimeConfig {
base_url: "https://example.com".to_string(),
model: "glm-5.1".to_string(),
api_key: "key".to_string(),
},
)
.unwrap();
assert_eq!(result.history.len(), 2);
assert_eq!(result.history[0], Message::user("First prompt"));
assert_eq!(result.history[1], Message::assistant("First reply"));
}
#[test]
fn persists_settings_and_plaintext_api_key_in_store() {
with_test_home("config", || {
let app = build_test_app();
let mut service = AgentService::new(&app.handle()).unwrap();
let initial = service.get_config_status().unwrap();
assert!(!initial.configured);
assert!(!initial.has_api_key);
assert_eq!(initial.base_url, DEFAULT_AGENT_BASE_URL);
assert_eq!(initial.model, DEFAULT_AGENT_MODEL);
let saved = service
.save_settings(SaveAgentSettingsRequest {
base_url: "https://example.test/v4".to_string(),
model: "glm-test".to_string(),
})
.unwrap();
assert_eq!(saved.base_url, "https://example.test/v4");
assert_eq!(saved.model, "glm-test");
assert!(!saved.has_api_key);
let updated = service
.update_api_key(UpdateAgentApiKeyRequest {
api_key: "z-ai-key-1".to_string(),
})
.unwrap();
assert!(updated.configured);
assert!(updated.has_api_key);
let prepared = service
.prepare_turn(ChatPromptRequest {
workspace_id: "workspace-1".to_string(),
session_id: None,
prompt: "hello".to_string(),
})
.unwrap();
assert_eq!(prepared.runtime.base_url, "https://example.test/v4");
assert_eq!(prepared.runtime.model, "glm-test");
assert_eq!(prepared.runtime.api_key, "z-ai-key-1");
});
}
#[test]
fn clears_plaintext_api_key_from_store() {
with_test_home("clear", || {
let app = build_test_app();
let mut service = AgentService::new(&app.handle()).unwrap();
service
.update_api_key(UpdateAgentApiKeyRequest {
api_key: "z-ai-key-1".to_string(),
})
.unwrap();
let cleared = service.clear_api_key().unwrap();
assert!(!cleared.configured);
assert!(!cleared.has_api_key);
let result = service.prepare_turn(ChatPromptRequest {
workspace_id: "workspace-1".to_string(),
session_id: None,
prompt: " ".to_string(),
prompt: "hello".to_string(),
});
assert_eq!(result.unwrap_err(), AppError::AgentNotConfigured);
});
}
assert_eq!(result.unwrap_err(), AppError::EmptyPrompt);
fn build_test_app() -> tauri::App<MockRuntime> {
mock_builder()
.plugin(tauri_plugin_store::Builder::new().build())
.build(mock_context(noop_assets()))
.unwrap()
}
fn unique_identifier(prefix: &str) -> String {
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_nanos();
format!("com.mosaiciq.tests.{prefix}.{nanos}")
}
fn with_test_home<T>(prefix: &str, test: impl FnOnce() -> T) -> T {
let _lock = env_lock().lock().unwrap();
let home = env::temp_dir().join(unique_identifier(prefix));
fs::create_dir_all(&home).unwrap();
let original_home = env::var_os("HOME");
env::set_var("HOME", &home);
let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(test));
match original_home {
Some(value) => env::set_var("HOME", value),
None => env::remove_var("HOME"),
}
#[test]
fn creates_new_session_when_request_does_not_provide_one() {
let mut service = AgentService::default();
cleanup_test_data_dir(home);
let result = service
.prepare_turn(ChatPromptRequest {
workspace_id: "workspace-1".to_string(),
session_id: None,
prompt: "Summarize AAPL".to_string(),
})
.unwrap();
assert_eq!(result.session_id, "session-1");
}
#[test]
fn increments_history_length_when_request_reuses_existing_session() {
let mut service = AgentService::default();
let session_id = "session-42".to_string();
let _ = service
.prepare_turn(ChatPromptRequest {
workspace_id: "workspace-1".to_string(),
session_id: Some(session_id.clone()),
prompt: "First prompt".to_string(),
})
.unwrap();
let result = service
.prepare_turn(ChatPromptRequest {
workspace_id: "workspace-1".to_string(),
session_id: Some(session_id),
prompt: "Second prompt".to_string(),
})
.unwrap();
assert!(result.reply.contains("turn 2"));
match result {
Ok(value) => value,
Err(payload) => std::panic::resume_unwind(payload),
}
}
fn env_lock() -> &'static Mutex<()> {
static LOCK: OnceLock<Mutex<()>> = OnceLock::new();
LOCK.get_or_init(|| Mutex::new(()))
}
fn cleanup_test_data_dir(path: PathBuf) {
let _ = fs::remove_dir_all(path);
}
}

View File

@@ -0,0 +1,80 @@
use serde_json::json;
use tauri::{AppHandle, Runtime};
use tauri_plugin_store::StoreExt;
use crate::agent::{
AgentStoredSettings, AGENT_SETTINGS_STORE_PATH, DEFAULT_AGENT_BASE_URL, DEFAULT_AGENT_MODEL,
};
use crate::error::AppError;
const BASE_URL_KEY: &str = "baseUrl";
const MODEL_KEY: &str = "model";
const API_KEY_KEY: &str = "apiKey";
/// Manages the provider settings and plaintext API key stored through the Tauri store plugin.
#[derive(Debug, Clone)]
pub struct AgentSettingsService<R: Runtime> {
app_handle: AppHandle<R>,
}
impl<R: Runtime> AgentSettingsService<R> {
/// Create a new settings service for the provided application handle.
pub fn new(app_handle: &AppHandle<R>) -> Self {
Self {
app_handle: app_handle.clone(),
}
}
/// Load the current agent settings, falling back to app defaults when unset.
pub fn load(&self) -> Result<AgentStoredSettings, AppError> {
let store = self
.app_handle
.store(AGENT_SETTINGS_STORE_PATH)
.map_err(|error| AppError::SettingsStore(error.to_string()))?;
Ok(AgentStoredSettings {
base_url: store
.get(BASE_URL_KEY)
.and_then(|value| value.as_str().map(ToOwned::to_owned))
.unwrap_or_else(|| DEFAULT_AGENT_BASE_URL.to_string()),
model: store
.get(MODEL_KEY)
.and_then(|value| value.as_str().map(ToOwned::to_owned))
.unwrap_or_else(|| DEFAULT_AGENT_MODEL.to_string()),
api_key: store
.get(API_KEY_KEY)
.and_then(|value| value.as_str().map(ToOwned::to_owned))
.unwrap_or_default(),
})
}
/// Persist the current settings, including the plaintext API key.
pub fn save(&self, settings: AgentStoredSettings) -> Result<AgentStoredSettings, AppError> {
self.save_inner(&settings)?;
Ok(settings)
}
/// Update only the plaintext API key.
pub fn set_api_key(&self, api_key: String) -> Result<AgentStoredSettings, AppError> {
let mut settings = self.load()?;
settings.api_key = api_key;
self.save_inner(&settings)?;
Ok(settings)
}
fn save_inner(&self, settings: &AgentStoredSettings) -> Result<(), AppError> {
let store = self
.app_handle
.store(AGENT_SETTINGS_STORE_PATH)
.map_err(|error| AppError::SettingsStore(error.to_string()))?;
// The API key is intentionally persisted in plain text per the current
// product requirement, so it lives in the same store as the runtime config.
store.set(BASE_URL_KEY.to_string(), json!(settings.base_url));
store.set(MODEL_KEY.to_string(), json!(settings.model));
store.set(API_KEY_KEY.to_string(), json!(settings.api_key));
store
.save()
.map_err(|error| AppError::SettingsStore(error.to_string()))
}
}

View File

@@ -1,5 +1,13 @@
use rig::completion::Message;
use serde::{Deserialize, Serialize};
/// Default Z.AI coding plan endpoint used by the app.
pub const DEFAULT_AGENT_BASE_URL: &str = "https://api.z.ai/api/coding/paas/v4";
/// Default model used for plain-text terminal chat.
pub const DEFAULT_AGENT_MODEL: &str = "glm-5.1";
/// Store file used for agent settings and plaintext API key storage.
pub const AGENT_SETTINGS_STORE_PATH: &str = "agent-settings.json";
/// Request payload for an interactive chat turn.
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
@@ -12,7 +20,18 @@ pub struct ChatPromptRequest {
pub prompt: String,
}
/// Synchronous chat turn preparation result used by the streaming command.
/// Runtime provider configuration after settings resolution.
#[derive(Debug, Clone)]
pub struct AgentRuntimeConfig {
/// OpenAI-compatible base URL.
pub base_url: String,
/// Upstream model identifier.
pub model: String,
/// Runtime API key loaded from plaintext application storage.
pub api_key: String,
}
/// Prepared chat turn after validation and session history lookup.
#[derive(Debug, Clone)]
pub struct PreparedChatTurn {
/// Workspace identifier associated with the turn.
@@ -21,8 +40,10 @@ pub struct PreparedChatTurn {
pub session_id: String,
/// Prompt content after validation and normalization.
pub prompt: String,
/// Fully prepared reply text that will be chunked into stream events.
pub reply: String,
/// History to send upstream before the new prompt.
pub history: Vec<Message>,
/// Resolved provider config for this turn.
pub runtime: AgentRuntimeConfig,
}
/// Immediate response returned when a chat stream starts.
@@ -76,3 +97,57 @@ pub struct AgentErrorEvent {
/// User-visible error message for the failed stream.
pub message: String,
}
/// Persisted settings for the chat provider, including the plaintext API key.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct AgentStoredSettings {
/// OpenAI-compatible base URL.
pub base_url: String,
/// Upstream model identifier.
pub model: String,
/// Plaintext API key saved in the application store.
pub api_key: String,
}
impl Default for AgentStoredSettings {
fn default() -> Self {
Self {
base_url: DEFAULT_AGENT_BASE_URL.to_string(),
model: DEFAULT_AGENT_MODEL.to_string(),
api_key: String::new(),
}
}
}
/// Public configuration status returned to the webview.
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct AgentConfigStatus {
/// Whether the app has everything needed to start chat immediately.
pub configured: bool,
/// Whether the app currently has an API key stored.
pub has_api_key: bool,
/// Current provider base URL.
pub base_url: String,
/// Current provider model.
pub model: String,
}
/// Request payload for updating persisted non-secret settings.
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SaveAgentSettingsRequest {
/// OpenAI-compatible base URL.
pub base_url: String,
/// Upstream model identifier.
pub model: String,
}
/// Request payload for rotating the stored API key.
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UpdateAgentApiKeyRequest {
/// Replacement plaintext API key to store.
pub api_key: String,
}

View File

@@ -1,22 +0,0 @@
use crate::agent::{AgentPromptRequest, AgentPromptResponse};
use crate::state::AppState;
/// Handles interactive agent prompts from the frontend.
///
/// # Errors
///
/// Returns an error string when shared backend state is unavailable or when
/// the request fails validation in the agent layer.
#[tauri::command]
pub async fn agent_prompt(
state: tauri::State<'_, AppState>,
request: AgentPromptRequest,
) -> Result<AgentPromptResponse, String> {
// Convert a poisoned mutex into a user-visible error instead of panicking.
let mut agent = state
.agent
.lock()
.map_err(|_| "agent state is unavailable".to_string())?;
agent.prompt(request).map_err(|error| error.to_string())
}

View File

@@ -1,3 +1,4 @@
//! Tauri command handlers.
pub mod settings;
pub mod terminal;

View File

@@ -1,19 +0,0 @@
/// Handles the search command from the frontend, which performs a search query against both yahoo finance and the sec api
///
/// # Errors
///
/// Returns an error string when shared backend state is unavailable or when
/// the request fails validation in the agent layer.
#[tauri::command]
pub async fn search_ticker(
state: tauri::State<'_, AppState>,
request: AgentPromptRequest,
) -> Result<AgentPromptResponse, String> {
// Convert a poisoned mutex into a user-visible error instead of panicking.
let mut agent = state
.agent
.lock()
.map_err(|_| "agent state is unavailable".to_string())?;
agent.prompt(request).map_err(|error| error.to_string())
}

View File

@@ -0,0 +1,60 @@
use crate::agent::{AgentConfigStatus, SaveAgentSettingsRequest, UpdateAgentApiKeyRequest};
use crate::state::AppState;
/// Return the current public configuration state for the AI chat runtime.
#[tauri::command]
pub async fn get_agent_config_status(
state: tauri::State<'_, AppState>,
) -> Result<AgentConfigStatus, String> {
let agent = state
.agent
.lock()
.map_err(|_| "agent state is unavailable".to_string())?;
agent.get_config_status().map_err(|error| error.to_string())
}
/// Persist the non-secret base URL and model settings.
#[tauri::command]
pub async fn save_agent_settings(
state: tauri::State<'_, AppState>,
request: SaveAgentSettingsRequest,
) -> Result<AgentConfigStatus, String> {
let mut agent = state
.agent
.lock()
.map_err(|_| "agent state is unavailable".to_string())?;
agent
.save_settings(request)
.map_err(|error| error.to_string())
}
/// Save or replace the plaintext API key.
#[tauri::command]
pub async fn update_agent_api_key(
state: tauri::State<'_, AppState>,
request: UpdateAgentApiKeyRequest,
) -> Result<AgentConfigStatus, String> {
let mut agent = state
.agent
.lock()
.map_err(|_| "agent state is unavailable".to_string())?;
agent
.update_api_key(request)
.map_err(|error| error.to_string())
}
/// Remove the stored plaintext API key.
#[tauri::command]
pub async fn clear_agent_api_key(
state: tauri::State<'_, AppState>,
) -> Result<AgentConfigStatus, String> {
let mut agent = state
.agent
.lock()
.map_err(|_| "agent state is unavailable".to_string())?;
agent.clear_api_key().map_err(|error| error.to_string())
}

View File

@@ -1,9 +1,11 @@
use std::time::Duration;
use tauri::Emitter;
use futures::StreamExt;
use tauri::{Emitter, Manager};
use crate::agent::{
AgentDeltaEvent, AgentErrorEvent, AgentResultEvent, ChatPromptRequest, ChatStreamStart,
AgentDeltaEvent, AgentErrorEvent, AgentResultEvent, ChatGateway, ChatPromptRequest,
ChatStreamStart,
};
use crate::state::AppState;
use crate::terminal::{ExecuteTerminalCommandRequest, TerminalCommandResponse};
@@ -25,15 +27,17 @@ pub async fn start_chat_stream(
request: ChatPromptRequest,
) -> Result<ChatStreamStart, String> {
let request_id = state.next_request_id();
let prepared_turn = {
let (prepared_turn, gateway) = {
let mut agent = state
.agent
.lock()
.map_err(|_| "agent state is unavailable".to_string())?;
agent
let gateway = agent.gateway();
let prepared_turn = agent
.prepare_turn(request)
.map_err(|error| error.to_string())?
.map_err(|error| error.to_string())?;
(prepared_turn, gateway)
};
let start = ChatStreamStart {
@@ -41,48 +45,78 @@ pub async fn start_chat_stream(
session_id: prepared_turn.session_id.clone(),
};
let workspace_id = prepared_turn.workspace_id.clone();
let session_id = prepared_turn.session_id.clone();
let reply = prepared_turn.reply.clone();
let should_fail = prepared_turn.prompt.contains("__simulate_stream_error__");
let app_handle = app.clone();
tauri::async_runtime::spawn(async move {
// Delay the first event slightly so the frontend can register callbacks for the new request id.
tokio::time::sleep(Duration::from_millis(30)).await;
if should_fail {
let _ = app.emit(
"agent_error",
AgentErrorEvent {
workspace_id,
request_id,
session_id,
message: "Simulated backend stream failure.".to_string(),
},
);
return;
// Resolve the upstream stream outside the mutex so long-running provider I/O
// does not block other settings reads or chat requests.
let mut stream = match gateway
.stream_chat(
prepared_turn.runtime.clone(),
prepared_turn.prompt.clone(),
prepared_turn.history.clone(),
)
.await
{
Ok(stream) => stream,
Err(error) => {
let _ = app_handle.emit(
"agent_error",
AgentErrorEvent {
workspace_id: prepared_turn.workspace_id,
request_id,
session_id: prepared_turn.session_id,
message: error.to_string(),
},
);
return;
}
};
let mut reply = String::new();
while let Some(chunk) = stream.next().await {
match chunk {
Ok(delta) => {
reply.push_str(&delta);
let _ = app_handle.emit(
"agent_delta",
AgentDeltaEvent {
workspace_id: prepared_turn.workspace_id.clone(),
request_id: request_id.clone(),
session_id: prepared_turn.session_id.clone(),
delta,
},
);
}
Err(error) => {
let _ = app_handle.emit(
"agent_error",
AgentErrorEvent {
workspace_id: prepared_turn.workspace_id,
request_id,
session_id: prepared_turn.session_id,
message: error.to_string(),
},
);
return;
}
}
}
// Emit coarse-grained deltas for now; the event contract remains stable when a real model streams tokens.
for chunk in chunk_reply(&reply) {
let _ = app.emit(
"agent_delta",
AgentDeltaEvent {
workspace_id: workspace_id.clone(),
request_id: request_id.clone(),
session_id: session_id.clone(),
delta: chunk,
},
);
tokio::time::sleep(Duration::from_millis(60)).await;
// Store the final assistant message after the stream completes so the next
// conversational turn reuses the full transcript.
if let Ok(mut agent) = app_handle.state::<AppState>().agent.lock() {
let _ = agent.record_assistant_reply(&prepared_turn.session_id, &reply);
}
let _ = app.emit(
let _ = app_handle.emit(
"agent_result",
AgentResultEvent {
workspace_id,
workspace_id: prepared_turn.workspace_id,
request_id,
session_id,
session_id: prepared_turn.session_id,
reply,
},
);
@@ -90,21 +124,3 @@ pub async fn start_chat_stream(
Ok(start)
}
/// Splits a reply into small word groups to simulate incremental streaming.
fn chunk_reply(reply: &str) -> Vec<String> {
let words = reply.split_whitespace().collect::<Vec<_>>();
if words.is_empty() {
return vec![String::new()];
}
words
.chunks(3)
.map(|chunk| {
let mut delta = chunk.join(" ");
delta.push(' ');
delta
})
.collect()
}

View File

@@ -1,16 +1,40 @@
use std::error::Error;
use std::fmt::{Display, Formatter};
/// Backend error type for application-level validation failures.
/// Backend error type for application-level validation and runtime failures.
#[derive(Debug, PartialEq, Eq)]
pub enum AppError {
EmptyPrompt,
AgentNotConfigured,
ApiKeyMissing,
InvalidSettings(String),
UnknownSession(String),
SettingsStore(String),
ProviderInit(String),
ProviderRequest(String),
}
impl Display for AppError {
fn fmt(&self, formatter: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::EmptyPrompt => formatter.write_str("prompt cannot be empty"),
Self::AgentNotConfigured => formatter.write_str(
"AI chat is not configured yet. Open AI Settings to save a model and API key.",
),
Self::ApiKeyMissing => formatter.write_str("API key cannot be empty"),
Self::InvalidSettings(message) => formatter.write_str(message),
Self::UnknownSession(session_id) => {
write!(formatter, "unknown session: {session_id}")
}
Self::SettingsStore(message) => {
write!(formatter, "settings store error: {message}")
}
Self::ProviderInit(message) => {
write!(formatter, "AI provider initialization failed: {message}")
}
Self::ProviderRequest(message) => {
write!(formatter, "AI provider request failed: {message}")
}
}
}
}

View File

@@ -10,17 +10,28 @@ mod error;
mod state;
mod terminal;
use tauri::Manager;
/// Starts the Tauri application and registers the backend command surface.
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
tauri::Builder::default()
// Keep shared backend services in managed state so commands stay thin.
.manage(state::AppState::default())
.plugin(tauri_plugin_store::Builder::new().build())
.setup(|app| {
let state = state::AppState::new(&app.handle())
.map_err(|error| -> Box<dyn std::error::Error> { Box::new(error) })?;
app.manage(state);
Ok(())
})
.plugin(tauri_plugin_opener::init())
.invoke_handler(tauri::generate_handler![
commands::terminal::execute_terminal_command,
commands::terminal::start_chat_stream
commands::terminal::start_chat_stream,
commands::settings::get_agent_config_status,
commands::settings::save_agent_settings,
commands::settings::update_agent_api_key,
commands::settings::clear_agent_api_key
])
.run(tauri::generate_context!())
.expect("error while running tauri application");

View File

@@ -1,34 +1,36 @@
//! Shared application state managed by Tauri.
use std::sync::Mutex;
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Mutex;
use tauri::{AppHandle, Wry};
use crate::agent::AgentService;
use crate::error::AppError;
use crate::terminal::TerminalCommandService;
/// Runtime services shared across Tauri commands.
pub struct AppState {
/// Stateful chat service used for per-session conversation history.
pub agent: Mutex<AgentService>,
/// Stateful chat service used for per-session conversation history and agent config.
pub agent: Mutex<AgentService<Wry>>,
/// Slash-command executor backed by shared mock data.
pub command_service: TerminalCommandService,
next_request_id: AtomicU64,
}
impl AppState {
/// Create a new application state for the current Tauri app.
pub fn new(app_handle: &AppHandle<Wry>) -> Result<Self, AppError> {
Ok(Self {
agent: Mutex::new(AgentService::new(app_handle)?),
command_service: TerminalCommandService::default(),
next_request_id: AtomicU64::new(1),
})
}
/// Generates a unique request id for correlating stream events with frontend listeners.
pub fn next_request_id(&self) -> String {
let id = self.next_request_id.fetch_add(1, Ordering::Relaxed);
format!("request-{id}")
}
}
impl Default for AppState {
fn default() -> Self {
Self {
agent: Mutex::new(AgentService::default()),
command_service: TerminalCommandService::default(),
next_request_id: AtomicU64::new(1),
}
}
}

View File

@@ -33,11 +33,7 @@ impl TerminalCommandService {
"/analyze" => self.analyze(command.args.first().map(String::as_str)),
"/help" => help_response(),
_ => TerminalCommandResponse::Text {
content: format!(
"Unknown command: {}\n\n{}",
command.command,
help_text()
),
content: format!("Unknown command: {}\n\n{}", command.command, help_text()),
},
}
}
@@ -132,10 +128,7 @@ impl TerminalCommandService {
.companies
.iter()
.filter(|company| {
company
.symbol
.to_lowercase()
.contains(&normalized_query)
company.symbol.to_lowercase().contains(&normalized_query)
|| company.name.to_lowercase().contains(&normalized_query)
})
.cloned()

View File

@@ -37,13 +37,19 @@ pub enum TerminalCommandResponse {
#[derive(Debug, Clone, Serialize, PartialEq)]
#[serde(tag = "type", rename_all = "camelCase")]
pub enum PanelPayload {
Company { data: Company },
Portfolio { data: Portfolio },
Company {
data: Company,
},
Portfolio {
data: Portfolio,
},
News {
data: Vec<NewsItem>,
ticker: Option<String>,
},
Analysis { data: StockAnalysis },
Analysis {
data: StockAnalysis,
},
}
/// Company snapshot used by the company panel.

View File

@@ -2,15 +2,20 @@ import React, { useEffect, useCallback, useRef } from 'react';
import { Terminal } from './components/Terminal/Terminal';
import { Sidebar } from './components/Sidebar/Sidebar';
import { TabBar } from './components/TabBar/TabBar';
import { AgentSettingsModal } from './components/Settings/AgentSettingsModal';
import { useTabs } from './hooks/useTabs';
import { createEntry } from './hooks/useTerminal';
import { agentSettingsBridge } from './lib/agentSettingsBridge';
import { terminalBridge } from './lib/terminalBridge';
import { AgentConfigStatus } from './types/agentSettings';
import './App.css';
function App() {
const tabs = useTabs();
const [sidebarOpen, setSidebarOpen] = React.useState(true);
const [isProcessing, setIsProcessing] = React.useState(false);
const [agentStatus, setAgentStatus] = React.useState<AgentConfigStatus | null>(null);
const [isSettingsOpen, setIsSettingsOpen] = React.useState(false);
const commandHistoryRefs = useRef<Record<string, string[]>>({});
const commandIndexRefs = useRef<Record<string, number>>({});
@@ -36,6 +41,24 @@ function App() {
commandIndexRefs.current[workspaceId] = -1;
}, [tabs]);
const refreshAgentStatus = useCallback(async () => {
const status = await agentSettingsBridge.getStatus();
setAgentStatus(status);
return status;
}, []);
const handleAgentStatusChange = useCallback((status: AgentConfigStatus) => {
setAgentStatus(status);
}, []);
const handleOpenSettings = useCallback(async () => {
try {
await refreshAgentStatus();
} finally {
setIsSettingsOpen(true);
}
}, [refreshAgentStatus]);
const handleCommand = useCallback(async (command: string) => {
const trimmedCommand = command.trim();
const workspaceId = tabs.activeWorkspaceId;
@@ -187,6 +210,26 @@ function App() {
const outputRef = useRef<HTMLDivElement | null>(null);
useEffect(() => {
let active = true;
void refreshAgentStatus()
.then(() => {
if (!active) {
return;
}
})
.catch(() => {
if (active) {
setAgentStatus(null);
}
});
return () => {
active = false;
};
}, [refreshAgentStatus]);
useEffect(() => {
tabs.workspaces.forEach((workspace) => {
commandHistoryRefs.current[workspace.id] ??= [];
@@ -259,6 +302,10 @@ function App() {
onTabClick={(id) => tabs.setActiveWorkspace(id)}
onTabClose={(id) => tabs.closeWorkspace(id)}
onNewTab={handleCreateWorkspace}
onOpenSettings={() => {
void handleOpenSettings();
}}
isAgentReady={Boolean(agentStatus?.configured)}
onTabRename={(id, name) => tabs.renameWorkspace(id, name)}
/>
@@ -273,6 +320,13 @@ function App() {
resetCommandIndex={resetCommandIndex}
/>
</div>
<AgentSettingsModal
isOpen={isSettingsOpen}
status={agentStatus}
onClose={() => setIsSettingsOpen(false)}
onStatusChange={handleAgentStatusChange}
/>
</div>
);
}

View File

@@ -0,0 +1,229 @@
import React, { useEffect, useState } from 'react';
import { agentSettingsBridge } from '../../lib/agentSettingsBridge';
import { AgentConfigStatus } from '../../types/agentSettings';
interface AgentSettingsModalProps {
isOpen: boolean;
status: AgentConfigStatus | null;
onClose: () => void;
onStatusChange: (status: AgentConfigStatus) => void;
}
const inputClassName =
'w-full rounded border border-[#2a2a2a] bg-[#111111] px-3 py-2 text-sm font-mono text-[#e0e0e0] outline-none transition-colors focus:border-[#58a6ff]';
export const AgentSettingsModal: React.FC<AgentSettingsModalProps> = ({
isOpen,
status,
onClose,
onStatusChange,
}) => {
const [baseUrl, setBaseUrl] = useState('');
const [model, setModel] = useState('');
const [apiKey, setApiKey] = useState('');
const [error, setError] = useState<string | null>(null);
const [success, setSuccess] = useState<string | null>(null);
const [isBusy, setIsBusy] = useState(false);
useEffect(() => {
if (!status || !isOpen) {
return;
}
setBaseUrl(status.baseUrl);
setModel(status.model);
setApiKey('');
setError(null);
setSuccess(null);
}, [isOpen, status]);
if (!isOpen || !status) {
return null;
}
const saveRuntimeSettings = async () => {
// Runtime config and API key are saved through separate backend commands, so
// key actions persist the latest base URL/model first to keep them in sync.
const nextStatus = await agentSettingsBridge.saveSettings({ baseUrl, model });
onStatusChange(nextStatus);
return nextStatus;
};
const handleSaveSettings = async () => {
setIsBusy(true);
setError(null);
setSuccess(null);
try {
await saveRuntimeSettings();
setSuccess('Runtime settings saved.');
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to save settings.');
} finally {
setIsBusy(false);
}
};
const handleSaveApiKey = async () => {
setIsBusy(true);
setError(null);
setSuccess(null);
try {
const savedStatus = await saveRuntimeSettings();
const nextStatus = await agentSettingsBridge.updateApiKey({ apiKey });
onStatusChange({ ...savedStatus, ...nextStatus });
setApiKey('');
setSuccess(status.hasApiKey ? 'Plaintext API key updated.' : 'Plaintext API key saved.');
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to save API key.');
} finally {
setIsBusy(false);
}
};
const handleClearApiKey = async () => {
setIsBusy(true);
setError(null);
setSuccess(null);
try {
const savedStatus = await saveRuntimeSettings();
const nextStatus = await agentSettingsBridge.clearApiKey();
onStatusChange({ ...savedStatus, ...nextStatus });
setApiKey('');
setSuccess('Plaintext API key cleared.');
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to clear API key.');
} finally {
setIsBusy(false);
}
};
return (
<div className="fixed inset-0 z-40 flex items-center justify-center bg-black/70 px-4">
<div className="w-full max-w-2xl rounded-xl border border-[#2a2a2a] bg-[#0a0a0a] shadow-2xl">
<div className="flex items-center justify-between border-b border-[#2a2a2a] px-5 py-4">
<div>
<h2 className="text-sm font-mono font-semibold text-[#e0e0e0]">AI Settings</h2>
<p className="mt-1 text-xs font-mono text-[#888888]">
Configure the Z.AI coding endpoint and store the API key in plain text.
</p>
</div>
<button
type="button"
onClick={onClose}
className="rounded px-2 py-1 text-xs font-mono text-[#888888] transition-colors hover:bg-[#1a1a1a] hover:text-[#e0e0e0]"
>
Close
</button>
</div>
<div className="space-y-5 px-5 py-5">
<section className="rounded-lg border border-[#2a2a2a] bg-[#111111] p-4">
<div className="mb-4 flex items-center justify-between">
<div>
<h3 className="text-xs font-mono uppercase tracking-wide text-[#888888]">
Runtime
</h3>
<p className="mt-1 text-sm font-mono text-[#e0e0e0]">
{status.configured ? 'Configured' : 'API key required'}
</p>
</div>
<div className="text-right text-xs font-mono text-[#888888]">
<div>Configured: {status.configured ? 'yes' : 'no'}</div>
<div>API key stored: {status.hasApiKey ? 'yes' : 'no'}</div>
</div>
</div>
<div className="grid gap-4 md:grid-cols-2">
<label className="block">
<span className="mb-2 block text-xs font-mono text-[#888888]">Base URL</span>
<input
className={inputClassName}
value={baseUrl}
onChange={(event) => setBaseUrl(event.target.value)}
placeholder="https://api.z.ai/api/coding/paas/v4"
/>
</label>
<label className="block">
<span className="mb-2 block text-xs font-mono text-[#888888]">Model</span>
<input
className={inputClassName}
value={model}
onChange={(event) => setModel(event.target.value)}
placeholder="glm-5.1"
/>
</label>
</div>
<div className="mt-4 flex justify-end">
<button
type="button"
onClick={handleSaveSettings}
disabled={isBusy}
className="rounded border border-[#2a2a2a] bg-[#151515] px-3 py-2 text-xs font-mono text-[#e0e0e0] transition-colors hover:border-[#58a6ff] hover:text-[#58a6ff] disabled:cursor-not-allowed disabled:opacity-50"
>
Save Runtime
</button>
</div>
</section>
<section className="rounded-lg border border-[#2a2a2a] bg-[#111111] p-4">
<h3 className="text-xs font-mono uppercase tracking-wide text-[#888888]">
{status.hasApiKey ? 'Plaintext API Key' : 'Save Plaintext API Key'}
</h3>
<p className="mt-2 text-xs font-mono text-[#888888]">
This stores your provider key in plain text in the app settings file.
</p>
<label className="mt-4 block">
<span className="mb-2 block text-xs font-mono text-[#888888]">
{status.hasApiKey ? 'Replace API Key' : 'API Key'}
</span>
<input
type="password"
className={inputClassName}
value={apiKey}
onChange={(event) => setApiKey(event.target.value)}
placeholder="Enter API key"
/>
</label>
<div className="mt-4 flex justify-between">
<div>
{status.hasApiKey ? (
<button
type="button"
onClick={handleClearApiKey}
disabled={isBusy}
className="rounded border border-[#2a2a2a] bg-[#151515] px-3 py-2 text-xs font-mono text-[#ff7b72] transition-colors hover:border-[#ff7b72] disabled:cursor-not-allowed disabled:opacity-50"
>
Clear Key
</button>
) : null}
</div>
<button
type="button"
onClick={handleSaveApiKey}
disabled={isBusy}
className="rounded border border-[#2a2a2a] bg-[#151515] px-3 py-2 text-xs font-mono text-[#e0e0e0] transition-colors hover:border-[#58a6ff] hover:text-[#58a6ff] disabled:cursor-not-allowed disabled:opacity-50"
>
{status.hasApiKey ? 'Save Runtime & Update Key' : 'Save Runtime & Save Key'}
</button>
</div>
</section>
{success ? (
<div className="rounded border border-[#214f31] bg-[#102417] px-3 py-2 text-xs font-mono text-[#9ee6b3]">
{success}
</div>
) : null}
{error ? (
<div className="rounded border border-[#5c2b2b] bg-[#211313] px-3 py-2 text-xs font-mono text-[#ffb4b4]">
{error}
</div>
) : null}
</div>
</div>
</div>
);
};

View File

@@ -11,6 +11,8 @@ interface TabBarProps {
onTabClick: (id: string) => void;
onTabClose: (id: string) => void;
onNewTab: () => void;
onOpenSettings: () => void;
isAgentReady?: boolean;
onTabRename?: (id: string, newName: string) => void;
}
@@ -19,6 +21,8 @@ export const TabBar: React.FC<TabBarProps> = ({
onTabClick,
onTabClose,
onNewTab,
onOpenSettings,
isAgentReady = false,
onTabRename
}) => {
const [editingId, setEditingId] = useState<string | null>(null);
@@ -118,6 +122,19 @@ export const TabBar: React.FC<TabBarProps> = ({
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 4v16m8-8H4" />
</svg>
</button>
<button
onClick={onOpenSettings}
className={`ml-1 flex items-center gap-1 rounded px-2 py-1 text-[10px] font-mono transition-colors ${
isAgentReady
? 'text-[#00d26a] hover:bg-[#102417]'
: 'text-[#ffb000] hover:bg-[#241b08]'
}`}
title="AI settings"
>
<span className={`inline-block h-2 w-2 rounded-full ${isAgentReady ? 'bg-[#00d26a]' : 'bg-[#ffb000]'}`} />
AI
</button>
</div>
);
};

View File

@@ -0,0 +1,26 @@
import { invoke } from '@tauri-apps/api/core';
import {
AgentConfigStatus,
SaveAgentSettingsRequest,
UpdateAgentApiKeyRequest,
} from '../types/agentSettings';
class AgentSettingsBridge {
async getStatus(): Promise<AgentConfigStatus> {
return invoke<AgentConfigStatus>('get_agent_config_status');
}
async saveSettings(request: SaveAgentSettingsRequest): Promise<AgentConfigStatus> {
return invoke<AgentConfigStatus>('save_agent_settings', { request });
}
async updateApiKey(request: UpdateAgentApiKeyRequest): Promise<AgentConfigStatus> {
return invoke<AgentConfigStatus>('update_agent_api_key', { request });
}
async clearApiKey(): Promise<AgentConfigStatus> {
return invoke<AgentConfigStatus>('clear_agent_api_key');
}
}
export const agentSettingsBridge = new AgentSettingsBridge();

View File

@@ -0,0 +1,15 @@
export interface AgentConfigStatus {
configured: boolean;
hasApiKey: boolean;
baseUrl: string;
model: string;
}
export interface SaveAgentSettingsRequest {
baseUrl: string;
model: string;
}
export interface UpdateAgentApiKeyRequest {
apiKey: string;
}