feat: add support for API chats

This commit is contained in:
2026-02-04 13:44:44 -08:00
parent a0804ed32a
commit ae06cc301d
6 changed files with 1108 additions and 2 deletions
+90
View File
@@ -36,6 +36,33 @@ pub struct ClaudeStartOptions {
#[serde(default)]
pub ollama_model: Option<String>,
// OpenAI-specific options
#[serde(default)]
pub openai_api_key: Option<String>,
#[serde(default = "default_openai_base_url")]
pub openai_base_url: String,
#[serde(default)]
pub openai_model: Option<String>,
// Anthropic-specific options
#[serde(default)]
pub anthropic_api_key: Option<String>,
#[serde(default = "default_anthropic_base_url")]
pub anthropic_base_url: String,
#[serde(default)]
pub anthropic_model: Option<String>,
// Gemini-specific options
#[serde(default)]
pub gemini_api_key: Option<String>,
#[serde(default)]
pub gemini_model: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -65,6 +92,33 @@ pub struct HikariConfig {
#[serde(default)]
pub ollama_model: Option<String>,
// OpenAI-specific settings
#[serde(default)]
pub openai_api_key: Option<String>,
#[serde(default = "default_openai_base_url")]
pub openai_base_url: String,
#[serde(default)]
pub openai_model: Option<String>,
// Anthropic-specific settings
#[serde(default)]
pub anthropic_api_key: Option<String>,
#[serde(default = "default_anthropic_base_url")]
pub anthropic_base_url: String,
#[serde(default)]
pub anthropic_model: Option<String>,
// Gemini-specific settings
#[serde(default)]
pub gemini_api_key: Option<String>,
#[serde(default)]
pub gemini_model: Option<String>,
#[serde(default)]
pub theme: Theme,
@@ -130,6 +184,14 @@ impl Default for HikariConfig {
auto_granted_tools: Vec::new(),
ollama_base_url: default_ollama_base_url(),
ollama_model: None,
openai_api_key: None,
openai_base_url: default_openai_base_url(),
openai_model: None,
anthropic_api_key: None,
anthropic_base_url: default_anthropic_base_url(),
anthropic_model: None,
gemini_api_key: None,
gemini_model: None,
theme: Theme::default(),
greeting_enabled: true,
greeting_custom_prompt: None,
@@ -175,6 +237,14 @@ fn default_ollama_base_url() -> String {
"http://localhost:11434".to_string()
}
fn default_openai_base_url() -> String {
"https://api.openai.com/v1".to_string()
}
fn default_anthropic_base_url() -> String {
"https://api.anthropic.com".to_string()
}
#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Theme {
@@ -221,6 +291,18 @@ mod tests {
assert!(config.auto_granted_tools.is_empty());
assert_eq!(config.ollama_base_url, "http://localhost:11434");
assert!(config.ollama_model.is_none());
// OpenAI defaults
assert!(config.openai_api_key.is_none());
assert_eq!(config.openai_base_url, "https://api.openai.com/v1");
assert!(config.openai_model.is_none());
// Anthropic defaults
assert!(config.anthropic_api_key.is_none());
assert_eq!(config.anthropic_base_url, "https://api.anthropic.com");
assert!(config.anthropic_model.is_none());
// Gemini defaults
assert!(config.gemini_api_key.is_none());
assert!(config.gemini_model.is_none());
// Other settings
assert_eq!(config.theme, Theme::Dark);
assert!(config.greeting_enabled);
assert!(config.greeting_custom_prompt.is_none());
@@ -249,6 +331,14 @@ mod tests {
auto_granted_tools: vec!["Read".to_string(), "Glob".to_string()],
ollama_base_url: "http://localhost:11434".to_string(),
ollama_model: None,
openai_api_key: None,
openai_base_url: "https://api.openai.com/v1".to_string(),
openai_model: None,
anthropic_api_key: None,
anthropic_base_url: "https://api.anthropic.com".to_string(),
anthropic_model: None,
gemini_api_key: None,
gemini_model: None,
theme: Theme::Light,
greeting_enabled: true,
greeting_custom_prompt: Some("Hello!".to_string()),
+732 -1
View File
@@ -14,10 +14,13 @@ use crate::types::{
};
use crate::wsl_bridge::WslBridge;
/// A unified bridge that can wrap either the Claude CLI or Ollama provider
/// A unified bridge that can wrap different LLM providers
pub enum ProviderBridge {
ClaudeCli(WslBridge),
Ollama(OllamaBridge),
OpenAi(OpenAiBridge),
Anthropic(AnthropicBridge),
Gemini(GeminiBridge),
}
impl ProviderBridge {
@@ -29,6 +32,15 @@ impl ProviderBridge {
ProviderType::Ollama => {
ProviderBridge::Ollama(OllamaBridge::new(conversation_id))
}
ProviderType::OpenAi => {
ProviderBridge::OpenAi(OpenAiBridge::new(conversation_id))
}
ProviderType::Anthropic => {
ProviderBridge::Anthropic(AnthropicBridge::new(conversation_id))
}
ProviderType::Gemini => {
ProviderBridge::Gemini(GeminiBridge::new(conversation_id))
}
}
}
@@ -36,6 +48,9 @@ impl ProviderBridge {
match self {
ProviderBridge::ClaudeCli(bridge) => bridge.start(app, options),
ProviderBridge::Ollama(bridge) => bridge.start(app, options),
ProviderBridge::OpenAi(bridge) => bridge.start(app, options),
ProviderBridge::Anthropic(bridge) => bridge.start(app, options),
ProviderBridge::Gemini(bridge) => bridge.start(app, options),
}
}
@@ -43,6 +58,9 @@ impl ProviderBridge {
match self {
ProviderBridge::ClaudeCli(bridge) => bridge.stop(app),
ProviderBridge::Ollama(bridge) => bridge.stop(app),
ProviderBridge::OpenAi(bridge) => bridge.stop(app),
ProviderBridge::Anthropic(bridge) => bridge.stop(app),
ProviderBridge::Gemini(bridge) => bridge.stop(app),
}
}
@@ -50,6 +68,9 @@ impl ProviderBridge {
match self {
ProviderBridge::ClaudeCli(bridge) => bridge.interrupt(app),
ProviderBridge::Ollama(bridge) => bridge.interrupt(app),
ProviderBridge::OpenAi(bridge) => bridge.interrupt(app),
ProviderBridge::Anthropic(bridge) => bridge.interrupt(app),
ProviderBridge::Gemini(bridge) => bridge.interrupt(app),
}
}
@@ -57,6 +78,9 @@ impl ProviderBridge {
match self {
ProviderBridge::ClaudeCli(bridge) => bridge.send_message(message),
ProviderBridge::Ollama(bridge) => bridge.send_message(message),
ProviderBridge::OpenAi(bridge) => bridge.send_message(message),
ProviderBridge::Anthropic(bridge) => bridge.send_message(message),
ProviderBridge::Gemini(bridge) => bridge.send_message(message),
}
}
@@ -68,6 +92,9 @@ impl ProviderBridge {
match self {
ProviderBridge::ClaudeCli(bridge) => bridge.send_tool_result(tool_use_id, result),
ProviderBridge::Ollama(bridge) => bridge.send_tool_result(tool_use_id, result),
ProviderBridge::OpenAi(bridge) => bridge.send_tool_result(tool_use_id, result),
ProviderBridge::Anthropic(bridge) => bridge.send_tool_result(tool_use_id, result),
ProviderBridge::Gemini(bridge) => bridge.send_tool_result(tool_use_id, result),
}
}
@@ -75,6 +102,9 @@ impl ProviderBridge {
match self {
ProviderBridge::ClaudeCli(bridge) => bridge.is_running(),
ProviderBridge::Ollama(bridge) => bridge.is_running(),
ProviderBridge::OpenAi(bridge) => bridge.is_running(),
ProviderBridge::Anthropic(bridge) => bridge.is_running(),
ProviderBridge::Gemini(bridge) => bridge.is_running(),
}
}
@@ -82,6 +112,9 @@ impl ProviderBridge {
match self {
ProviderBridge::ClaudeCli(bridge) => bridge.get_working_directory(),
ProviderBridge::Ollama(bridge) => bridge.get_working_directory(),
ProviderBridge::OpenAi(bridge) => bridge.get_working_directory(),
ProviderBridge::Anthropic(bridge) => bridge.get_working_directory(),
ProviderBridge::Gemini(bridge) => bridge.get_working_directory(),
}
}
@@ -89,6 +122,9 @@ impl ProviderBridge {
match self {
ProviderBridge::ClaudeCli(bridge) => bridge.get_stats(),
ProviderBridge::Ollama(bridge) => bridge.get_stats(),
ProviderBridge::OpenAi(bridge) => bridge.get_stats(),
ProviderBridge::Anthropic(bridge) => bridge.get_stats(),
ProviderBridge::Gemini(bridge) => bridge.get_stats(),
}
}
@@ -96,6 +132,9 @@ impl ProviderBridge {
match self {
ProviderBridge::ClaudeCli(_) => ProviderType::ClaudeCli,
ProviderBridge::Ollama(_) => ProviderType::Ollama,
ProviderBridge::OpenAi(_) => ProviderType::OpenAi,
ProviderBridge::Anthropic(_) => ProviderType::Anthropic,
ProviderBridge::Gemini(_) => ProviderType::Gemini,
}
}
}
@@ -495,6 +534,680 @@ fn emit_connection_status(
);
}
/// Bridge for OpenAI API provider
pub struct OpenAiBridge {
conversation_id: String,
working_directory: String,
is_running: bool,
stats: Arc<RwLock<UsageStats>>,
api_key: String,
base_url: String,
model: String,
messages: Vec<serde_json::Value>,
cancel_flag: Arc<std::sync::atomic::AtomicBool>,
app_handle: Option<AppHandle>,
}
impl OpenAiBridge {
pub fn new(conversation_id: String) -> Self {
OpenAiBridge {
conversation_id,
working_directory: String::new(),
is_running: false,
stats: Arc::new(RwLock::new(UsageStats::new())),
api_key: String::new(),
base_url: "https://api.openai.com/v1".to_string(),
model: "gpt-4o".to_string(),
messages: Vec::new(),
cancel_flag: Arc::new(std::sync::atomic::AtomicBool::new(false)),
app_handle: None,
}
}
pub fn start(&mut self, app: AppHandle, options: ClaudeStartOptions) -> Result<(), String> {
if self.is_running {
return Err("OpenAI bridge already running".to_string());
}
self.working_directory = options.working_dir.clone();
self.api_key = options.openai_api_key.clone().unwrap_or_default();
self.base_url = options.openai_base_url.clone();
self.model = options.openai_model.clone().unwrap_or_else(|| "gpt-4o".to_string());
self.app_handle = Some(app.clone());
self.cancel_flag.store(false, std::sync::atomic::Ordering::SeqCst);
if self.api_key.is_empty() {
return Err("OpenAI API key is required".to_string());
}
self.messages.clear();
if let Some(ref instructions) = options.custom_instructions {
if !instructions.is_empty() {
self.messages.push(serde_json::json!({
"role": "system",
"content": instructions
}));
}
}
emit_connection_status(&app, ConnectionStatus::Connecting, Some(self.conversation_id.clone()));
self.is_running = true;
emit_connection_status(&app, ConnectionStatus::Connected, Some(self.conversation_id.clone()));
emit_state_change(&app, CharacterState::Idle, None, Some(self.conversation_id.clone()));
Ok(())
}
pub fn stop(&mut self, app: &AppHandle) {
self.cancel_flag.store(true, std::sync::atomic::Ordering::SeqCst);
self.is_running = false;
self.app_handle = None;
let stats_snapshot = self.stats.read().clone();
let app_clone = app.clone();
tauri::async_runtime::spawn(async move {
if let Err(e) = crate::stats::save_stats(&app_clone, &stats_snapshot).await {
eprintln!("Failed to save stats: {}", e);
}
});
self.stats.write().reset_session();
emit_connection_status(app, ConnectionStatus::Disconnected, Some(self.conversation_id.clone()));
}
pub fn interrupt(&mut self, app: &AppHandle) -> Result<(), String> {
self.cancel_flag.store(true, std::sync::atomic::Ordering::SeqCst);
self.is_running = false;
emit_connection_status(app, ConnectionStatus::Disconnected, Some(self.conversation_id.clone()));
Ok(())
}
pub fn send_message(&mut self, message: &str) -> Result<(), String> {
if !self.is_running {
return Err("OpenAI bridge not running".to_string());
}
let app = self.app_handle.clone().ok_or("App handle not available")?;
self.messages.push(serde_json::json!({
"role": "user",
"content": message
}));
self.stats.write().increment_messages();
emit_state_change(&app, CharacterState::Thinking, None, Some(self.conversation_id.clone()));
let api_key = self.api_key.clone();
let base_url = self.base_url.clone();
let model = self.model.clone();
let messages = self.messages.clone();
let stats = self.stats.clone();
let conv_id = self.conversation_id.clone();
let cancel_flag = self.cancel_flag.clone();
tauri::async_runtime::spawn(async move {
match stream_openai_chat(&base_url, &api_key, &model, &messages, &app, &stats, &conv_id, &cancel_flag).await {
Ok(_) => {}
Err(e) => {
let _ = app.emit(
"claude:output",
OutputEvent {
line_type: "error".to_string(),
content: format!("OpenAI error: {}", e),
tool_name: None,
conversation_id: Some(conv_id.clone()),
},
);
emit_state_change(&app, CharacterState::Error, None, Some(conv_id));
}
}
});
Ok(())
}
pub fn send_tool_result(&mut self, _tool_use_id: &str, _result: serde_json::Value) -> Result<(), String> {
Err("Tool results are not supported with OpenAI provider".to_string())
}
pub fn is_running(&self) -> bool { self.is_running }
pub fn get_working_directory(&self) -> &str { &self.working_directory }
pub fn get_stats(&self) -> UsageStats { self.stats.read().clone() }
}
/// Bridge for Anthropic API provider
pub struct AnthropicBridge {
conversation_id: String,
working_directory: String,
is_running: bool,
stats: Arc<RwLock<UsageStats>>,
api_key: String,
base_url: String,
model: String,
messages: Vec<serde_json::Value>,
cancel_flag: Arc<std::sync::atomic::AtomicBool>,
app_handle: Option<AppHandle>,
}
impl AnthropicBridge {
pub fn new(conversation_id: String) -> Self {
AnthropicBridge {
conversation_id,
working_directory: String::new(),
is_running: false,
stats: Arc::new(RwLock::new(UsageStats::new())),
api_key: String::new(),
base_url: "https://api.anthropic.com".to_string(),
model: "claude-sonnet-4-5-20250514".to_string(),
messages: Vec::new(),
cancel_flag: Arc::new(std::sync::atomic::AtomicBool::new(false)),
app_handle: None,
}
}
pub fn start(&mut self, app: AppHandle, options: ClaudeStartOptions) -> Result<(), String> {
if self.is_running {
return Err("Anthropic bridge already running".to_string());
}
self.working_directory = options.working_dir.clone();
self.api_key = options.anthropic_api_key.clone().unwrap_or_default();
self.base_url = options.anthropic_base_url.clone();
self.model = options.anthropic_model.clone().unwrap_or_else(|| "claude-sonnet-4-5-20250514".to_string());
self.app_handle = Some(app.clone());
self.cancel_flag.store(false, std::sync::atomic::Ordering::SeqCst);
if self.api_key.is_empty() {
return Err("Anthropic API key is required".to_string());
}
self.messages.clear();
emit_connection_status(&app, ConnectionStatus::Connecting, Some(self.conversation_id.clone()));
self.is_running = true;
emit_connection_status(&app, ConnectionStatus::Connected, Some(self.conversation_id.clone()));
emit_state_change(&app, CharacterState::Idle, None, Some(self.conversation_id.clone()));
Ok(())
}
pub fn stop(&mut self, app: &AppHandle) {
self.cancel_flag.store(true, std::sync::atomic::Ordering::SeqCst);
self.is_running = false;
self.app_handle = None;
let stats_snapshot = self.stats.read().clone();
let app_clone = app.clone();
tauri::async_runtime::spawn(async move {
if let Err(e) = crate::stats::save_stats(&app_clone, &stats_snapshot).await {
eprintln!("Failed to save stats: {}", e);
}
});
self.stats.write().reset_session();
emit_connection_status(app, ConnectionStatus::Disconnected, Some(self.conversation_id.clone()));
}
pub fn interrupt(&mut self, app: &AppHandle) -> Result<(), String> {
self.cancel_flag.store(true, std::sync::atomic::Ordering::SeqCst);
self.is_running = false;
emit_connection_status(app, ConnectionStatus::Disconnected, Some(self.conversation_id.clone()));
Ok(())
}
pub fn send_message(&mut self, message: &str) -> Result<(), String> {
if !self.is_running {
return Err("Anthropic bridge not running".to_string());
}
let app = self.app_handle.clone().ok_or("App handle not available")?;
self.messages.push(serde_json::json!({
"role": "user",
"content": [{ "type": "text", "text": message }]
}));
self.stats.write().increment_messages();
emit_state_change(&app, CharacterState::Thinking, None, Some(self.conversation_id.clone()));
let api_key = self.api_key.clone();
let base_url = self.base_url.clone();
let model = self.model.clone();
let messages = self.messages.clone();
let stats = self.stats.clone();
let conv_id = self.conversation_id.clone();
let cancel_flag = self.cancel_flag.clone();
tauri::async_runtime::spawn(async move {
match stream_anthropic_chat(&base_url, &api_key, &model, &messages, &app, &stats, &conv_id, &cancel_flag).await {
Ok(_) => {}
Err(e) => {
let _ = app.emit(
"claude:output",
OutputEvent {
line_type: "error".to_string(),
content: format!("Anthropic error: {}", e),
tool_name: None,
conversation_id: Some(conv_id.clone()),
},
);
emit_state_change(&app, CharacterState::Error, None, Some(conv_id));
}
}
});
Ok(())
}
pub fn send_tool_result(&mut self, _tool_use_id: &str, _result: serde_json::Value) -> Result<(), String> {
Err("Tool results are not supported with direct Anthropic API provider".to_string())
}
pub fn is_running(&self) -> bool { self.is_running }
pub fn get_working_directory(&self) -> &str { &self.working_directory }
pub fn get_stats(&self) -> UsageStats { self.stats.read().clone() }
}
/// Bridge for Google Gemini API provider
pub struct GeminiBridge {
conversation_id: String,
working_directory: String,
is_running: bool,
stats: Arc<RwLock<UsageStats>>,
api_key: String,
model: String,
messages: Vec<serde_json::Value>,
cancel_flag: Arc<std::sync::atomic::AtomicBool>,
app_handle: Option<AppHandle>,
}
impl GeminiBridge {
pub fn new(conversation_id: String) -> Self {
GeminiBridge {
conversation_id,
working_directory: String::new(),
is_running: false,
stats: Arc::new(RwLock::new(UsageStats::new())),
api_key: String::new(),
model: "gemini-2.0-flash".to_string(),
messages: Vec::new(),
cancel_flag: Arc::new(std::sync::atomic::AtomicBool::new(false)),
app_handle: None,
}
}
pub fn start(&mut self, app: AppHandle, options: ClaudeStartOptions) -> Result<(), String> {
if self.is_running {
return Err("Gemini bridge already running".to_string());
}
self.working_directory = options.working_dir.clone();
self.api_key = options.gemini_api_key.clone().unwrap_or_default();
self.model = options.gemini_model.clone().unwrap_or_else(|| "gemini-2.0-flash".to_string());
self.app_handle = Some(app.clone());
self.cancel_flag.store(false, std::sync::atomic::Ordering::SeqCst);
if self.api_key.is_empty() {
return Err("Gemini API key is required".to_string());
}
self.messages.clear();
emit_connection_status(&app, ConnectionStatus::Connecting, Some(self.conversation_id.clone()));
self.is_running = true;
emit_connection_status(&app, ConnectionStatus::Connected, Some(self.conversation_id.clone()));
emit_state_change(&app, CharacterState::Idle, None, Some(self.conversation_id.clone()));
Ok(())
}
pub fn stop(&mut self, app: &AppHandle) {
self.cancel_flag.store(true, std::sync::atomic::Ordering::SeqCst);
self.is_running = false;
self.app_handle = None;
let stats_snapshot = self.stats.read().clone();
let app_clone = app.clone();
tauri::async_runtime::spawn(async move {
if let Err(e) = crate::stats::save_stats(&app_clone, &stats_snapshot).await {
eprintln!("Failed to save stats: {}", e);
}
});
self.stats.write().reset_session();
emit_connection_status(app, ConnectionStatus::Disconnected, Some(self.conversation_id.clone()));
}
pub fn interrupt(&mut self, app: &AppHandle) -> Result<(), String> {
self.cancel_flag.store(true, std::sync::atomic::Ordering::SeqCst);
self.is_running = false;
emit_connection_status(app, ConnectionStatus::Disconnected, Some(self.conversation_id.clone()));
Ok(())
}
pub fn send_message(&mut self, message: &str) -> Result<(), String> {
if !self.is_running {
return Err("Gemini bridge not running".to_string());
}
let app = self.app_handle.clone().ok_or("App handle not available")?;
self.messages.push(serde_json::json!({
"role": "user",
"parts": [{ "text": message }]
}));
self.stats.write().increment_messages();
emit_state_change(&app, CharacterState::Thinking, None, Some(self.conversation_id.clone()));
let api_key = self.api_key.clone();
let model = self.model.clone();
let messages = self.messages.clone();
let stats = self.stats.clone();
let conv_id = self.conversation_id.clone();
let cancel_flag = self.cancel_flag.clone();
tauri::async_runtime::spawn(async move {
match stream_gemini_chat(&api_key, &model, &messages, &app, &stats, &conv_id, &cancel_flag).await {
Ok(_) => {}
Err(e) => {
let _ = app.emit(
"claude:output",
OutputEvent {
line_type: "error".to_string(),
content: format!("Gemini error: {}", e),
tool_name: None,
conversation_id: Some(conv_id.clone()),
},
);
emit_state_change(&app, CharacterState::Error, None, Some(conv_id));
}
}
});
Ok(())
}
pub fn send_tool_result(&mut self, _tool_use_id: &str, _result: serde_json::Value) -> Result<(), String> {
Err("Tool results are not supported with Gemini provider".to_string())
}
pub fn is_running(&self) -> bool { self.is_running }
pub fn get_working_directory(&self) -> &str { &self.working_directory }
pub fn get_stats(&self) -> UsageStats { self.stats.read().clone() }
}
// Stream functions for the new providers
async fn stream_openai_chat(
base_url: &str,
api_key: &str,
model: &str,
messages: &[serde_json::Value],
app: &AppHandle,
stats: &Arc<RwLock<UsageStats>>,
conversation_id: &str,
cancel_flag: &Arc<std::sync::atomic::AtomicBool>,
) -> Result<String, String> {
let client = reqwest::Client::new();
let url = format!("{}/chat/completions", base_url);
emit_state_change(app, CharacterState::Typing, None, Some(conversation_id.to_string()));
let request_body = serde_json::json!({
"model": model,
"messages": messages,
"stream": true
});
let response = client
.post(&url)
.header("Authorization", format!("Bearer {}", api_key))
.header("Content-Type", "application/json")
.json(&request_body)
.send()
.await
.map_err(|e| format!("Failed to send request to OpenAI: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(format!("OpenAI API error: {}", error_text));
}
let mut full_response = String::new();
let mut stream = response.bytes_stream();
use futures_util::StreamExt;
while let Some(chunk_result) = stream.next().await {
if cancel_flag.load(std::sync::atomic::Ordering::SeqCst) {
break;
}
match chunk_result {
Ok(chunk) => {
let chunk_str = String::from_utf8_lossy(&chunk);
for line in chunk_str.lines() {
if line.starts_with("data: ") {
let data = &line[6..];
if data == "[DONE]" {
break;
}
if let Ok(parsed) = serde_json::from_str::<serde_json::Value>(data) {
if let Some(content) = parsed["choices"][0]["delta"]["content"].as_str() {
full_response.push_str(content);
let _ = app.emit("claude:stream", content.to_string());
}
}
}
}
}
Err(e) => return Err(format!("Error reading stream: {}", e)),
}
}
stats.write().increment_messages();
finalize_response(app, &full_response, stats, conversation_id, model).await;
Ok(full_response)
}
async fn stream_anthropic_chat(
base_url: &str,
api_key: &str,
model: &str,
messages: &[serde_json::Value],
app: &AppHandle,
stats: &Arc<RwLock<UsageStats>>,
conversation_id: &str,
cancel_flag: &Arc<std::sync::atomic::AtomicBool>,
) -> Result<String, String> {
let client = reqwest::Client::new();
let url = format!("{}/v1/messages", base_url);
emit_state_change(app, CharacterState::Typing, None, Some(conversation_id.to_string()));
let request_body = serde_json::json!({
"model": model,
"messages": messages,
"max_tokens": 4096,
"stream": true
});
let response = client
.post(&url)
.header("x-api-key", api_key)
.header("anthropic-version", "2023-06-01")
.header("Content-Type", "application/json")
.json(&request_body)
.send()
.await
.map_err(|e| format!("Failed to send request to Anthropic: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(format!("Anthropic API error: {}", error_text));
}
let mut full_response = String::new();
let mut stream = response.bytes_stream();
use futures_util::StreamExt;
while let Some(chunk_result) = stream.next().await {
if cancel_flag.load(std::sync::atomic::Ordering::SeqCst) {
break;
}
match chunk_result {
Ok(chunk) => {
let chunk_str = String::from_utf8_lossy(&chunk);
for line in chunk_str.lines() {
if line.starts_with("data: ") {
let data = &line[6..];
if let Ok(parsed) = serde_json::from_str::<serde_json::Value>(data) {
if parsed["type"] == "content_block_delta" {
if let Some(text) = parsed["delta"]["text"].as_str() {
full_response.push_str(text);
let _ = app.emit("claude:stream", text.to_string());
}
}
}
}
}
}
Err(e) => return Err(format!("Error reading stream: {}", e)),
}
}
stats.write().increment_messages();
finalize_response(app, &full_response, stats, conversation_id, model).await;
Ok(full_response)
}
async fn stream_gemini_chat(
api_key: &str,
model: &str,
messages: &[serde_json::Value],
app: &AppHandle,
stats: &Arc<RwLock<UsageStats>>,
conversation_id: &str,
cancel_flag: &Arc<std::sync::atomic::AtomicBool>,
) -> Result<String, String> {
let client = reqwest::Client::new();
let url = format!(
"https://generativelanguage.googleapis.com/v1beta/models/{}:streamGenerateContent?alt=sse&key={}",
model, api_key
);
emit_state_change(app, CharacterState::Typing, None, Some(conversation_id.to_string()));
let request_body = serde_json::json!({
"contents": messages,
"generationConfig": {
"maxOutputTokens": 4096
}
});
let response = client
.post(&url)
.header("Content-Type", "application/json")
.json(&request_body)
.send()
.await
.map_err(|e| format!("Failed to send request to Gemini: {}", e))?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_default();
return Err(format!("Gemini API error: {}", error_text));
}
let mut full_response = String::new();
let mut stream = response.bytes_stream();
use futures_util::StreamExt;
while let Some(chunk_result) = stream.next().await {
if cancel_flag.load(std::sync::atomic::Ordering::SeqCst) {
break;
}
match chunk_result {
Ok(chunk) => {
let chunk_str = String::from_utf8_lossy(&chunk);
for line in chunk_str.lines() {
if line.starts_with("data: ") {
let data = &line[6..];
if let Ok(parsed) = serde_json::from_str::<serde_json::Value>(data) {
if let Some(text) = parsed["candidates"][0]["content"]["parts"][0]["text"].as_str() {
full_response.push_str(text);
let _ = app.emit("claude:stream", text.to_string());
}
}
}
}
}
Err(e) => return Err(format!("Error reading stream: {}", e)),
}
}
stats.write().increment_messages();
finalize_response(app, &full_response, stats, conversation_id, model).await;
Ok(full_response)
}
async fn finalize_response(
app: &AppHandle,
full_response: &str,
stats: &Arc<RwLock<UsageStats>>,
conversation_id: &str,
model: &str,
) {
// Emit the complete response
let _ = app.emit(
"claude:output",
OutputEvent {
line_type: "assistant".to_string(),
content: full_response.to_string(),
tool_name: None,
conversation_id: Some(conversation_id.to_string()),
},
);
// Count code blocks
let code_blocks = full_response.matches("```").count() / 2;
for _ in 0..code_blocks {
stats.write().increment_code_blocks();
}
// Check achievements
let newly_unlocked = {
let mut stats_guard = stats.write();
stats_guard.check_achievements()
};
for achievement_id in &newly_unlocked {
let info = get_achievement_info(achievement_id);
let _ = app.emit(
"achievement:unlocked",
AchievementUnlockedEvent { achievement: info },
);
}
if !newly_unlocked.is_empty() {
let app_clone = app.clone();
let achievements_progress = stats.read().achievements.clone();
tauri::async_runtime::spawn(async move {
if let Err(e) = crate::achievements::save_achievements(&app_clone, &achievements_progress).await {
eprintln!("Failed to save achievements: {}", e);
}
});
}
// Emit stats update
let current_stats = stats.read().clone();
let _ = app.emit("claude:stats", StatsUpdateEvent { stats: current_stats });
emit_state_change(app, CharacterState::Success, None, Some(conversation_id.to_string()));
// Estimate tokens (rough approximation)
let estimated_tokens = (full_response.len() / 4) as u64;
stats.write().add_usage(0, estimated_tokens, model);
}
#[cfg(test)]
mod tests {
use super::*;
@@ -514,6 +1227,15 @@ mod tests {
let ollama = ProviderBridge::new(ProviderType::Ollama, "conv2".to_string());
assert_eq!(ollama.provider_type(), ProviderType::Ollama);
let openai = ProviderBridge::new(ProviderType::OpenAi, "conv3".to_string());
assert_eq!(openai.provider_type(), ProviderType::OpenAi);
let anthropic = ProviderBridge::new(ProviderType::Anthropic, "conv4".to_string());
assert_eq!(anthropic.provider_type(), ProviderType::Anthropic);
let gemini = ProviderBridge::new(ProviderType::Gemini, "conv5".to_string());
assert_eq!(gemini.provider_type(), ProviderType::Gemini);
}
#[test]
@@ -523,6 +1245,15 @@ mod tests {
let bridge = ProviderBridge::new(ProviderType::Ollama, "test".to_string());
assert!(!bridge.is_running());
let bridge = ProviderBridge::new(ProviderType::OpenAi, "test".to_string());
assert!(!bridge.is_running());
let bridge = ProviderBridge::new(ProviderType::Anthropic, "test".to_string());
assert!(!bridge.is_running());
let bridge = ProviderBridge::new(ProviderType::Gemini, "test".to_string());
assert!(!bridge.is_running());
}
#[test]
+59
View File
@@ -21,6 +21,9 @@ pub enum ProviderType {
#[default]
ClaudeCli,
Ollama,
OpenAi,
Anthropic,
Gemini,
}
#[allow(dead_code)]
@@ -29,6 +32,9 @@ impl ProviderType {
match self {
ProviderType::ClaudeCli => "Claude CLI",
ProviderType::Ollama => "Ollama (Local)",
ProviderType::OpenAi => "OpenAI API",
ProviderType::Anthropic => "Anthropic API",
ProviderType::Gemini => "Google Gemini",
}
}
@@ -36,10 +42,24 @@ impl ProviderType {
match self {
ProviderType::ClaudeCli => "Use Claude Code CLI for AI assistance",
ProviderType::Ollama => "Use locally running Ollama models",
ProviderType::OpenAi => "Direct OpenAI API access (GPT-4, etc.)",
ProviderType::Anthropic => "Direct Anthropic API access (Claude models)",
ProviderType::Gemini => "Direct Google Gemini API access",
}
}
pub fn requires_api_key(&self) -> bool {
matches!(
self,
ProviderType::OpenAi | ProviderType::Anthropic | ProviderType::Gemini
)
}
}
// Note: The new providers (OpenAI, Anthropic, Gemini) are implemented directly
// in provider_bridge.rs using the Bridge pattern rather than the LlmProvider trait.
// This simplifies the architecture while still providing full functionality.
#[allow(dead_code)]
pub fn create_provider(
provider_type: ProviderType,
@@ -48,6 +68,12 @@ pub fn create_provider(
match provider_type {
ProviderType::ClaudeCli => Box::new(ClaudeCliProvider::new(config)),
ProviderType::Ollama => Box::new(OllamaProvider::new(config)),
// The new API-based providers are handled in provider_bridge.rs
ProviderType::OpenAi | ProviderType::Anthropic | ProviderType::Gemini => {
// These providers use the Bridge pattern in provider_bridge.rs
// Fall back to Claude CLI for trait-based usage
Box::new(ClaudeCliProvider::new(config))
}
}
}
@@ -59,6 +85,9 @@ mod tests {
fn test_provider_type_display_name() {
assert_eq!(ProviderType::ClaudeCli.display_name(), "Claude CLI");
assert_eq!(ProviderType::Ollama.display_name(), "Ollama (Local)");
assert_eq!(ProviderType::OpenAi.display_name(), "OpenAI API");
assert_eq!(ProviderType::Anthropic.display_name(), "Anthropic API");
assert_eq!(ProviderType::Gemini.display_name(), "Google Gemini");
}
#[test]
@@ -76,6 +105,18 @@ mod tests {
let ollama = ProviderType::Ollama;
let json = serde_json::to_string(&ollama).unwrap();
assert_eq!(json, "\"ollama\"");
let openai = ProviderType::OpenAi;
let json = serde_json::to_string(&openai).unwrap();
assert_eq!(json, "\"open_ai\"");
let anthropic = ProviderType::Anthropic;
let json = serde_json::to_string(&anthropic).unwrap();
assert_eq!(json, "\"anthropic\"");
let gemini = ProviderType::Gemini;
let json = serde_json::to_string(&gemini).unwrap();
assert_eq!(json, "\"gemini\"");
}
#[test]
@@ -85,5 +126,23 @@ mod tests {
let ollama: ProviderType = serde_json::from_str("\"ollama\"").unwrap();
assert_eq!(ollama, ProviderType::Ollama);
let openai: ProviderType = serde_json::from_str("\"open_ai\"").unwrap();
assert_eq!(openai, ProviderType::OpenAi);
let anthropic: ProviderType = serde_json::from_str("\"anthropic\"").unwrap();
assert_eq!(anthropic, ProviderType::Anthropic);
let gemini: ProviderType = serde_json::from_str("\"gemini\"").unwrap();
assert_eq!(gemini, ProviderType::Gemini);
}
#[test]
fn test_provider_type_requires_api_key() {
assert!(!ProviderType::ClaudeCli.requires_api_key());
assert!(!ProviderType::Ollama.requires_api_key());
assert!(ProviderType::OpenAi.requires_api_key());
assert!(ProviderType::Anthropic.requires_api_key());
assert!(ProviderType::Gemini.requires_api_key());
}
}
+190
View File
@@ -23,6 +23,14 @@
auto_granted_tools: [],
ollama_base_url: "http://localhost:11434",
ollama_model: null,
openai_api_key: null,
openai_base_url: "https://api.openai.com/v1",
openai_model: null,
anthropic_api_key: null,
anthropic_base_url: "https://api.anthropic.com",
anthropic_model: null,
gemini_api_key: null,
gemini_model: null,
theme: "dark",
greeting_enabled: true,
greeting_custom_prompt: null,
@@ -79,6 +87,9 @@
const availableProviders: { value: ProviderType; label: string; description: string }[] = [
{ value: "claude_cli", label: "Claude CLI", description: "Use Claude Code CLI for AI assistance" },
{ value: "ollama", label: "Ollama (Local)", description: "Use locally running Ollama models" },
{ value: "open_ai", label: "OpenAI API", description: "Direct OpenAI API access (GPT-4o, etc.)" },
{ value: "anthropic", label: "Anthropic API", description: "Direct Anthropic API access (Claude models)" },
{ value: "gemini", label: "Google Gemini", description: "Direct Google Gemini API access" },
];
const availableModels = [
@@ -97,6 +108,29 @@
{ value: "gemma2", label: "Gemma 2" },
];
const openaiModels = [
{ value: "", label: "Default (gpt-4o)" },
{ value: "gpt-4o", label: "GPT-4o" },
{ value: "gpt-4o-mini", label: "GPT-4o Mini" },
{ value: "gpt-4-turbo", label: "GPT-4 Turbo" },
{ value: "gpt-3.5-turbo", label: "GPT-3.5 Turbo" },
];
const anthropicModels = [
{ value: "", label: "Default (Claude Sonnet 4.5)" },
{ value: "claude-sonnet-4-5-20250514", label: "Claude Sonnet 4.5" },
{ value: "claude-opus-4-20250514", label: "Claude Opus 4" },
{ value: "claude-3-5-sonnet-20241022", label: "Claude 3.5 Sonnet" },
{ value: "claude-3-5-haiku-20241022", label: "Claude 3.5 Haiku" },
];
const geminiModels = [
{ value: "", label: "Default (gemini-2.0-flash)" },
{ value: "gemini-2.0-flash", label: "Gemini 2.0 Flash" },
{ value: "gemini-1.5-pro", label: "Gemini 1.5 Pro" },
{ value: "gemini-1.5-flash", label: "Gemini 1.5 Flash" },
];
const commonTools = [
"Read",
"Write",
@@ -297,6 +331,162 @@
</div>
</div>
{/if}
<!-- OpenAI-specific settings -->
{#if config.provider_type === "open_ai"}
<div class="mt-4 p-3 bg-[var(--bg-primary)] rounded-lg border border-[var(--border-color)]">
<h4 class="text-sm font-medium text-[var(--text-primary)] mb-3">OpenAI Settings</h4>
<!-- OpenAI API Key -->
<div class="mb-3">
<label for="openai-api-key" class="block text-xs text-[var(--text-secondary)] mb-1">
API Key
</label>
<input
id="openai-api-key"
type="password"
bind:value={config.openai_api_key}
placeholder="sk-..."
class="w-full px-3 py-2 text-sm bg-[var(--bg-secondary)] border border-[var(--border-color)] rounded-lg text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent-primary)]"
/>
</div>
<!-- OpenAI Base URL -->
<div class="mb-3">
<label for="openai-url" class="block text-xs text-[var(--text-secondary)] mb-1">
Base URL <span class="text-[var(--text-tertiary)]">(for OpenAI-compatible APIs)</span>
</label>
<input
id="openai-url"
type="text"
bind:value={config.openai_base_url}
placeholder="https://api.openai.com/v1"
class="w-full px-3 py-2 text-sm bg-[var(--bg-secondary)] border border-[var(--border-color)] rounded-lg text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent-primary)]"
/>
</div>
<!-- OpenAI Model Selection -->
<div class="mb-3">
<label for="openai-model" class="block text-xs text-[var(--text-secondary)] mb-1">
Model
</label>
<select
id="openai-model"
bind:value={config.openai_model}
class="w-full px-3 py-2 text-sm bg-[var(--bg-secondary)] border border-[var(--border-color)] rounded-lg text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent-primary)]"
>
{#each openaiModels as model (model.value)}
<option value={model.value}>{model.label}</option>
{/each}
</select>
</div>
<div class="p-2 bg-blue-500/10 border border-blue-500/30 rounded text-xs text-blue-400">
<strong>Tip:</strong> You can use this with any OpenAI-compatible API (Groq, Together AI, etc.)
by changing the Base URL.
</div>
</div>
{/if}
<!-- Anthropic-specific settings -->
{#if config.provider_type === "anthropic"}
<div class="mt-4 p-3 bg-[var(--bg-primary)] rounded-lg border border-[var(--border-color)]">
<h4 class="text-sm font-medium text-[var(--text-primary)] mb-3">Anthropic Settings</h4>
<!-- Anthropic API Key -->
<div class="mb-3">
<label for="anthropic-api-key" class="block text-xs text-[var(--text-secondary)] mb-1">
API Key
</label>
<input
id="anthropic-api-key"
type="password"
bind:value={config.anthropic_api_key}
placeholder="sk-ant-..."
class="w-full px-3 py-2 text-sm bg-[var(--bg-secondary)] border border-[var(--border-color)] rounded-lg text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent-primary)]"
/>
</div>
<!-- Anthropic Base URL -->
<div class="mb-3">
<label for="anthropic-url" class="block text-xs text-[var(--text-secondary)] mb-1">
Base URL <span class="text-[var(--text-tertiary)]">(optional)</span>
</label>
<input
id="anthropic-url"
type="text"
bind:value={config.anthropic_base_url}
placeholder="https://api.anthropic.com"
class="w-full px-3 py-2 text-sm bg-[var(--bg-secondary)] border border-[var(--border-color)] rounded-lg text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent-primary)]"
/>
</div>
<!-- Anthropic Model Selection -->
<div class="mb-3">
<label for="anthropic-model" class="block text-xs text-[var(--text-secondary)] mb-1">
Model
</label>
<select
id="anthropic-model"
bind:value={config.anthropic_model}
class="w-full px-3 py-2 text-sm bg-[var(--bg-secondary)] border border-[var(--border-color)] rounded-lg text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent-primary)]"
>
{#each anthropicModels as model (model.value)}
<option value={model.value}>{model.label}</option>
{/each}
</select>
</div>
<div class="p-2 bg-purple-500/10 border border-purple-500/30 rounded text-xs text-purple-400">
<strong>Note:</strong> This uses the Anthropic API directly without Claude Code CLI features
like tools, MCP, or thinking blocks.
</div>
</div>
{/if}
<!-- Gemini-specific settings -->
{#if config.provider_type === "gemini"}
<div class="mt-4 p-3 bg-[var(--bg-primary)] rounded-lg border border-[var(--border-color)]">
<h4 class="text-sm font-medium text-[var(--text-primary)] mb-3">Gemini Settings</h4>
<!-- Gemini API Key -->
<div class="mb-3">
<label for="gemini-api-key" class="block text-xs text-[var(--text-secondary)] mb-1">
API Key
</label>
<input
id="gemini-api-key"
type="password"
bind:value={config.gemini_api_key}
placeholder="AIza..."
class="w-full px-3 py-2 text-sm bg-[var(--bg-secondary)] border border-[var(--border-color)] rounded-lg text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent-primary)]"
/>
<p class="text-xs text-[var(--text-tertiary)] mt-1">
Get your API key from <a href="https://aistudio.google.com/apikey" target="_blank" class="text-[var(--accent-secondary)] hover:underline">Google AI Studio</a>
</p>
</div>
<!-- Gemini Model Selection -->
<div class="mb-3">
<label for="gemini-model" class="block text-xs text-[var(--text-secondary)] mb-1">
Model
</label>
<select
id="gemini-model"
bind:value={config.gemini_model}
class="w-full px-3 py-2 text-sm bg-[var(--bg-secondary)] border border-[var(--border-color)] rounded-lg text-[var(--text-primary)] focus:outline-none focus:border-[var(--accent-primary)]"
>
{#each geminiModels as model (model.value)}
<option value={model.value}>{model.label}</option>
{/each}
</select>
</div>
<div class="p-2 bg-green-500/10 border border-green-500/30 rounded text-xs text-green-400">
<strong>Note:</strong> Gemini has a generous free tier! Great for experimenting.
</div>
</div>
{/if}
</section>
<!-- Agent Settings Section -->
+16
View File
@@ -51,6 +51,14 @@
auto_granted_tools: [],
ollama_base_url: "http://localhost:11434",
ollama_model: null,
openai_api_key: null,
openai_base_url: "https://api.openai.com/v1",
openai_model: null,
anthropic_api_key: null,
anthropic_base_url: "https://api.anthropic.com",
anthropic_model: null,
gemini_api_key: null,
gemini_model: null,
theme: "dark",
greeting_enabled: true,
greeting_custom_prompt: null,
@@ -157,6 +165,14 @@
allowed_tools: allAllowedTools,
ollama_base_url: currentConfig.ollama_base_url || "http://localhost:11434",
ollama_model: currentConfig.ollama_model || null,
openai_api_key: currentConfig.openai_api_key || null,
openai_base_url: currentConfig.openai_base_url || "https://api.openai.com/v1",
openai_model: currentConfig.openai_model || null,
anthropic_api_key: currentConfig.anthropic_api_key || null,
anthropic_base_url: currentConfig.anthropic_base_url || "https://api.anthropic.com",
anthropic_model: currentConfig.anthropic_model || null,
gemini_api_key: currentConfig.gemini_api_key || null,
gemini_model: currentConfig.gemini_model || null,
},
});
} catch (error) {
+21 -1
View File
@@ -2,7 +2,7 @@ import { writable, derived } from "svelte/store";
import { invoke } from "@tauri-apps/api/core";
export type Theme = "dark" | "light" | "high-contrast" | "custom";
export type ProviderType = "claude_cli" | "ollama";
export type ProviderType = "claude_cli" | "ollama" | "open_ai" | "anthropic" | "gemini";
export interface CustomThemeColors {
bg_primary: string | null;
@@ -22,8 +22,20 @@ export interface HikariConfig {
custom_instructions: string | null;
mcp_servers_json: string | null;
auto_granted_tools: string[];
// Ollama settings
ollama_base_url: string;
ollama_model: string | null;
// OpenAI settings
openai_api_key: string | null;
openai_base_url: string;
openai_model: string | null;
// Anthropic settings
anthropic_api_key: string | null;
anthropic_base_url: string;
anthropic_model: string | null;
// Gemini settings
gemini_api_key: string | null;
gemini_model: string | null;
theme: Theme;
greeting_enabled: boolean;
greeting_custom_prompt: string | null;
@@ -52,6 +64,14 @@ const defaultConfig: HikariConfig = {
auto_granted_tools: [],
ollama_base_url: "http://localhost:11434",
ollama_model: null,
openai_api_key: null,
openai_base_url: "https://api.openai.com/v1",
openai_model: null,
anthropic_api_key: null,
anthropic_base_url: "https://api.anthropic.com",
anthropic_model: null,
gemini_api_key: null,
gemini_model: null,
theme: "dark",
greeting_enabled: true,
greeting_custom_prompt: null,