generated from nhcarrigan/template
645f5037d3
dirs::home_dir() returns the Windows home (C:\Users\accou) on Windows builds, but the global CLAUDE.md lives in the WSL home directory. Route both get_global_claude_md and save_global_claude_md through WSL on Windows, matching the pattern used by list_skills and list_memory_files.
3486 lines
111 KiB
Rust
3486 lines
111 KiB
Rust
use std::path::PathBuf;
|
||
use serde::{Deserialize, Serialize};
|
||
use tauri::{AppHandle, Manager, State};
|
||
use tauri_plugin_http::reqwest;
|
||
use tauri_plugin_store::StoreExt;
|
||
|
||
use crate::achievements::{get_achievement_info, load_achievements, AchievementUnlockedEvent};
|
||
use crate::bridge_manager::SharedBridgeManager;
|
||
use crate::config::{ClaudeStartOptions, HikariConfig};
|
||
use crate::process_ext::HideWindow;
|
||
use crate::stats::UsageStats;
|
||
use crate::temp_manager::SharedTempFileManager;
|
||
|
||
const CONFIG_STORE_KEY: &str = "config";
|
||
|
||
/// Convert a Windows path to a WSL path
|
||
/// Example: C:\Users\accou\Documents\item.txt -> /mnt/c/Users/accou/Documents/item.txt
|
||
fn windows_path_to_wsl(windows_path: &str) -> Option<String> {
|
||
// Check if it's a Windows path (has drive letter like C:\)
|
||
if windows_path.len() >= 3 && windows_path.chars().nth(1) == Some(':') {
|
||
let drive_letter = windows_path.chars().next()?.to_lowercase().to_string();
|
||
let path_without_drive = &windows_path[2..]; // Remove "C:"
|
||
|
||
// Replace backslashes with forward slashes and convert to WSL mount point
|
||
let wsl_path = path_without_drive.replace('\\', "/");
|
||
Some(format!("/mnt/{}{}", drive_letter, wsl_path))
|
||
} else {
|
||
None
|
||
}
|
||
}
|
||
|
||
/// Convert a WSL path to a Windows path
|
||
/// Example: /mnt/c/Users/accou/Documents/item.txt -> C:\Users\accou\Documents\item.txt
|
||
#[allow(dead_code)]
|
||
fn wsl_path_to_windows(wsl_path: &str) -> Option<String> {
|
||
// Check if it's a WSL mount point path
|
||
if wsl_path.starts_with("/mnt/") && wsl_path.len() > 6 {
|
||
let rest = &wsl_path[5..]; // Remove "/mnt/"
|
||
if let Some(drive_letter) = rest.chars().next() {
|
||
let path_after_drive = &rest[1..]; // Remove drive letter
|
||
|
||
// Convert to Windows path with backslashes
|
||
let windows_path = path_after_drive.replace('/', "\\");
|
||
Some(format!("{}:{}", drive_letter.to_uppercase(), windows_path))
|
||
} else {
|
||
None
|
||
}
|
||
} else {
|
||
None
|
||
}
|
||
}
|
||
|
||
/// Create a Command instance for executing Claude CLI commands
|
||
/// On Windows, this will use WSL to execute the command
|
||
/// On other platforms, it executes directly
|
||
fn create_claude_command() -> std::process::Command {
|
||
#[cfg(target_os = "windows")]
|
||
{
|
||
// Use `which` inside WSL to find the claude binary dynamically
|
||
// Non-login shells launched by `wsl` don't inherit the full user PATH,
|
||
// so we need to use a login shell to get the correct PATH
|
||
let which_output = std::process::Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "bash", "-l", "-c", "which claude"])
|
||
.output();
|
||
|
||
match which_output {
|
||
Ok(output) if output.status.success() => {
|
||
let claude_path = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
let mut cmd = std::process::Command::new("wsl");
|
||
cmd.hide_window();
|
||
cmd.arg(claude_path);
|
||
cmd
|
||
}
|
||
_ => {
|
||
// Fallback to just "claude" if which fails
|
||
// This maintains backwards compatibility
|
||
let mut cmd = std::process::Command::new("wsl");
|
||
cmd.hide_window();
|
||
cmd.arg("claude");
|
||
cmd
|
||
}
|
||
}
|
||
}
|
||
|
||
#[cfg(not(target_os = "windows"))]
|
||
{
|
||
// Use `which` to find the claude binary dynamically
|
||
// This works regardless of how Claude Code was installed (standalone, npm, etc.)
|
||
// and avoids hardcoding paths
|
||
let which_output = std::process::Command::new("which")
|
||
.hide_window()
|
||
.arg("claude")
|
||
.output();
|
||
|
||
match which_output {
|
||
Ok(output) if output.status.success() => {
|
||
let claude_path = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
let mut cmd = std::process::Command::new(claude_path);
|
||
cmd.hide_window();
|
||
cmd
|
||
}
|
||
_ => {
|
||
// Fallback to just "claude" if which fails
|
||
// This maintains backwards compatibility
|
||
let mut cmd = std::process::Command::new("claude");
|
||
cmd.hide_window();
|
||
cmd
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn start_claude(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
options: ClaudeStartOptions,
|
||
) -> Result<(), String> {
|
||
let mut manager = bridge_manager.lock();
|
||
manager.start_claude(&conversation_id, options)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn stop_claude(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
) -> Result<(), String> {
|
||
let mut manager = bridge_manager.lock();
|
||
manager.stop_claude(&conversation_id)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn interrupt_claude(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
) -> Result<(), String> {
|
||
let mut manager = bridge_manager.lock();
|
||
manager.interrupt_claude(&conversation_id)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn send_prompt(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
message: String,
|
||
) -> Result<(), String> {
|
||
let mut manager = bridge_manager.lock();
|
||
manager.send_prompt(&conversation_id, message)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn is_claude_running(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
) -> Result<bool, String> {
|
||
let manager = bridge_manager.lock();
|
||
Ok(manager.is_claude_running(&conversation_id))
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_working_directory(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
) -> Result<String, String> {
|
||
let manager = bridge_manager.lock();
|
||
manager.get_working_directory(&conversation_id)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn select_wsl_directory() -> Result<String, String> {
|
||
// Return the user's home directory cross-platform
|
||
dirs::home_dir()
|
||
.ok_or_else(|| "Could not determine home directory".to_string())
|
||
.map(|p| p.to_string_lossy().to_string())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_config(app: AppHandle) -> Result<HikariConfig, String> {
|
||
let store = app.store("hikari-config.json").map_err(|e| e.to_string())?;
|
||
|
||
match store.get(CONFIG_STORE_KEY) {
|
||
Some(value) => serde_json::from_value(value.clone()).map_err(|e| e.to_string()),
|
||
None => Ok(HikariConfig::default()),
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn save_config(app: AppHandle, config: HikariConfig) -> Result<(), String> {
|
||
let store = app.store("hikari-config.json").map_err(|e| e.to_string())?;
|
||
|
||
let value = serde_json::to_value(&config).map_err(|e| e.to_string())?;
|
||
store.set(CONFIG_STORE_KEY, value);
|
||
store.save().map_err(|e| e.to_string())?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_usage_stats(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
) -> Result<UsageStats, String> {
|
||
let manager = bridge_manager.lock();
|
||
manager.get_usage_stats(&conversation_id)
|
||
}
|
||
|
||
/// Load persisted lifetime stats from store (no bridge required)
|
||
#[tauri::command]
|
||
pub async fn get_persisted_stats(app: AppHandle) -> Result<UsageStats, String> {
|
||
let mut stats = UsageStats::new();
|
||
|
||
// Load persisted stats if available
|
||
if let Some(persisted) = crate::stats::load_stats(&app).await {
|
||
stats.apply_persisted(persisted);
|
||
}
|
||
|
||
Ok(stats)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn validate_directory(
|
||
path: String,
|
||
current_dir: Option<String>,
|
||
) -> Result<String, String> {
|
||
use std::path::{Path, PathBuf};
|
||
|
||
// Detect if we're dealing with a WSL path (starts with / on Windows, or current_dir is a WSL path)
|
||
let is_wsl_path = cfg!(windows) && (path.starts_with('/') || current_dir.as_ref().is_some_and(|p| p.starts_with('/')));
|
||
|
||
if is_wsl_path {
|
||
// WSL path - handle as Unix-style path without filesystem validation
|
||
// since the Windows binary can't validate WSL filesystem paths
|
||
let resolved = if path.starts_with('/') {
|
||
// Absolute WSL path - use as-is
|
||
path
|
||
} else if let Some(ref cwd) = current_dir {
|
||
// Relative path - resolve manually using Unix path logic
|
||
if path == "." {
|
||
cwd.clone()
|
||
} else if path == ".." {
|
||
// Go up one directory
|
||
cwd.rsplit_once('/').map(|x| x.0).unwrap_or("/").to_string()
|
||
} else if path.starts_with("../") {
|
||
// Handle ../ prefix
|
||
let parent = cwd.rsplit_once('/').map(|x| x.0).unwrap_or("/");
|
||
let remainder = path.strip_prefix("../").unwrap();
|
||
if remainder.is_empty() {
|
||
parent.to_string()
|
||
} else {
|
||
format!("{}/{}", parent, remainder)
|
||
}
|
||
} else if path.starts_with("./") {
|
||
// Handle ./ prefix
|
||
format!("{}/{}", cwd, path.strip_prefix("./").unwrap())
|
||
} else {
|
||
// Regular relative path
|
||
format!("{}/{}", cwd, path)
|
||
}
|
||
} else {
|
||
return Err("Cannot resolve relative WSL path without current directory".to_string());
|
||
};
|
||
|
||
// Normalize the path (remove duplicate slashes, etc.)
|
||
let normalized = resolved.split('/').filter(|s| !s.is_empty()).collect::<Vec<_>>().join("/");
|
||
Ok(if normalized.is_empty() { "/".to_string() } else { format!("/{}", normalized) })
|
||
} else {
|
||
// Native path (Windows on Windows, Unix on Unix) - validate normally
|
||
let path = Path::new(&path);
|
||
|
||
let expanded_path = if path.starts_with("~") {
|
||
if let Some(home) = dirs::home_dir() {
|
||
if path == Path::new("~") {
|
||
home
|
||
} else {
|
||
home.join(path.strip_prefix("~").unwrap())
|
||
}
|
||
} else {
|
||
return Err("Could not determine home directory".to_string());
|
||
}
|
||
} else if path.is_relative() {
|
||
if let Some(ref cwd) = current_dir {
|
||
let cwd_path = PathBuf::from(cwd);
|
||
cwd_path.join(path)
|
||
} else {
|
||
path.to_path_buf()
|
||
}
|
||
} else {
|
||
path.to_path_buf()
|
||
};
|
||
|
||
// Check if the path exists and is a directory
|
||
if !expanded_path.exists() {
|
||
return Err(format!(
|
||
"Directory does not exist: {}",
|
||
expanded_path.display()
|
||
));
|
||
}
|
||
|
||
if !expanded_path.is_dir() {
|
||
return Err(format!(
|
||
"Path is not a directory: {}",
|
||
expanded_path.display()
|
||
));
|
||
}
|
||
|
||
// Return the canonicalized (absolute) path
|
||
expanded_path
|
||
.canonicalize()
|
||
.map(|p| p.to_string_lossy().to_string())
|
||
.map_err(|e| format!("Failed to resolve path: {}", e))
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn load_saved_achievements(
|
||
app: AppHandle,
|
||
) -> Result<Vec<AchievementUnlockedEvent>, String> {
|
||
use chrono::Utc;
|
||
|
||
// Load achievements from persistent store
|
||
let progress = load_achievements(&app).await;
|
||
|
||
// Create events for all previously unlocked achievements
|
||
let mut events = Vec::new();
|
||
for achievement_id in &progress.unlocked {
|
||
let mut info = get_achievement_info(achievement_id);
|
||
info.unlocked_at = Some(Utc::now()); // We don't store timestamps, so just use now
|
||
events.push(AchievementUnlockedEvent { achievement: info });
|
||
}
|
||
|
||
Ok(events)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn answer_question(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
tool_use_id: String,
|
||
answers: serde_json::Value,
|
||
) -> Result<(), String> {
|
||
let mut manager = bridge_manager.lock();
|
||
manager.send_tool_result(&conversation_id, &tool_use_id, answers)
|
||
}
|
||
|
||
#[derive(Debug, Serialize)]
|
||
pub struct WorkspaceHookInfo {
|
||
pub has_concerns: bool,
|
||
pub hook_types: Vec<String>,
|
||
pub mcp_servers: Vec<String>,
|
||
pub custom_commands: Vec<String>,
|
||
}
|
||
|
||
/// Check whether a working directory has Claude Code hooks, MCP servers, or custom commands.
|
||
///
|
||
/// Hikari Desktop runs Claude in `--output-format stream-json` (non-interactive mode),
|
||
/// which bypasses Claude's own workspace trust dialog. We therefore check for these
|
||
/// ourselves so the frontend can show its own trust gate before launching.
|
||
#[tauri::command]
|
||
pub async fn check_workspace_hooks(working_dir: String) -> WorkspaceHookInfo {
|
||
let use_wsl = cfg!(windows) && working_dir.starts_with('/');
|
||
|
||
let settings_paths = [
|
||
format!("{}/.claude/settings.json", working_dir),
|
||
format!("{}/.claude/settings.local.json", working_dir),
|
||
];
|
||
|
||
let mut all_hook_types: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
|
||
let mut all_mcp_servers: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
|
||
|
||
for path in &settings_paths {
|
||
let content = if use_wsl {
|
||
match read_file_via_wsl(path).await {
|
||
Ok(c) => c,
|
||
Err(_) => continue,
|
||
}
|
||
} else {
|
||
match std::fs::read_to_string(path) {
|
||
Ok(c) => c,
|
||
Err(_) => continue,
|
||
}
|
||
};
|
||
|
||
let settings: serde_json::Value = match serde_json::from_str(&content) {
|
||
Ok(v) => v,
|
||
Err(_) => continue,
|
||
};
|
||
|
||
if let Some(hooks) = settings.get("hooks").and_then(|h| h.as_object()) {
|
||
for key in hooks.keys() {
|
||
all_hook_types.insert(key.clone());
|
||
}
|
||
}
|
||
|
||
if let Some(servers) = settings.get("mcpServers").and_then(|s| s.as_object()) {
|
||
for key in servers.keys() {
|
||
all_mcp_servers.insert(key.clone());
|
||
}
|
||
}
|
||
}
|
||
|
||
let custom_commands = list_workspace_commands(&working_dir, use_wsl).await;
|
||
let hook_types: Vec<String> = all_hook_types.into_iter().collect();
|
||
let mcp_servers: Vec<String> = all_mcp_servers.into_iter().collect();
|
||
let has_concerns = !hook_types.is_empty() || !mcp_servers.is_empty() || !custom_commands.is_empty();
|
||
|
||
WorkspaceHookInfo {
|
||
has_concerns,
|
||
hook_types,
|
||
mcp_servers,
|
||
custom_commands,
|
||
}
|
||
}
|
||
|
||
async fn list_workspace_commands(working_dir: &str, use_wsl: bool) -> Vec<String> {
|
||
let commands_dir = format!("{}/.claude/commands", working_dir);
|
||
|
||
if use_wsl {
|
||
let script = format!(
|
||
"if [ -d '{0}' ]; then for f in '{0}'/*.md; do [ -f \"$f\" ] && basename \"$f\" .md; done; fi",
|
||
commands_dir
|
||
);
|
||
let Ok(output) = std::process::Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "sh", "-c", &script])
|
||
.output()
|
||
else {
|
||
return vec![];
|
||
};
|
||
String::from_utf8_lossy(&output.stdout)
|
||
.lines()
|
||
.filter(|l| !l.is_empty())
|
||
.map(str::to_string)
|
||
.collect()
|
||
} else {
|
||
let dir = std::path::Path::new(&commands_dir);
|
||
if !dir.exists() {
|
||
return vec![];
|
||
}
|
||
let Ok(entries) = std::fs::read_dir(dir) else {
|
||
return vec![];
|
||
};
|
||
let mut names: Vec<String> = entries
|
||
.filter_map(|e| e.ok())
|
||
.filter(|e| {
|
||
e.path()
|
||
.extension()
|
||
.is_some_and(|ext| ext.eq_ignore_ascii_case("md"))
|
||
})
|
||
.filter_map(|e| {
|
||
e.path()
|
||
.file_stem()
|
||
.map(|s| s.to_string_lossy().to_string())
|
||
})
|
||
.collect();
|
||
names.sort();
|
||
names
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_skills() -> Result<Vec<String>, String> {
|
||
// On Windows, we need to use WSL to access the skills directory
|
||
// since skills are stored in the WSL home directory
|
||
if cfg!(windows) {
|
||
return list_skills_via_wsl().await;
|
||
}
|
||
|
||
// On Unix systems, use the native filesystem
|
||
use std::fs;
|
||
|
||
let home = dirs::home_dir().ok_or_else(|| "Could not determine home directory".to_string())?;
|
||
let skills_dir = home.join(".claude").join("skills");
|
||
|
||
if !skills_dir.exists() {
|
||
return Ok(Vec::new());
|
||
}
|
||
|
||
let mut skills = Vec::new();
|
||
let entries =
|
||
fs::read_dir(&skills_dir).map_err(|e| format!("Failed to read skills directory: {}", e))?;
|
||
|
||
for entry in entries {
|
||
let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
|
||
let path = entry.path();
|
||
|
||
if path.is_dir() {
|
||
let skill_file = path.join("SKILL.md");
|
||
if skill_file.exists() {
|
||
if let Some(name) = path.file_name() {
|
||
skills.push(name.to_string_lossy().to_string());
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
skills.sort();
|
||
Ok(skills)
|
||
}
|
||
|
||
/// List skills by executing commands through WSL (for Windows)
|
||
#[allow(dead_code)]
|
||
async fn list_skills_via_wsl() -> Result<Vec<String>, String> {
|
||
use std::process::Command;
|
||
|
||
// Use WSL to list directories in ~/.claude/skills that contain SKILL.md
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args([
|
||
"-e",
|
||
"sh",
|
||
"-c",
|
||
"if [ -d ~/.claude/skills ]; then for d in ~/.claude/skills/*/; do [ -f \"${d}SKILL.md\" ] && basename \"$d\"; done; fi",
|
||
])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
if stderr.contains("not found") || stderr.contains("No such file") {
|
||
return Ok(Vec::new());
|
||
}
|
||
return Err(format!("WSL command failed: {}", stderr));
|
||
}
|
||
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
let mut skills: Vec<String> = stdout
|
||
.lines()
|
||
.filter(|line| !line.is_empty())
|
||
.map(|line| line.to_string())
|
||
.collect();
|
||
|
||
skills.sort();
|
||
Ok(skills)
|
||
}
|
||
|
||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||
pub struct UpdateInfo {
|
||
pub current_version: String,
|
||
pub latest_version: String,
|
||
pub has_update: bool,
|
||
pub release_url: String,
|
||
pub release_notes: Option<String>,
|
||
}
|
||
|
||
#[derive(Debug, serde::Deserialize)]
|
||
struct GiteaRelease {
|
||
tag_name: String,
|
||
html_url: String,
|
||
body: Option<String>,
|
||
prerelease: bool,
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn check_for_updates() -> Result<UpdateInfo, String> {
|
||
const CURRENT_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||
const RELEASES_API: &str =
|
||
"https://git.nhcarrigan.com/api/v1/repos/nhcarrigan/hikari-desktop/releases";
|
||
|
||
// Fetch releases from Gitea API
|
||
let client = reqwest::Client::new();
|
||
let response = client
|
||
.get(RELEASES_API)
|
||
.header("Accept", "application/json")
|
||
.send()
|
||
.await
|
||
.map_err(|e| format!("Failed to fetch releases: {}", e))?;
|
||
|
||
if !response.status().is_success() {
|
||
return Err(format!("API returned status: {}", response.status()));
|
||
}
|
||
|
||
let text = response
|
||
.text()
|
||
.await
|
||
.map_err(|e| format!("Failed to read response: {}", e))?;
|
||
|
||
let releases: Vec<GiteaRelease> =
|
||
serde_json::from_str(&text).map_err(|e| format!("Failed to parse releases: {}", e))?;
|
||
|
||
// Find the latest non-prerelease, or fall back to latest prerelease
|
||
let latest = releases
|
||
.iter()
|
||
.find(|r| !r.prerelease)
|
||
.or_else(|| releases.first());
|
||
|
||
let latest = match latest {
|
||
Some(r) => r,
|
||
None => return Err("No releases found".to_string()),
|
||
};
|
||
|
||
// Parse version strings (remove 'v' prefix if present)
|
||
let current = semver::Version::parse(CURRENT_VERSION)
|
||
.map_err(|e| format!("Failed to parse current version: {}", e))?;
|
||
|
||
let latest_tag = latest.tag_name.trim_start_matches('v');
|
||
let latest_ver = semver::Version::parse(latest_tag)
|
||
.map_err(|e| format!("Failed to parse latest version: {}", e))?;
|
||
|
||
Ok(UpdateInfo {
|
||
current_version: CURRENT_VERSION.to_string(),
|
||
latest_version: latest.tag_name.clone(),
|
||
has_update: latest_ver > current,
|
||
release_url: latest.html_url.clone(),
|
||
release_notes: latest.body.clone(),
|
||
})
|
||
}
|
||
|
||
#[derive(Debug, serde::Deserialize)]
|
||
struct GiteaChangelogRelease {
|
||
tag_name: String,
|
||
html_url: String,
|
||
body: Option<String>,
|
||
prerelease: bool,
|
||
created_at: String,
|
||
}
|
||
|
||
#[derive(Debug, Clone, serde::Serialize)]
|
||
pub struct ChangelogEntry {
|
||
pub version: String,
|
||
pub url: String,
|
||
pub notes: Option<String>,
|
||
pub prerelease: bool,
|
||
pub created_at: String,
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn fetch_changelog() -> Result<Vec<ChangelogEntry>, String> {
|
||
const RELEASES_API: &str =
|
||
"https://git.nhcarrigan.com/api/v1/repos/nhcarrigan/hikari-desktop/releases";
|
||
|
||
let client = reqwest::Client::new();
|
||
let response = client
|
||
.get(RELEASES_API)
|
||
.header("Accept", "application/json")
|
||
.query(&[("limit", "50")])
|
||
.send()
|
||
.await
|
||
.map_err(|e| format!("Failed to fetch releases: {}", e))?;
|
||
|
||
if !response.status().is_success() {
|
||
return Err(format!("API returned status: {}", response.status()));
|
||
}
|
||
|
||
let text = response
|
||
.text()
|
||
.await
|
||
.map_err(|e| format!("Failed to read response: {}", e))?;
|
||
|
||
let releases: Vec<GiteaChangelogRelease> =
|
||
serde_json::from_str(&text).map_err(|e| format!("Failed to parse releases: {}", e))?;
|
||
|
||
Ok(releases
|
||
.into_iter()
|
||
.map(|r| ChangelogEntry {
|
||
version: r.tag_name,
|
||
url: r.html_url,
|
||
notes: r.body,
|
||
prerelease: r.prerelease,
|
||
created_at: r.created_at,
|
||
})
|
||
.collect())
|
||
}
|
||
|
||
fn parse_npm_cli_version(json: &str) -> Result<String, String> {
|
||
let data: serde_json::Value =
|
||
serde_json::from_str(json).map_err(|e| format!("Failed to parse response: {}", e))?;
|
||
data.get("version")
|
||
.and_then(|v| v.as_str())
|
||
.map(|s| s.to_string())
|
||
.ok_or_else(|| "No version field in response".to_string())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn check_cli_latest_version() -> Result<String, String> {
|
||
let client = reqwest::Client::new();
|
||
let response = client
|
||
.get("https://registry.npmjs.org/@anthropic-ai/claude-code/latest")
|
||
.header("Accept", "application/json")
|
||
.send()
|
||
.await
|
||
.map_err(|e| format!("Failed to fetch CLI version: {}", e))?;
|
||
|
||
if !response.status().is_success() {
|
||
return Err(format!("Registry returned status: {}", response.status()));
|
||
}
|
||
|
||
let body = response
|
||
.text()
|
||
.await
|
||
.map_err(|e| format!("Failed to read response: {}", e))?;
|
||
|
||
parse_npm_cli_version(&body)
|
||
}
|
||
|
||
#[derive(Debug, Clone, serde::Serialize)]
|
||
pub struct SavedFileInfo {
|
||
pub path: String,
|
||
pub filename: String,
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn save_temp_file(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
conversation_id: String,
|
||
data: Vec<u8>,
|
||
filename: Option<String>,
|
||
) -> Result<SavedFileInfo, String> {
|
||
let mut manager = temp_manager.lock();
|
||
let path = manager.save_file(&conversation_id, &data, filename.as_deref())?;
|
||
|
||
let filename = path
|
||
.file_name()
|
||
.map(|n| n.to_string_lossy().to_string())
|
||
.unwrap_or_else(|| "unknown".to_string());
|
||
|
||
let path_string = path.to_string_lossy().to_string();
|
||
|
||
// On Windows, convert the path to WSL format if needed
|
||
// so Claude Code (running in WSL) can access it via /mnt/c/...
|
||
let final_path = if cfg!(windows) {
|
||
windows_path_to_wsl(&path_string).unwrap_or(path_string)
|
||
} else {
|
||
path_string
|
||
};
|
||
|
||
Ok(SavedFileInfo {
|
||
path: final_path,
|
||
filename,
|
||
})
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn register_temp_file(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
conversation_id: String,
|
||
file_path: String,
|
||
) -> Result<(), String> {
|
||
let mut manager = temp_manager.lock();
|
||
manager.register_file(&conversation_id, PathBuf::from(file_path));
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_temp_files(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
conversation_id: String,
|
||
) -> Result<Vec<String>, String> {
|
||
let manager = temp_manager.lock();
|
||
let files = manager.get_files_for_conversation(&conversation_id);
|
||
Ok(files.iter().map(|p| p.to_string_lossy().to_string()).collect())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn cleanup_temp_files(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
conversation_id: String,
|
||
) -> Result<(), String> {
|
||
let mut manager = temp_manager.lock();
|
||
manager.cleanup_conversation(&conversation_id)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn cleanup_all_temp_files(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
) -> Result<(), String> {
|
||
let mut manager = temp_manager.lock();
|
||
manager.cleanup_all()
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn cleanup_orphaned_temp_files(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
) -> Result<usize, String> {
|
||
let mut manager = temp_manager.lock();
|
||
manager.cleanup_orphaned_files()
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_file_size(file_path: String) -> Result<u64, String> {
|
||
let metadata = std::fs::metadata(&file_path)
|
||
.map_err(|e| format!("Failed to get file metadata: {}", e))?;
|
||
Ok(metadata.len())
|
||
}
|
||
|
||
// ==================== Editor File Operations ====================
|
||
|
||
#[derive(Debug, Clone, serde::Serialize)]
|
||
pub struct FileEntry {
|
||
pub name: String,
|
||
pub path: String,
|
||
#[serde(rename = "isDirectory")]
|
||
pub is_directory: bool,
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_directory(app: AppHandle, path: String) -> Result<Vec<FileEntry>, String> {
|
||
// Set up logging
|
||
let log_path = if let Ok(app_data_dir) = app.path().app_data_dir() {
|
||
let _ = std::fs::create_dir_all(&app_data_dir);
|
||
app_data_dir.join("hikari_editor_debug.log")
|
||
} else {
|
||
PathBuf::from("hikari_editor_debug.log")
|
||
};
|
||
|
||
let mut log_file = std::fs::OpenOptions::new()
|
||
.create(true)
|
||
.append(true)
|
||
.open(&log_path)
|
||
.ok();
|
||
|
||
let mut log = |msg: String| {
|
||
if let Some(ref mut file) = log_file {
|
||
use std::io::Write;
|
||
let timestamp = chrono::Local::now().format("%Y-%m-%d %H:%M:%S");
|
||
let _ = writeln!(file, "[{}] {}", timestamp, msg);
|
||
}
|
||
};
|
||
|
||
log(format!("list_directory called with path: {}", path));
|
||
log(format!("cfg!(windows) = {}", cfg!(windows)));
|
||
log(format!("path.starts_with('/') = {}", path.starts_with('/')));
|
||
|
||
// On Windows with a WSL path (starts with /), use WSL to list the directory
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
log("Using WSL path".to_string());
|
||
return list_directory_via_wsl(&path).await;
|
||
}
|
||
|
||
log("Using native filesystem access".to_string());
|
||
|
||
// Native filesystem access
|
||
use std::fs;
|
||
use std::path::Path;
|
||
|
||
let dir_path = Path::new(&path);
|
||
|
||
if !dir_path.exists() {
|
||
let err = format!("Directory does not exist: {}", path);
|
||
log(format!("ERROR: {}", err));
|
||
return Err(err);
|
||
}
|
||
|
||
if !dir_path.is_dir() {
|
||
let err = format!("Path is not a directory: {}", path);
|
||
log(format!("ERROR: {}", err));
|
||
return Err(err);
|
||
}
|
||
|
||
let entries = fs::read_dir(dir_path)
|
||
.map_err(|e| {
|
||
let err = format!("Failed to read directory: {}", e);
|
||
log(format!("ERROR: {}", err));
|
||
err
|
||
})?;
|
||
|
||
let mut file_entries = Vec::new();
|
||
|
||
for entry in entries {
|
||
let entry = entry.map_err(|e| {
|
||
let err = format!("Failed to read entry: {}", e);
|
||
log(format!("ERROR: {}", err));
|
||
err
|
||
})?;
|
||
let path = entry.path();
|
||
let name = entry
|
||
.file_name()
|
||
.to_string_lossy()
|
||
.to_string();
|
||
|
||
file_entries.push(FileEntry {
|
||
name: name.clone(),
|
||
path: path.to_string_lossy().to_string(),
|
||
is_directory: path.is_dir(),
|
||
});
|
||
}
|
||
|
||
log(format!("Successfully listed {} entries", file_entries.len()));
|
||
Ok(file_entries)
|
||
}
|
||
|
||
/// List directory contents via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn list_directory_via_wsl(path: &str) -> Result<Vec<FileEntry>, String> {
|
||
use std::process::Command;
|
||
|
||
// Use WSL to list directory contents
|
||
// Output format: type<tab>name (d for directory, f for file)
|
||
let script = format!(
|
||
r#"if [ -d '{}' ]; then for f in '{}'/* '{}'/.* ; do [ -e "$f" ] || continue; name=$(basename "$f"); if [ "$name" = "." ] || [ "$name" = ".." ]; then continue; fi; if [ -d "$f" ]; then echo "d $name"; else echo "f $name"; fi; done; else echo "ERROR: Directory does not exist"; exit 1; fi"#,
|
||
path, path, path
|
||
);
|
||
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "sh", "-c", &script])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
|
||
if !output.status.success() || stdout.starts_with("ERROR:") {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
if stdout.starts_with("ERROR:") {
|
||
return Err(stdout.trim().to_string());
|
||
}
|
||
return Err(format!("WSL command failed: {}", stderr));
|
||
}
|
||
|
||
let mut file_entries = Vec::new();
|
||
|
||
for line in stdout.lines() {
|
||
if line.is_empty() {
|
||
continue;
|
||
}
|
||
|
||
let parts: Vec<&str> = line.splitn(2, '\t').collect();
|
||
if parts.len() != 2 {
|
||
continue;
|
||
}
|
||
|
||
let is_directory = parts[0] == "d";
|
||
let name = parts[1].to_string();
|
||
let entry_path = if path == "/" {
|
||
format!("/{}", name)
|
||
} else {
|
||
format!("{}/{}", path, name)
|
||
};
|
||
|
||
file_entries.push(FileEntry {
|
||
name,
|
||
path: entry_path,
|
||
is_directory,
|
||
});
|
||
}
|
||
|
||
Ok(file_entries)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn read_file_content(path: String) -> Result<String, String> {
|
||
// On Windows with a WSL path, use WSL to read the file
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return read_file_via_wsl(&path).await;
|
||
}
|
||
|
||
use std::fs;
|
||
fs::read_to_string(&path)
|
||
.map_err(|e| format!("Failed to read file: {}", e))
|
||
}
|
||
|
||
/// Read the first `# Heading` from a WSL file path (for Windows).
|
||
/// Returns `None` if the file cannot be read or has no top-level heading.
|
||
#[cfg(target_os = "windows")]
|
||
fn read_wsl_file_first_heading(path: &str) -> Option<String> {
|
||
use std::process::Command;
|
||
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "bash", "-c", &format!("head -20 '{}'", path)])
|
||
.output()
|
||
.ok()?;
|
||
|
||
if !output.status.success() {
|
||
return None;
|
||
}
|
||
|
||
let content = String::from_utf8_lossy(&output.stdout);
|
||
extract_first_heading(&content)
|
||
}
|
||
|
||
/// Read file content via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn read_file_via_wsl(path: &str) -> Result<String, String> {
|
||
use std::process::Command;
|
||
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "cat", path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to read file: {}", stderr));
|
||
}
|
||
|
||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn write_file_content(path: String, content: String) -> Result<(), String> {
|
||
// On Windows with a WSL path, use WSL to write the file
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return write_file_via_wsl(&path, &content).await;
|
||
}
|
||
|
||
use std::fs;
|
||
fs::write(&path, content)
|
||
.map_err(|e| format!("Failed to write file: {}", e))
|
||
}
|
||
|
||
/// Write file content via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn write_file_via_wsl(path: &str, content: &str) -> Result<(), String> {
|
||
use std::io::Write;
|
||
use std::process::{Command, Stdio};
|
||
|
||
let mut child = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "sh", "-c", &format!("cat > '{}'", path)])
|
||
.stdin(Stdio::piped())
|
||
.spawn()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if let Some(mut stdin) = child.stdin.take() {
|
||
stdin.write_all(content.as_bytes())
|
||
.map_err(|e| format!("Failed to write to stdin: {}", e))?;
|
||
}
|
||
|
||
let status = child.wait()
|
||
.map_err(|e| format!("Failed to wait for WSL command: {}", e))?;
|
||
|
||
if !status.success() {
|
||
return Err("Failed to write file via WSL".to_string());
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn create_file(path: String) -> Result<(), String> {
|
||
// On Windows with a WSL path, use WSL to create the file
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return create_file_via_wsl(&path).await;
|
||
}
|
||
|
||
use std::fs::File;
|
||
use std::path::Path;
|
||
|
||
let file_path = Path::new(&path);
|
||
|
||
if file_path.exists() {
|
||
return Err("File already exists".to_string());
|
||
}
|
||
|
||
File::create(file_path).map_err(|e| format!("Failed to create file: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Create file via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn create_file_via_wsl(path: &str) -> Result<(), String> {
|
||
use std::process::Command;
|
||
|
||
// Check if file exists first
|
||
let check = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "test", "-e", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if check.success() {
|
||
return Err("File already exists".to_string());
|
||
}
|
||
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "touch", path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to create file: {}", stderr));
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn create_directory(path: String) -> Result<(), String> {
|
||
// On Windows with a WSL path, use WSL to create the directory
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return create_directory_via_wsl(&path).await;
|
||
}
|
||
|
||
use std::fs;
|
||
use std::path::Path;
|
||
|
||
let dir_path = Path::new(&path);
|
||
|
||
if dir_path.exists() {
|
||
return Err("Directory already exists".to_string());
|
||
}
|
||
|
||
fs::create_dir_all(dir_path).map_err(|e| format!("Failed to create directory: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Create directory via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn create_directory_via_wsl(path: &str) -> Result<(), String> {
|
||
use std::process::Command;
|
||
|
||
// Check if directory exists first
|
||
let check = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "test", "-e", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if check.success() {
|
||
return Err("Directory already exists".to_string());
|
||
}
|
||
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "mkdir", "-p", path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to create directory: {}", stderr));
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn delete_file(path: String) -> Result<(), String> {
|
||
// On Windows with a WSL path, use WSL to delete the file
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return delete_file_via_wsl(&path).await;
|
||
}
|
||
|
||
use std::fs;
|
||
use std::path::Path;
|
||
|
||
let file_path = Path::new(&path);
|
||
|
||
if !file_path.exists() {
|
||
return Err("File does not exist".to_string());
|
||
}
|
||
|
||
if file_path.is_dir() {
|
||
return Err("Path is a directory, use delete_directory instead".to_string());
|
||
}
|
||
|
||
fs::remove_file(file_path).map_err(|e| format!("Failed to delete file: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Delete file via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn delete_file_via_wsl(path: &str) -> Result<(), String> {
|
||
use std::process::Command;
|
||
|
||
// Check if path exists
|
||
let check_exists = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "test", "-e", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !check_exists.success() {
|
||
return Err("File does not exist".to_string());
|
||
}
|
||
|
||
// Check if path is a directory
|
||
let check_dir = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "test", "-d", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if check_dir.success() {
|
||
return Err("Path is a directory, use delete_directory instead".to_string());
|
||
}
|
||
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "rm", path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to delete file: {}", stderr));
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn delete_directory(path: String) -> Result<(), String> {
|
||
// On Windows with a WSL path, use WSL to delete the directory
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return delete_directory_via_wsl(&path).await;
|
||
}
|
||
|
||
use std::fs;
|
||
use std::path::Path;
|
||
|
||
let dir_path = Path::new(&path);
|
||
|
||
if !dir_path.exists() {
|
||
return Err("Directory does not exist".to_string());
|
||
}
|
||
|
||
if !dir_path.is_dir() {
|
||
return Err("Path is not a directory".to_string());
|
||
}
|
||
|
||
fs::remove_dir_all(dir_path).map_err(|e| format!("Failed to delete directory: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Delete directory via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn delete_directory_via_wsl(path: &str) -> Result<(), String> {
|
||
use std::process::Command;
|
||
|
||
// Check if path exists
|
||
let check_exists = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "test", "-e", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !check_exists.success() {
|
||
return Err("Directory does not exist".to_string());
|
||
}
|
||
|
||
// Check if path is a directory
|
||
let check_dir = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "test", "-d", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !check_dir.success() {
|
||
return Err("Path is not a directory".to_string());
|
||
}
|
||
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "rm", "-rf", path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to delete directory: {}", stderr));
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn rename_path(old_path: String, new_path: String) -> Result<(), String> {
|
||
// On Windows with WSL paths, use WSL to rename
|
||
if cfg!(windows) && old_path.starts_with('/') {
|
||
return rename_path_via_wsl(&old_path, &new_path).await;
|
||
}
|
||
|
||
use std::fs;
|
||
use std::path::Path;
|
||
|
||
let old = Path::new(&old_path);
|
||
let new = Path::new(&new_path);
|
||
|
||
if !old.exists() {
|
||
return Err("Path does not exist".to_string());
|
||
}
|
||
|
||
if new.exists() {
|
||
return Err("Destination already exists".to_string());
|
||
}
|
||
|
||
fs::rename(old, new).map_err(|e| format!("Failed to rename: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Rename path via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn rename_path_via_wsl(old_path: &str, new_path: &str) -> Result<(), String> {
|
||
use std::process::Command;
|
||
|
||
// Check if old path exists
|
||
let check_old = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "test", "-e", old_path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !check_old.success() {
|
||
return Err("Path does not exist".to_string());
|
||
}
|
||
|
||
// Check if new path already exists
|
||
let check_new = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "test", "-e", new_path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if check_new.success() {
|
||
return Err("Destination already exists".to_string());
|
||
}
|
||
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "mv", old_path, new_path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to rename: {}", stderr));
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
// ==================== Cost Tracking Commands ====================
|
||
|
||
const COST_HISTORY_STORE_KEY: &str = "cost_history";
|
||
|
||
#[tauri::command]
|
||
pub async fn get_cost_summary(app: AppHandle, days: u32) -> Result<crate::cost_tracking::CostSummary, String> {
|
||
let history = load_cost_history(&app).await;
|
||
Ok(history.get_summary(days))
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_cost_alerts(app: AppHandle) -> Result<Vec<crate::cost_tracking::CostAlert>, String> {
|
||
let mut history = load_cost_history(&app).await;
|
||
let alerts = history.check_alerts();
|
||
|
||
// Save updated alert state
|
||
save_cost_history(&app, &history).await?;
|
||
|
||
Ok(alerts)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn set_cost_alert_thresholds(
|
||
app: AppHandle,
|
||
daily: Option<f64>,
|
||
weekly: Option<f64>,
|
||
monthly: Option<f64>,
|
||
) -> Result<(), String> {
|
||
let mut history = load_cost_history(&app).await;
|
||
history.set_alert_thresholds(daily, weekly, monthly);
|
||
save_cost_history(&app, &history).await
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn export_cost_csv(app: AppHandle, days: u32) -> Result<String, String> {
|
||
let history = load_cost_history(&app).await;
|
||
Ok(history.export_csv(days))
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_today_cost(app: AppHandle) -> Result<f64, String> {
|
||
let history = load_cost_history(&app).await;
|
||
Ok(history.get_today_cost())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_week_cost(app: AppHandle) -> Result<f64, String> {
|
||
let history = load_cost_history(&app).await;
|
||
Ok(history.get_week_cost())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_month_cost(app: AppHandle) -> Result<f64, String> {
|
||
let history = load_cost_history(&app).await;
|
||
Ok(history.get_month_cost())
|
||
}
|
||
|
||
/// Add cost to history (called internally when stats are updated)
|
||
pub async fn record_cost(app: &AppHandle, input_tokens: u64, output_tokens: u64, cost_usd: f64) {
|
||
let mut history = load_cost_history(app).await;
|
||
history.add_cost(input_tokens, output_tokens, cost_usd);
|
||
let _ = save_cost_history(app, &history).await;
|
||
}
|
||
|
||
/// Record a new session
|
||
pub async fn record_session(app: &AppHandle) {
|
||
let mut history = load_cost_history(app).await;
|
||
history.increment_sessions();
|
||
let _ = save_cost_history(app, &history).await;
|
||
}
|
||
|
||
async fn load_cost_history(app: &AppHandle) -> crate::cost_tracking::CostHistory {
|
||
let store = match app.store("hikari-cost-history.json") {
|
||
Ok(s) => s,
|
||
Err(_) => return crate::cost_tracking::CostHistory::new(),
|
||
};
|
||
|
||
match store.get(COST_HISTORY_STORE_KEY) {
|
||
Some(value) => serde_json::from_value(value.clone()).unwrap_or_default(),
|
||
None => crate::cost_tracking::CostHistory::new(),
|
||
}
|
||
}
|
||
|
||
async fn save_cost_history(app: &AppHandle, history: &crate::cost_tracking::CostHistory) -> Result<(), String> {
|
||
let store = app.store("hikari-cost-history.json").map_err(|e| e.to_string())?;
|
||
let value = serde_json::to_value(history).map_err(|e| e.to_string())?;
|
||
store.set(COST_HISTORY_STORE_KEY, value);
|
||
store.save().map_err(|e| e.to_string())?;
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn init_discord_rpc(
|
||
discord_rpc: State<'_, std::sync::Arc<crate::discord_rpc::DiscordRpcManager>>,
|
||
session_name: String,
|
||
model: String,
|
||
started_at: i64,
|
||
) -> Result<(), String> {
|
||
discord_rpc.init(session_name, model, started_at)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn update_discord_rpc(
|
||
discord_rpc: State<'_, std::sync::Arc<crate::discord_rpc::DiscordRpcManager>>,
|
||
session_name: String,
|
||
model: String,
|
||
started_at: i64,
|
||
) -> Result<(), String> {
|
||
discord_rpc.update(session_name, model, started_at)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn stop_discord_rpc(
|
||
discord_rpc: State<'_, std::sync::Arc<crate::discord_rpc::DiscordRpcManager>>,
|
||
) -> Result<(), String> {
|
||
discord_rpc.stop()
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn close_application(app_handle: AppHandle) -> Result<(), String> {
|
||
// Get the main window
|
||
if let Some(window) = app_handle.get_webview_window("main") {
|
||
// Hide the window first for a smoother close
|
||
let _ = window.hide();
|
||
}
|
||
|
||
// Exit the application
|
||
app_handle.exit(0);
|
||
Ok(())
|
||
}
|
||
|
||
#[derive(serde::Serialize)]
|
||
pub struct MemoryFileInfo {
|
||
pub path: String,
|
||
pub heading: Option<String>,
|
||
pub last_modified: Option<String>, // Unix timestamp in seconds as a string
|
||
}
|
||
|
||
#[derive(serde::Serialize)]
|
||
pub struct MemoryFilesResponse {
|
||
pub files: Vec<MemoryFileInfo>,
|
||
}
|
||
|
||
/// Extract the first `# Heading` from a string of file content.
|
||
fn extract_first_heading(content: &str) -> Option<String> {
|
||
content.lines().find_map(|line| {
|
||
let trimmed = line.trim();
|
||
if let Some(rest) = trimmed.strip_prefix("# ") {
|
||
let heading = rest.trim().to_string();
|
||
if !heading.is_empty() {
|
||
return Some(heading);
|
||
}
|
||
}
|
||
None
|
||
})
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_memory_files() -> Result<MemoryFilesResponse, String> {
|
||
// On Windows, we need to look in the WSL home directory
|
||
// On Linux/Mac, use the native home directory
|
||
#[cfg(target_os = "windows")]
|
||
{
|
||
list_memory_files_via_wsl().await
|
||
}
|
||
|
||
#[cfg(not(target_os = "windows"))]
|
||
{
|
||
list_memory_files_native().await
|
||
}
|
||
}
|
||
|
||
/// List memory files via WSL (for Windows)
|
||
#[cfg(target_os = "windows")]
|
||
async fn list_memory_files_via_wsl() -> Result<MemoryFilesResponse, String> {
|
||
use std::process::Command;
|
||
|
||
// Use WSL to find all memory files in the WSL home directory
|
||
// This script finds all "memory" directories and lists their files
|
||
let script = r#"
|
||
find ~/.claude/projects -type d -name memory 2>/dev/null | while read dir; do
|
||
find "$dir" -maxdepth 1 -type f 2>/dev/null
|
||
done | sort
|
||
"#;
|
||
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "bash", "-l", "-c", script])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to list memory files: {}", stderr));
|
||
}
|
||
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
let paths: Vec<String> = stdout
|
||
.lines()
|
||
.filter(|line| !line.trim().is_empty())
|
||
.map(|line| line.trim().to_string())
|
||
.collect();
|
||
|
||
// Read first heading from each file via WSL
|
||
let mut files = Vec::new();
|
||
for path in paths {
|
||
let heading = read_wsl_file_first_heading(&path);
|
||
files.push(MemoryFileInfo {
|
||
path,
|
||
heading,
|
||
last_modified: None,
|
||
});
|
||
}
|
||
|
||
Ok(MemoryFilesResponse { files })
|
||
}
|
||
|
||
/// List memory files using native filesystem (for Linux/Mac)
|
||
#[cfg(not(target_os = "windows"))]
|
||
async fn list_memory_files_native() -> Result<MemoryFilesResponse, String> {
|
||
use std::fs;
|
||
|
||
// Get the .claude directory in the user's home
|
||
let home_dir = match dirs::home_dir() {
|
||
Some(dir) => dir,
|
||
None => return Err("Could not find home directory".to_string()),
|
||
};
|
||
|
||
let claude_dir = home_dir.join(".claude");
|
||
let projects_dir = claude_dir.join("projects");
|
||
|
||
if !projects_dir.exists() {
|
||
return Ok(MemoryFilesResponse { files: Vec::new() });
|
||
}
|
||
|
||
let mut memory_paths = Vec::new();
|
||
|
||
// Recursively find all memory directories
|
||
fn find_memory_files(
|
||
dir: &std::path::Path,
|
||
files: &mut Vec<String>,
|
||
) -> std::io::Result<()> {
|
||
if !dir.is_dir() {
|
||
return Ok(());
|
||
}
|
||
|
||
for entry in fs::read_dir(dir)? {
|
||
let entry = entry?;
|
||
let path = entry.path();
|
||
|
||
if path.is_dir() {
|
||
// Check if this is a "memory" directory
|
||
if path.file_name().and_then(|n| n.to_str()) == Some("memory") {
|
||
// List all files in the memory directory
|
||
for mem_entry in fs::read_dir(&path)? {
|
||
let mem_entry = mem_entry?;
|
||
let mem_path = mem_entry.path();
|
||
|
||
if mem_path.is_file() {
|
||
if let Some(path_str) = mem_path.to_str() {
|
||
files.push(path_str.to_string());
|
||
}
|
||
}
|
||
}
|
||
} else {
|
||
// Recurse into subdirectories
|
||
find_memory_files(&path, files)?;
|
||
}
|
||
}
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
if let Err(e) = find_memory_files(&projects_dir, &mut memory_paths) {
|
||
return Err(format!("Failed to list memory files: {}", e));
|
||
}
|
||
|
||
// Sort files alphabetically
|
||
memory_paths.sort();
|
||
|
||
// Read first heading and modification time from each file
|
||
let files = memory_paths
|
||
.into_iter()
|
||
.map(|path| {
|
||
let heading = fs::read_to_string(&path)
|
||
.ok()
|
||
.and_then(|content| extract_first_heading(&content));
|
||
let last_modified = fs::metadata(&path)
|
||
.ok()
|
||
.and_then(|m| m.modified().ok())
|
||
.and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok())
|
||
.map(|d| d.as_secs().to_string());
|
||
MemoryFileInfo {
|
||
path,
|
||
heading,
|
||
last_modified,
|
||
}
|
||
})
|
||
.collect();
|
||
|
||
Ok(MemoryFilesResponse { files })
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_claude_version() -> Result<String, String> {
|
||
tracing::debug!("Getting Claude CLI version");
|
||
|
||
let output = create_claude_command()
|
||
.arg("--version")
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let version = String::from_utf8_lossy(&output.stdout)
|
||
.trim()
|
||
.to_string();
|
||
tracing::info!("Claude CLI version: {}", version);
|
||
Ok(version)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to get Claude version: {}", error);
|
||
Err(format!("Failed to get Claude version: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude --version: {}", e);
|
||
Err(format!("Failed to execute claude --version: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
// ==================== Auth Commands ====================
|
||
|
||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||
pub struct ClaudeAuthStatus {
|
||
pub is_logged_in: bool,
|
||
pub email: Option<String>,
|
||
pub org_id: Option<String>,
|
||
pub org_name: Option<String>,
|
||
pub api_key_source: Option<String>,
|
||
pub api_provider: Option<String>,
|
||
pub subscription_type: Option<String>,
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_auth_status() -> Result<ClaudeAuthStatus, String> {
|
||
tracing::debug!("Getting Claude auth status");
|
||
|
||
let output = create_claude_command()
|
||
.args(["auth", "status"])
|
||
.output()
|
||
.map_err(|e| format!("Failed to run claude auth status: {}", e))?;
|
||
|
||
let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
|
||
let raw = if stdout.is_empty() { &stderr } else { &stdout };
|
||
|
||
if let Ok(json) = serde_json::from_str::<serde_json::Value>(raw) {
|
||
let is_logged_in = json
|
||
.get("loggedIn")
|
||
.and_then(|v| v.as_bool())
|
||
.unwrap_or(false);
|
||
|
||
let email = json
|
||
.get("email")
|
||
.and_then(|v| v.as_str())
|
||
.map(String::from);
|
||
|
||
let org_id = json
|
||
.get("orgId")
|
||
.and_then(|v| v.as_str())
|
||
.map(String::from);
|
||
|
||
let org_name = json
|
||
.get("orgName")
|
||
.and_then(|v| v.as_str())
|
||
.map(String::from);
|
||
|
||
let api_key_source = json
|
||
.get("apiKeySource")
|
||
.and_then(|v| v.as_str())
|
||
.map(String::from);
|
||
|
||
let api_provider = json
|
||
.get("apiProvider")
|
||
.and_then(|v| v.as_str())
|
||
.map(String::from);
|
||
|
||
let subscription_type = json
|
||
.get("subscriptionType")
|
||
.and_then(|v| v.as_str())
|
||
.map(String::from);
|
||
|
||
tracing::info!("Claude auth status: logged_in={}", is_logged_in);
|
||
Ok(ClaudeAuthStatus {
|
||
is_logged_in,
|
||
email,
|
||
org_id,
|
||
org_name,
|
||
api_key_source,
|
||
api_provider,
|
||
subscription_type,
|
||
})
|
||
} else {
|
||
// Non-JSON output: fall back to heuristic
|
||
let lower = raw.to_lowercase();
|
||
let is_logged_in = output.status.success()
|
||
&& !lower.contains("not logged in")
|
||
&& !lower.contains("not authenticated")
|
||
&& !lower.contains("no account");
|
||
tracing::info!("Claude auth status (non-JSON): logged_in={}", is_logged_in);
|
||
Ok(ClaudeAuthStatus {
|
||
is_logged_in,
|
||
email: None,
|
||
org_id: None,
|
||
org_name: None,
|
||
api_key_source: None,
|
||
api_provider: None,
|
||
subscription_type: None,
|
||
})
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn auth_login() -> Result<String, String> {
|
||
tracing::info!("Running claude auth login");
|
||
|
||
let output = create_claude_command()
|
||
.args(["auth", "login"])
|
||
.output()
|
||
.map_err(|e| format!("Failed to run claude auth login: {}", e))?;
|
||
|
||
let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
|
||
|
||
if output.status.success() {
|
||
let message = if stdout.is_empty() { "Login successful".to_string() } else { stdout };
|
||
tracing::info!("Claude auth login succeeded");
|
||
Ok(message)
|
||
} else {
|
||
let error = if stderr.is_empty() { stdout } else { stderr };
|
||
tracing::error!("Claude auth login failed: {}", error);
|
||
Err(format!("Login failed: {}", error))
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn auth_logout() -> Result<String, String> {
|
||
tracing::info!("Running claude auth logout");
|
||
|
||
let output = create_claude_command()
|
||
.args(["auth", "logout"])
|
||
.output()
|
||
.map_err(|e| format!("Failed to run claude auth logout: {}", e))?;
|
||
|
||
let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
|
||
|
||
if output.status.success() {
|
||
let message = if stdout.is_empty() { "Logged out successfully".to_string() } else { stdout };
|
||
tracing::info!("Claude auth logout succeeded");
|
||
Ok(message)
|
||
} else {
|
||
let error = if stderr.is_empty() { stdout } else { stderr };
|
||
tracing::error!("Claude auth logout failed: {}", error);
|
||
Err(format!("Logout failed: {}", error))
|
||
}
|
||
}
|
||
|
||
// ==================== Plugin Management Commands ====================
|
||
|
||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||
pub struct PluginInfo {
|
||
pub name: String,
|
||
pub version: String,
|
||
pub description: Option<String>,
|
||
pub enabled: bool,
|
||
}
|
||
|
||
/// Parse plugin list output from Claude CLI
|
||
fn parse_plugin_list(stdout: &str) -> Vec<PluginInfo> {
|
||
let mut plugins = Vec::new();
|
||
|
||
// Parse text output format:
|
||
// ❯ macrodata@macrodata
|
||
// Version: 0.1.3
|
||
// Scope: user
|
||
// Status: ✔ enabled
|
||
|
||
let lines: Vec<&str> = stdout.lines().collect();
|
||
let mut i = 0;
|
||
while i < lines.len() {
|
||
let line = lines[i].trim();
|
||
|
||
// Look for plugin name line (starts with ❯)
|
||
if line.starts_with("❯") {
|
||
let name = line.trim_start_matches("❯").trim().to_string();
|
||
let mut version = String::new();
|
||
let mut enabled = false;
|
||
|
||
// Parse following lines for metadata
|
||
i += 1;
|
||
while i < lines.len() {
|
||
let meta_line = lines[i].trim();
|
||
if meta_line.is_empty() || meta_line.starts_with("❯") {
|
||
break;
|
||
}
|
||
|
||
if meta_line.starts_with("Version:") {
|
||
version = meta_line.trim_start_matches("Version:").trim().to_string();
|
||
} else if meta_line.starts_with("Status:") {
|
||
enabled = meta_line.contains("enabled");
|
||
}
|
||
i += 1;
|
||
}
|
||
|
||
plugins.push(PluginInfo {
|
||
name,
|
||
version,
|
||
description: None,
|
||
enabled,
|
||
});
|
||
continue;
|
||
}
|
||
i += 1;
|
||
}
|
||
|
||
plugins
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_plugins() -> Result<Vec<PluginInfo>, String> {
|
||
tracing::debug!("Listing Claude Code plugins");
|
||
|
||
let output = create_claude_command()
|
||
.arg("plugin")
|
||
.arg("list")
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
let plugins = parse_plugin_list(&stdout);
|
||
tracing::info!("Listed {} plugins", plugins.len());
|
||
Ok(plugins)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to list plugins: {}", error);
|
||
Err(format!("Failed to list plugins: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin list: {}", e);
|
||
Err(format!("Failed to execute claude plugin list: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn install_plugin(plugin_name: String) -> Result<String, String> {
|
||
tracing::debug!("Installing plugin: {}", plugin_name);
|
||
|
||
let output = create_claude_command()
|
||
.arg("plugin")
|
||
.arg("install")
|
||
.arg(&plugin_name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully installed plugin: {}", plugin_name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to install plugin {}: {}", plugin_name, error);
|
||
Err(format!("Failed to install plugin: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin install: {}", e);
|
||
Err(format!("Failed to execute claude plugin install: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn uninstall_plugin(plugin_name: String) -> Result<String, String> {
|
||
tracing::debug!("Uninstalling plugin: {}", plugin_name);
|
||
|
||
let output = create_claude_command()
|
||
.arg("plugin")
|
||
.arg("uninstall")
|
||
.arg(&plugin_name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully uninstalled plugin: {}", plugin_name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to uninstall plugin {}: {}", plugin_name, error);
|
||
Err(format!("Failed to uninstall plugin: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin uninstall: {}", e);
|
||
Err(format!("Failed to execute claude plugin uninstall: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn enable_plugin(plugin_name: String) -> Result<String, String> {
|
||
tracing::debug!("Enabling plugin: {}", plugin_name);
|
||
|
||
let output = create_claude_command()
|
||
.arg("plugin")
|
||
.arg("enable")
|
||
.arg(&plugin_name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully enabled plugin: {}", plugin_name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to enable plugin {}: {}", plugin_name, error);
|
||
Err(format!("Failed to enable plugin: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin enable: {}", e);
|
||
Err(format!("Failed to execute claude plugin enable: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn disable_plugin(plugin_name: String) -> Result<String, String> {
|
||
tracing::debug!("Disabling plugin: {}", plugin_name);
|
||
|
||
let output = create_claude_command()
|
||
.arg("plugin")
|
||
.arg("disable")
|
||
.arg(&plugin_name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully disabled plugin: {}", plugin_name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to disable plugin {}: {}", plugin_name, error);
|
||
Err(format!("Failed to disable plugin: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin disable: {}", e);
|
||
Err(format!("Failed to execute claude plugin disable: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn update_plugin(plugin_name: String) -> Result<String, String> {
|
||
tracing::debug!("Updating plugin: {}", plugin_name);
|
||
|
||
let output = create_claude_command()
|
||
.arg("plugin")
|
||
.arg("update")
|
||
.arg(&plugin_name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully updated plugin: {}", plugin_name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to update plugin {}: {}", plugin_name, error);
|
||
Err(format!("Failed to update plugin: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin update: {}", e);
|
||
Err(format!("Failed to execute claude plugin update: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
// ==================== Plugin Marketplace Commands ====================
|
||
|
||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||
pub struct MarketplaceInfo {
|
||
pub name: String,
|
||
pub source: String,
|
||
}
|
||
|
||
/// Parse marketplace list output from Claude CLI
|
||
fn parse_marketplace_list(stdout: &str) -> Vec<MarketplaceInfo> {
|
||
let mut marketplaces = Vec::new();
|
||
|
||
// Parse format:
|
||
// Configured marketplaces:
|
||
//
|
||
// ❯ claude-plugins-official
|
||
// Source: GitHub (anthropics/claude-plugins-official)
|
||
//
|
||
// ❯ macrodata
|
||
// Source: GitHub (ascorbic/macrodata)
|
||
|
||
let mut current_name: Option<String> = None;
|
||
|
||
for line in stdout.lines() {
|
||
let trimmed = line.trim();
|
||
|
||
// Look for marketplace names starting with ❯
|
||
if trimmed.starts_with("❯") {
|
||
current_name = Some(trimmed.trim_start_matches("❯").trim().to_string());
|
||
}
|
||
// Look for Source line
|
||
else if trimmed.starts_with("Source:") && current_name.is_some() {
|
||
let source = trimmed.trim_start_matches("Source:").trim().to_string();
|
||
marketplaces.push(MarketplaceInfo {
|
||
name: current_name.take().unwrap(),
|
||
source,
|
||
});
|
||
}
|
||
}
|
||
|
||
marketplaces
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_marketplaces() -> Result<Vec<MarketplaceInfo>, String> {
|
||
tracing::debug!("Listing plugin marketplaces");
|
||
|
||
let output = create_claude_command()
|
||
.arg("plugin")
|
||
.arg("marketplace")
|
||
.arg("list")
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if !output.status.success() {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to list marketplaces: {}", error);
|
||
return Err(format!("Failed to list marketplaces: {}", error));
|
||
}
|
||
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
let marketplaces = parse_marketplace_list(&stdout);
|
||
tracing::info!("Found {} marketplaces", marketplaces.len());
|
||
Ok(marketplaces)
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin marketplace list: {}", e);
|
||
Err(format!(
|
||
"Failed to execute claude plugin marketplace list: {}",
|
||
e
|
||
))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn add_marketplace(source: String) -> Result<String, String> {
|
||
tracing::debug!("Adding marketplace: {}", source);
|
||
|
||
let output = create_claude_command()
|
||
.arg("plugin")
|
||
.arg("marketplace")
|
||
.arg("add")
|
||
.arg(&source)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully added marketplace: {}", source);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to add marketplace {}: {}", source, error);
|
||
Err(format!("Failed to add marketplace: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin marketplace add: {}", e);
|
||
Err(format!(
|
||
"Failed to execute claude plugin marketplace add: {}",
|
||
e
|
||
))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn remove_marketplace(name: String) -> Result<String, String> {
|
||
tracing::debug!("Removing marketplace: {}", name);
|
||
|
||
let output = create_claude_command()
|
||
.arg("plugin")
|
||
.arg("marketplace")
|
||
.arg("remove")
|
||
.arg(&name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully removed marketplace: {}", name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to remove marketplace {}: {}", name, error);
|
||
Err(format!("Failed to remove marketplace: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin marketplace remove: {}", e);
|
||
Err(format!(
|
||
"Failed to execute claude plugin marketplace remove: {}",
|
||
e
|
||
))
|
||
}
|
||
}
|
||
}
|
||
|
||
// ==================== MCP Management Commands ====================
|
||
|
||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||
pub struct McpServerInfo {
|
||
pub name: String,
|
||
pub command: Option<String>,
|
||
pub url: Option<String>,
|
||
pub transport: String, // "stdio", "http", or "sse"
|
||
pub env: Option<serde_json::Value>,
|
||
pub status: Option<String>, // "Connected" or "Failed to connect"
|
||
}
|
||
|
||
/// Parse MCP server list output from Claude CLI
|
||
fn parse_mcp_server_list(stdout: &str) -> Vec<McpServerInfo> {
|
||
let mut servers = Vec::new();
|
||
|
||
// Parse text output format:
|
||
// asana: https://mcp.asana.com/sse (SSE) - ✓ Connected
|
||
// gitea: gitea-mcp -t stdio --host https://git.nhcarrigan.com - ✓ Connected
|
||
// plugin:macrodata:macrodata: ... - ✓ Connected
|
||
|
||
for line in stdout.lines() {
|
||
let line = line.trim();
|
||
if line.is_empty() || line.starts_with("Checking") {
|
||
continue;
|
||
}
|
||
|
||
// Find the last occurrence of " - ✓" or " - ✗" to split status from the rest
|
||
let (content, status) = if let Some(pos) = line.rfind(" - ✓").or_else(|| line.rfind(" - ✗")) {
|
||
let status_str = line[pos + 3..].trim().trim_start_matches("✓").trim_start_matches("✗").trim();
|
||
(line[..pos].trim(), Some(status_str.to_string()))
|
||
} else {
|
||
(line, None)
|
||
};
|
||
|
||
// Now find the name by looking for the first colon followed by either http or a command
|
||
// The format is: "name: command/url"
|
||
// But name can contain colons (e.g., "plugin:macrodata:macrodata")
|
||
// Strategy: Find the colon that separates name from content
|
||
// - If content after colon starts with "http", it's a URL (name is before first colon)
|
||
// - If content is a command, name might have colons, so find the last colon before a non-URL space-separated part
|
||
|
||
let (name, rest) = if let Some(first_colon) = content.find(':') {
|
||
let after_first_colon = content[first_colon + 1..].trim_start();
|
||
|
||
// Check if it's a URL (starts with http)
|
||
if after_first_colon.starts_with("http") {
|
||
// Name is everything before the first colon
|
||
(content[..first_colon].to_string(), after_first_colon.to_string())
|
||
} else {
|
||
// It's a command - name might contain colons (like plugin:foo:bar)
|
||
// Strategy: Commands start with a letter/word, not with a colon
|
||
// Find the rightmost colon that has whitespace after it (indicating start of command)
|
||
let mut split_pos = first_colon;
|
||
for (idx, _) in content.match_indices(':') {
|
||
let after = content[idx + 1..].trim_start();
|
||
// If what comes after this colon is NOT another colon-prefixed part,
|
||
// and doesn't start with "//" (part of URL), this is our split point
|
||
if !after.is_empty() && !after.starts_with(':') && !after.starts_with("//") {
|
||
// Check if this looks like a command (starts with letter/number)
|
||
if after.chars().next().map(|c| c.is_alphanumeric()).unwrap_or(false) {
|
||
split_pos = idx;
|
||
}
|
||
}
|
||
}
|
||
|
||
(content[..split_pos].to_string(), content[split_pos + 1..].trim_start().to_string())
|
||
}
|
||
} else {
|
||
continue; // Skip lines without colons
|
||
};
|
||
|
||
let name = name.trim().to_string();
|
||
let rest = rest.trim();
|
||
|
||
// Determine if it's a URL or command
|
||
let (url, command, transport) = if rest.starts_with("http") {
|
||
// HTTP/SSE server: "https://mcp.asana.com/sse (SSE)"
|
||
// Extract URL and transport type
|
||
let (url, transport) = if let Some((url_part, transport_part)) = rest.rsplit_once('(') {
|
||
let url = url_part.trim().to_string();
|
||
let transport = transport_part.trim_end_matches(')').trim().to_lowercase();
|
||
(Some(url), transport)
|
||
} else {
|
||
(Some(rest.to_string()), "http".to_string())
|
||
};
|
||
|
||
(url, None, transport)
|
||
} else {
|
||
// stdio server: "gitea-mcp -t stdio --host https://git.nhcarrigan.com"
|
||
// Command is everything in rest
|
||
(None, Some(rest.to_string()), "stdio".to_string())
|
||
};
|
||
|
||
servers.push(McpServerInfo {
|
||
name,
|
||
command,
|
||
url,
|
||
transport,
|
||
env: None,
|
||
status,
|
||
});
|
||
}
|
||
|
||
servers
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_mcp_servers() -> Result<Vec<McpServerInfo>, String> {
|
||
tracing::debug!("Listing MCP servers");
|
||
|
||
let output = create_claude_command()
|
||
.arg("mcp")
|
||
.arg("list")
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
let servers = parse_mcp_server_list(&stdout);
|
||
tracing::info!("Listed {} MCP servers", servers.len());
|
||
Ok(servers)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to list MCP servers: {}", error);
|
||
Err(format!("Failed to list MCP servers: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude mcp list: {}", e);
|
||
Err(format!("Failed to execute claude mcp list: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_mcp_server(name: String) -> Result<McpServerInfo, String> {
|
||
tracing::debug!("Getting MCP server details: {}", name);
|
||
|
||
// Get all servers and find the matching one
|
||
let servers = list_mcp_servers().await?;
|
||
|
||
servers
|
||
.into_iter()
|
||
.find(|s| s.name == name)
|
||
.ok_or_else(|| format!("MCP server '{}' not found", name))
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn remove_mcp_server(name: String) -> Result<String, String> {
|
||
tracing::debug!("Removing MCP server: {}", name);
|
||
|
||
let output = create_claude_command()
|
||
.arg("mcp")
|
||
.arg("remove")
|
||
.arg(&name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully removed MCP server: {}", name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to remove MCP server {}: {}", name, error);
|
||
Err(format!("Failed to remove MCP server: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude mcp remove: {}", e);
|
||
Err(format!("Failed to execute claude mcp remove: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn add_mcp_server(
|
||
name: String,
|
||
command_or_url: String,
|
||
transport: String,
|
||
env_vars: Option<Vec<String>>,
|
||
headers: Option<Vec<String>>,
|
||
) -> Result<String, String> {
|
||
tracing::debug!("Adding MCP server: {} with transport {}", name, transport);
|
||
|
||
let mut cmd = create_claude_command();
|
||
cmd.arg("mcp").arg("add");
|
||
|
||
// Add transport flag
|
||
cmd.arg("--transport").arg(&transport);
|
||
|
||
// Add environment variables if provided
|
||
if let Some(env_vars) = env_vars {
|
||
for env_var in env_vars {
|
||
cmd.arg("-e").arg(env_var);
|
||
}
|
||
}
|
||
|
||
// Add headers if provided (for HTTP/SSE)
|
||
if let Some(headers) = headers {
|
||
for header in headers {
|
||
cmd.arg("-H").arg(header);
|
||
}
|
||
}
|
||
|
||
// Add name and command/URL
|
||
cmd.arg(&name).arg(&command_or_url);
|
||
|
||
let output = cmd.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully added MCP server: {}", name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to add MCP server {}: {}", name, error);
|
||
Err(format!("Failed to add MCP server: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude mcp add: {}", e);
|
||
Err(format!("Failed to execute claude mcp add: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_mcp_server_details(name: String) -> Result<String, String> {
|
||
tracing::debug!("Getting detailed info for MCP server: {}", name);
|
||
|
||
let output = create_claude_command()
|
||
.arg("mcp")
|
||
.arg("get")
|
||
.arg(&name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let details = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::debug!("Got MCP server details: {}", details);
|
||
Ok(details)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to get MCP server details for {}: {}", name, error);
|
||
Err(format!("Failed to get server details: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude mcp get: {}", e);
|
||
Err(format!("Failed to execute claude mcp get: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
// ==================== Codebase Mapper ====================
|
||
|
||
/// Directories to skip when scanning (always ignored regardless of .gitignore)
|
||
const SCAN_SKIP_DIRS: &[&str] = &[
|
||
".git",
|
||
"node_modules",
|
||
"target",
|
||
".next",
|
||
"dist",
|
||
"build",
|
||
"out",
|
||
"__pycache__",
|
||
".cache",
|
||
".pytest_cache",
|
||
"vendor",
|
||
".idea",
|
||
".vscode",
|
||
"coverage",
|
||
".nyc_output",
|
||
"venv",
|
||
".venv",
|
||
"env",
|
||
".tox",
|
||
];
|
||
|
||
/// Files that indicate the project type
|
||
const PROJECT_MARKERS: &[(&str, &str)] = &[
|
||
("Cargo.toml", "Rust"),
|
||
("package.json", "Node.js"),
|
||
("pyproject.toml", "Python"),
|
||
("requirements.txt", "Python"),
|
||
("go.mod", "Go"),
|
||
("pom.xml", "Java (Maven)"),
|
||
("build.gradle", "Java (Gradle)"),
|
||
("Gemfile", "Ruby"),
|
||
("composer.json", "PHP"),
|
||
("*.csproj", "C#/.NET"),
|
||
("CMakeLists.txt", "C/C++ (CMake)"),
|
||
("Makefile", "C/C++"),
|
||
];
|
||
|
||
#[derive(Debug, Serialize)]
|
||
pub struct ProjectScan {
|
||
pub working_dir: String,
|
||
pub file_tree: String,
|
||
pub detected_type: String,
|
||
pub key_files: Vec<String>,
|
||
}
|
||
|
||
/// Recursively build a file tree string, respecting skip dirs, up to `max_depth` levels.
|
||
fn build_file_tree(
|
||
dir: &std::path::Path,
|
||
prefix: &str,
|
||
depth: usize,
|
||
max_depth: usize,
|
||
lines: &mut Vec<String>,
|
||
) {
|
||
if depth > max_depth {
|
||
lines.push(format!("{}...", prefix));
|
||
return;
|
||
}
|
||
|
||
let Ok(entries) = std::fs::read_dir(dir) else {
|
||
return;
|
||
};
|
||
|
||
let mut items: Vec<std::fs::DirEntry> = entries
|
||
.filter_map(|e| e.ok())
|
||
.collect();
|
||
items.sort_by_key(|e| {
|
||
let name = e.file_name().to_string_lossy().to_lowercase();
|
||
// Sort: hidden last, directories first
|
||
let is_hidden = name.starts_with('.');
|
||
let is_dir = e.path().is_dir();
|
||
(is_hidden, !is_dir, name)
|
||
});
|
||
|
||
let count = items.len();
|
||
for (i, entry) in items.iter().enumerate() {
|
||
let name = entry.file_name().to_string_lossy().to_string();
|
||
let is_last = i == count - 1;
|
||
let connector = if is_last { "└── " } else { "├── " };
|
||
let child_prefix = if is_last {
|
||
format!("{} ", prefix)
|
||
} else {
|
||
format!("{}│ ", prefix)
|
||
};
|
||
|
||
let path = entry.path();
|
||
if path.is_dir() {
|
||
if SCAN_SKIP_DIRS.contains(&name.as_str()) {
|
||
lines.push(format!("{}{}{}/ (skipped)", prefix, connector, name));
|
||
continue;
|
||
}
|
||
lines.push(format!("{}{}{}/", prefix, connector, name));
|
||
build_file_tree(&path, &child_prefix, depth + 1, max_depth, lines);
|
||
} else {
|
||
lines.push(format!("{}{}{}", prefix, connector, name));
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn scan_project(working_dir: String) -> Result<ProjectScan, String> {
|
||
let dir_path = std::path::Path::new(&working_dir);
|
||
|
||
if !dir_path.exists() {
|
||
return Err(format!("Directory does not exist: {}", working_dir));
|
||
}
|
||
if !dir_path.is_dir() {
|
||
return Err(format!("Path is not a directory: {}", working_dir));
|
||
}
|
||
|
||
// Detect project type by checking for marker files
|
||
let mut detected_type = "Unknown".to_string();
|
||
let mut key_files: Vec<String> = Vec::new();
|
||
|
||
for (marker, project_type) in PROJECT_MARKERS {
|
||
let marker_path = dir_path.join(marker);
|
||
if marker_path.exists() {
|
||
if detected_type == "Unknown" {
|
||
detected_type = project_type.to_string();
|
||
}
|
||
key_files.push(marker.to_string());
|
||
}
|
||
}
|
||
|
||
// Also collect other notable root-level files
|
||
let notable_root_files = &[
|
||
"README.md", "CLAUDE.md", "LICENSE", ".env.example",
|
||
"docker-compose.yml", "Dockerfile", ".github",
|
||
"tsconfig.json", "vitest.config.ts", "eslint.config.js",
|
||
"check-all.sh", "tauri.conf.json",
|
||
];
|
||
for file in notable_root_files {
|
||
let file_path = dir_path.join(file);
|
||
if file_path.exists() && !key_files.contains(&file.to_string()) {
|
||
key_files.push(file.to_string());
|
||
}
|
||
}
|
||
|
||
// Build file tree (max 4 levels deep)
|
||
let mut lines: Vec<String> = vec![format!("{}/", working_dir)];
|
||
build_file_tree(dir_path, "", 0, 4, &mut lines);
|
||
let file_tree = lines.join("\n");
|
||
|
||
Ok(ProjectScan {
|
||
working_dir,
|
||
file_tree,
|
||
detected_type,
|
||
key_files,
|
||
})
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn open_binary_file(app: AppHandle, path: String) -> Result<(), String> {
|
||
use tauri_plugin_opener::OpenerExt;
|
||
|
||
#[cfg(target_os = "windows")]
|
||
{
|
||
// Convert the WSL Linux path (e.g. /tmp/file.pdf) to a Windows UNC path
|
||
// (e.g. \\wsl.localhost\Ubuntu\tmp\file.pdf) so the Windows shell can open it.
|
||
let output = std::process::Command::new("wsl")
|
||
.args(["wslpath", "-w", &path])
|
||
.output()
|
||
.map_err(|e| e.to_string())?;
|
||
let windows_path = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
app.opener()
|
||
.open_path(windows_path, None::<&str>)
|
||
.map_err(|e| e.to_string())
|
||
}
|
||
|
||
#[cfg(not(target_os = "windows"))]
|
||
{
|
||
app.opener()
|
||
.open_path(path, None::<&str>)
|
||
.map_err(|e| e.to_string())
|
||
}
|
||
}
|
||
|
||
/// Read `~/.claude/CLAUDE.md` via WSL (for Windows).
|
||
/// Returns an empty string if the file does not exist.
|
||
#[cfg(target_os = "windows")]
|
||
async fn get_global_claude_md_via_wsl() -> Result<String, String> {
|
||
use std::process::Command;
|
||
|
||
let output = Command::new("wsl")
|
||
.hide_window()
|
||
.args(["-e", "bash", "-l", "-c", "cat ~/.claude/CLAUDE.md 2>/dev/null || true"])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||
}
|
||
|
||
/// Write content to `~/.claude/CLAUDE.md` via WSL (for Windows).
|
||
/// Creates the file (and `~/.claude/` directory) if they do not exist.
|
||
#[cfg(target_os = "windows")]
|
||
async fn save_global_claude_md_via_wsl(content: String) -> Result<(), String> {
|
||
use std::io::Write;
|
||
use std::process::{Command, Stdio};
|
||
|
||
let mut child = Command::new("wsl")
|
||
.hide_window()
|
||
.args([
|
||
"-e",
|
||
"bash",
|
||
"-l",
|
||
"-c",
|
||
"mkdir -p ~/.claude && cat > ~/.claude/CLAUDE.md",
|
||
])
|
||
.stdin(Stdio::piped())
|
||
.spawn()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if let Some(stdin) = child.stdin.as_mut() {
|
||
stdin
|
||
.write_all(content.as_bytes())
|
||
.map_err(|e| format!("Failed to write content to WSL stdin: {}", e))?;
|
||
}
|
||
|
||
let status = child
|
||
.wait()
|
||
.map_err(|e| format!("Failed to wait for WSL command: {}", e))?;
|
||
|
||
if !status.success() {
|
||
return Err("Failed to save CLAUDE.md via WSL".to_string());
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Read the contents of `~/.claude/CLAUDE.md`.
|
||
/// Returns an empty string if the file does not exist.
|
||
#[tauri::command]
|
||
pub async fn get_global_claude_md() -> Result<String, String> {
|
||
#[cfg(target_os = "windows")]
|
||
return get_global_claude_md_via_wsl().await;
|
||
|
||
#[cfg(not(target_os = "windows"))]
|
||
{
|
||
let path = dirs::home_dir()
|
||
.ok_or_else(|| "Could not determine home directory".to_string())?
|
||
.join(".claude")
|
||
.join("CLAUDE.md");
|
||
|
||
if !path.exists() {
|
||
return Ok(String::new());
|
||
}
|
||
|
||
std::fs::read_to_string(&path).map_err(|e| format!("Failed to read CLAUDE.md: {}", e))
|
||
}
|
||
}
|
||
|
||
/// Write content to `~/.claude/CLAUDE.md`.
|
||
/// Creates the file (and `~/.claude/` directory) if they do not exist.
|
||
#[tauri::command]
|
||
pub async fn save_global_claude_md(content: String) -> Result<(), String> {
|
||
#[cfg(target_os = "windows")]
|
||
return save_global_claude_md_via_wsl(content).await;
|
||
|
||
#[cfg(not(target_os = "windows"))]
|
||
{
|
||
let claude_dir = dirs::home_dir()
|
||
.ok_or_else(|| "Could not determine home directory".to_string())?
|
||
.join(".claude");
|
||
|
||
if !claude_dir.exists() {
|
||
std::fs::create_dir_all(&claude_dir)
|
||
.map_err(|e| format!("Failed to create ~/.claude directory: {}", e))?;
|
||
}
|
||
|
||
let path = claude_dir.join("CLAUDE.md");
|
||
std::fs::write(&path, content).map_err(|e| format!("Failed to write CLAUDE.md: {}", e))
|
||
}
|
||
}
|
||
|
||
#[cfg(test)]
|
||
mod tests {
|
||
use super::*;
|
||
use std::fs::{self, File};
|
||
use std::io::Write;
|
||
use tempfile::TempDir;
|
||
|
||
// Helper to run async tests
|
||
fn run_async<F: std::future::Future>(f: F) -> F::Output {
|
||
tokio::runtime::Runtime::new().unwrap().block_on(f)
|
||
}
|
||
|
||
// ==================== create_claude_command tests ====================
|
||
|
||
#[test]
|
||
#[cfg(target_os = "windows")]
|
||
fn test_create_claude_command_windows() {
|
||
// On Windows, should create a command that uses wsl with full path to claude
|
||
// The path is resolved dynamically via `which` in a login shell
|
||
let cmd = create_claude_command();
|
||
let program = cmd.get_program();
|
||
|
||
assert_eq!(program, "wsl");
|
||
|
||
// Verify the first argument is a path to claude (full path from `which`)
|
||
// or fallback to just "claude" if which fails
|
||
let args: Vec<&std::ffi::OsStr> = cmd.get_args().collect();
|
||
assert_eq!(args.len(), 1);
|
||
|
||
let arg_str = args[0].to_string_lossy();
|
||
assert!(
|
||
arg_str.contains("claude"),
|
||
"Expected argument to contain 'claude', got: {}",
|
||
arg_str
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
#[cfg(not(target_os = "windows"))]
|
||
fn test_create_claude_command_linux() {
|
||
// On Linux/Mac, should create a command that uses the full path to claude
|
||
// (resolved via `which` command)
|
||
let cmd = create_claude_command();
|
||
let program = cmd.get_program();
|
||
|
||
// The program should be the full path to claude (from `which`)
|
||
// or fallback to "claude" if which fails
|
||
let program_str = program.to_string_lossy();
|
||
assert!(
|
||
program_str.ends_with("claude"),
|
||
"Expected program to end with 'claude', got: {}",
|
||
program_str
|
||
);
|
||
}
|
||
|
||
// ==================== validate_directory tests ====================
|
||
|
||
#[test]
|
||
fn test_validate_directory_absolute_path_exists() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let path = temp_dir.path().to_string_lossy().to_string();
|
||
|
||
let result = run_async(validate_directory(path.clone(), None));
|
||
assert!(result.is_ok());
|
||
// Canonicalized path should be returned
|
||
assert!(result.unwrap().contains(&temp_dir.path().file_name().unwrap().to_string_lossy().to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_path_not_exists() {
|
||
let result = run_async(validate_directory(
|
||
"/nonexistent/path/that/does/not/exist".to_string(),
|
||
None,
|
||
));
|
||
assert!(result.is_err());
|
||
assert!(result.unwrap_err().contains("does not exist"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_path_is_file() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let file_path = temp_dir.path().join("test_file.txt");
|
||
File::create(&file_path).unwrap();
|
||
|
||
let result = run_async(validate_directory(
|
||
file_path.to_string_lossy().to_string(),
|
||
None,
|
||
));
|
||
assert!(result.is_err());
|
||
assert!(result.unwrap_err().contains("not a directory"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_home_expansion() {
|
||
// This test assumes HOME is set (which it should be on most systems)
|
||
if std::env::var_os("HOME").is_some() {
|
||
let result = run_async(validate_directory("~".to_string(), None));
|
||
assert!(result.is_ok());
|
||
// Should not contain ~ after expansion
|
||
assert!(!result.unwrap().contains("~"));
|
||
}
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_home_subpath_expansion() {
|
||
// This test assumes HOME is set and has some subdirectory
|
||
if let Some(home) = std::env::var_os("HOME") {
|
||
let home_path = std::path::Path::new(&home);
|
||
// Find any subdirectory in home
|
||
if let Ok(entries) = fs::read_dir(home_path) {
|
||
for entry in entries.flatten() {
|
||
if entry.path().is_dir() {
|
||
let subdir_name = entry.file_name().to_string_lossy().to_string();
|
||
let tilde_path = format!("~/{}", subdir_name);
|
||
let result = run_async(validate_directory(tilde_path, None));
|
||
assert!(result.is_ok());
|
||
assert!(!result.unwrap().contains("~"));
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_relative_path_with_current_dir() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let subdir = temp_dir.path().join("subdir");
|
||
fs::create_dir(&subdir).unwrap();
|
||
|
||
let result = run_async(validate_directory(
|
||
"subdir".to_string(),
|
||
Some(temp_dir.path().to_string_lossy().to_string()),
|
||
));
|
||
assert!(result.is_ok());
|
||
assert!(result.unwrap().contains("subdir"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_dot_path() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
|
||
let result = run_async(validate_directory(
|
||
".".to_string(),
|
||
Some(temp_dir.path().to_string_lossy().to_string()),
|
||
));
|
||
assert!(result.is_ok());
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_dotdot_path() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let subdir = temp_dir.path().join("subdir");
|
||
fs::create_dir(&subdir).unwrap();
|
||
|
||
let result = run_async(validate_directory(
|
||
"..".to_string(),
|
||
Some(subdir.to_string_lossy().to_string()),
|
||
));
|
||
assert!(result.is_ok());
|
||
// Should resolve to parent
|
||
let resolved = result.unwrap();
|
||
assert!(resolved.contains(&temp_dir.path().file_name().unwrap().to_string_lossy().to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_relative_without_current_dir() {
|
||
// Relative path without current_dir - should fail since relative path likely won't exist
|
||
let result = run_async(validate_directory(
|
||
"some_random_nonexistent_relative_path".to_string(),
|
||
None,
|
||
));
|
||
assert!(result.is_err());
|
||
}
|
||
|
||
// ==================== get_file_size tests ====================
|
||
|
||
#[test]
|
||
fn test_get_file_size_empty_file() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let file_path = temp_dir.path().join("empty.txt");
|
||
File::create(&file_path).unwrap();
|
||
|
||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||
assert!(result.is_ok());
|
||
assert_eq!(result.unwrap(), 0);
|
||
}
|
||
|
||
#[test]
|
||
fn test_get_file_size_with_content() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let file_path = temp_dir.path().join("content.txt");
|
||
let mut file = File::create(&file_path).unwrap();
|
||
file.write_all(b"Hello, Hikari!").unwrap();
|
||
|
||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||
assert!(result.is_ok());
|
||
assert_eq!(result.unwrap(), 14); // "Hello, Hikari!" is 14 bytes
|
||
}
|
||
|
||
#[test]
|
||
fn test_get_file_size_larger_file() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let file_path = temp_dir.path().join("large.txt");
|
||
let mut file = File::create(&file_path).unwrap();
|
||
// Write 1000 bytes
|
||
let data = vec![b'x'; 1000];
|
||
file.write_all(&data).unwrap();
|
||
|
||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||
assert!(result.is_ok());
|
||
assert_eq!(result.unwrap(), 1000);
|
||
}
|
||
|
||
#[test]
|
||
fn test_get_file_size_nonexistent_file() {
|
||
let result = run_async(get_file_size(
|
||
"/nonexistent/path/file.txt".to_string(),
|
||
));
|
||
assert!(result.is_err());
|
||
assert!(result.unwrap_err().contains("Failed to get file metadata"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_get_file_size_directory() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
|
||
// Getting "size" of a directory should work but return directory metadata
|
||
// This is actually valid - directories have metadata too
|
||
let result = run_async(get_file_size(temp_dir.path().to_string_lossy().to_string()));
|
||
assert!(result.is_ok());
|
||
// Directory size is platform-dependent, just check it returns something
|
||
}
|
||
|
||
// ==================== list_skills tests ====================
|
||
|
||
#[test]
|
||
fn test_list_skills_no_skills_dir() {
|
||
// This test is tricky because it depends on HOME being set
|
||
// and potentially affecting real user data, so we'll just
|
||
// verify the function doesn't panic
|
||
let result = run_async(list_skills());
|
||
// Should either return Ok with a list or Ok with empty vec
|
||
assert!(result.is_ok());
|
||
}
|
||
|
||
// ==================== select_wsl_directory tests ====================
|
||
|
||
#[test]
|
||
fn test_select_wsl_directory_returns_home() {
|
||
let result = run_async(select_wsl_directory());
|
||
assert!(result.is_ok());
|
||
|
||
// Should return the user's home directory
|
||
let home_dir = result.unwrap();
|
||
assert!(home_dir.starts_with("/home/") || home_dir == "/root");
|
||
}
|
||
|
||
// ==================== UpdateInfo struct tests ====================
|
||
|
||
#[test]
|
||
fn test_update_info_serialization() {
|
||
let info = UpdateInfo {
|
||
current_version: "1.0.0".to_string(),
|
||
latest_version: "0.4.0".to_string(),
|
||
has_update: true,
|
||
release_url: "https://example.com/release".to_string(),
|
||
release_notes: Some("New features!".to_string()),
|
||
};
|
||
|
||
let json = serde_json::to_string(&info).unwrap();
|
||
assert!(json.contains("1.0.0"));
|
||
assert!(json.contains("0.4.0"));
|
||
assert!(json.contains("true"));
|
||
assert!(json.contains("New features!"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_update_info_without_notes() {
|
||
let info = UpdateInfo {
|
||
current_version: "1.0.0".to_string(),
|
||
latest_version: "1.0.0".to_string(),
|
||
has_update: false,
|
||
release_url: "https://example.com/release".to_string(),
|
||
release_notes: None,
|
||
};
|
||
|
||
let json = serde_json::to_string(&info).unwrap();
|
||
assert!(json.contains("null") || json.contains("release_notes"));
|
||
}
|
||
|
||
// ==================== parse_npm_cli_version tests ====================
|
||
|
||
#[test]
|
||
fn test_parse_npm_cli_version_valid() {
|
||
let json = r#"{"name":"@anthropic-ai/claude-code","version":"2.1.72","description":"Claude Code"}"#;
|
||
let result = parse_npm_cli_version(json).unwrap();
|
||
assert_eq!(result, "2.1.72");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_npm_cli_version_missing_field() {
|
||
let json = r#"{"name":"@anthropic-ai/claude-code","description":"no version here"}"#;
|
||
let result = parse_npm_cli_version(json);
|
||
assert!(result.is_err());
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_npm_cli_version_invalid_json() {
|
||
let result = parse_npm_cli_version("not json at all");
|
||
assert!(result.is_err());
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_npm_cli_version_non_string_version() {
|
||
let json = r#"{"version":123}"#;
|
||
let result = parse_npm_cli_version(json);
|
||
assert!(result.is_err());
|
||
}
|
||
|
||
// ==================== SavedFileInfo struct tests ====================
|
||
|
||
#[test]
|
||
fn test_saved_file_info_serialization() {
|
||
let info = SavedFileInfo {
|
||
path: "/tmp/test.txt".to_string(),
|
||
filename: "test.txt".to_string(),
|
||
};
|
||
|
||
let json = serde_json::to_string(&info).unwrap();
|
||
assert!(json.contains("/tmp/test.txt"));
|
||
assert!(json.contains("test.txt"));
|
||
}
|
||
|
||
// ==================== CLI Parser Tests ====================
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_single_enabled() {
|
||
let output = r#"❯ macrodata@macrodata
|
||
Version: 0.1.3
|
||
Scope: user
|
||
Status: ✔ enabled"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 1);
|
||
assert_eq!(plugins[0].name, "macrodata@macrodata");
|
||
assert_eq!(plugins[0].version, "0.1.3");
|
||
assert!(plugins[0].enabled);
|
||
assert_eq!(plugins[0].description, None);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_single_disabled() {
|
||
let output = r#"❯ test-plugin@official
|
||
Version: 2.0.0
|
||
Status: ✘ disabled"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 1);
|
||
assert_eq!(plugins[0].name, "test-plugin@official");
|
||
assert_eq!(plugins[0].version, "2.0.0");
|
||
assert!(!plugins[0].enabled);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_multiple() {
|
||
let output = r#"❯ macrodata@macrodata
|
||
Version: 0.1.3
|
||
Status: ✔ enabled
|
||
|
||
❯ another-plugin@official
|
||
Version: 1.5.0
|
||
Status: ✘ disabled
|
||
|
||
❯ third-plugin@test
|
||
Version: 3.0.0-beta
|
||
Status: ✔ enabled"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 3);
|
||
|
||
assert_eq!(plugins[0].name, "macrodata@macrodata");
|
||
assert_eq!(plugins[0].version, "0.1.3");
|
||
assert!(plugins[0].enabled);
|
||
|
||
assert_eq!(plugins[1].name, "another-plugin@official");
|
||
assert_eq!(plugins[1].version, "1.5.0");
|
||
assert!(!plugins[1].enabled);
|
||
|
||
assert_eq!(plugins[2].name, "third-plugin@test");
|
||
assert_eq!(plugins[2].version, "3.0.0-beta");
|
||
assert!(plugins[2].enabled);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_empty() {
|
||
let output = "";
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 0);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_marketplace_list_single() {
|
||
let output = r#"Configured marketplaces:
|
||
|
||
❯ claude-plugins-official
|
||
Source: GitHub (anthropics/claude-plugins-official)"#;
|
||
|
||
let marketplaces = parse_marketplace_list(output);
|
||
assert_eq!(marketplaces.len(), 1);
|
||
assert_eq!(marketplaces[0].name, "claude-plugins-official");
|
||
assert_eq!(marketplaces[0].source, "GitHub (anthropics/claude-plugins-official)");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_marketplace_list_multiple() {
|
||
let output = r#"Configured marketplaces:
|
||
|
||
❯ claude-plugins-official
|
||
Source: GitHub (anthropics/claude-plugins-official)
|
||
|
||
❯ macrodata
|
||
Source: GitHub (ascorbic/macrodata)
|
||
|
||
❯ custom-marketplace
|
||
Source: GitHub (user/custom-marketplace)"#;
|
||
|
||
let marketplaces = parse_marketplace_list(output);
|
||
assert_eq!(marketplaces.len(), 3);
|
||
|
||
assert_eq!(marketplaces[0].name, "claude-plugins-official");
|
||
assert_eq!(marketplaces[1].name, "macrodata");
|
||
assert_eq!(marketplaces[2].name, "custom-marketplace");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_marketplace_list_empty() {
|
||
let output = "Configured marketplaces:\n\n";
|
||
let marketplaces = parse_marketplace_list(output);
|
||
assert_eq!(marketplaces.len(), 0);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_sse_connected() {
|
||
let output = "asana: https://mcp.asana.com/sse (SSE) - ✓ Connected";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "asana");
|
||
assert_eq!(servers[0].url, Some("https://mcp.asana.com/sse".to_string()));
|
||
assert_eq!(servers[0].command, None);
|
||
assert_eq!(servers[0].transport, "sse");
|
||
assert_eq!(servers[0].status, Some("Connected".to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_http_connected() {
|
||
let output = "test-server: https://api.example.com/mcp (HTTP) - ✓ Connected";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "test-server");
|
||
assert_eq!(servers[0].url, Some("https://api.example.com/mcp".to_string()));
|
||
assert_eq!(servers[0].transport, "http");
|
||
assert_eq!(servers[0].status, Some("Connected".to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_stdio_connected() {
|
||
let output = "gitea: gitea-mcp -t stdio --host https://git.nhcarrigan.com - ✓ Connected";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "gitea");
|
||
assert_eq!(servers[0].url, None);
|
||
assert_eq!(servers[0].command, Some("gitea-mcp -t stdio --host https://git.nhcarrigan.com".to_string()));
|
||
assert_eq!(servers[0].transport, "stdio");
|
||
assert_eq!(servers[0].status, Some("Connected".to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_failed_connection() {
|
||
let output = "broken-server: https://invalid.com (SSE) - ✗ Failed to connect";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "broken-server");
|
||
assert_eq!(servers[0].status, Some("Failed to connect".to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_multiple() {
|
||
let output = r#"asana: https://mcp.asana.com/sse (SSE) - ✓ Connected
|
||
gitea: gitea-mcp -t stdio (STDIO) - ✓ Connected
|
||
notion: https://mcp.notion.so (HTTP) - ✓ Connected"#;
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 3);
|
||
|
||
assert_eq!(servers[0].name, "asana");
|
||
assert_eq!(servers[0].transport, "sse");
|
||
|
||
assert_eq!(servers[1].name, "gitea");
|
||
assert_eq!(servers[1].transport, "stdio");
|
||
|
||
assert_eq!(servers[2].name, "notion");
|
||
assert_eq!(servers[2].transport, "http");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_with_checking_line() {
|
||
let output = r#"Checking MCP servers...
|
||
asana: https://mcp.asana.com/sse (SSE) - ✓ Connected"#;
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "asana");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_empty() {
|
||
let output = "";
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 0);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_plugin_provided() {
|
||
let output = "plugin:macrodata:macrodata: plugin macrodata - ✗ Failed to connect";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "plugin:macrodata:macrodata");
|
||
assert_eq!(servers[0].command, Some("plugin macrodata".to_string()));
|
||
assert_eq!(servers[0].transport, "stdio");
|
||
}
|
||
|
||
// ==================== Edge Case Tests ====================
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_with_unicode_names() {
|
||
let output = r#"❯ 日本語-plugin@marketplace
|
||
Version: 1.0.0
|
||
Status: ✔ enabled
|
||
|
||
❯ émoji-🎉-plugin@marketplace
|
||
Version: 2.0.0
|
||
Status: ✗ disabled"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 2);
|
||
assert_eq!(plugins[0].name, "日本語-plugin@marketplace");
|
||
assert!(plugins[0].enabled);
|
||
assert_eq!(plugins[1].name, "émoji-🎉-plugin@marketplace");
|
||
assert!(!plugins[1].enabled);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_missing_version() {
|
||
let output = r#"❯ broken-plugin@marketplace
|
||
Status: ✔ enabled"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 1);
|
||
assert_eq!(plugins[0].name, "broken-plugin@marketplace");
|
||
assert_eq!(plugins[0].version, ""); // Empty version
|
||
assert!(plugins[0].enabled);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_missing_status() {
|
||
let output = r#"❯ incomplete-plugin@marketplace
|
||
Version: 1.0.0"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 1);
|
||
assert_eq!(plugins[0].name, "incomplete-plugin@marketplace");
|
||
assert_eq!(plugins[0].version, "1.0.0");
|
||
assert!(!plugins[0].enabled); // Defaults to false when status missing
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_marketplace_list_with_unicode() {
|
||
let output = r#"❯ 日本語-marketplace
|
||
Source: github/日本語/repo
|
||
|
||
❯ emoji-🚀-marketplace
|
||
Source: github/emoji/🚀-repo"#;
|
||
|
||
let marketplaces = parse_marketplace_list(output);
|
||
assert_eq!(marketplaces.len(), 2);
|
||
assert_eq!(marketplaces[0].name, "日本語-marketplace");
|
||
assert_eq!(marketplaces[0].source, "github/日本語/repo");
|
||
assert_eq!(marketplaces[1].name, "emoji-🚀-marketplace");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_with_unicode_names() {
|
||
let output = "日本語-server: https://example.com/日本語 (SSE) - ✓ Connected";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "日本語-server");
|
||
assert_eq!(servers[0].url, Some("https://example.com/日本語".to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_very_long_command() {
|
||
let output = "long-cmd: some-binary --flag1 value1 --flag2 value2 --flag3 value3 --flag4 value4 --flag5 value5 --very-long-option with-a-very-long-value - ✓ Connected";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "long-cmd");
|
||
assert_eq!(
|
||
servers[0].command,
|
||
Some("some-binary --flag1 value1 --flag2 value2 --flag3 value3 --flag4 value4 --flag5 value5 --very-long-option with-a-very-long-value".to_string())
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_no_status() {
|
||
let output = "pending-server: https://example.com (HTTP)";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "pending-server");
|
||
assert_eq!(servers[0].status, None);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_with_extra_whitespace() {
|
||
let output = r#"❯ whitespace-plugin@marketplace
|
||
Version: 1.0.0
|
||
Status: ✔ enabled "#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 1);
|
||
assert_eq!(plugins[0].name, "whitespace-plugin@marketplace");
|
||
assert_eq!(plugins[0].version, "1.0.0");
|
||
assert!(plugins[0].enabled);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_multiple_with_checking() {
|
||
let output = r#"Checking connections...
|
||
asana: https://mcp.asana.com/sse (SSE) - ✓ Connected
|
||
gitea: gitea-mcp -t stdio (STDIO) - ✓ Connected"#;
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 2); // Should ignore "Checking" line
|
||
assert_eq!(servers[0].name, "asana");
|
||
assert_eq!(servers[1].name, "gitea");
|
||
}
|
||
|
||
// ==================== extract_first_heading tests ====================
|
||
|
||
#[test]
|
||
fn test_extract_first_heading_returns_heading() {
|
||
let content = "# My Memory File\n\nSome content here.";
|
||
assert_eq!(
|
||
extract_first_heading(content),
|
||
Some("My Memory File".to_string())
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn test_extract_first_heading_ignores_non_h1() {
|
||
let content = "## Section Header\n### Sub-section\nSome content.";
|
||
assert_eq!(extract_first_heading(content), None);
|
||
}
|
||
|
||
#[test]
|
||
fn test_extract_first_heading_finds_first_h1_after_other_lines() {
|
||
let content = "Some intro text\n\n# The Real Title\n\nMore content.";
|
||
assert_eq!(
|
||
extract_first_heading(content),
|
||
Some("The Real Title".to_string())
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn test_extract_first_heading_trims_whitespace() {
|
||
let content = "# Trimmed Heading \n\nContent.";
|
||
assert_eq!(
|
||
extract_first_heading(content),
|
||
Some("Trimmed Heading".to_string())
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn test_extract_first_heading_returns_none_for_empty_content() {
|
||
assert_eq!(extract_first_heading(""), None);
|
||
}
|
||
|
||
#[test]
|
||
fn test_extract_first_heading_returns_none_for_empty_heading() {
|
||
let content = "# \n\nContent after empty heading.";
|
||
assert_eq!(extract_first_heading(content), None);
|
||
}
|
||
|
||
#[test]
|
||
fn test_extract_first_heading_returns_none_when_no_headings() {
|
||
let content = "Just some plain text.\nNo headings here at all.";
|
||
assert_eq!(extract_first_heading(content), None);
|
||
}
|
||
|
||
#[test]
|
||
fn test_extract_first_heading_handles_leading_whitespace_on_line() {
|
||
let content = " # Indented Heading\n\nContent.";
|
||
assert_eq!(
|
||
extract_first_heading(content),
|
||
Some("Indented Heading".to_string())
|
||
);
|
||
}
|
||
|
||
// ==================== open_binary_file E2E path conversion tests ====================
|
||
|
||
/// Build the wslpath command structure without executing it, for cross-platform CI testing.
|
||
#[cfg(test)]
|
||
fn build_wslpath_command(path: &str) -> (String, Vec<String>) {
|
||
(
|
||
"wsl".to_string(),
|
||
vec!["wslpath".to_string(), "-w".to_string(), path.to_string()],
|
||
)
|
||
}
|
||
|
||
#[test]
|
||
fn test_e2e_wslpath_command_structure_pdf() {
|
||
let (command, args) = build_wslpath_command("/tmp/mcp_output_abc123.pdf");
|
||
assert_eq!(command, "wsl");
|
||
assert_eq!(args.len(), 3);
|
||
assert_eq!(args[0], "wslpath");
|
||
assert_eq!(args[1], "-w");
|
||
assert_eq!(args[2], "/tmp/mcp_output_abc123.pdf");
|
||
}
|
||
|
||
#[test]
|
||
fn test_e2e_wslpath_command_structure_audio() {
|
||
let (command, args) = build_wslpath_command("/tmp/mcp_output_xyz789.mp3");
|
||
assert_eq!(command, "wsl");
|
||
assert_eq!(args[2], "/tmp/mcp_output_xyz789.mp3");
|
||
}
|
||
|
||
#[test]
|
||
fn test_e2e_wslpath_command_structure_preserves_path() {
|
||
let path = "/home/naomi/documents/report with spaces.pdf";
|
||
let (_, args) = build_wslpath_command(path);
|
||
assert_eq!(args[2], path);
|
||
}
|
||
|
||
#[test]
|
||
fn test_get_global_claude_md_path_construction() {
|
||
// Verify that home_dir() resolves successfully on the test platform
|
||
let home = dirs::home_dir();
|
||
assert!(home.is_some(), "home_dir() should be available in test environment");
|
||
let expected = home.unwrap().join(".claude").join("CLAUDE.md");
|
||
assert!(expected.to_string_lossy().contains(".claude"));
|
||
assert!(expected.to_string_lossy().ends_with("CLAUDE.md"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_save_global_claude_md_dir_path_construction() {
|
||
let home = dirs::home_dir();
|
||
assert!(home.is_some());
|
||
let dir = home.unwrap().join(".claude");
|
||
assert!(dir.to_string_lossy().contains(".claude"));
|
||
}
|
||
}
|