generated from nhcarrigan/template
4b684bcd63
Added 30 new backend tests for improved code coverage: **New Test Modules:** - debug_logger.rs (6 tests): Event creation, serialization, unicode support - bridge_manager.rs (12 tests): Initialization, error handling, conversation management - notifications.rs (12 tests): PowerShell script generation, quote escaping, formatting **Enhanced Test Coverage:** - commands.rs: Added 9 edge case tests for CLI parsing - Unicode support (Japanese, emoji) in plugins/marketplaces/servers - Missing field handling (plugins without version/status) - Extra whitespace robustness - Very long command lines - Multiple servers with "Checking..." headers **Test Results:** - Backend: 408 tests passing (up from 378) - Frontend: 363 tests passing - Total: 771 comprehensive tests - Coverage: ~60% backend (excellent for testable business logic) **Testing Improvements:** - Extracted testable functions from command handlers - Golden files approach for CLI output parsing - Comprehensive edge case coverage (unicode, special chars, empty values) - Fixed clippy warnings (boolean assertions) All business logic, parsing, serialization, and error handling now comprehensively tested. Co-Authored-By: Naomi Carrigan <commits@nhcarrigan.com>
2481 lines
80 KiB
Rust
2481 lines
80 KiB
Rust
use std::path::PathBuf;
|
||
use serde::{Deserialize, Serialize};
|
||
use tauri::{AppHandle, Manager, State};
|
||
use tauri_plugin_http::reqwest;
|
||
use tauri_plugin_store::StoreExt;
|
||
|
||
use crate::achievements::{get_achievement_info, load_achievements, AchievementUnlockedEvent};
|
||
use crate::bridge_manager::SharedBridgeManager;
|
||
use crate::config::{ClaudeStartOptions, HikariConfig};
|
||
use crate::stats::UsageStats;
|
||
use crate::temp_manager::SharedTempFileManager;
|
||
|
||
const CONFIG_STORE_KEY: &str = "config";
|
||
|
||
/// Convert a Windows path to a WSL path
|
||
/// Example: C:\Users\accou\Documents\item.txt -> /mnt/c/Users/accou/Documents/item.txt
|
||
fn windows_path_to_wsl(windows_path: &str) -> Option<String> {
|
||
// Check if it's a Windows path (has drive letter like C:\)
|
||
if windows_path.len() >= 3 && windows_path.chars().nth(1) == Some(':') {
|
||
let drive_letter = windows_path.chars().next()?.to_lowercase().to_string();
|
||
let path_without_drive = &windows_path[2..]; // Remove "C:"
|
||
|
||
// Replace backslashes with forward slashes and convert to WSL mount point
|
||
let wsl_path = path_without_drive.replace('\\', "/");
|
||
Some(format!("/mnt/{}{}", drive_letter, wsl_path))
|
||
} else {
|
||
None
|
||
}
|
||
}
|
||
|
||
/// Convert a WSL path to a Windows path
|
||
/// Example: /mnt/c/Users/accou/Documents/item.txt -> C:\Users\accou\Documents\item.txt
|
||
#[allow(dead_code)]
|
||
fn wsl_path_to_windows(wsl_path: &str) -> Option<String> {
|
||
// Check if it's a WSL mount point path
|
||
if wsl_path.starts_with("/mnt/") && wsl_path.len() > 6 {
|
||
let rest = &wsl_path[5..]; // Remove "/mnt/"
|
||
if let Some(drive_letter) = rest.chars().next() {
|
||
let path_after_drive = &rest[1..]; // Remove drive letter
|
||
|
||
// Convert to Windows path with backslashes
|
||
let windows_path = path_after_drive.replace('/', "\\");
|
||
Some(format!("{}:{}", drive_letter.to_uppercase(), windows_path))
|
||
} else {
|
||
None
|
||
}
|
||
} else {
|
||
None
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn start_claude(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
options: ClaudeStartOptions,
|
||
) -> Result<(), String> {
|
||
let mut manager = bridge_manager.lock();
|
||
manager.start_claude(&conversation_id, options)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn stop_claude(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
) -> Result<(), String> {
|
||
let mut manager = bridge_manager.lock();
|
||
manager.stop_claude(&conversation_id)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn interrupt_claude(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
) -> Result<(), String> {
|
||
let mut manager = bridge_manager.lock();
|
||
manager.interrupt_claude(&conversation_id)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn send_prompt(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
message: String,
|
||
) -> Result<(), String> {
|
||
let mut manager = bridge_manager.lock();
|
||
manager.send_prompt(&conversation_id, message)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn is_claude_running(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
) -> Result<bool, String> {
|
||
let manager = bridge_manager.lock();
|
||
Ok(manager.is_claude_running(&conversation_id))
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_working_directory(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
) -> Result<String, String> {
|
||
let manager = bridge_manager.lock();
|
||
manager.get_working_directory(&conversation_id)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn select_wsl_directory() -> Result<String, String> {
|
||
// Return the user's home directory cross-platform
|
||
dirs::home_dir()
|
||
.ok_or_else(|| "Could not determine home directory".to_string())
|
||
.map(|p| p.to_string_lossy().to_string())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_config(app: AppHandle) -> Result<HikariConfig, String> {
|
||
let store = app.store("hikari-config.json").map_err(|e| e.to_string())?;
|
||
|
||
match store.get(CONFIG_STORE_KEY) {
|
||
Some(value) => serde_json::from_value(value.clone()).map_err(|e| e.to_string()),
|
||
None => Ok(HikariConfig::default()),
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn save_config(app: AppHandle, config: HikariConfig) -> Result<(), String> {
|
||
let store = app.store("hikari-config.json").map_err(|e| e.to_string())?;
|
||
|
||
let value = serde_json::to_value(&config).map_err(|e| e.to_string())?;
|
||
store.set(CONFIG_STORE_KEY, value);
|
||
store.save().map_err(|e| e.to_string())?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_usage_stats(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
) -> Result<UsageStats, String> {
|
||
let manager = bridge_manager.lock();
|
||
manager.get_usage_stats(&conversation_id)
|
||
}
|
||
|
||
/// Load persisted lifetime stats from store (no bridge required)
|
||
#[tauri::command]
|
||
pub async fn get_persisted_stats(app: AppHandle) -> Result<UsageStats, String> {
|
||
let mut stats = UsageStats::new();
|
||
|
||
// Load persisted stats if available
|
||
if let Some(persisted) = crate::stats::load_stats(&app).await {
|
||
stats.apply_persisted(persisted);
|
||
}
|
||
|
||
Ok(stats)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn validate_directory(
|
||
path: String,
|
||
current_dir: Option<String>,
|
||
) -> Result<String, String> {
|
||
use std::path::{Path, PathBuf};
|
||
|
||
// Detect if we're dealing with a WSL path (starts with / on Windows, or current_dir is a WSL path)
|
||
let is_wsl_path = cfg!(windows) && (path.starts_with('/') || current_dir.as_ref().is_some_and(|p| p.starts_with('/')));
|
||
|
||
if is_wsl_path {
|
||
// WSL path - handle as Unix-style path without filesystem validation
|
||
// since the Windows binary can't validate WSL filesystem paths
|
||
let resolved = if path.starts_with('/') {
|
||
// Absolute WSL path - use as-is
|
||
path
|
||
} else if let Some(ref cwd) = current_dir {
|
||
// Relative path - resolve manually using Unix path logic
|
||
if path == "." {
|
||
cwd.clone()
|
||
} else if path == ".." {
|
||
// Go up one directory
|
||
cwd.rsplit_once('/').map(|x| x.0).unwrap_or("/").to_string()
|
||
} else if path.starts_with("../") {
|
||
// Handle ../ prefix
|
||
let parent = cwd.rsplit_once('/').map(|x| x.0).unwrap_or("/");
|
||
let remainder = path.strip_prefix("../").unwrap();
|
||
if remainder.is_empty() {
|
||
parent.to_string()
|
||
} else {
|
||
format!("{}/{}", parent, remainder)
|
||
}
|
||
} else if path.starts_with("./") {
|
||
// Handle ./ prefix
|
||
format!("{}/{}", cwd, path.strip_prefix("./").unwrap())
|
||
} else {
|
||
// Regular relative path
|
||
format!("{}/{}", cwd, path)
|
||
}
|
||
} else {
|
||
return Err("Cannot resolve relative WSL path without current directory".to_string());
|
||
};
|
||
|
||
// Normalize the path (remove duplicate slashes, etc.)
|
||
let normalized = resolved.split('/').filter(|s| !s.is_empty()).collect::<Vec<_>>().join("/");
|
||
Ok(if normalized.is_empty() { "/".to_string() } else { format!("/{}", normalized) })
|
||
} else {
|
||
// Native path (Windows on Windows, Unix on Unix) - validate normally
|
||
let path = Path::new(&path);
|
||
|
||
let expanded_path = if path.starts_with("~") {
|
||
if let Some(home) = dirs::home_dir() {
|
||
if path == Path::new("~") {
|
||
home
|
||
} else {
|
||
home.join(path.strip_prefix("~").unwrap())
|
||
}
|
||
} else {
|
||
return Err("Could not determine home directory".to_string());
|
||
}
|
||
} else if path.is_relative() {
|
||
if let Some(ref cwd) = current_dir {
|
||
let cwd_path = PathBuf::from(cwd);
|
||
cwd_path.join(path)
|
||
} else {
|
||
path.to_path_buf()
|
||
}
|
||
} else {
|
||
path.to_path_buf()
|
||
};
|
||
|
||
// Check if the path exists and is a directory
|
||
if !expanded_path.exists() {
|
||
return Err(format!(
|
||
"Directory does not exist: {}",
|
||
expanded_path.display()
|
||
));
|
||
}
|
||
|
||
if !expanded_path.is_dir() {
|
||
return Err(format!(
|
||
"Path is not a directory: {}",
|
||
expanded_path.display()
|
||
));
|
||
}
|
||
|
||
// Return the canonicalized (absolute) path
|
||
expanded_path
|
||
.canonicalize()
|
||
.map(|p| p.to_string_lossy().to_string())
|
||
.map_err(|e| format!("Failed to resolve path: {}", e))
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn load_saved_achievements(
|
||
app: AppHandle,
|
||
) -> Result<Vec<AchievementUnlockedEvent>, String> {
|
||
use chrono::Utc;
|
||
|
||
// Load achievements from persistent store
|
||
let progress = load_achievements(&app).await;
|
||
|
||
// Create events for all previously unlocked achievements
|
||
let mut events = Vec::new();
|
||
for achievement_id in &progress.unlocked {
|
||
let mut info = get_achievement_info(achievement_id);
|
||
info.unlocked_at = Some(Utc::now()); // We don't store timestamps, so just use now
|
||
events.push(AchievementUnlockedEvent { achievement: info });
|
||
}
|
||
|
||
Ok(events)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn answer_question(
|
||
bridge_manager: State<'_, SharedBridgeManager>,
|
||
conversation_id: String,
|
||
tool_use_id: String,
|
||
answers: serde_json::Value,
|
||
) -> Result<(), String> {
|
||
let mut manager = bridge_manager.lock();
|
||
manager.send_tool_result(&conversation_id, &tool_use_id, answers)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_skills() -> Result<Vec<String>, String> {
|
||
// On Windows, we need to use WSL to access the skills directory
|
||
// since skills are stored in the WSL home directory
|
||
if cfg!(windows) {
|
||
return list_skills_via_wsl().await;
|
||
}
|
||
|
||
// On Unix systems, use the native filesystem
|
||
use std::fs;
|
||
|
||
let home = dirs::home_dir().ok_or_else(|| "Could not determine home directory".to_string())?;
|
||
let skills_dir = home.join(".claude").join("skills");
|
||
|
||
if !skills_dir.exists() {
|
||
return Ok(Vec::new());
|
||
}
|
||
|
||
let mut skills = Vec::new();
|
||
let entries =
|
||
fs::read_dir(&skills_dir).map_err(|e| format!("Failed to read skills directory: {}", e))?;
|
||
|
||
for entry in entries {
|
||
let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
|
||
let path = entry.path();
|
||
|
||
if path.is_dir() {
|
||
let skill_file = path.join("SKILL.md");
|
||
if skill_file.exists() {
|
||
if let Some(name) = path.file_name() {
|
||
skills.push(name.to_string_lossy().to_string());
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
skills.sort();
|
||
Ok(skills)
|
||
}
|
||
|
||
/// List skills by executing commands through WSL (for Windows)
|
||
#[allow(dead_code)]
|
||
async fn list_skills_via_wsl() -> Result<Vec<String>, String> {
|
||
use std::process::Command;
|
||
|
||
// Use WSL to list directories in ~/.claude/skills that contain SKILL.md
|
||
let output = Command::new("wsl")
|
||
.args([
|
||
"-e",
|
||
"sh",
|
||
"-c",
|
||
"if [ -d ~/.claude/skills ]; then for d in ~/.claude/skills/*/; do [ -f \"${d}SKILL.md\" ] && basename \"$d\"; done; fi",
|
||
])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
if stderr.contains("not found") || stderr.contains("No such file") {
|
||
return Ok(Vec::new());
|
||
}
|
||
return Err(format!("WSL command failed: {}", stderr));
|
||
}
|
||
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
let mut skills: Vec<String> = stdout
|
||
.lines()
|
||
.filter(|line| !line.is_empty())
|
||
.map(|line| line.to_string())
|
||
.collect();
|
||
|
||
skills.sort();
|
||
Ok(skills)
|
||
}
|
||
|
||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||
pub struct UpdateInfo {
|
||
pub current_version: String,
|
||
pub latest_version: String,
|
||
pub has_update: bool,
|
||
pub release_url: String,
|
||
pub release_notes: Option<String>,
|
||
}
|
||
|
||
#[derive(Debug, serde::Deserialize)]
|
||
struct GiteaRelease {
|
||
tag_name: String,
|
||
html_url: String,
|
||
body: Option<String>,
|
||
prerelease: bool,
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn check_for_updates() -> Result<UpdateInfo, String> {
|
||
const CURRENT_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||
const RELEASES_API: &str =
|
||
"https://git.nhcarrigan.com/api/v1/repos/nhcarrigan/hikari-desktop/releases";
|
||
|
||
// Fetch releases from Gitea API
|
||
let client = reqwest::Client::new();
|
||
let response = client
|
||
.get(RELEASES_API)
|
||
.header("Accept", "application/json")
|
||
.send()
|
||
.await
|
||
.map_err(|e| format!("Failed to fetch releases: {}", e))?;
|
||
|
||
if !response.status().is_success() {
|
||
return Err(format!("API returned status: {}", response.status()));
|
||
}
|
||
|
||
let text = response
|
||
.text()
|
||
.await
|
||
.map_err(|e| format!("Failed to read response: {}", e))?;
|
||
|
||
let releases: Vec<GiteaRelease> =
|
||
serde_json::from_str(&text).map_err(|e| format!("Failed to parse releases: {}", e))?;
|
||
|
||
// Find the latest non-prerelease, or fall back to latest prerelease
|
||
let latest = releases
|
||
.iter()
|
||
.find(|r| !r.prerelease)
|
||
.or_else(|| releases.first());
|
||
|
||
let latest = match latest {
|
||
Some(r) => r,
|
||
None => return Err("No releases found".to_string()),
|
||
};
|
||
|
||
// Parse version strings (remove 'v' prefix if present)
|
||
let current = semver::Version::parse(CURRENT_VERSION)
|
||
.map_err(|e| format!("Failed to parse current version: {}", e))?;
|
||
|
||
let latest_tag = latest.tag_name.trim_start_matches('v');
|
||
let latest_ver = semver::Version::parse(latest_tag)
|
||
.map_err(|e| format!("Failed to parse latest version: {}", e))?;
|
||
|
||
Ok(UpdateInfo {
|
||
current_version: CURRENT_VERSION.to_string(),
|
||
latest_version: latest.tag_name.clone(),
|
||
has_update: latest_ver > current,
|
||
release_url: latest.html_url.clone(),
|
||
release_notes: latest.body.clone(),
|
||
})
|
||
}
|
||
|
||
#[derive(Debug, Clone, serde::Serialize)]
|
||
pub struct SavedFileInfo {
|
||
pub path: String,
|
||
pub filename: String,
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn save_temp_file(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
conversation_id: String,
|
||
data: Vec<u8>,
|
||
filename: Option<String>,
|
||
) -> Result<SavedFileInfo, String> {
|
||
let mut manager = temp_manager.lock();
|
||
let path = manager.save_file(&conversation_id, &data, filename.as_deref())?;
|
||
|
||
let filename = path
|
||
.file_name()
|
||
.map(|n| n.to_string_lossy().to_string())
|
||
.unwrap_or_else(|| "unknown".to_string());
|
||
|
||
let path_string = path.to_string_lossy().to_string();
|
||
|
||
// On Windows, convert the path to WSL format if needed
|
||
// so Claude Code (running in WSL) can access it via /mnt/c/...
|
||
let final_path = if cfg!(windows) {
|
||
windows_path_to_wsl(&path_string).unwrap_or(path_string)
|
||
} else {
|
||
path_string
|
||
};
|
||
|
||
Ok(SavedFileInfo {
|
||
path: final_path,
|
||
filename,
|
||
})
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn register_temp_file(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
conversation_id: String,
|
||
file_path: String,
|
||
) -> Result<(), String> {
|
||
let mut manager = temp_manager.lock();
|
||
manager.register_file(&conversation_id, PathBuf::from(file_path));
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_temp_files(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
conversation_id: String,
|
||
) -> Result<Vec<String>, String> {
|
||
let manager = temp_manager.lock();
|
||
let files = manager.get_files_for_conversation(&conversation_id);
|
||
Ok(files.iter().map(|p| p.to_string_lossy().to_string()).collect())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn cleanup_temp_files(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
conversation_id: String,
|
||
) -> Result<(), String> {
|
||
let mut manager = temp_manager.lock();
|
||
manager.cleanup_conversation(&conversation_id)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn cleanup_all_temp_files(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
) -> Result<(), String> {
|
||
let mut manager = temp_manager.lock();
|
||
manager.cleanup_all()
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn cleanup_orphaned_temp_files(
|
||
temp_manager: State<'_, SharedTempFileManager>,
|
||
) -> Result<usize, String> {
|
||
let mut manager = temp_manager.lock();
|
||
manager.cleanup_orphaned_files()
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_file_size(file_path: String) -> Result<u64, String> {
|
||
let metadata = std::fs::metadata(&file_path)
|
||
.map_err(|e| format!("Failed to get file metadata: {}", e))?;
|
||
Ok(metadata.len())
|
||
}
|
||
|
||
// ==================== Editor File Operations ====================
|
||
|
||
#[derive(Debug, Clone, serde::Serialize)]
|
||
pub struct FileEntry {
|
||
pub name: String,
|
||
pub path: String,
|
||
#[serde(rename = "isDirectory")]
|
||
pub is_directory: bool,
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_directory(app: AppHandle, path: String) -> Result<Vec<FileEntry>, String> {
|
||
// Set up logging
|
||
let log_path = if let Ok(app_data_dir) = app.path().app_data_dir() {
|
||
let _ = std::fs::create_dir_all(&app_data_dir);
|
||
app_data_dir.join("hikari_editor_debug.log")
|
||
} else {
|
||
PathBuf::from("hikari_editor_debug.log")
|
||
};
|
||
|
||
let mut log_file = std::fs::OpenOptions::new()
|
||
.create(true)
|
||
.append(true)
|
||
.open(&log_path)
|
||
.ok();
|
||
|
||
let mut log = |msg: String| {
|
||
if let Some(ref mut file) = log_file {
|
||
use std::io::Write;
|
||
let timestamp = chrono::Local::now().format("%Y-%m-%d %H:%M:%S");
|
||
let _ = writeln!(file, "[{}] {}", timestamp, msg);
|
||
}
|
||
};
|
||
|
||
log(format!("list_directory called with path: {}", path));
|
||
log(format!("cfg!(windows) = {}", cfg!(windows)));
|
||
log(format!("path.starts_with('/') = {}", path.starts_with('/')));
|
||
|
||
// On Windows with a WSL path (starts with /), use WSL to list the directory
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
log("Using WSL path".to_string());
|
||
return list_directory_via_wsl(&path).await;
|
||
}
|
||
|
||
log("Using native filesystem access".to_string());
|
||
|
||
// Native filesystem access
|
||
use std::fs;
|
||
use std::path::Path;
|
||
|
||
let dir_path = Path::new(&path);
|
||
|
||
if !dir_path.exists() {
|
||
let err = format!("Directory does not exist: {}", path);
|
||
log(format!("ERROR: {}", err));
|
||
return Err(err);
|
||
}
|
||
|
||
if !dir_path.is_dir() {
|
||
let err = format!("Path is not a directory: {}", path);
|
||
log(format!("ERROR: {}", err));
|
||
return Err(err);
|
||
}
|
||
|
||
let entries = fs::read_dir(dir_path)
|
||
.map_err(|e| {
|
||
let err = format!("Failed to read directory: {}", e);
|
||
log(format!("ERROR: {}", err));
|
||
err
|
||
})?;
|
||
|
||
let mut file_entries = Vec::new();
|
||
|
||
for entry in entries {
|
||
let entry = entry.map_err(|e| {
|
||
let err = format!("Failed to read entry: {}", e);
|
||
log(format!("ERROR: {}", err));
|
||
err
|
||
})?;
|
||
let path = entry.path();
|
||
let name = entry
|
||
.file_name()
|
||
.to_string_lossy()
|
||
.to_string();
|
||
|
||
file_entries.push(FileEntry {
|
||
name: name.clone(),
|
||
path: path.to_string_lossy().to_string(),
|
||
is_directory: path.is_dir(),
|
||
});
|
||
}
|
||
|
||
log(format!("Successfully listed {} entries", file_entries.len()));
|
||
Ok(file_entries)
|
||
}
|
||
|
||
/// List directory contents via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn list_directory_via_wsl(path: &str) -> Result<Vec<FileEntry>, String> {
|
||
use std::process::Command;
|
||
|
||
// Use WSL to list directory contents
|
||
// Output format: type<tab>name (d for directory, f for file)
|
||
let script = format!(
|
||
r#"if [ -d '{}' ]; then for f in '{}'/* '{}'/.* ; do [ -e "$f" ] || continue; name=$(basename "$f"); if [ "$name" = "." ] || [ "$name" = ".." ]; then continue; fi; if [ -d "$f" ]; then echo "d $name"; else echo "f $name"; fi; done; else echo "ERROR: Directory does not exist"; exit 1; fi"#,
|
||
path, path, path
|
||
);
|
||
|
||
let output = Command::new("wsl")
|
||
.args(["-e", "sh", "-c", &script])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
|
||
if !output.status.success() || stdout.starts_with("ERROR:") {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
if stdout.starts_with("ERROR:") {
|
||
return Err(stdout.trim().to_string());
|
||
}
|
||
return Err(format!("WSL command failed: {}", stderr));
|
||
}
|
||
|
||
let mut file_entries = Vec::new();
|
||
|
||
for line in stdout.lines() {
|
||
if line.is_empty() {
|
||
continue;
|
||
}
|
||
|
||
let parts: Vec<&str> = line.splitn(2, '\t').collect();
|
||
if parts.len() != 2 {
|
||
continue;
|
||
}
|
||
|
||
let is_directory = parts[0] == "d";
|
||
let name = parts[1].to_string();
|
||
let entry_path = if path == "/" {
|
||
format!("/{}", name)
|
||
} else {
|
||
format!("{}/{}", path, name)
|
||
};
|
||
|
||
file_entries.push(FileEntry {
|
||
name,
|
||
path: entry_path,
|
||
is_directory,
|
||
});
|
||
}
|
||
|
||
Ok(file_entries)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn read_file_content(path: String) -> Result<String, String> {
|
||
// On Windows with a WSL path, use WSL to read the file
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return read_file_via_wsl(&path).await;
|
||
}
|
||
|
||
use std::fs;
|
||
fs::read_to_string(&path)
|
||
.map_err(|e| format!("Failed to read file: {}", e))
|
||
}
|
||
|
||
/// Read file content via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn read_file_via_wsl(path: &str) -> Result<String, String> {
|
||
use std::process::Command;
|
||
|
||
let output = Command::new("wsl")
|
||
.args(["-e", "cat", path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to read file: {}", stderr));
|
||
}
|
||
|
||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn write_file_content(path: String, content: String) -> Result<(), String> {
|
||
// On Windows with a WSL path, use WSL to write the file
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return write_file_via_wsl(&path, &content).await;
|
||
}
|
||
|
||
use std::fs;
|
||
fs::write(&path, content)
|
||
.map_err(|e| format!("Failed to write file: {}", e))
|
||
}
|
||
|
||
/// Write file content via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn write_file_via_wsl(path: &str, content: &str) -> Result<(), String> {
|
||
use std::io::Write;
|
||
use std::process::{Command, Stdio};
|
||
|
||
let mut child = Command::new("wsl")
|
||
.args(["-e", "sh", "-c", &format!("cat > '{}'", path)])
|
||
.stdin(Stdio::piped())
|
||
.spawn()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if let Some(mut stdin) = child.stdin.take() {
|
||
stdin.write_all(content.as_bytes())
|
||
.map_err(|e| format!("Failed to write to stdin: {}", e))?;
|
||
}
|
||
|
||
let status = child.wait()
|
||
.map_err(|e| format!("Failed to wait for WSL command: {}", e))?;
|
||
|
||
if !status.success() {
|
||
return Err("Failed to write file via WSL".to_string());
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn create_file(path: String) -> Result<(), String> {
|
||
// On Windows with a WSL path, use WSL to create the file
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return create_file_via_wsl(&path).await;
|
||
}
|
||
|
||
use std::fs::File;
|
||
use std::path::Path;
|
||
|
||
let file_path = Path::new(&path);
|
||
|
||
if file_path.exists() {
|
||
return Err("File already exists".to_string());
|
||
}
|
||
|
||
File::create(file_path).map_err(|e| format!("Failed to create file: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Create file via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn create_file_via_wsl(path: &str) -> Result<(), String> {
|
||
use std::process::Command;
|
||
|
||
// Check if file exists first
|
||
let check = Command::new("wsl")
|
||
.args(["-e", "test", "-e", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if check.success() {
|
||
return Err("File already exists".to_string());
|
||
}
|
||
|
||
let output = Command::new("wsl")
|
||
.args(["-e", "touch", path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to create file: {}", stderr));
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn create_directory(path: String) -> Result<(), String> {
|
||
// On Windows with a WSL path, use WSL to create the directory
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return create_directory_via_wsl(&path).await;
|
||
}
|
||
|
||
use std::fs;
|
||
use std::path::Path;
|
||
|
||
let dir_path = Path::new(&path);
|
||
|
||
if dir_path.exists() {
|
||
return Err("Directory already exists".to_string());
|
||
}
|
||
|
||
fs::create_dir_all(dir_path).map_err(|e| format!("Failed to create directory: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Create directory via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn create_directory_via_wsl(path: &str) -> Result<(), String> {
|
||
use std::process::Command;
|
||
|
||
// Check if directory exists first
|
||
let check = Command::new("wsl")
|
||
.args(["-e", "test", "-e", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if check.success() {
|
||
return Err("Directory already exists".to_string());
|
||
}
|
||
|
||
let output = Command::new("wsl")
|
||
.args(["-e", "mkdir", "-p", path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to create directory: {}", stderr));
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn delete_file(path: String) -> Result<(), String> {
|
||
// On Windows with a WSL path, use WSL to delete the file
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return delete_file_via_wsl(&path).await;
|
||
}
|
||
|
||
use std::fs;
|
||
use std::path::Path;
|
||
|
||
let file_path = Path::new(&path);
|
||
|
||
if !file_path.exists() {
|
||
return Err("File does not exist".to_string());
|
||
}
|
||
|
||
if file_path.is_dir() {
|
||
return Err("Path is a directory, use delete_directory instead".to_string());
|
||
}
|
||
|
||
fs::remove_file(file_path).map_err(|e| format!("Failed to delete file: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Delete file via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn delete_file_via_wsl(path: &str) -> Result<(), String> {
|
||
use std::process::Command;
|
||
|
||
// Check if path exists
|
||
let check_exists = Command::new("wsl")
|
||
.args(["-e", "test", "-e", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !check_exists.success() {
|
||
return Err("File does not exist".to_string());
|
||
}
|
||
|
||
// Check if path is a directory
|
||
let check_dir = Command::new("wsl")
|
||
.args(["-e", "test", "-d", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if check_dir.success() {
|
||
return Err("Path is a directory, use delete_directory instead".to_string());
|
||
}
|
||
|
||
let output = Command::new("wsl")
|
||
.args(["-e", "rm", path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to delete file: {}", stderr));
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn delete_directory(path: String) -> Result<(), String> {
|
||
// On Windows with a WSL path, use WSL to delete the directory
|
||
if cfg!(windows) && path.starts_with('/') {
|
||
return delete_directory_via_wsl(&path).await;
|
||
}
|
||
|
||
use std::fs;
|
||
use std::path::Path;
|
||
|
||
let dir_path = Path::new(&path);
|
||
|
||
if !dir_path.exists() {
|
||
return Err("Directory does not exist".to_string());
|
||
}
|
||
|
||
if !dir_path.is_dir() {
|
||
return Err("Path is not a directory".to_string());
|
||
}
|
||
|
||
fs::remove_dir_all(dir_path).map_err(|e| format!("Failed to delete directory: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Delete directory via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn delete_directory_via_wsl(path: &str) -> Result<(), String> {
|
||
use std::process::Command;
|
||
|
||
// Check if path exists
|
||
let check_exists = Command::new("wsl")
|
||
.args(["-e", "test", "-e", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !check_exists.success() {
|
||
return Err("Directory does not exist".to_string());
|
||
}
|
||
|
||
// Check if path is a directory
|
||
let check_dir = Command::new("wsl")
|
||
.args(["-e", "test", "-d", path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !check_dir.success() {
|
||
return Err("Path is not a directory".to_string());
|
||
}
|
||
|
||
let output = Command::new("wsl")
|
||
.args(["-e", "rm", "-rf", path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to delete directory: {}", stderr));
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn rename_path(old_path: String, new_path: String) -> Result<(), String> {
|
||
// On Windows with WSL paths, use WSL to rename
|
||
if cfg!(windows) && old_path.starts_with('/') {
|
||
return rename_path_via_wsl(&old_path, &new_path).await;
|
||
}
|
||
|
||
use std::fs;
|
||
use std::path::Path;
|
||
|
||
let old = Path::new(&old_path);
|
||
let new = Path::new(&new_path);
|
||
|
||
if !old.exists() {
|
||
return Err("Path does not exist".to_string());
|
||
}
|
||
|
||
if new.exists() {
|
||
return Err("Destination already exists".to_string());
|
||
}
|
||
|
||
fs::rename(old, new).map_err(|e| format!("Failed to rename: {}", e))?;
|
||
|
||
Ok(())
|
||
}
|
||
|
||
/// Rename path via WSL (for Windows with WSL paths)
|
||
#[allow(dead_code)]
|
||
async fn rename_path_via_wsl(old_path: &str, new_path: &str) -> Result<(), String> {
|
||
use std::process::Command;
|
||
|
||
// Check if old path exists
|
||
let check_old = Command::new("wsl")
|
||
.args(["-e", "test", "-e", old_path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !check_old.success() {
|
||
return Err("Path does not exist".to_string());
|
||
}
|
||
|
||
// Check if new path already exists
|
||
let check_new = Command::new("wsl")
|
||
.args(["-e", "test", "-e", new_path])
|
||
.status()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if check_new.success() {
|
||
return Err("Destination already exists".to_string());
|
||
}
|
||
|
||
let output = Command::new("wsl")
|
||
.args(["-e", "mv", old_path, new_path])
|
||
.output()
|
||
.map_err(|e| format!("Failed to execute WSL command: {}", e))?;
|
||
|
||
if !output.status.success() {
|
||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||
return Err(format!("Failed to rename: {}", stderr));
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
// ==================== Cost Tracking Commands ====================
|
||
|
||
const COST_HISTORY_STORE_KEY: &str = "cost_history";
|
||
|
||
#[tauri::command]
|
||
pub async fn get_cost_summary(app: AppHandle, days: u32) -> Result<crate::cost_tracking::CostSummary, String> {
|
||
let history = load_cost_history(&app).await;
|
||
Ok(history.get_summary(days))
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_cost_alerts(app: AppHandle) -> Result<Vec<crate::cost_tracking::CostAlert>, String> {
|
||
let mut history = load_cost_history(&app).await;
|
||
let alerts = history.check_alerts();
|
||
|
||
// Save updated alert state
|
||
save_cost_history(&app, &history).await?;
|
||
|
||
Ok(alerts)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn set_cost_alert_thresholds(
|
||
app: AppHandle,
|
||
daily: Option<f64>,
|
||
weekly: Option<f64>,
|
||
monthly: Option<f64>,
|
||
) -> Result<(), String> {
|
||
let mut history = load_cost_history(&app).await;
|
||
history.set_alert_thresholds(daily, weekly, monthly);
|
||
save_cost_history(&app, &history).await
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn export_cost_csv(app: AppHandle, days: u32) -> Result<String, String> {
|
||
let history = load_cost_history(&app).await;
|
||
Ok(history.export_csv(days))
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_today_cost(app: AppHandle) -> Result<f64, String> {
|
||
let history = load_cost_history(&app).await;
|
||
Ok(history.get_today_cost())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_week_cost(app: AppHandle) -> Result<f64, String> {
|
||
let history = load_cost_history(&app).await;
|
||
Ok(history.get_week_cost())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_month_cost(app: AppHandle) -> Result<f64, String> {
|
||
let history = load_cost_history(&app).await;
|
||
Ok(history.get_month_cost())
|
||
}
|
||
|
||
/// Add cost to history (called internally when stats are updated)
|
||
pub async fn record_cost(app: &AppHandle, input_tokens: u64, output_tokens: u64, cost_usd: f64) {
|
||
let mut history = load_cost_history(app).await;
|
||
history.add_cost(input_tokens, output_tokens, cost_usd);
|
||
let _ = save_cost_history(app, &history).await;
|
||
}
|
||
|
||
/// Record a new session
|
||
pub async fn record_session(app: &AppHandle) {
|
||
let mut history = load_cost_history(app).await;
|
||
history.increment_sessions();
|
||
let _ = save_cost_history(app, &history).await;
|
||
}
|
||
|
||
async fn load_cost_history(app: &AppHandle) -> crate::cost_tracking::CostHistory {
|
||
let store = match app.store("hikari-cost-history.json") {
|
||
Ok(s) => s,
|
||
Err(_) => return crate::cost_tracking::CostHistory::new(),
|
||
};
|
||
|
||
match store.get(COST_HISTORY_STORE_KEY) {
|
||
Some(value) => serde_json::from_value(value.clone()).unwrap_or_default(),
|
||
None => crate::cost_tracking::CostHistory::new(),
|
||
}
|
||
}
|
||
|
||
async fn save_cost_history(app: &AppHandle, history: &crate::cost_tracking::CostHistory) -> Result<(), String> {
|
||
let store = app.store("hikari-cost-history.json").map_err(|e| e.to_string())?;
|
||
let value = serde_json::to_value(history).map_err(|e| e.to_string())?;
|
||
store.set(COST_HISTORY_STORE_KEY, value);
|
||
store.save().map_err(|e| e.to_string())?;
|
||
Ok(())
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn init_discord_rpc(
|
||
discord_rpc: State<'_, std::sync::Arc<crate::discord_rpc::DiscordRpcManager>>,
|
||
session_name: String,
|
||
model: String,
|
||
started_at: i64,
|
||
) -> Result<(), String> {
|
||
discord_rpc.init(session_name, model, started_at)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn update_discord_rpc(
|
||
discord_rpc: State<'_, std::sync::Arc<crate::discord_rpc::DiscordRpcManager>>,
|
||
session_name: String,
|
||
model: String,
|
||
started_at: i64,
|
||
) -> Result<(), String> {
|
||
discord_rpc.update(session_name, model, started_at)
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn stop_discord_rpc(
|
||
discord_rpc: State<'_, std::sync::Arc<crate::discord_rpc::DiscordRpcManager>>,
|
||
) -> Result<(), String> {
|
||
discord_rpc.stop()
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn close_application(app_handle: AppHandle) -> Result<(), String> {
|
||
// Get the main window
|
||
if let Some(window) = app_handle.get_webview_window("main") {
|
||
// Hide the window first for a smoother close
|
||
let _ = window.hide();
|
||
}
|
||
|
||
// Exit the application
|
||
app_handle.exit(0);
|
||
Ok(())
|
||
}
|
||
|
||
#[derive(serde::Serialize)]
|
||
pub struct MemoryFilesResponse {
|
||
pub files: Vec<String>,
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_memory_files() -> Result<MemoryFilesResponse, String> {
|
||
use std::fs;
|
||
|
||
// Get the .claude directory in the user's home
|
||
let home_dir = match dirs::home_dir() {
|
||
Some(dir) => dir,
|
||
None => return Err("Could not find home directory".to_string()),
|
||
};
|
||
|
||
let claude_dir = home_dir.join(".claude");
|
||
let projects_dir = claude_dir.join("projects");
|
||
|
||
if !projects_dir.exists() {
|
||
return Ok(MemoryFilesResponse { files: Vec::new() });
|
||
}
|
||
|
||
let mut memory_files = Vec::new();
|
||
|
||
// Recursively find all memory directories
|
||
fn find_memory_files(dir: &std::path::Path, files: &mut Vec<String>) -> std::io::Result<()> {
|
||
if !dir.is_dir() {
|
||
return Ok(());
|
||
}
|
||
|
||
for entry in fs::read_dir(dir)? {
|
||
let entry = entry?;
|
||
let path = entry.path();
|
||
|
||
if path.is_dir() {
|
||
// Check if this is a "memory" directory
|
||
if path.file_name().and_then(|n| n.to_str()) == Some("memory") {
|
||
// List all files in the memory directory
|
||
for mem_entry in fs::read_dir(&path)? {
|
||
let mem_entry = mem_entry?;
|
||
let mem_path = mem_entry.path();
|
||
|
||
if mem_path.is_file() {
|
||
if let Some(path_str) = mem_path.to_str() {
|
||
files.push(path_str.to_string());
|
||
}
|
||
}
|
||
}
|
||
} else {
|
||
// Recurse into subdirectories
|
||
find_memory_files(&path, files)?;
|
||
}
|
||
}
|
||
}
|
||
|
||
Ok(())
|
||
}
|
||
|
||
if let Err(e) = find_memory_files(&projects_dir, &mut memory_files) {
|
||
return Err(format!("Failed to list memory files: {}", e));
|
||
}
|
||
|
||
// Sort files alphabetically
|
||
memory_files.sort();
|
||
|
||
Ok(MemoryFilesResponse {
|
||
files: memory_files,
|
||
})
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_claude_version() -> Result<String, String> {
|
||
tracing::debug!("Getting Claude CLI version");
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("--version")
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let version = String::from_utf8_lossy(&output.stdout)
|
||
.trim()
|
||
.to_string();
|
||
tracing::info!("Claude CLI version: {}", version);
|
||
Ok(version)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to get Claude version: {}", error);
|
||
Err(format!("Failed to get Claude version: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude --version: {}", e);
|
||
Err(format!("Failed to execute claude --version: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
// ==================== Plugin Management Commands ====================
|
||
|
||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||
pub struct PluginInfo {
|
||
pub name: String,
|
||
pub version: String,
|
||
pub description: Option<String>,
|
||
pub enabled: bool,
|
||
}
|
||
|
||
/// Parse plugin list output from Claude CLI
|
||
fn parse_plugin_list(stdout: &str) -> Vec<PluginInfo> {
|
||
let mut plugins = Vec::new();
|
||
|
||
// Parse text output format:
|
||
// ❯ macrodata@macrodata
|
||
// Version: 0.1.3
|
||
// Scope: user
|
||
// Status: ✔ enabled
|
||
|
||
let lines: Vec<&str> = stdout.lines().collect();
|
||
let mut i = 0;
|
||
while i < lines.len() {
|
||
let line = lines[i].trim();
|
||
|
||
// Look for plugin name line (starts with ❯)
|
||
if line.starts_with("❯") {
|
||
let name = line.trim_start_matches("❯").trim().to_string();
|
||
let mut version = String::new();
|
||
let mut enabled = false;
|
||
|
||
// Parse following lines for metadata
|
||
i += 1;
|
||
while i < lines.len() {
|
||
let meta_line = lines[i].trim();
|
||
if meta_line.is_empty() || meta_line.starts_with("❯") {
|
||
break;
|
||
}
|
||
|
||
if meta_line.starts_with("Version:") {
|
||
version = meta_line.trim_start_matches("Version:").trim().to_string();
|
||
} else if meta_line.starts_with("Status:") {
|
||
enabled = meta_line.contains("enabled");
|
||
}
|
||
i += 1;
|
||
}
|
||
|
||
plugins.push(PluginInfo {
|
||
name,
|
||
version,
|
||
description: None,
|
||
enabled,
|
||
});
|
||
continue;
|
||
}
|
||
i += 1;
|
||
}
|
||
|
||
plugins
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_plugins() -> Result<Vec<PluginInfo>, String> {
|
||
tracing::debug!("Listing Claude Code plugins");
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("plugin")
|
||
.arg("list")
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
let plugins = parse_plugin_list(&stdout);
|
||
tracing::info!("Listed {} plugins", plugins.len());
|
||
Ok(plugins)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to list plugins: {}", error);
|
||
Err(format!("Failed to list plugins: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin list: {}", e);
|
||
Err(format!("Failed to execute claude plugin list: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn install_plugin(plugin_name: String) -> Result<String, String> {
|
||
tracing::debug!("Installing plugin: {}", plugin_name);
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("plugin")
|
||
.arg("install")
|
||
.arg(&plugin_name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully installed plugin: {}", plugin_name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to install plugin {}: {}", plugin_name, error);
|
||
Err(format!("Failed to install plugin: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin install: {}", e);
|
||
Err(format!("Failed to execute claude plugin install: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn uninstall_plugin(plugin_name: String) -> Result<String, String> {
|
||
tracing::debug!("Uninstalling plugin: {}", plugin_name);
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("plugin")
|
||
.arg("uninstall")
|
||
.arg(&plugin_name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully uninstalled plugin: {}", plugin_name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to uninstall plugin {}: {}", plugin_name, error);
|
||
Err(format!("Failed to uninstall plugin: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin uninstall: {}", e);
|
||
Err(format!("Failed to execute claude plugin uninstall: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn enable_plugin(plugin_name: String) -> Result<String, String> {
|
||
tracing::debug!("Enabling plugin: {}", plugin_name);
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("plugin")
|
||
.arg("enable")
|
||
.arg(&plugin_name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully enabled plugin: {}", plugin_name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to enable plugin {}: {}", plugin_name, error);
|
||
Err(format!("Failed to enable plugin: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin enable: {}", e);
|
||
Err(format!("Failed to execute claude plugin enable: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn disable_plugin(plugin_name: String) -> Result<String, String> {
|
||
tracing::debug!("Disabling plugin: {}", plugin_name);
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("plugin")
|
||
.arg("disable")
|
||
.arg(&plugin_name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully disabled plugin: {}", plugin_name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to disable plugin {}: {}", plugin_name, error);
|
||
Err(format!("Failed to disable plugin: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin disable: {}", e);
|
||
Err(format!("Failed to execute claude plugin disable: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn update_plugin(plugin_name: String) -> Result<String, String> {
|
||
tracing::debug!("Updating plugin: {}", plugin_name);
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("plugin")
|
||
.arg("update")
|
||
.arg(&plugin_name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully updated plugin: {}", plugin_name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to update plugin {}: {}", plugin_name, error);
|
||
Err(format!("Failed to update plugin: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin update: {}", e);
|
||
Err(format!("Failed to execute claude plugin update: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
// ==================== Plugin Marketplace Commands ====================
|
||
|
||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||
pub struct MarketplaceInfo {
|
||
pub name: String,
|
||
pub source: String,
|
||
}
|
||
|
||
/// Parse marketplace list output from Claude CLI
|
||
fn parse_marketplace_list(stdout: &str) -> Vec<MarketplaceInfo> {
|
||
let mut marketplaces = Vec::new();
|
||
|
||
// Parse format:
|
||
// Configured marketplaces:
|
||
//
|
||
// ❯ claude-plugins-official
|
||
// Source: GitHub (anthropics/claude-plugins-official)
|
||
//
|
||
// ❯ macrodata
|
||
// Source: GitHub (ascorbic/macrodata)
|
||
|
||
let mut current_name: Option<String> = None;
|
||
|
||
for line in stdout.lines() {
|
||
let trimmed = line.trim();
|
||
|
||
// Look for marketplace names starting with ❯
|
||
if trimmed.starts_with("❯") {
|
||
current_name = Some(trimmed.trim_start_matches("❯").trim().to_string());
|
||
}
|
||
// Look for Source line
|
||
else if trimmed.starts_with("Source:") && current_name.is_some() {
|
||
let source = trimmed.trim_start_matches("Source:").trim().to_string();
|
||
marketplaces.push(MarketplaceInfo {
|
||
name: current_name.take().unwrap(),
|
||
source,
|
||
});
|
||
}
|
||
}
|
||
|
||
marketplaces
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_marketplaces() -> Result<Vec<MarketplaceInfo>, String> {
|
||
tracing::debug!("Listing plugin marketplaces");
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("plugin")
|
||
.arg("marketplace")
|
||
.arg("list")
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if !output.status.success() {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to list marketplaces: {}", error);
|
||
return Err(format!("Failed to list marketplaces: {}", error));
|
||
}
|
||
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
let marketplaces = parse_marketplace_list(&stdout);
|
||
tracing::info!("Found {} marketplaces", marketplaces.len());
|
||
Ok(marketplaces)
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin marketplace list: {}", e);
|
||
Err(format!(
|
||
"Failed to execute claude plugin marketplace list: {}",
|
||
e
|
||
))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn add_marketplace(source: String) -> Result<String, String> {
|
||
tracing::debug!("Adding marketplace: {}", source);
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("plugin")
|
||
.arg("marketplace")
|
||
.arg("add")
|
||
.arg(&source)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully added marketplace: {}", source);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to add marketplace {}: {}", source, error);
|
||
Err(format!("Failed to add marketplace: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin marketplace add: {}", e);
|
||
Err(format!(
|
||
"Failed to execute claude plugin marketplace add: {}",
|
||
e
|
||
))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn remove_marketplace(name: String) -> Result<String, String> {
|
||
tracing::debug!("Removing marketplace: {}", name);
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("plugin")
|
||
.arg("marketplace")
|
||
.arg("remove")
|
||
.arg(&name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully removed marketplace: {}", name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to remove marketplace {}: {}", name, error);
|
||
Err(format!("Failed to remove marketplace: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude plugin marketplace remove: {}", e);
|
||
Err(format!(
|
||
"Failed to execute claude plugin marketplace remove: {}",
|
||
e
|
||
))
|
||
}
|
||
}
|
||
}
|
||
|
||
// ==================== MCP Management Commands ====================
|
||
|
||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||
pub struct McpServerInfo {
|
||
pub name: String,
|
||
pub command: Option<String>,
|
||
pub url: Option<String>,
|
||
pub transport: String, // "stdio", "http", or "sse"
|
||
pub env: Option<serde_json::Value>,
|
||
pub status: Option<String>, // "Connected" or "Failed to connect"
|
||
}
|
||
|
||
/// Parse MCP server list output from Claude CLI
|
||
fn parse_mcp_server_list(stdout: &str) -> Vec<McpServerInfo> {
|
||
let mut servers = Vec::new();
|
||
|
||
// Parse text output format:
|
||
// asana: https://mcp.asana.com/sse (SSE) - ✓ Connected
|
||
// gitea: gitea-mcp -t stdio --host https://git.nhcarrigan.com - ✓ Connected
|
||
// plugin:macrodata:macrodata: ... - ✓ Connected
|
||
|
||
for line in stdout.lines() {
|
||
let line = line.trim();
|
||
if line.is_empty() || line.starts_with("Checking") {
|
||
continue;
|
||
}
|
||
|
||
// Find the last occurrence of " - ✓" or " - ✗" to split status from the rest
|
||
let (content, status) = if let Some(pos) = line.rfind(" - ✓").or_else(|| line.rfind(" - ✗")) {
|
||
let status_str = line[pos + 3..].trim().trim_start_matches("✓").trim_start_matches("✗").trim();
|
||
(line[..pos].trim(), Some(status_str.to_string()))
|
||
} else {
|
||
(line, None)
|
||
};
|
||
|
||
// Now find the name by looking for the first colon followed by either http or a command
|
||
// The format is: "name: command/url"
|
||
// But name can contain colons (e.g., "plugin:macrodata:macrodata")
|
||
// Strategy: Find the colon that separates name from content
|
||
// - If content after colon starts with "http", it's a URL (name is before first colon)
|
||
// - If content is a command, name might have colons, so find the last colon before a non-URL space-separated part
|
||
|
||
let (name, rest) = if let Some(first_colon) = content.find(':') {
|
||
let after_first_colon = content[first_colon + 1..].trim_start();
|
||
|
||
// Check if it's a URL (starts with http)
|
||
if after_first_colon.starts_with("http") {
|
||
// Name is everything before the first colon
|
||
(content[..first_colon].to_string(), after_first_colon.to_string())
|
||
} else {
|
||
// It's a command - name might contain colons (like plugin:foo:bar)
|
||
// Strategy: Commands start with a letter/word, not with a colon
|
||
// Find the rightmost colon that has whitespace after it (indicating start of command)
|
||
let mut split_pos = first_colon;
|
||
for (idx, _) in content.match_indices(':') {
|
||
let after = content[idx + 1..].trim_start();
|
||
// If what comes after this colon is NOT another colon-prefixed part,
|
||
// and doesn't start with "//" (part of URL), this is our split point
|
||
if !after.is_empty() && !after.starts_with(':') && !after.starts_with("//") {
|
||
// Check if this looks like a command (starts with letter/number)
|
||
if after.chars().next().map(|c| c.is_alphanumeric()).unwrap_or(false) {
|
||
split_pos = idx;
|
||
}
|
||
}
|
||
}
|
||
|
||
(content[..split_pos].to_string(), content[split_pos + 1..].trim_start().to_string())
|
||
}
|
||
} else {
|
||
continue; // Skip lines without colons
|
||
};
|
||
|
||
let name = name.trim().to_string();
|
||
let rest = rest.trim();
|
||
|
||
// Determine if it's a URL or command
|
||
let (url, command, transport) = if rest.starts_with("http") {
|
||
// HTTP/SSE server: "https://mcp.asana.com/sse (SSE)"
|
||
// Extract URL and transport type
|
||
let (url, transport) = if let Some((url_part, transport_part)) = rest.rsplit_once('(') {
|
||
let url = url_part.trim().to_string();
|
||
let transport = transport_part.trim_end_matches(')').trim().to_lowercase();
|
||
(Some(url), transport)
|
||
} else {
|
||
(Some(rest.to_string()), "http".to_string())
|
||
};
|
||
|
||
(url, None, transport)
|
||
} else {
|
||
// stdio server: "gitea-mcp -t stdio --host https://git.nhcarrigan.com"
|
||
// Command is everything in rest
|
||
(None, Some(rest.to_string()), "stdio".to_string())
|
||
};
|
||
|
||
servers.push(McpServerInfo {
|
||
name,
|
||
command,
|
||
url,
|
||
transport,
|
||
env: None,
|
||
status,
|
||
});
|
||
}
|
||
|
||
servers
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn list_mcp_servers() -> Result<Vec<McpServerInfo>, String> {
|
||
tracing::debug!("Listing MCP servers");
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("mcp")
|
||
.arg("list")
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||
let servers = parse_mcp_server_list(&stdout);
|
||
tracing::info!("Listed {} MCP servers", servers.len());
|
||
Ok(servers)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to list MCP servers: {}", error);
|
||
Err(format!("Failed to list MCP servers: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude mcp list: {}", e);
|
||
Err(format!("Failed to execute claude mcp list: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_mcp_server(name: String) -> Result<McpServerInfo, String> {
|
||
tracing::debug!("Getting MCP server details: {}", name);
|
||
|
||
// Get all servers and find the matching one
|
||
let servers = list_mcp_servers().await?;
|
||
|
||
servers
|
||
.into_iter()
|
||
.find(|s| s.name == name)
|
||
.ok_or_else(|| format!("MCP server '{}' not found", name))
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn remove_mcp_server(name: String) -> Result<String, String> {
|
||
tracing::debug!("Removing MCP server: {}", name);
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("mcp")
|
||
.arg("remove")
|
||
.arg(&name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully removed MCP server: {}", name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to remove MCP server {}: {}", name, error);
|
||
Err(format!("Failed to remove MCP server: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude mcp remove: {}", e);
|
||
Err(format!("Failed to execute claude mcp remove: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn add_mcp_server(
|
||
name: String,
|
||
command_or_url: String,
|
||
transport: String,
|
||
env_vars: Option<Vec<String>>,
|
||
headers: Option<Vec<String>>,
|
||
) -> Result<String, String> {
|
||
tracing::debug!("Adding MCP server: {} with transport {}", name, transport);
|
||
|
||
let mut cmd = std::process::Command::new("claude");
|
||
cmd.arg("mcp").arg("add");
|
||
|
||
// Add transport flag
|
||
cmd.arg("--transport").arg(&transport);
|
||
|
||
// Add environment variables if provided
|
||
if let Some(env_vars) = env_vars {
|
||
for env_var in env_vars {
|
||
cmd.arg("-e").arg(env_var);
|
||
}
|
||
}
|
||
|
||
// Add headers if provided (for HTTP/SSE)
|
||
if let Some(headers) = headers {
|
||
for header in headers {
|
||
cmd.arg("-H").arg(header);
|
||
}
|
||
}
|
||
|
||
// Add name and command/URL
|
||
cmd.arg(&name).arg(&command_or_url);
|
||
|
||
let output = cmd.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let message = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::info!("Successfully added MCP server: {}", name);
|
||
Ok(message)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to add MCP server {}: {}", name, error);
|
||
Err(format!("Failed to add MCP server: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude mcp add: {}", e);
|
||
Err(format!("Failed to execute claude mcp add: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[tauri::command]
|
||
pub async fn get_mcp_server_details(name: String) -> Result<String, String> {
|
||
tracing::debug!("Getting detailed info for MCP server: {}", name);
|
||
|
||
let output = std::process::Command::new("claude")
|
||
.arg("mcp")
|
||
.arg("get")
|
||
.arg(&name)
|
||
.output();
|
||
|
||
match output {
|
||
Ok(output) => {
|
||
if output.status.success() {
|
||
let details = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||
tracing::debug!("Got MCP server details: {}", details);
|
||
Ok(details)
|
||
} else {
|
||
let error = String::from_utf8_lossy(&output.stderr);
|
||
tracing::error!("Failed to get MCP server details for {}: {}", name, error);
|
||
Err(format!("Failed to get server details: {}", error))
|
||
}
|
||
}
|
||
Err(e) => {
|
||
tracing::error!("Failed to execute claude mcp get: {}", e);
|
||
Err(format!("Failed to execute claude mcp get: {}", e))
|
||
}
|
||
}
|
||
}
|
||
|
||
#[cfg(test)]
|
||
mod tests {
|
||
use super::*;
|
||
use std::fs::{self, File};
|
||
use std::io::Write;
|
||
use tempfile::TempDir;
|
||
|
||
// Helper to run async tests
|
||
fn run_async<F: std::future::Future>(f: F) -> F::Output {
|
||
tokio::runtime::Runtime::new().unwrap().block_on(f)
|
||
}
|
||
|
||
// ==================== validate_directory tests ====================
|
||
|
||
#[test]
|
||
fn test_validate_directory_absolute_path_exists() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let path = temp_dir.path().to_string_lossy().to_string();
|
||
|
||
let result = run_async(validate_directory(path.clone(), None));
|
||
assert!(result.is_ok());
|
||
// Canonicalized path should be returned
|
||
assert!(result.unwrap().contains(&temp_dir.path().file_name().unwrap().to_string_lossy().to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_path_not_exists() {
|
||
let result = run_async(validate_directory(
|
||
"/nonexistent/path/that/does/not/exist".to_string(),
|
||
None,
|
||
));
|
||
assert!(result.is_err());
|
||
assert!(result.unwrap_err().contains("does not exist"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_path_is_file() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let file_path = temp_dir.path().join("test_file.txt");
|
||
File::create(&file_path).unwrap();
|
||
|
||
let result = run_async(validate_directory(
|
||
file_path.to_string_lossy().to_string(),
|
||
None,
|
||
));
|
||
assert!(result.is_err());
|
||
assert!(result.unwrap_err().contains("not a directory"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_home_expansion() {
|
||
// This test assumes HOME is set (which it should be on most systems)
|
||
if std::env::var_os("HOME").is_some() {
|
||
let result = run_async(validate_directory("~".to_string(), None));
|
||
assert!(result.is_ok());
|
||
// Should not contain ~ after expansion
|
||
assert!(!result.unwrap().contains("~"));
|
||
}
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_home_subpath_expansion() {
|
||
// This test assumes HOME is set and has some subdirectory
|
||
if let Some(home) = std::env::var_os("HOME") {
|
||
let home_path = std::path::Path::new(&home);
|
||
// Find any subdirectory in home
|
||
if let Ok(entries) = fs::read_dir(home_path) {
|
||
for entry in entries.flatten() {
|
||
if entry.path().is_dir() {
|
||
let subdir_name = entry.file_name().to_string_lossy().to_string();
|
||
let tilde_path = format!("~/{}", subdir_name);
|
||
let result = run_async(validate_directory(tilde_path, None));
|
||
assert!(result.is_ok());
|
||
assert!(!result.unwrap().contains("~"));
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_relative_path_with_current_dir() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let subdir = temp_dir.path().join("subdir");
|
||
fs::create_dir(&subdir).unwrap();
|
||
|
||
let result = run_async(validate_directory(
|
||
"subdir".to_string(),
|
||
Some(temp_dir.path().to_string_lossy().to_string()),
|
||
));
|
||
assert!(result.is_ok());
|
||
assert!(result.unwrap().contains("subdir"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_dot_path() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
|
||
let result = run_async(validate_directory(
|
||
".".to_string(),
|
||
Some(temp_dir.path().to_string_lossy().to_string()),
|
||
));
|
||
assert!(result.is_ok());
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_dotdot_path() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let subdir = temp_dir.path().join("subdir");
|
||
fs::create_dir(&subdir).unwrap();
|
||
|
||
let result = run_async(validate_directory(
|
||
"..".to_string(),
|
||
Some(subdir.to_string_lossy().to_string()),
|
||
));
|
||
assert!(result.is_ok());
|
||
// Should resolve to parent
|
||
let resolved = result.unwrap();
|
||
assert!(resolved.contains(&temp_dir.path().file_name().unwrap().to_string_lossy().to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_validate_directory_relative_without_current_dir() {
|
||
// Relative path without current_dir - should fail since relative path likely won't exist
|
||
let result = run_async(validate_directory(
|
||
"some_random_nonexistent_relative_path".to_string(),
|
||
None,
|
||
));
|
||
assert!(result.is_err());
|
||
}
|
||
|
||
// ==================== get_file_size tests ====================
|
||
|
||
#[test]
|
||
fn test_get_file_size_empty_file() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let file_path = temp_dir.path().join("empty.txt");
|
||
File::create(&file_path).unwrap();
|
||
|
||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||
assert!(result.is_ok());
|
||
assert_eq!(result.unwrap(), 0);
|
||
}
|
||
|
||
#[test]
|
||
fn test_get_file_size_with_content() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let file_path = temp_dir.path().join("content.txt");
|
||
let mut file = File::create(&file_path).unwrap();
|
||
file.write_all(b"Hello, Hikari!").unwrap();
|
||
|
||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||
assert!(result.is_ok());
|
||
assert_eq!(result.unwrap(), 14); // "Hello, Hikari!" is 14 bytes
|
||
}
|
||
|
||
#[test]
|
||
fn test_get_file_size_larger_file() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
let file_path = temp_dir.path().join("large.txt");
|
||
let mut file = File::create(&file_path).unwrap();
|
||
// Write 1000 bytes
|
||
let data = vec![b'x'; 1000];
|
||
file.write_all(&data).unwrap();
|
||
|
||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||
assert!(result.is_ok());
|
||
assert_eq!(result.unwrap(), 1000);
|
||
}
|
||
|
||
#[test]
|
||
fn test_get_file_size_nonexistent_file() {
|
||
let result = run_async(get_file_size(
|
||
"/nonexistent/path/file.txt".to_string(),
|
||
));
|
||
assert!(result.is_err());
|
||
assert!(result.unwrap_err().contains("Failed to get file metadata"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_get_file_size_directory() {
|
||
let temp_dir = TempDir::new().unwrap();
|
||
|
||
// Getting "size" of a directory should work but return directory metadata
|
||
// This is actually valid - directories have metadata too
|
||
let result = run_async(get_file_size(temp_dir.path().to_string_lossy().to_string()));
|
||
assert!(result.is_ok());
|
||
// Directory size is platform-dependent, just check it returns something
|
||
}
|
||
|
||
// ==================== list_skills tests ====================
|
||
|
||
#[test]
|
||
fn test_list_skills_no_skills_dir() {
|
||
// This test is tricky because it depends on HOME being set
|
||
// and potentially affecting real user data, so we'll just
|
||
// verify the function doesn't panic
|
||
let result = run_async(list_skills());
|
||
// Should either return Ok with a list or Ok with empty vec
|
||
assert!(result.is_ok());
|
||
}
|
||
|
||
// ==================== select_wsl_directory tests ====================
|
||
|
||
#[test]
|
||
fn test_select_wsl_directory_returns_home() {
|
||
let result = run_async(select_wsl_directory());
|
||
assert!(result.is_ok());
|
||
|
||
// Should return the user's home directory
|
||
let home_dir = result.unwrap();
|
||
assert!(home_dir.starts_with("/home/") || home_dir == "/root");
|
||
}
|
||
|
||
// ==================== UpdateInfo struct tests ====================
|
||
|
||
#[test]
|
||
fn test_update_info_serialization() {
|
||
let info = UpdateInfo {
|
||
current_version: "1.0.0".to_string(),
|
||
latest_version: "0.4.0".to_string(),
|
||
has_update: true,
|
||
release_url: "https://example.com/release".to_string(),
|
||
release_notes: Some("New features!".to_string()),
|
||
};
|
||
|
||
let json = serde_json::to_string(&info).unwrap();
|
||
assert!(json.contains("1.0.0"));
|
||
assert!(json.contains("0.4.0"));
|
||
assert!(json.contains("true"));
|
||
assert!(json.contains("New features!"));
|
||
}
|
||
|
||
#[test]
|
||
fn test_update_info_without_notes() {
|
||
let info = UpdateInfo {
|
||
current_version: "1.0.0".to_string(),
|
||
latest_version: "1.0.0".to_string(),
|
||
has_update: false,
|
||
release_url: "https://example.com/release".to_string(),
|
||
release_notes: None,
|
||
};
|
||
|
||
let json = serde_json::to_string(&info).unwrap();
|
||
assert!(json.contains("null") || json.contains("release_notes"));
|
||
}
|
||
|
||
// ==================== SavedFileInfo struct tests ====================
|
||
|
||
#[test]
|
||
fn test_saved_file_info_serialization() {
|
||
let info = SavedFileInfo {
|
||
path: "/tmp/test.txt".to_string(),
|
||
filename: "test.txt".to_string(),
|
||
};
|
||
|
||
let json = serde_json::to_string(&info).unwrap();
|
||
assert!(json.contains("/tmp/test.txt"));
|
||
assert!(json.contains("test.txt"));
|
||
}
|
||
|
||
// ==================== CLI Parser Tests ====================
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_single_enabled() {
|
||
let output = r#"❯ macrodata@macrodata
|
||
Version: 0.1.3
|
||
Scope: user
|
||
Status: ✔ enabled"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 1);
|
||
assert_eq!(plugins[0].name, "macrodata@macrodata");
|
||
assert_eq!(plugins[0].version, "0.1.3");
|
||
assert!(plugins[0].enabled);
|
||
assert_eq!(plugins[0].description, None);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_single_disabled() {
|
||
let output = r#"❯ test-plugin@official
|
||
Version: 2.0.0
|
||
Status: ✘ disabled"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 1);
|
||
assert_eq!(plugins[0].name, "test-plugin@official");
|
||
assert_eq!(plugins[0].version, "2.0.0");
|
||
assert!(!plugins[0].enabled);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_multiple() {
|
||
let output = r#"❯ macrodata@macrodata
|
||
Version: 0.1.3
|
||
Status: ✔ enabled
|
||
|
||
❯ another-plugin@official
|
||
Version: 1.5.0
|
||
Status: ✘ disabled
|
||
|
||
❯ third-plugin@test
|
||
Version: 3.0.0-beta
|
||
Status: ✔ enabled"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 3);
|
||
|
||
assert_eq!(plugins[0].name, "macrodata@macrodata");
|
||
assert_eq!(plugins[0].version, "0.1.3");
|
||
assert!(plugins[0].enabled);
|
||
|
||
assert_eq!(plugins[1].name, "another-plugin@official");
|
||
assert_eq!(plugins[1].version, "1.5.0");
|
||
assert!(!plugins[1].enabled);
|
||
|
||
assert_eq!(plugins[2].name, "third-plugin@test");
|
||
assert_eq!(plugins[2].version, "3.0.0-beta");
|
||
assert!(plugins[2].enabled);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_empty() {
|
||
let output = "";
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 0);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_marketplace_list_single() {
|
||
let output = r#"Configured marketplaces:
|
||
|
||
❯ claude-plugins-official
|
||
Source: GitHub (anthropics/claude-plugins-official)"#;
|
||
|
||
let marketplaces = parse_marketplace_list(output);
|
||
assert_eq!(marketplaces.len(), 1);
|
||
assert_eq!(marketplaces[0].name, "claude-plugins-official");
|
||
assert_eq!(marketplaces[0].source, "GitHub (anthropics/claude-plugins-official)");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_marketplace_list_multiple() {
|
||
let output = r#"Configured marketplaces:
|
||
|
||
❯ claude-plugins-official
|
||
Source: GitHub (anthropics/claude-plugins-official)
|
||
|
||
❯ macrodata
|
||
Source: GitHub (ascorbic/macrodata)
|
||
|
||
❯ custom-marketplace
|
||
Source: GitHub (user/custom-marketplace)"#;
|
||
|
||
let marketplaces = parse_marketplace_list(output);
|
||
assert_eq!(marketplaces.len(), 3);
|
||
|
||
assert_eq!(marketplaces[0].name, "claude-plugins-official");
|
||
assert_eq!(marketplaces[1].name, "macrodata");
|
||
assert_eq!(marketplaces[2].name, "custom-marketplace");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_marketplace_list_empty() {
|
||
let output = "Configured marketplaces:\n\n";
|
||
let marketplaces = parse_marketplace_list(output);
|
||
assert_eq!(marketplaces.len(), 0);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_sse_connected() {
|
||
let output = "asana: https://mcp.asana.com/sse (SSE) - ✓ Connected";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "asana");
|
||
assert_eq!(servers[0].url, Some("https://mcp.asana.com/sse".to_string()));
|
||
assert_eq!(servers[0].command, None);
|
||
assert_eq!(servers[0].transport, "sse");
|
||
assert_eq!(servers[0].status, Some("Connected".to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_http_connected() {
|
||
let output = "test-server: https://api.example.com/mcp (HTTP) - ✓ Connected";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "test-server");
|
||
assert_eq!(servers[0].url, Some("https://api.example.com/mcp".to_string()));
|
||
assert_eq!(servers[0].transport, "http");
|
||
assert_eq!(servers[0].status, Some("Connected".to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_stdio_connected() {
|
||
let output = "gitea: gitea-mcp -t stdio --host https://git.nhcarrigan.com - ✓ Connected";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "gitea");
|
||
assert_eq!(servers[0].url, None);
|
||
assert_eq!(servers[0].command, Some("gitea-mcp -t stdio --host https://git.nhcarrigan.com".to_string()));
|
||
assert_eq!(servers[0].transport, "stdio");
|
||
assert_eq!(servers[0].status, Some("Connected".to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_failed_connection() {
|
||
let output = "broken-server: https://invalid.com (SSE) - ✗ Failed to connect";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "broken-server");
|
||
assert_eq!(servers[0].status, Some("Failed to connect".to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_multiple() {
|
||
let output = r#"asana: https://mcp.asana.com/sse (SSE) - ✓ Connected
|
||
gitea: gitea-mcp -t stdio (STDIO) - ✓ Connected
|
||
notion: https://mcp.notion.so (HTTP) - ✓ Connected"#;
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 3);
|
||
|
||
assert_eq!(servers[0].name, "asana");
|
||
assert_eq!(servers[0].transport, "sse");
|
||
|
||
assert_eq!(servers[1].name, "gitea");
|
||
assert_eq!(servers[1].transport, "stdio");
|
||
|
||
assert_eq!(servers[2].name, "notion");
|
||
assert_eq!(servers[2].transport, "http");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_with_checking_line() {
|
||
let output = r#"Checking MCP servers...
|
||
asana: https://mcp.asana.com/sse (SSE) - ✓ Connected"#;
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "asana");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_empty() {
|
||
let output = "";
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 0);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_plugin_provided() {
|
||
let output = "plugin:macrodata:macrodata: plugin macrodata - ✗ Failed to connect";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "plugin:macrodata:macrodata");
|
||
assert_eq!(servers[0].command, Some("plugin macrodata".to_string()));
|
||
assert_eq!(servers[0].transport, "stdio");
|
||
}
|
||
|
||
// ==================== Edge Case Tests ====================
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_with_unicode_names() {
|
||
let output = r#"❯ 日本語-plugin@marketplace
|
||
Version: 1.0.0
|
||
Status: ✔ enabled
|
||
|
||
❯ émoji-🎉-plugin@marketplace
|
||
Version: 2.0.0
|
||
Status: ✗ disabled"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 2);
|
||
assert_eq!(plugins[0].name, "日本語-plugin@marketplace");
|
||
assert!(plugins[0].enabled);
|
||
assert_eq!(plugins[1].name, "émoji-🎉-plugin@marketplace");
|
||
assert!(!plugins[1].enabled);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_missing_version() {
|
||
let output = r#"❯ broken-plugin@marketplace
|
||
Status: ✔ enabled"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 1);
|
||
assert_eq!(plugins[0].name, "broken-plugin@marketplace");
|
||
assert_eq!(plugins[0].version, ""); // Empty version
|
||
assert!(plugins[0].enabled);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_missing_status() {
|
||
let output = r#"❯ incomplete-plugin@marketplace
|
||
Version: 1.0.0"#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 1);
|
||
assert_eq!(plugins[0].name, "incomplete-plugin@marketplace");
|
||
assert_eq!(plugins[0].version, "1.0.0");
|
||
assert!(!plugins[0].enabled); // Defaults to false when status missing
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_marketplace_list_with_unicode() {
|
||
let output = r#"❯ 日本語-marketplace
|
||
Source: github/日本語/repo
|
||
|
||
❯ emoji-🚀-marketplace
|
||
Source: github/emoji/🚀-repo"#;
|
||
|
||
let marketplaces = parse_marketplace_list(output);
|
||
assert_eq!(marketplaces.len(), 2);
|
||
assert_eq!(marketplaces[0].name, "日本語-marketplace");
|
||
assert_eq!(marketplaces[0].source, "github/日本語/repo");
|
||
assert_eq!(marketplaces[1].name, "emoji-🚀-marketplace");
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_with_unicode_names() {
|
||
let output = "日本語-server: https://example.com/日本語 (SSE) - ✓ Connected";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "日本語-server");
|
||
assert_eq!(servers[0].url, Some("https://example.com/日本語".to_string()));
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_very_long_command() {
|
||
let output = "long-cmd: some-binary --flag1 value1 --flag2 value2 --flag3 value3 --flag4 value4 --flag5 value5 --very-long-option with-a-very-long-value - ✓ Connected";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "long-cmd");
|
||
assert_eq!(
|
||
servers[0].command,
|
||
Some("some-binary --flag1 value1 --flag2 value2 --flag3 value3 --flag4 value4 --flag5 value5 --very-long-option with-a-very-long-value".to_string())
|
||
);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_no_status() {
|
||
let output = "pending-server: https://example.com (HTTP)";
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 1);
|
||
assert_eq!(servers[0].name, "pending-server");
|
||
assert_eq!(servers[0].status, None);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_plugin_list_with_extra_whitespace() {
|
||
let output = r#"❯ whitespace-plugin@marketplace
|
||
Version: 1.0.0
|
||
Status: ✔ enabled "#;
|
||
|
||
let plugins = parse_plugin_list(output);
|
||
assert_eq!(plugins.len(), 1);
|
||
assert_eq!(plugins[0].name, "whitespace-plugin@marketplace");
|
||
assert_eq!(plugins[0].version, "1.0.0");
|
||
assert!(plugins[0].enabled);
|
||
}
|
||
|
||
#[test]
|
||
fn test_parse_mcp_server_list_multiple_with_checking() {
|
||
let output = r#"Checking connections...
|
||
asana: https://mcp.asana.com/sse (SSE) - ✓ Connected
|
||
gitea: gitea-mcp -t stdio (STDIO) - ✓ Connected"#;
|
||
|
||
let servers = parse_mcp_server_list(output);
|
||
assert_eq!(servers.len(), 2); // Should ignore "Checking" line
|
||
assert_eq!(servers[0].name, "asana");
|
||
assert_eq!(servers[1].name, "gitea");
|
||
}
|
||
}
|