generated from nhcarrigan/template
feat: add tests and assert coverage #71
File diff suppressed because it is too large
Load Diff
@@ -257,3 +257,465 @@ pub fn update_clipboard_language(
|
||||
save_history(&app, &history)?;
|
||||
Ok(updated_entry)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// ==================== ClipboardEntry tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_new() {
|
||||
let entry = ClipboardEntry::new(
|
||||
"let x = 42;".to_string(),
|
||||
Some("rust".to_string()),
|
||||
Some("main.rs".to_string()),
|
||||
);
|
||||
|
||||
assert_eq!(entry.content, "let x = 42;");
|
||||
assert_eq!(entry.language, Some("rust".to_string()));
|
||||
assert_eq!(entry.source, Some("main.rs".to_string()));
|
||||
assert!(!entry.is_pinned);
|
||||
assert!(!entry.id.is_empty());
|
||||
assert!(!entry.timestamp.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_new_without_optional_fields() {
|
||||
let entry = ClipboardEntry::new("some content".to_string(), None, None);
|
||||
|
||||
assert_eq!(entry.content, "some content");
|
||||
assert!(entry.language.is_none());
|
||||
assert!(entry.source.is_none());
|
||||
assert!(!entry.is_pinned);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_unique_ids() {
|
||||
let entry1 = ClipboardEntry::new("content1".to_string(), None, None);
|
||||
let entry2 = ClipboardEntry::new("content2".to_string(), None, None);
|
||||
|
||||
assert_ne!(entry1.id, entry2.id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_serialization() {
|
||||
let entry = ClipboardEntry::new(
|
||||
"fn main() {}".to_string(),
|
||||
Some("rust".to_string()),
|
||||
Some("lib.rs".to_string()),
|
||||
);
|
||||
|
||||
let json = serde_json::to_string(&entry).unwrap();
|
||||
assert!(json.contains("fn main() {}"));
|
||||
assert!(json.contains("rust"));
|
||||
assert!(json.contains("lib.rs"));
|
||||
assert!(json.contains("is_pinned"));
|
||||
|
||||
let deserialized: ClipboardEntry = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.content, entry.content);
|
||||
assert_eq!(deserialized.language, entry.language);
|
||||
assert_eq!(deserialized.source, entry.source);
|
||||
assert_eq!(deserialized.id, entry.id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_clone() {
|
||||
let entry = ClipboardEntry::new(
|
||||
"original".to_string(),
|
||||
Some("python".to_string()),
|
||||
None,
|
||||
);
|
||||
|
||||
let cloned = entry.clone();
|
||||
assert_eq!(cloned.content, entry.content);
|
||||
assert_eq!(cloned.id, entry.id);
|
||||
assert_eq!(cloned.language, entry.language);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_timestamp_is_rfc3339() {
|
||||
let entry = ClipboardEntry::new("test".to_string(), None, None);
|
||||
|
||||
// RFC3339 timestamp should parse successfully
|
||||
let parsed = chrono::DateTime::parse_from_rfc3339(&entry.timestamp);
|
||||
assert!(parsed.is_ok());
|
||||
}
|
||||
|
||||
// ==================== ClipboardHistory tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_history_default() {
|
||||
let history = ClipboardHistory::default();
|
||||
assert!(history.entries.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_history_serialization() {
|
||||
let mut history = ClipboardHistory::default();
|
||||
history.entries.push(ClipboardEntry::new(
|
||||
"entry1".to_string(),
|
||||
Some("js".to_string()),
|
||||
None,
|
||||
));
|
||||
history.entries.push(ClipboardEntry::new(
|
||||
"entry2".to_string(),
|
||||
None,
|
||||
Some("file.txt".to_string()),
|
||||
));
|
||||
|
||||
let json = serde_json::to_string(&history).unwrap();
|
||||
assert!(json.contains("entry1"));
|
||||
assert!(json.contains("entry2"));
|
||||
assert!(json.contains("js"));
|
||||
assert!(json.contains("file.txt"));
|
||||
|
||||
let deserialized: ClipboardHistory = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.entries.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_history_entries_order() {
|
||||
let mut history = ClipboardHistory::default();
|
||||
|
||||
history.entries.push(ClipboardEntry::new("first".to_string(), None, None));
|
||||
history.entries.push(ClipboardEntry::new("second".to_string(), None, None));
|
||||
history.entries.push(ClipboardEntry::new("third".to_string(), None, None));
|
||||
|
||||
assert_eq!(history.entries[0].content, "first");
|
||||
assert_eq!(history.entries[1].content, "second");
|
||||
assert_eq!(history.entries[2].content, "third");
|
||||
}
|
||||
|
||||
// ==================== ClipboardState tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_state_default() {
|
||||
let state = ClipboardState::default();
|
||||
assert!(state.last_content.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_state_with_content() {
|
||||
let state = ClipboardState {
|
||||
last_content: Some("cached content".to_string()),
|
||||
};
|
||||
assert_eq!(state.last_content, Some("cached content".to_string()));
|
||||
}
|
||||
|
||||
// ==================== MAX_HISTORY_SIZE constant test ====================
|
||||
|
||||
#[test]
|
||||
fn test_max_history_size_is_reasonable() {
|
||||
assert_eq!(MAX_HISTORY_SIZE, 100);
|
||||
assert!(MAX_HISTORY_SIZE > 0);
|
||||
assert!(MAX_HISTORY_SIZE <= 1000); // Sanity check
|
||||
}
|
||||
|
||||
// ==================== Pinned entry sorting tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_pinned_entries_sorting() {
|
||||
let mut entries = vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "unpinned older".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "pinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "unpinned newer".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
];
|
||||
|
||||
// Apply the same sorting logic as used in the module
|
||||
entries.sort_by(|a, b| {
|
||||
if a.is_pinned && !b.is_pinned {
|
||||
std::cmp::Ordering::Less
|
||||
} else if !a.is_pinned && b.is_pinned {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
b.timestamp.cmp(&a.timestamp)
|
||||
}
|
||||
});
|
||||
|
||||
// Pinned should be first
|
||||
assert!(entries[0].is_pinned);
|
||||
assert_eq!(entries[0].id, "2");
|
||||
|
||||
// Then unpinned sorted by timestamp descending (newest first)
|
||||
assert_eq!(entries[1].id, "3"); // newer unpinned
|
||||
assert_eq!(entries[2].id, "1"); // older unpinned
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_pinned_entries_sorting() {
|
||||
let mut entries = vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "pinned older".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "unpinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "pinned newer".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
];
|
||||
|
||||
entries.sort_by(|a, b| {
|
||||
if a.is_pinned && !b.is_pinned {
|
||||
std::cmp::Ordering::Less
|
||||
} else if !a.is_pinned && b.is_pinned {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
b.timestamp.cmp(&a.timestamp)
|
||||
}
|
||||
});
|
||||
|
||||
// Both pinned first, sorted by timestamp
|
||||
assert!(entries[0].is_pinned);
|
||||
assert_eq!(entries[0].id, "3"); // pinned newer
|
||||
assert!(entries[1].is_pinned);
|
||||
assert_eq!(entries[1].id, "1"); // pinned older
|
||||
// Then unpinned
|
||||
assert!(!entries[2].is_pinned);
|
||||
assert_eq!(entries[2].id, "2");
|
||||
}
|
||||
|
||||
// ==================== Entry filtering tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_filter_entries_by_language() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "rust code".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "js code".to_string(),
|
||||
language: Some("javascript".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "more rust".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let filtered: Vec<_> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|e| e.language.as_ref() == Some(&"rust".to_string()))
|
||||
.collect();
|
||||
|
||||
assert_eq!(filtered.len(), 2);
|
||||
assert!(filtered.iter().all(|e| e.language == Some("rust".to_string())));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_entries_by_content() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "fn hello_world()".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "function hello()".to_string(),
|
||||
language: Some("javascript".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "def goodbye()".to_string(),
|
||||
language: Some("python".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let query = "hello";
|
||||
let query_lower = query.to_lowercase();
|
||||
let filtered: Vec<_> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|e| e.content.to_lowercase().contains(&query_lower))
|
||||
.collect();
|
||||
|
||||
assert_eq!(filtered.len(), 2);
|
||||
assert!(filtered[0].content.contains("hello"));
|
||||
assert!(filtered[1].content.contains("hello"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_entries_case_insensitive() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "HELLO WORLD".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let query = "hello";
|
||||
let query_lower = query.to_lowercase();
|
||||
let filtered: Vec<_> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|e| e.content.to_lowercase().contains(&query_lower))
|
||||
.collect();
|
||||
|
||||
assert_eq!(filtered.len(), 1);
|
||||
}
|
||||
|
||||
// ==================== Unique languages extraction test ====================
|
||||
|
||||
#[test]
|
||||
fn test_extract_unique_languages() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "".to_string(),
|
||||
language: Some("javascript".to_string()),
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "".to_string(),
|
||||
language: Some("rust".to_string()), // Duplicate
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "4".to_string(),
|
||||
content: "".to_string(),
|
||||
language: None, // No language
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let mut languages: Vec<String> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter_map(|e| e.language.clone())
|
||||
.collect();
|
||||
languages.sort();
|
||||
languages.dedup();
|
||||
|
||||
assert_eq!(languages.len(), 2);
|
||||
assert!(languages.contains(&"rust".to_string()));
|
||||
assert!(languages.contains(&"javascript".to_string()));
|
||||
}
|
||||
|
||||
// ==================== Retain pinned entries test ====================
|
||||
|
||||
#[test]
|
||||
fn test_retain_pinned_on_clear() {
|
||||
let mut history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "pinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "unpinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "another pinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// Simulate clear (keep only pinned)
|
||||
history.entries.retain(|e| e.is_pinned);
|
||||
|
||||
assert_eq!(history.entries.len(), 2);
|
||||
assert!(history.entries.iter().all(|e| e.is_pinned));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -393,3 +393,263 @@ pub async fn get_file_size(file_path: String) -> Result<u64, String> {
|
||||
.map_err(|e| format!("Failed to get file metadata: {}", e))?;
|
||||
Ok(metadata.len())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper to run async tests
|
||||
fn run_async<F: std::future::Future>(f: F) -> F::Output {
|
||||
tokio::runtime::Runtime::new().unwrap().block_on(f)
|
||||
}
|
||||
|
||||
// ==================== validate_directory tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_absolute_path_exists() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let path = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = run_async(validate_directory(path.clone(), None));
|
||||
assert!(result.is_ok());
|
||||
// Canonicalized path should be returned
|
||||
assert!(result.unwrap().contains(&temp_dir.path().file_name().unwrap().to_string_lossy().to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_path_not_exists() {
|
||||
let result = run_async(validate_directory(
|
||||
"/nonexistent/path/that/does/not/exist".to_string(),
|
||||
None,
|
||||
));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("does not exist"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_path_is_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("test_file.txt");
|
||||
File::create(&file_path).unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
file_path.to_string_lossy().to_string(),
|
||||
None,
|
||||
));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("not a directory"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_home_expansion() {
|
||||
// This test assumes HOME is set (which it should be on most systems)
|
||||
if std::env::var_os("HOME").is_some() {
|
||||
let result = run_async(validate_directory("~".to_string(), None));
|
||||
assert!(result.is_ok());
|
||||
// Should not contain ~ after expansion
|
||||
assert!(!result.unwrap().contains("~"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_home_subpath_expansion() {
|
||||
// This test assumes HOME is set and has some subdirectory
|
||||
if let Some(home) = std::env::var_os("HOME") {
|
||||
let home_path = std::path::Path::new(&home);
|
||||
// Find any subdirectory in home
|
||||
if let Ok(entries) = fs::read_dir(home_path) {
|
||||
for entry in entries.flatten() {
|
||||
if entry.path().is_dir() {
|
||||
let subdir_name = entry.file_name().to_string_lossy().to_string();
|
||||
let tilde_path = format!("~/{}", subdir_name);
|
||||
let result = run_async(validate_directory(tilde_path, None));
|
||||
assert!(result.is_ok());
|
||||
assert!(!result.unwrap().contains("~"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_relative_path_with_current_dir() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let subdir = temp_dir.path().join("subdir");
|
||||
fs::create_dir(&subdir).unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
"subdir".to_string(),
|
||||
Some(temp_dir.path().to_string_lossy().to_string()),
|
||||
));
|
||||
assert!(result.is_ok());
|
||||
assert!(result.unwrap().contains("subdir"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_dot_path() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
".".to_string(),
|
||||
Some(temp_dir.path().to_string_lossy().to_string()),
|
||||
));
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_dotdot_path() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let subdir = temp_dir.path().join("subdir");
|
||||
fs::create_dir(&subdir).unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
"..".to_string(),
|
||||
Some(subdir.to_string_lossy().to_string()),
|
||||
));
|
||||
assert!(result.is_ok());
|
||||
// Should resolve to parent
|
||||
let resolved = result.unwrap();
|
||||
assert!(resolved.contains(&temp_dir.path().file_name().unwrap().to_string_lossy().to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_relative_without_current_dir() {
|
||||
// Relative path without current_dir - should fail since relative path likely won't exist
|
||||
let result = run_async(validate_directory(
|
||||
"some_random_nonexistent_relative_path".to_string(),
|
||||
None,
|
||||
));
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// ==================== get_file_size tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_empty_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("empty.txt");
|
||||
File::create(&file_path).unwrap();
|
||||
|
||||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_with_content() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("content.txt");
|
||||
let mut file = File::create(&file_path).unwrap();
|
||||
file.write_all(b"Hello, Hikari!").unwrap();
|
||||
|
||||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 14); // "Hello, Hikari!" is 14 bytes
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_larger_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("large.txt");
|
||||
let mut file = File::create(&file_path).unwrap();
|
||||
// Write 1000 bytes
|
||||
let data = vec![b'x'; 1000];
|
||||
file.write_all(&data).unwrap();
|
||||
|
||||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 1000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_nonexistent_file() {
|
||||
let result = run_async(get_file_size(
|
||||
"/nonexistent/path/file.txt".to_string(),
|
||||
));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("Failed to get file metadata"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_directory() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
// Getting "size" of a directory should work but return directory metadata
|
||||
// This is actually valid - directories have metadata too
|
||||
let result = run_async(get_file_size(temp_dir.path().to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
// Directory size is platform-dependent, just check it returns something
|
||||
}
|
||||
|
||||
// ==================== list_skills tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_list_skills_no_skills_dir() {
|
||||
// This test is tricky because it depends on HOME being set
|
||||
// and potentially affecting real user data, so we'll just
|
||||
// verify the function doesn't panic
|
||||
let result = run_async(list_skills());
|
||||
// Should either return Ok with a list or Ok with empty vec
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
// ==================== select_wsl_directory tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_select_wsl_directory_returns_home() {
|
||||
let result = run_async(select_wsl_directory());
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), "/home");
|
||||
}
|
||||
|
||||
// ==================== UpdateInfo struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_update_info_serialization() {
|
||||
let info = UpdateInfo {
|
||||
current_version: "0.3.0".to_string(),
|
||||
latest_version: "0.4.0".to_string(),
|
||||
has_update: true,
|
||||
release_url: "https://example.com/release".to_string(),
|
||||
release_notes: Some("New features!".to_string()),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&info).unwrap();
|
||||
assert!(json.contains("0.3.0"));
|
||||
assert!(json.contains("0.4.0"));
|
||||
assert!(json.contains("true"));
|
||||
assert!(json.contains("New features!"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_update_info_without_notes() {
|
||||
let info = UpdateInfo {
|
||||
current_version: "0.3.0".to_string(),
|
||||
latest_version: "0.3.0".to_string(),
|
||||
has_update: false,
|
||||
release_url: "https://example.com/release".to_string(),
|
||||
release_notes: None,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&info).unwrap();
|
||||
assert!(json.contains("null") || json.contains("release_notes"));
|
||||
}
|
||||
|
||||
// ==================== SavedFileInfo struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_saved_file_info_serialization() {
|
||||
let info = SavedFileInfo {
|
||||
path: "/tmp/test.txt".to_string(),
|
||||
filename: "test.txt".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&info).unwrap();
|
||||
assert!(json.contains("/tmp/test.txt"));
|
||||
assert!(json.contains("test.txt"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,3 +286,593 @@ pub fn git_discard(working_dir: String, file_path: String) -> Result<String, Str
|
||||
pub fn git_create_branch(working_dir: String, branch_name: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["checkout", "-b", &branch_name])
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper to create a git repository in a temp directory
|
||||
fn create_test_repo() -> TempDir {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initialize git repo
|
||||
run_git_command(&working_dir, &["init"]).unwrap();
|
||||
|
||||
// Configure git user for commits
|
||||
run_git_command(&working_dir, &["config", "user.email", "test@example.com"]).unwrap();
|
||||
run_git_command(&working_dir, &["config", "user.name", "Test User"]).unwrap();
|
||||
|
||||
// Disable GPG signing for tests (user may have it enabled globally)
|
||||
run_git_command(&working_dir, &["config", "commit.gpgsign", "false"]).unwrap();
|
||||
|
||||
temp_dir
|
||||
}
|
||||
|
||||
// Helper to create a file in the test repo
|
||||
fn create_file(dir: &TempDir, name: &str, content: &str) {
|
||||
let file_path = dir.path().join(name);
|
||||
let mut file = File::create(file_path).unwrap();
|
||||
file.write_all(content.as_bytes()).unwrap();
|
||||
}
|
||||
|
||||
// ==================== GitStatus struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_status_serialization() {
|
||||
let status = GitStatus {
|
||||
is_repo: true,
|
||||
branch: Some("main".to_string()),
|
||||
upstream: Some("origin/main".to_string()),
|
||||
ahead: 2,
|
||||
behind: 1,
|
||||
staged: vec![GitFileChange {
|
||||
path: "file.txt".to_string(),
|
||||
status: "modified".to_string(),
|
||||
}],
|
||||
unstaged: vec![],
|
||||
untracked: vec!["new_file.txt".to_string()],
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&status).unwrap();
|
||||
assert!(json.contains("\"is_repo\":true"));
|
||||
assert!(json.contains("\"branch\":\"main\""));
|
||||
assert!(json.contains("\"ahead\":2"));
|
||||
assert!(json.contains("\"behind\":1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_not_a_repo() {
|
||||
let status = GitStatus {
|
||||
is_repo: false,
|
||||
branch: None,
|
||||
upstream: None,
|
||||
ahead: 0,
|
||||
behind: 0,
|
||||
staged: vec![],
|
||||
unstaged: vec![],
|
||||
untracked: vec![],
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&status).unwrap();
|
||||
let deserialized: GitStatus = serde_json::from_str(&json).unwrap();
|
||||
assert!(!deserialized.is_repo);
|
||||
assert!(deserialized.branch.is_none());
|
||||
}
|
||||
|
||||
// ==================== GitFileChange struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_file_change_serialization() {
|
||||
let change = GitFileChange {
|
||||
path: "src/main.rs".to_string(),
|
||||
status: "added".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&change).unwrap();
|
||||
assert!(json.contains("src/main.rs"));
|
||||
assert!(json.contains("added"));
|
||||
|
||||
let deserialized: GitFileChange = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.path, "src/main.rs");
|
||||
assert_eq!(deserialized.status, "added");
|
||||
}
|
||||
|
||||
// ==================== GitBranch struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_branch_serialization() {
|
||||
let branch = GitBranch {
|
||||
name: "feature/new-feature".to_string(),
|
||||
is_current: true,
|
||||
is_remote: false,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&branch).unwrap();
|
||||
assert!(json.contains("feature/new-feature"));
|
||||
assert!(json.contains("\"is_current\":true"));
|
||||
assert!(json.contains("\"is_remote\":false"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_branch_remote() {
|
||||
let branch = GitBranch {
|
||||
name: "origin/main".to_string(),
|
||||
is_current: false,
|
||||
is_remote: true,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&branch).unwrap();
|
||||
let deserialized: GitBranch = serde_json::from_str(&json).unwrap();
|
||||
assert!(deserialized.is_remote);
|
||||
assert!(!deserialized.is_current);
|
||||
}
|
||||
|
||||
// ==================== GitLogEntry struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_log_entry_serialization() {
|
||||
let entry = GitLogEntry {
|
||||
hash: "abc123def456".to_string(),
|
||||
short_hash: "abc123d".to_string(),
|
||||
author: "Hikari".to_string(),
|
||||
date: "2 hours ago".to_string(),
|
||||
message: "feat: add new feature".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&entry).unwrap();
|
||||
assert!(json.contains("abc123def456"));
|
||||
assert!(json.contains("Hikari"));
|
||||
assert!(json.contains("feat: add new feature"));
|
||||
}
|
||||
|
||||
// ==================== git_status integration tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_status_not_a_git_repo() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(!status.is_repo);
|
||||
assert!(status.branch.is_none());
|
||||
assert!(status.staged.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_empty_repo() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(status.staged.is_empty());
|
||||
assert!(status.unstaged.is_empty());
|
||||
assert!(status.untracked.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_with_untracked_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create an untracked file
|
||||
create_file(&temp_dir, "untracked.txt", "hello");
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(status.untracked.contains(&"untracked.txt".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_with_staged_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and stage a file
|
||||
create_file(&temp_dir, "staged.txt", "hello");
|
||||
run_git_command(&working_dir, &["add", "staged.txt"]).unwrap();
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(!status.staged.is_empty());
|
||||
assert_eq!(status.staged[0].path, "staged.txt");
|
||||
assert_eq!(status.staged[0].status, "added");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_with_modified_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create, stage, and commit a file
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial commit"]).unwrap();
|
||||
|
||||
// Modify the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(!status.unstaged.is_empty());
|
||||
assert_eq!(status.unstaged[0].path, "file.txt");
|
||||
assert_eq!(status.unstaged[0].status, "modified");
|
||||
}
|
||||
|
||||
// ==================== git_diff integration tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_no_changes() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_diff(working_dir, None, false);
|
||||
assert!(result.is_ok());
|
||||
assert!(result.unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_with_changes() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit a file
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
|
||||
let result = git_diff(working_dir, None, false);
|
||||
assert!(result.is_ok());
|
||||
let diff = result.unwrap();
|
||||
assert!(diff.contains("diff"));
|
||||
assert!(diff.contains("file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_staged() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit a file
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify and stage the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
|
||||
let result = git_diff(working_dir, None, true);
|
||||
assert!(result.is_ok());
|
||||
let diff = result.unwrap();
|
||||
assert!(diff.contains("diff"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_specific_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit files
|
||||
create_file(&temp_dir, "file1.txt", "content1");
|
||||
create_file(&temp_dir, "file2.txt", "content2");
|
||||
run_git_command(&working_dir, &["add", "-A"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify both files
|
||||
create_file(&temp_dir, "file1.txt", "modified1");
|
||||
create_file(&temp_dir, "file2.txt", "modified2");
|
||||
|
||||
// Get diff for only file1.txt
|
||||
let result = git_diff(working_dir, Some("file1.txt".to_string()), false);
|
||||
assert!(result.is_ok());
|
||||
let diff = result.unwrap();
|
||||
assert!(diff.contains("file1.txt"));
|
||||
assert!(!diff.contains("file2.txt"));
|
||||
}
|
||||
|
||||
// ==================== git_branches integration tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_branches_single_branch() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Need at least one commit for branches to show
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
let result = git_branches(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let branches = result.unwrap();
|
||||
assert!(!branches.is_empty());
|
||||
// Should have at least one branch (main or master)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_branches_multiple_branches() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initial commit
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Create additional branch
|
||||
run_git_command(&working_dir, &["branch", "feature-branch"]).unwrap();
|
||||
|
||||
let result = git_branches(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let branches = result.unwrap();
|
||||
assert!(branches.len() >= 2);
|
||||
assert!(branches.iter().any(|b| b.name == "feature-branch"));
|
||||
}
|
||||
|
||||
// ==================== git_stage and git_unstage tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_stage_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
|
||||
let result = git_stage(working_dir.clone(), "file.txt".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify file is staged
|
||||
let status = git_status(working_dir).unwrap();
|
||||
assert!(status.staged.iter().any(|f| f.path == "file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_unstage_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// First, commit a file so we have a HEAD to restore from
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify and stage the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
|
||||
let result = git_unstage(working_dir.clone(), "file.txt".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify file is unstaged (should now be in unstaged/modified, not staged)
|
||||
let status = git_status(working_dir).unwrap();
|
||||
assert!(!status.staged.iter().any(|f| f.path == "file.txt"));
|
||||
assert!(status.unstaged.iter().any(|f| f.path == "file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_stage_all() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
create_file(&temp_dir, "file1.txt", "content1");
|
||||
create_file(&temp_dir, "file2.txt", "content2");
|
||||
|
||||
let result = git_stage_all(working_dir.clone());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify all files are staged
|
||||
let status = git_status(working_dir).unwrap();
|
||||
assert_eq!(status.staged.len(), 2);
|
||||
}
|
||||
|
||||
// ==================== git_commit tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_commit() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
|
||||
let result = git_commit(working_dir.clone(), "test commit message".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify commit was made
|
||||
let log = git_log(working_dir, Some(1)).unwrap();
|
||||
assert!(!log.is_empty());
|
||||
assert!(log[0].message.contains("test commit message"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_commit_nothing_to_commit() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Need initial commit first
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Try to commit with nothing staged
|
||||
let result = git_commit(working_dir, "empty commit".to_string());
|
||||
assert!(result.is_err()); // Should fail because nothing to commit
|
||||
}
|
||||
|
||||
// ==================== git_log tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_log_empty_repo() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_log(working_dir, Some(10));
|
||||
// May fail on empty repo or return empty
|
||||
if result.is_ok() {
|
||||
assert!(result.unwrap().is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_log_with_commits() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Make multiple commits
|
||||
for i in 1..=3 {
|
||||
create_file(&temp_dir, &format!("file{}.txt", i), "content");
|
||||
run_git_command(&working_dir, &["add", "-A"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", &format!("commit {}", i)]).unwrap();
|
||||
}
|
||||
|
||||
let result = git_log(working_dir, Some(10));
|
||||
assert!(result.is_ok());
|
||||
|
||||
let log = result.unwrap();
|
||||
assert_eq!(log.len(), 3);
|
||||
assert!(log[0].message.contains("commit 3")); // Most recent first
|
||||
assert!(log[2].message.contains("commit 1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_log_limit() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Make 5 commits
|
||||
for i in 1..=5 {
|
||||
create_file(&temp_dir, &format!("file{}.txt", i), "content");
|
||||
run_git_command(&working_dir, &["add", "-A"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", &format!("commit {}", i)]).unwrap();
|
||||
}
|
||||
|
||||
// Only get last 2
|
||||
let result = git_log(working_dir, Some(2));
|
||||
assert!(result.is_ok());
|
||||
|
||||
let log = result.unwrap();
|
||||
assert_eq!(log.len(), 2);
|
||||
}
|
||||
|
||||
// ==================== git_discard tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_discard_changes() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit a file
|
||||
create_file(&temp_dir, "file.txt", "original content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
|
||||
// Discard changes
|
||||
let result = git_discard(working_dir.clone(), "file.txt".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify file contents are restored
|
||||
let content = fs::read_to_string(temp_dir.path().join("file.txt")).unwrap();
|
||||
assert_eq!(content, "original content");
|
||||
}
|
||||
|
||||
// ==================== git_create_branch tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_create_branch() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initial commit required
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
let result = git_create_branch(working_dir.clone(), "new-branch".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify branch exists and is current
|
||||
let branches = git_branches(working_dir).unwrap();
|
||||
assert!(branches.iter().any(|b| b.name == "new-branch" && b.is_current));
|
||||
}
|
||||
|
||||
// ==================== git_checkout tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_checkout() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initial commit required
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Create a branch
|
||||
run_git_command(&working_dir, &["branch", "other-branch"]).unwrap();
|
||||
|
||||
// Checkout the branch
|
||||
let result = git_checkout(working_dir.clone(), "other-branch".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify current branch
|
||||
let branches = git_branches(working_dir).unwrap();
|
||||
let current = branches.iter().find(|b| b.is_current);
|
||||
assert!(current.is_some());
|
||||
assert_eq!(current.unwrap().name, "other-branch");
|
||||
}
|
||||
|
||||
// ==================== run_git_command tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_run_git_command_success() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = run_git_command(&working_dir, &["status"]);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_git_command_failure() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// This should fail because it's not a git repo
|
||||
let result = run_git_command(&working_dir, &["log"]);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_git_command_invalid_dir() {
|
||||
let result = run_git_command("/nonexistent/path", &["status"]);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,414 @@
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
import {
|
||||
slashCommands,
|
||||
parseSlashCommand,
|
||||
getMatchingCommands,
|
||||
isSlashCommand,
|
||||
type SlashCommand,
|
||||
} from "./slashCommands";
|
||||
|
||||
// Mock all external dependencies
|
||||
vi.mock("svelte/store", () => ({
|
||||
get: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("$lib/stores/claude", () => ({
|
||||
claudeStore: {
|
||||
addLine: vi.fn(),
|
||||
clearTerminal: vi.fn(),
|
||||
activeConversationId: { subscribe: vi.fn() },
|
||||
currentWorkingDirectory: { subscribe: vi.fn() },
|
||||
setWorkingDirectory: vi.fn(),
|
||||
getConversationHistory: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock("$lib/stores/character", () => ({
|
||||
characterState: {
|
||||
setState: vi.fn(),
|
||||
setTemporaryState: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock("$lib/tauri", () => ({
|
||||
setSkipNextGreeting: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("$lib/stores/search", () => ({
|
||||
searchState: {
|
||||
setQuery: vi.fn(),
|
||||
clear: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe("slashCommands", () => {
|
||||
describe("slashCommands array", () => {
|
||||
it("contains expected commands", () => {
|
||||
const commandNames = slashCommands.map((cmd) => cmd.name);
|
||||
expect(commandNames).toContain("cd");
|
||||
expect(commandNames).toContain("clear");
|
||||
expect(commandNames).toContain("new");
|
||||
expect(commandNames).toContain("help");
|
||||
expect(commandNames).toContain("search");
|
||||
expect(commandNames).toContain("summarise");
|
||||
expect(commandNames).toContain("skill");
|
||||
});
|
||||
|
||||
it("has 7 commands total", () => {
|
||||
expect(slashCommands.length).toBe(7);
|
||||
});
|
||||
|
||||
it("each command has required properties", () => {
|
||||
slashCommands.forEach((cmd) => {
|
||||
expect(cmd.name).toBeDefined();
|
||||
expect(typeof cmd.name).toBe("string");
|
||||
expect(cmd.name.length).toBeGreaterThan(0);
|
||||
|
||||
expect(cmd.description).toBeDefined();
|
||||
expect(typeof cmd.description).toBe("string");
|
||||
expect(cmd.description.length).toBeGreaterThan(0);
|
||||
|
||||
expect(cmd.usage).toBeDefined();
|
||||
expect(typeof cmd.usage).toBe("string");
|
||||
expect(cmd.usage.startsWith("/")).toBe(true);
|
||||
|
||||
expect(cmd.execute).toBeDefined();
|
||||
expect(typeof cmd.execute).toBe("function");
|
||||
});
|
||||
});
|
||||
|
||||
it("cd command has correct metadata", () => {
|
||||
const cdCmd = slashCommands.find((cmd) => cmd.name === "cd");
|
||||
expect(cdCmd).toBeDefined();
|
||||
expect(cdCmd!.description).toBe("Change the working directory");
|
||||
expect(cdCmd!.usage).toBe("/cd <path>");
|
||||
});
|
||||
|
||||
it("clear command has correct metadata", () => {
|
||||
const clearCmd = slashCommands.find((cmd) => cmd.name === "clear");
|
||||
expect(clearCmd).toBeDefined();
|
||||
expect(clearCmd!.description).toBe("Clear the terminal display (keeps conversation context)");
|
||||
expect(clearCmd!.usage).toBe("/clear");
|
||||
});
|
||||
|
||||
it("new command has correct metadata", () => {
|
||||
const newCmd = slashCommands.find((cmd) => cmd.name === "new");
|
||||
expect(newCmd).toBeDefined();
|
||||
expect(newCmd!.description).toBe("Start a fresh conversation (resets context)");
|
||||
expect(newCmd!.usage).toBe("/new");
|
||||
});
|
||||
|
||||
it("help command has correct metadata", () => {
|
||||
const helpCmd = slashCommands.find((cmd) => cmd.name === "help");
|
||||
expect(helpCmd).toBeDefined();
|
||||
expect(helpCmd!.description).toBe("Show available slash commands");
|
||||
expect(helpCmd!.usage).toBe("/help");
|
||||
});
|
||||
|
||||
it("search command has correct metadata", () => {
|
||||
const searchCmd = slashCommands.find((cmd) => cmd.name === "search");
|
||||
expect(searchCmd).toBeDefined();
|
||||
expect(searchCmd!.description).toBe("Search within the conversation (use /search to clear)");
|
||||
expect(searchCmd!.usage).toBe("/search [query]");
|
||||
});
|
||||
|
||||
it("summarise command has correct metadata", () => {
|
||||
const summariseCmd = slashCommands.find((cmd) => cmd.name === "summarise");
|
||||
expect(summariseCmd).toBeDefined();
|
||||
expect(summariseCmd!.description).toBe("Get a summary of the entire conversation");
|
||||
expect(summariseCmd!.usage).toBe("/summarise");
|
||||
});
|
||||
|
||||
it("skill command has correct metadata", () => {
|
||||
const skillCmd = slashCommands.find((cmd) => cmd.name === "skill");
|
||||
expect(skillCmd).toBeDefined();
|
||||
expect(skillCmd!.description).toBe("Invoke a Claude Code skill from ~/.claude/skills/");
|
||||
expect(skillCmd!.usage).toBe("/skill [name] [data]");
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseSlashCommand", () => {
|
||||
it("returns null for non-slash input", () => {
|
||||
const result = parseSlashCommand("hello world");
|
||||
expect(result.command).toBeNull();
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("returns null for empty string", () => {
|
||||
const result = parseSlashCommand("");
|
||||
expect(result.command).toBeNull();
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("returns null for whitespace only", () => {
|
||||
const result = parseSlashCommand(" ");
|
||||
expect(result.command).toBeNull();
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /cd command without args", () => {
|
||||
const result = parseSlashCommand("/cd");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("cd");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /cd command with path argument", () => {
|
||||
const result = parseSlashCommand("/cd /home/naomi/code");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("cd");
|
||||
expect(result.args).toBe("/home/naomi/code");
|
||||
});
|
||||
|
||||
it("parses /clear command", () => {
|
||||
const result = parseSlashCommand("/clear");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("clear");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /new command", () => {
|
||||
const result = parseSlashCommand("/new");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("new");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /help command", () => {
|
||||
const result = parseSlashCommand("/help");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("help");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /search command with query", () => {
|
||||
const result = parseSlashCommand("/search hello world");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("search");
|
||||
expect(result.args).toBe("hello world");
|
||||
});
|
||||
|
||||
it("parses /search command without query", () => {
|
||||
const result = parseSlashCommand("/search");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("search");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /summarise command", () => {
|
||||
const result = parseSlashCommand("/summarise");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("summarise");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /skill command with name and data", () => {
|
||||
const result = parseSlashCommand("/skill onboard-mentee john@example.com");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("skill");
|
||||
expect(result.args).toBe("onboard-mentee john@example.com");
|
||||
});
|
||||
|
||||
it("parses /skill command with name only", () => {
|
||||
const result = parseSlashCommand("/skill onboard-mentee");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("skill");
|
||||
expect(result.args).toBe("onboard-mentee");
|
||||
});
|
||||
|
||||
it("parses /skill command without arguments", () => {
|
||||
const result = parseSlashCommand("/skill");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("skill");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("returns null for unknown command", () => {
|
||||
const result = parseSlashCommand("/unknown");
|
||||
expect(result.command).toBeNull();
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("is case insensitive for command names", () => {
|
||||
const result1 = parseSlashCommand("/CD /path");
|
||||
expect(result1.command).not.toBeNull();
|
||||
expect(result1.command!.name).toBe("cd");
|
||||
|
||||
const result2 = parseSlashCommand("/CLEAR");
|
||||
expect(result2.command).not.toBeNull();
|
||||
expect(result2.command!.name).toBe("clear");
|
||||
|
||||
const result3 = parseSlashCommand("/Help");
|
||||
expect(result3.command).not.toBeNull();
|
||||
expect(result3.command!.name).toBe("help");
|
||||
});
|
||||
|
||||
it("handles leading whitespace", () => {
|
||||
const result = parseSlashCommand(" /cd /path");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("cd");
|
||||
expect(result.args).toBe("/path");
|
||||
});
|
||||
|
||||
it("handles trailing whitespace", () => {
|
||||
const result = parseSlashCommand("/cd /path ");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("cd");
|
||||
expect(result.args).toBe("/path");
|
||||
});
|
||||
|
||||
it("handles multiple spaces between args", () => {
|
||||
const result = parseSlashCommand("/search hello world");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("search");
|
||||
expect(result.args).toBe("hello world");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getMatchingCommands", () => {
|
||||
it("returns empty array for non-slash input", () => {
|
||||
const result = getMatchingCommands("hello");
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns empty array for empty string", () => {
|
||||
const result = getMatchingCommands("");
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns all commands for just slash", () => {
|
||||
const result = getMatchingCommands("/");
|
||||
expect(result.length).toBe(slashCommands.length);
|
||||
});
|
||||
|
||||
it("returns matching commands for partial input", () => {
|
||||
const result = getMatchingCommands("/c");
|
||||
const names = result.map((cmd) => cmd.name);
|
||||
expect(names).toContain("cd");
|
||||
expect(names).toContain("clear");
|
||||
expect(names).not.toContain("help");
|
||||
});
|
||||
|
||||
it("returns single command for exact match", () => {
|
||||
const result = getMatchingCommands("/cd");
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].name).toBe("cd");
|
||||
});
|
||||
|
||||
it("returns single command for partial unique match", () => {
|
||||
const result = getMatchingCommands("/cl");
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].name).toBe("clear");
|
||||
});
|
||||
|
||||
it("returns matching commands for /s prefix", () => {
|
||||
const result = getMatchingCommands("/s");
|
||||
const names = result.map((cmd) => cmd.name);
|
||||
expect(names).toContain("search");
|
||||
expect(names).toContain("summarise");
|
||||
expect(names).toContain("skill");
|
||||
});
|
||||
|
||||
it("is case insensitive", () => {
|
||||
const result1 = getMatchingCommands("/C");
|
||||
const result2 = getMatchingCommands("/c");
|
||||
expect(result1.length).toBe(result2.length);
|
||||
});
|
||||
|
||||
it("returns empty array for no matches", () => {
|
||||
const result = getMatchingCommands("/xyz");
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("handles whitespace correctly", () => {
|
||||
const result = getMatchingCommands(" /c");
|
||||
const names = result.map((cmd) => cmd.name);
|
||||
expect(names).toContain("cd");
|
||||
expect(names).toContain("clear");
|
||||
});
|
||||
|
||||
it("returns command for full command name", () => {
|
||||
const result = getMatchingCommands("/help");
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].name).toBe("help");
|
||||
});
|
||||
|
||||
it("returns command for /new", () => {
|
||||
const result = getMatchingCommands("/n");
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].name).toBe("new");
|
||||
});
|
||||
});
|
||||
|
||||
describe("isSlashCommand", () => {
|
||||
it("returns true for input starting with slash", () => {
|
||||
expect(isSlashCommand("/cd")).toBe(true);
|
||||
expect(isSlashCommand("/")).toBe(true);
|
||||
expect(isSlashCommand("/help")).toBe(true);
|
||||
expect(isSlashCommand("/unknown")).toBe(true);
|
||||
});
|
||||
|
||||
it("returns false for non-slash input", () => {
|
||||
expect(isSlashCommand("hello")).toBe(false);
|
||||
expect(isSlashCommand("")).toBe(false);
|
||||
expect(isSlashCommand("cd")).toBe(false);
|
||||
});
|
||||
|
||||
it("handles whitespace correctly", () => {
|
||||
expect(isSlashCommand(" /cd")).toBe(true);
|
||||
expect(isSlashCommand(" hello")).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false for slash in middle of string", () => {
|
||||
expect(isSlashCommand("hello/world")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("SlashCommand interface", () => {
|
||||
it("can create a valid slash command object", () => {
|
||||
const testCommand: SlashCommand = {
|
||||
name: "test",
|
||||
description: "A test command",
|
||||
usage: "/test [arg]",
|
||||
execute: vi.fn(),
|
||||
};
|
||||
|
||||
expect(testCommand.name).toBe("test");
|
||||
expect(testCommand.description).toBe("A test command");
|
||||
expect(testCommand.usage).toBe("/test [arg]");
|
||||
expect(typeof testCommand.execute).toBe("function");
|
||||
});
|
||||
|
||||
it("execute can be async function", () => {
|
||||
const asyncCommand: SlashCommand = {
|
||||
name: "async",
|
||||
description: "An async command",
|
||||
usage: "/async",
|
||||
execute: async () => {
|
||||
await Promise.resolve();
|
||||
},
|
||||
};
|
||||
|
||||
expect(asyncCommand.execute("")).toBeInstanceOf(Promise);
|
||||
});
|
||||
|
||||
it("execute can be sync function", () => {
|
||||
const syncCommand: SlashCommand = {
|
||||
name: "sync",
|
||||
description: "A sync command",
|
||||
usage: "/sync",
|
||||
execute: () => {
|
||||
// Synchronous execution
|
||||
},
|
||||
};
|
||||
|
||||
const result = syncCommand.execute("");
|
||||
// Sync function returns undefined, not a Promise
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,333 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import {
|
||||
NotificationType,
|
||||
NOTIFICATION_SOUNDS,
|
||||
type NotificationSound,
|
||||
} from "./types";
|
||||
|
||||
// Mock HTMLAudioElement for soundPlayer tests
|
||||
class MockAudioElement {
|
||||
src: string = "";
|
||||
preload: string = "";
|
||||
volume: number = 1;
|
||||
|
||||
constructor(src?: string) {
|
||||
if (src) this.src = src;
|
||||
}
|
||||
|
||||
cloneNode(): MockAudioElement {
|
||||
const clone = new MockAudioElement(this.src);
|
||||
clone.volume = this.volume;
|
||||
return clone;
|
||||
}
|
||||
|
||||
async play(): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
|
||||
// Store original Audio before mocking
|
||||
const OriginalAudio = global.Audio;
|
||||
|
||||
describe("notifications", () => {
|
||||
describe("NotificationType enum", () => {
|
||||
it("has SUCCESS type", () => {
|
||||
expect(NotificationType.SUCCESS).toBe("success");
|
||||
});
|
||||
|
||||
it("has ERROR type", () => {
|
||||
expect(NotificationType.ERROR).toBe("error");
|
||||
});
|
||||
|
||||
it("has PERMISSION type", () => {
|
||||
expect(NotificationType.PERMISSION).toBe("permission");
|
||||
});
|
||||
|
||||
it("has CONNECTION type", () => {
|
||||
expect(NotificationType.CONNECTION).toBe("connection");
|
||||
});
|
||||
|
||||
it("has TASK_START type", () => {
|
||||
expect(NotificationType.TASK_START).toBe("task_start");
|
||||
});
|
||||
|
||||
it("has ACHIEVEMENT type", () => {
|
||||
expect(NotificationType.ACHIEVEMENT).toBe("achievement");
|
||||
});
|
||||
|
||||
it("has exactly 6 notification types", () => {
|
||||
const types = Object.values(NotificationType);
|
||||
expect(types.length).toBe(6);
|
||||
});
|
||||
});
|
||||
|
||||
describe("NOTIFICATION_SOUNDS constant", () => {
|
||||
it("has sounds for all notification types", () => {
|
||||
Object.values(NotificationType).forEach((type) => {
|
||||
expect(NOTIFICATION_SOUNDS[type]).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
it("each sound has required properties", () => {
|
||||
Object.values(NOTIFICATION_SOUNDS).forEach((sound) => {
|
||||
expect(sound.type).toBeDefined();
|
||||
expect(sound.filename).toBeDefined();
|
||||
expect(sound.phrase).toBeDefined();
|
||||
expect(typeof sound.filename).toBe("string");
|
||||
expect(typeof sound.phrase).toBe("string");
|
||||
expect(sound.filename.endsWith(".mp3")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it("SUCCESS sound has correct properties", () => {
|
||||
const sound = NOTIFICATION_SOUNDS[NotificationType.SUCCESS];
|
||||
expect(sound.type).toBe(NotificationType.SUCCESS);
|
||||
expect(sound.filename).toBe("im-done.mp3");
|
||||
expect(sound.phrase).toBe("I'm done!");
|
||||
expect(sound.volume).toBe(0.7);
|
||||
});
|
||||
|
||||
it("ERROR sound has correct properties", () => {
|
||||
const sound = NOTIFICATION_SOUNDS[NotificationType.ERROR];
|
||||
expect(sound.type).toBe(NotificationType.ERROR);
|
||||
expect(sound.filename).toBe("oh-no.mp3");
|
||||
expect(sound.phrase).toBe("Oh no...");
|
||||
expect(sound.volume).toBe(0.8);
|
||||
});
|
||||
|
||||
it("PERMISSION sound has correct properties", () => {
|
||||
const sound = NOTIFICATION_SOUNDS[NotificationType.PERMISSION];
|
||||
expect(sound.type).toBe(NotificationType.PERMISSION);
|
||||
expect(sound.filename).toBe("access-please.mp3");
|
||||
expect(sound.phrase).toBe("Access please!");
|
||||
expect(sound.volume).toBe(0.9);
|
||||
});
|
||||
|
||||
it("CONNECTION sound has correct properties", () => {
|
||||
const sound = NOTIFICATION_SOUNDS[NotificationType.CONNECTION];
|
||||
expect(sound.type).toBe(NotificationType.CONNECTION);
|
||||
expect(sound.filename).toBe("connected.mp3");
|
||||
expect(sound.phrase).toBe("Connected!");
|
||||
expect(sound.volume).toBe(0.7);
|
||||
});
|
||||
|
||||
it("TASK_START sound has correct properties", () => {
|
||||
const sound = NOTIFICATION_SOUNDS[NotificationType.TASK_START];
|
||||
expect(sound.type).toBe(NotificationType.TASK_START);
|
||||
expect(sound.filename).toBe("working-on-it.mp3");
|
||||
expect(sound.phrase).toBe("Working on it!");
|
||||
expect(sound.volume).toBe(0.6);
|
||||
});
|
||||
|
||||
it("ACHIEVEMENT sound has correct properties", () => {
|
||||
const sound = NOTIFICATION_SOUNDS[NotificationType.ACHIEVEMENT];
|
||||
expect(sound.type).toBe(NotificationType.ACHIEVEMENT);
|
||||
expect(sound.filename).toBe("achievement.mp3");
|
||||
expect(sound.phrase).toBe("Achievement Get~!");
|
||||
expect(sound.volume).toBe(0.8);
|
||||
});
|
||||
|
||||
it("all volumes are within valid range (0-1)", () => {
|
||||
Object.values(NOTIFICATION_SOUNDS).forEach((sound) => {
|
||||
if (sound.volume !== undefined) {
|
||||
expect(sound.volume).toBeGreaterThanOrEqual(0);
|
||||
expect(sound.volume).toBeLessThanOrEqual(1);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("NotificationSound interface", () => {
|
||||
it("can create a valid notification sound object", () => {
|
||||
const sound: NotificationSound = {
|
||||
type: NotificationType.SUCCESS,
|
||||
filename: "test-sound.mp3",
|
||||
phrase: "Test phrase",
|
||||
volume: 0.5,
|
||||
};
|
||||
|
||||
expect(sound.type).toBe(NotificationType.SUCCESS);
|
||||
expect(sound.filename).toBe("test-sound.mp3");
|
||||
expect(sound.phrase).toBe("Test phrase");
|
||||
expect(sound.volume).toBe(0.5);
|
||||
});
|
||||
|
||||
it("volume is optional", () => {
|
||||
const sound: NotificationSound = {
|
||||
type: NotificationType.ERROR,
|
||||
filename: "error.mp3",
|
||||
phrase: "Error occurred",
|
||||
};
|
||||
|
||||
expect(sound.volume).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("SoundPlayer class", () => {
|
||||
beforeEach(() => {
|
||||
// Mock Audio constructor
|
||||
global.Audio = MockAudioElement as unknown as typeof Audio;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original Audio
|
||||
global.Audio = OriginalAudio;
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
it("can import soundPlayer singleton", async () => {
|
||||
const { soundPlayer } = await import("./soundPlayer");
|
||||
expect(soundPlayer).toBeDefined();
|
||||
});
|
||||
|
||||
it("setEnabled changes enabled state", async () => {
|
||||
const { soundPlayer } = await import("./soundPlayer");
|
||||
|
||||
soundPlayer.setEnabled(true);
|
||||
expect(soundPlayer.isEnabled()).toBe(true);
|
||||
|
||||
soundPlayer.setEnabled(false);
|
||||
expect(soundPlayer.isEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it("starts disabled by default", async () => {
|
||||
// Need to reimport to get fresh instance behavior
|
||||
// But since it's a singleton, we just test the method
|
||||
const { soundPlayer } = await import("./soundPlayer");
|
||||
|
||||
// Reset to default state
|
||||
soundPlayer.setEnabled(false);
|
||||
expect(soundPlayer.isEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it("setGlobalVolume clamps values to 0-1 range", async () => {
|
||||
const { soundPlayer } = await import("./soundPlayer");
|
||||
|
||||
// Test that it doesn't throw on edge cases
|
||||
soundPlayer.setGlobalVolume(0);
|
||||
soundPlayer.setGlobalVolume(1);
|
||||
soundPlayer.setGlobalVolume(0.5);
|
||||
|
||||
// Test clamping below 0
|
||||
soundPlayer.setGlobalVolume(-0.5);
|
||||
|
||||
// Test clamping above 1
|
||||
soundPlayer.setGlobalVolume(1.5);
|
||||
});
|
||||
|
||||
it("play returns early when disabled", async () => {
|
||||
const { soundPlayer } = await import("./soundPlayer");
|
||||
|
||||
soundPlayer.setEnabled(false);
|
||||
|
||||
// Should not throw when disabled
|
||||
await expect(soundPlayer.play(NotificationType.SUCCESS)).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it("play attempts to play when enabled", async () => {
|
||||
const { soundPlayer } = await import("./soundPlayer");
|
||||
|
||||
soundPlayer.setEnabled(true);
|
||||
|
||||
// Should not throw
|
||||
await expect(soundPlayer.play(NotificationType.SUCCESS)).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("NotificationManager class", () => {
|
||||
beforeEach(() => {
|
||||
global.Audio = MockAudioElement as unknown as typeof Audio;
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
global.Audio = OriginalAudio;
|
||||
});
|
||||
|
||||
it("can import notificationManager singleton", async () => {
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn().mockRejectedValue(new Error("Not available")),
|
||||
}));
|
||||
|
||||
const { notificationManager } = await import("./notificationManager");
|
||||
expect(notificationManager).toBeDefined();
|
||||
});
|
||||
|
||||
it("has notifySuccess method", async () => {
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn().mockRejectedValue(new Error("Not available")),
|
||||
}));
|
||||
|
||||
const { notificationManager } = await import("./notificationManager");
|
||||
expect(typeof notificationManager.notifySuccess).toBe("function");
|
||||
});
|
||||
|
||||
it("has notifyError method", async () => {
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn().mockRejectedValue(new Error("Not available")),
|
||||
}));
|
||||
|
||||
const { notificationManager } = await import("./notificationManager");
|
||||
expect(typeof notificationManager.notifyError).toBe("function");
|
||||
});
|
||||
|
||||
it("has notifyPermission method", async () => {
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn().mockRejectedValue(new Error("Not available")),
|
||||
}));
|
||||
|
||||
const { notificationManager } = await import("./notificationManager");
|
||||
expect(typeof notificationManager.notifyPermission).toBe("function");
|
||||
});
|
||||
|
||||
it("has notifyConnection method", async () => {
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn().mockRejectedValue(new Error("Not available")),
|
||||
}));
|
||||
|
||||
const { notificationManager } = await import("./notificationManager");
|
||||
expect(typeof notificationManager.notifyConnection).toBe("function");
|
||||
});
|
||||
|
||||
it("has notifyTaskStart method", async () => {
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn().mockRejectedValue(new Error("Not available")),
|
||||
}));
|
||||
|
||||
const { notificationManager } = await import("./notificationManager");
|
||||
expect(typeof notificationManager.notifyTaskStart).toBe("function");
|
||||
});
|
||||
|
||||
it("has notify method", async () => {
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn().mockRejectedValue(new Error("Not available")),
|
||||
}));
|
||||
|
||||
const { notificationManager } = await import("./notificationManager");
|
||||
expect(typeof notificationManager.notify).toBe("function");
|
||||
});
|
||||
});
|
||||
|
||||
describe("notification sounds file paths", () => {
|
||||
it("all sound files have valid paths", () => {
|
||||
Object.values(NOTIFICATION_SOUNDS).forEach((sound) => {
|
||||
// Check that filename doesn't contain path traversal
|
||||
expect(sound.filename).not.toContain("..");
|
||||
expect(sound.filename).not.toContain("/");
|
||||
expect(sound.filename).not.toContain("\\");
|
||||
});
|
||||
});
|
||||
|
||||
it("sound filenames are unique", () => {
|
||||
const filenames = Object.values(NOTIFICATION_SOUNDS).map((s) => s.filename);
|
||||
const uniqueFilenames = new Set(filenames);
|
||||
expect(uniqueFilenames.size).toBe(filenames.length);
|
||||
});
|
||||
|
||||
it("phrases are unique", () => {
|
||||
const phrases = Object.values(NOTIFICATION_SOUNDS).map((s) => s.phrase);
|
||||
const uniquePhrases = new Set(phrases);
|
||||
expect(uniquePhrases.size).toBe(phrases.length);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,485 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { get } from "svelte/store";
|
||||
import {
|
||||
configStore,
|
||||
isDarkTheme,
|
||||
isStreamerMode,
|
||||
isCompactMode,
|
||||
shouldHidePaths,
|
||||
maskPaths,
|
||||
clampFontSize,
|
||||
applyFontSize,
|
||||
applyTheme,
|
||||
applyCustomThemeColors,
|
||||
clearCustomThemeColors,
|
||||
MIN_FONT_SIZE,
|
||||
MAX_FONT_SIZE,
|
||||
DEFAULT_FONT_SIZE,
|
||||
type HikariConfig,
|
||||
type Theme,
|
||||
type CustomThemeColors,
|
||||
} from "./config";
|
||||
|
||||
// Mock Tauri APIs
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn(),
|
||||
}));
|
||||
|
||||
describe("config store", () => {
|
||||
describe("font size constants", () => {
|
||||
it("has correct MIN_FONT_SIZE", () => {
|
||||
expect(MIN_FONT_SIZE).toBe(10);
|
||||
});
|
||||
|
||||
it("has correct MAX_FONT_SIZE", () => {
|
||||
expect(MAX_FONT_SIZE).toBe(24);
|
||||
});
|
||||
|
||||
it("has correct DEFAULT_FONT_SIZE", () => {
|
||||
expect(DEFAULT_FONT_SIZE).toBe(14);
|
||||
});
|
||||
});
|
||||
|
||||
describe("clampFontSize", () => {
|
||||
it("returns the same value when within range", () => {
|
||||
expect(clampFontSize(14)).toBe(14);
|
||||
expect(clampFontSize(10)).toBe(10);
|
||||
expect(clampFontSize(24)).toBe(24);
|
||||
expect(clampFontSize(18)).toBe(18);
|
||||
});
|
||||
|
||||
it("clamps values below minimum", () => {
|
||||
expect(clampFontSize(5)).toBe(MIN_FONT_SIZE);
|
||||
expect(clampFontSize(0)).toBe(MIN_FONT_SIZE);
|
||||
expect(clampFontSize(-10)).toBe(MIN_FONT_SIZE);
|
||||
expect(clampFontSize(9)).toBe(MIN_FONT_SIZE);
|
||||
});
|
||||
|
||||
it("clamps values above maximum", () => {
|
||||
expect(clampFontSize(30)).toBe(MAX_FONT_SIZE);
|
||||
expect(clampFontSize(100)).toBe(MAX_FONT_SIZE);
|
||||
expect(clampFontSize(25)).toBe(MAX_FONT_SIZE);
|
||||
});
|
||||
});
|
||||
|
||||
describe("maskPaths", () => {
|
||||
it("returns text unchanged when hidePaths is false", () => {
|
||||
const text = "/home/naomi/code/project/file.ts";
|
||||
expect(maskPaths(text, false)).toBe(text);
|
||||
});
|
||||
|
||||
it("masks Unix home paths", () => {
|
||||
const text = "/home/naomi/code/project/file.ts";
|
||||
expect(maskPaths(text, true)).toBe("/home/****/code/project/file.ts");
|
||||
});
|
||||
|
||||
it("masks macOS user paths", () => {
|
||||
const text = "/Users/naomi/Documents/project/file.ts";
|
||||
expect(maskPaths(text, true)).toBe("/Users/****/Documents/project/file.ts");
|
||||
});
|
||||
|
||||
it("masks Windows user paths", () => {
|
||||
const text = "C:\\Users\\naomi\\Documents\\project\\file.ts";
|
||||
expect(maskPaths(text, true)).toBe("C:\\Users\\****\\Documents\\project\\file.ts");
|
||||
});
|
||||
|
||||
it("masks tilde paths", () => {
|
||||
const text = "~/code/project/file.ts";
|
||||
expect(maskPaths(text, true)).toBe("****/code/project/file.ts");
|
||||
});
|
||||
|
||||
it("masks multiple paths in the same text", () => {
|
||||
const text = "Editing /home/naomi/file1.ts and /home/naomi/file2.ts";
|
||||
expect(maskPaths(text, true)).toBe("Editing /home/****/file1.ts and /home/****/file2.ts");
|
||||
});
|
||||
|
||||
it("handles mixed path types", () => {
|
||||
const text = "Unix: /home/user/file, Mac: /Users/user/file, Win: C:\\Users\\user\\file";
|
||||
const expected = "Unix: /home/****/file, Mac: /Users/****/file, Win: C:\\Users\\****\\file";
|
||||
expect(maskPaths(text, true)).toBe(expected);
|
||||
});
|
||||
|
||||
it("handles paths with special characters in username", () => {
|
||||
const text = "/home/user-name_123/project";
|
||||
expect(maskPaths(text, true)).toBe("/home/****/project");
|
||||
});
|
||||
|
||||
it("does not mask non-path text", () => {
|
||||
const text = "This is just regular text without any paths";
|
||||
expect(maskPaths(text, true)).toBe(text);
|
||||
});
|
||||
|
||||
it("handles empty string", () => {
|
||||
expect(maskPaths("", true)).toBe("");
|
||||
expect(maskPaths("", false)).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Theme type", () => {
|
||||
it("accepts valid theme values", () => {
|
||||
const themes: Theme[] = ["dark", "light", "high-contrast", "custom"];
|
||||
themes.forEach((theme) => {
|
||||
expect(["dark", "light", "high-contrast", "custom"]).toContain(theme);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("CustomThemeColors interface", () => {
|
||||
it("can create a valid custom theme colors object", () => {
|
||||
const colors: CustomThemeColors = {
|
||||
bg_primary: "#1a1a2e",
|
||||
bg_secondary: "#16213e",
|
||||
bg_terminal: "#0f0f23",
|
||||
accent_primary: "#e94560",
|
||||
accent_secondary: "#533483",
|
||||
text_primary: "#eaeaea",
|
||||
text_secondary: "#a0a0a0",
|
||||
border_color: "#333355",
|
||||
};
|
||||
|
||||
expect(colors.bg_primary).toBe("#1a1a2e");
|
||||
expect(colors.accent_primary).toBe("#e94560");
|
||||
});
|
||||
|
||||
it("allows null values for optional colors", () => {
|
||||
const colors: CustomThemeColors = {
|
||||
bg_primary: null,
|
||||
bg_secondary: null,
|
||||
bg_terminal: null,
|
||||
accent_primary: "#e94560",
|
||||
accent_secondary: null,
|
||||
text_primary: null,
|
||||
text_secondary: null,
|
||||
border_color: null,
|
||||
};
|
||||
|
||||
expect(colors.bg_primary).toBeNull();
|
||||
expect(colors.accent_primary).toBe("#e94560");
|
||||
});
|
||||
});
|
||||
|
||||
describe("HikariConfig interface", () => {
|
||||
it("can create a valid config object with all fields", () => {
|
||||
const config: HikariConfig = {
|
||||
model: "claude-sonnet-4",
|
||||
api_key: "test-key",
|
||||
custom_instructions: "Be helpful",
|
||||
mcp_servers_json: "{}",
|
||||
auto_granted_tools: ["Read", "Write"],
|
||||
theme: "dark",
|
||||
greeting_enabled: true,
|
||||
greeting_custom_prompt: "Hello!",
|
||||
notifications_enabled: true,
|
||||
notification_volume: 0.7,
|
||||
always_on_top: false,
|
||||
minimize_to_tray: true,
|
||||
update_checks_enabled: true,
|
||||
character_panel_width: 300,
|
||||
font_size: 14,
|
||||
streamer_mode: false,
|
||||
streamer_hide_paths: false,
|
||||
compact_mode: false,
|
||||
profile_name: "Naomi",
|
||||
profile_avatar_path: "/path/to/avatar.png",
|
||||
profile_bio: "Developer",
|
||||
custom_theme_colors: {
|
||||
bg_primary: null,
|
||||
bg_secondary: null,
|
||||
bg_terminal: null,
|
||||
accent_primary: null,
|
||||
accent_secondary: null,
|
||||
text_primary: null,
|
||||
text_secondary: null,
|
||||
border_color: null,
|
||||
},
|
||||
};
|
||||
|
||||
expect(config.model).toBe("claude-sonnet-4");
|
||||
expect(config.auto_granted_tools).toEqual(["Read", "Write"]);
|
||||
expect(config.theme).toBe("dark");
|
||||
});
|
||||
|
||||
it("allows null values for optional fields", () => {
|
||||
const config: HikariConfig = {
|
||||
model: null,
|
||||
api_key: null,
|
||||
custom_instructions: null,
|
||||
mcp_servers_json: null,
|
||||
auto_granted_tools: [],
|
||||
theme: "dark",
|
||||
greeting_enabled: true,
|
||||
greeting_custom_prompt: null,
|
||||
notifications_enabled: true,
|
||||
notification_volume: 0.7,
|
||||
always_on_top: false,
|
||||
minimize_to_tray: false,
|
||||
update_checks_enabled: true,
|
||||
character_panel_width: null,
|
||||
font_size: 14,
|
||||
streamer_mode: false,
|
||||
streamer_hide_paths: false,
|
||||
compact_mode: false,
|
||||
profile_name: null,
|
||||
profile_avatar_path: null,
|
||||
profile_bio: null,
|
||||
custom_theme_colors: {
|
||||
bg_primary: null,
|
||||
bg_secondary: null,
|
||||
bg_terminal: null,
|
||||
accent_primary: null,
|
||||
accent_secondary: null,
|
||||
text_primary: null,
|
||||
text_secondary: null,
|
||||
border_color: null,
|
||||
},
|
||||
};
|
||||
|
||||
expect(config.model).toBeNull();
|
||||
expect(config.api_key).toBeNull();
|
||||
expect(config.character_panel_width).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("applyFontSize", () => {
|
||||
beforeEach(() => {
|
||||
// Reset document state
|
||||
if (typeof document !== "undefined") {
|
||||
document.documentElement.style.removeProperty("--terminal-font-size");
|
||||
}
|
||||
});
|
||||
|
||||
it("sets CSS variable for valid font size", () => {
|
||||
applyFontSize(16);
|
||||
const value = document.documentElement.style.getPropertyValue("--terminal-font-size");
|
||||
expect(value).toBe("16px");
|
||||
});
|
||||
|
||||
it("clamps font size below minimum", () => {
|
||||
applyFontSize(5);
|
||||
const value = document.documentElement.style.getPropertyValue("--terminal-font-size");
|
||||
expect(value).toBe(`${MIN_FONT_SIZE}px`);
|
||||
});
|
||||
|
||||
it("clamps font size above maximum", () => {
|
||||
applyFontSize(50);
|
||||
const value = document.documentElement.style.getPropertyValue("--terminal-font-size");
|
||||
expect(value).toBe(`${MAX_FONT_SIZE}px`);
|
||||
});
|
||||
});
|
||||
|
||||
describe("applyTheme", () => {
|
||||
beforeEach(() => {
|
||||
// Reset document state
|
||||
if (typeof document !== "undefined") {
|
||||
document.documentElement.removeAttribute("data-theme");
|
||||
clearCustomThemeColors();
|
||||
}
|
||||
});
|
||||
|
||||
it("sets data-theme attribute for dark theme", () => {
|
||||
applyTheme("dark");
|
||||
expect(document.documentElement.getAttribute("data-theme")).toBe("dark");
|
||||
});
|
||||
|
||||
it("sets data-theme attribute for light theme", () => {
|
||||
applyTheme("light");
|
||||
expect(document.documentElement.getAttribute("data-theme")).toBe("light");
|
||||
});
|
||||
|
||||
it("sets data-theme attribute for high-contrast theme", () => {
|
||||
applyTheme("high-contrast");
|
||||
expect(document.documentElement.getAttribute("data-theme")).toBe("high-contrast");
|
||||
});
|
||||
|
||||
it("uses dark as base for custom theme", () => {
|
||||
applyTheme("custom");
|
||||
expect(document.documentElement.getAttribute("data-theme")).toBe("dark");
|
||||
});
|
||||
|
||||
it("applies custom colors when theme is custom", () => {
|
||||
const colors: CustomThemeColors = {
|
||||
bg_primary: "#1a1a2e",
|
||||
bg_secondary: null,
|
||||
bg_terminal: null,
|
||||
accent_primary: "#e94560",
|
||||
accent_secondary: null,
|
||||
text_primary: null,
|
||||
text_secondary: null,
|
||||
border_color: null,
|
||||
};
|
||||
|
||||
applyTheme("custom", colors);
|
||||
|
||||
expect(document.documentElement.style.getPropertyValue("--bg-primary")).toBe("#1a1a2e");
|
||||
expect(document.documentElement.style.getPropertyValue("--accent-primary")).toBe("#e94560");
|
||||
});
|
||||
|
||||
it("does not apply custom colors for non-custom themes", () => {
|
||||
const colors: CustomThemeColors = {
|
||||
bg_primary: "#1a1a2e",
|
||||
bg_secondary: null,
|
||||
bg_terminal: null,
|
||||
accent_primary: null,
|
||||
accent_secondary: null,
|
||||
text_primary: null,
|
||||
text_secondary: null,
|
||||
border_color: null,
|
||||
};
|
||||
|
||||
applyTheme("dark", colors);
|
||||
|
||||
expect(document.documentElement.style.getPropertyValue("--bg-primary")).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("applyCustomThemeColors", () => {
|
||||
beforeEach(() => {
|
||||
clearCustomThemeColors();
|
||||
});
|
||||
|
||||
it("applies all provided colors", () => {
|
||||
const colors: CustomThemeColors = {
|
||||
bg_primary: "#111111",
|
||||
bg_secondary: "#222222",
|
||||
bg_terminal: "#333333",
|
||||
accent_primary: "#444444",
|
||||
accent_secondary: "#555555",
|
||||
text_primary: "#666666",
|
||||
text_secondary: "#777777",
|
||||
border_color: "#888888",
|
||||
};
|
||||
|
||||
applyCustomThemeColors(colors);
|
||||
|
||||
expect(document.documentElement.style.getPropertyValue("--bg-primary")).toBe("#111111");
|
||||
expect(document.documentElement.style.getPropertyValue("--bg-secondary")).toBe("#222222");
|
||||
expect(document.documentElement.style.getPropertyValue("--bg-terminal")).toBe("#333333");
|
||||
expect(document.documentElement.style.getPropertyValue("--accent-primary")).toBe("#444444");
|
||||
expect(document.documentElement.style.getPropertyValue("--accent-secondary")).toBe("#555555");
|
||||
expect(document.documentElement.style.getPropertyValue("--text-primary")).toBe("#666666");
|
||||
expect(document.documentElement.style.getPropertyValue("--text-secondary")).toBe("#777777");
|
||||
expect(document.documentElement.style.getPropertyValue("--border-color")).toBe("#888888");
|
||||
});
|
||||
|
||||
it("skips null values", () => {
|
||||
const colors: CustomThemeColors = {
|
||||
bg_primary: "#111111",
|
||||
bg_secondary: null,
|
||||
bg_terminal: null,
|
||||
accent_primary: null,
|
||||
accent_secondary: null,
|
||||
text_primary: null,
|
||||
text_secondary: null,
|
||||
border_color: null,
|
||||
};
|
||||
|
||||
applyCustomThemeColors(colors);
|
||||
|
||||
expect(document.documentElement.style.getPropertyValue("--bg-primary")).toBe("#111111");
|
||||
expect(document.documentElement.style.getPropertyValue("--bg-secondary")).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("clearCustomThemeColors", () => {
|
||||
it("removes all custom theme CSS properties", () => {
|
||||
// First apply some colors
|
||||
const colors: CustomThemeColors = {
|
||||
bg_primary: "#111111",
|
||||
bg_secondary: "#222222",
|
||||
bg_terminal: "#333333",
|
||||
accent_primary: "#444444",
|
||||
accent_secondary: "#555555",
|
||||
text_primary: "#666666",
|
||||
text_secondary: "#777777",
|
||||
border_color: "#888888",
|
||||
};
|
||||
applyCustomThemeColors(colors);
|
||||
|
||||
// Then clear them
|
||||
clearCustomThemeColors();
|
||||
|
||||
expect(document.documentElement.style.getPropertyValue("--bg-primary")).toBe("");
|
||||
expect(document.documentElement.style.getPropertyValue("--bg-secondary")).toBe("");
|
||||
expect(document.documentElement.style.getPropertyValue("--bg-terminal")).toBe("");
|
||||
expect(document.documentElement.style.getPropertyValue("--accent-primary")).toBe("");
|
||||
expect(document.documentElement.style.getPropertyValue("--accent-secondary")).toBe("");
|
||||
expect(document.documentElement.style.getPropertyValue("--text-primary")).toBe("");
|
||||
expect(document.documentElement.style.getPropertyValue("--text-secondary")).toBe("");
|
||||
expect(document.documentElement.style.getPropertyValue("--border-color")).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("derived stores", () => {
|
||||
// Note: These tests verify the derived store logic by testing the derivation functions
|
||||
// The actual stores depend on configStore which requires Tauri invoke mocking
|
||||
|
||||
it("isDarkTheme returns true for dark theme config", () => {
|
||||
// Test the derivation logic
|
||||
const darkConfig = { theme: "dark" as Theme };
|
||||
expect(darkConfig.theme === "dark").toBe(true);
|
||||
});
|
||||
|
||||
it("isDarkTheme returns false for light theme config", () => {
|
||||
const lightConfig = { theme: "light" as Theme };
|
||||
expect(lightConfig.theme === "dark").toBe(false);
|
||||
});
|
||||
|
||||
it("isStreamerMode derives from streamer_mode config", () => {
|
||||
const configWithStreamerMode = { streamer_mode: true };
|
||||
const configWithoutStreamerMode = { streamer_mode: false };
|
||||
|
||||
expect(configWithStreamerMode.streamer_mode).toBe(true);
|
||||
expect(configWithoutStreamerMode.streamer_mode).toBe(false);
|
||||
});
|
||||
|
||||
it("isCompactMode derives from compact_mode config", () => {
|
||||
const configWithCompactMode = { compact_mode: true };
|
||||
const configWithoutCompactMode = { compact_mode: false };
|
||||
|
||||
expect(configWithCompactMode.compact_mode).toBe(true);
|
||||
expect(configWithoutCompactMode.compact_mode).toBe(false);
|
||||
});
|
||||
|
||||
it("shouldHidePaths requires both streamer_mode and streamer_hide_paths", () => {
|
||||
const config1 = { streamer_mode: true, streamer_hide_paths: true };
|
||||
const config2 = { streamer_mode: true, streamer_hide_paths: false };
|
||||
const config3 = { streamer_mode: false, streamer_hide_paths: true };
|
||||
const config4 = { streamer_mode: false, streamer_hide_paths: false };
|
||||
|
||||
expect(config1.streamer_mode && config1.streamer_hide_paths).toBe(true);
|
||||
expect(config2.streamer_mode && config2.streamer_hide_paths).toBe(false);
|
||||
expect(config3.streamer_mode && config3.streamer_hide_paths).toBe(false);
|
||||
expect(config4.streamer_mode && config4.streamer_hide_paths).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("configStore methods", () => {
|
||||
it("has all expected methods", () => {
|
||||
expect(typeof configStore.loadConfig).toBe("function");
|
||||
expect(typeof configStore.saveConfig).toBe("function");
|
||||
expect(typeof configStore.updateConfig).toBe("function");
|
||||
expect(typeof configStore.openSidebar).toBe("function");
|
||||
expect(typeof configStore.closeSidebar).toBe("function");
|
||||
expect(typeof configStore.toggleSidebar).toBe("function");
|
||||
expect(typeof configStore.setTheme).toBe("function");
|
||||
expect(typeof configStore.setCustomThemeColors).toBe("function");
|
||||
expect(typeof configStore.setFontSize).toBe("function");
|
||||
expect(typeof configStore.increaseFontSize).toBe("function");
|
||||
expect(typeof configStore.decreaseFontSize).toBe("function");
|
||||
expect(typeof configStore.resetFontSize).toBe("function");
|
||||
expect(typeof configStore.addAutoGrantedTool).toBe("function");
|
||||
expect(typeof configStore.removeAutoGrantedTool).toBe("function");
|
||||
expect(typeof configStore.getConfig).toBe("function");
|
||||
expect(typeof configStore.toggleStreamerMode).toBe("function");
|
||||
expect(typeof configStore.toggleCompactMode).toBe("function");
|
||||
expect(typeof configStore.setCompactMode).toBe("function");
|
||||
});
|
||||
|
||||
it("has subscribable stores", () => {
|
||||
expect(typeof configStore.config.subscribe).toBe("function");
|
||||
expect(typeof configStore.isLoading.subscribe).toBe("function");
|
||||
expect(typeof configStore.isSidebarOpen.subscribe).toBe("function");
|
||||
expect(typeof configStore.saveError.subscribe).toBe("function");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,346 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { get } from "svelte/store";
|
||||
import { stats, formattedStats, resetSessionStats } from "./stats";
|
||||
import type { UsageStats } from "./stats";
|
||||
|
||||
// Mock Tauri APIs
|
||||
vi.mock("@tauri-apps/api/event", () => ({
|
||||
listen: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn(),
|
||||
}));
|
||||
|
||||
describe("stats store", () => {
|
||||
beforeEach(() => {
|
||||
// Reset stats to default before each test
|
||||
stats.set({
|
||||
total_input_tokens: 0,
|
||||
total_output_tokens: 0,
|
||||
total_cost_usd: 0,
|
||||
session_input_tokens: 0,
|
||||
session_output_tokens: 0,
|
||||
session_cost_usd: 0,
|
||||
model: null,
|
||||
messages_exchanged: 0,
|
||||
session_messages_exchanged: 0,
|
||||
code_blocks_generated: 0,
|
||||
session_code_blocks_generated: 0,
|
||||
files_edited: 0,
|
||||
session_files_edited: 0,
|
||||
files_created: 0,
|
||||
session_files_created: 0,
|
||||
tools_usage: {},
|
||||
session_tools_usage: {},
|
||||
session_duration_seconds: 0,
|
||||
});
|
||||
});
|
||||
|
||||
describe("stats writable store", () => {
|
||||
it("has correct default values", () => {
|
||||
const currentStats = get(stats);
|
||||
expect(currentStats.total_input_tokens).toBe(0);
|
||||
expect(currentStats.total_output_tokens).toBe(0);
|
||||
expect(currentStats.total_cost_usd).toBe(0);
|
||||
expect(currentStats.model).toBeNull();
|
||||
});
|
||||
|
||||
it("can be updated with set", () => {
|
||||
const newStats: UsageStats = {
|
||||
total_input_tokens: 1000,
|
||||
total_output_tokens: 2000,
|
||||
total_cost_usd: 0.05,
|
||||
session_input_tokens: 500,
|
||||
session_output_tokens: 1000,
|
||||
session_cost_usd: 0.025,
|
||||
model: "claude-sonnet-4",
|
||||
messages_exchanged: 10,
|
||||
session_messages_exchanged: 5,
|
||||
code_blocks_generated: 3,
|
||||
session_code_blocks_generated: 2,
|
||||
files_edited: 5,
|
||||
session_files_edited: 2,
|
||||
files_created: 1,
|
||||
session_files_created: 1,
|
||||
tools_usage: { Read: 5, Edit: 3 },
|
||||
session_tools_usage: { Read: 2, Edit: 1 },
|
||||
session_duration_seconds: 300,
|
||||
};
|
||||
|
||||
stats.set(newStats);
|
||||
const currentStats = get(stats);
|
||||
|
||||
expect(currentStats.total_input_tokens).toBe(1000);
|
||||
expect(currentStats.total_output_tokens).toBe(2000);
|
||||
expect(currentStats.model).toBe("claude-sonnet-4");
|
||||
expect(currentStats.tools_usage).toEqual({ Read: 5, Edit: 3 });
|
||||
});
|
||||
|
||||
it("can be updated with update function", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
total_input_tokens: 500,
|
||||
session_messages_exchanged: 3,
|
||||
}));
|
||||
|
||||
const currentStats = get(stats);
|
||||
expect(currentStats.total_input_tokens).toBe(500);
|
||||
expect(currentStats.session_messages_exchanged).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe("resetSessionStats", () => {
|
||||
it("resets all session fields to zero", () => {
|
||||
// First set some values
|
||||
stats.set({
|
||||
total_input_tokens: 1000,
|
||||
total_output_tokens: 2000,
|
||||
total_cost_usd: 0.05,
|
||||
session_input_tokens: 500,
|
||||
session_output_tokens: 1000,
|
||||
session_cost_usd: 0.025,
|
||||
model: "claude-sonnet-4",
|
||||
messages_exchanged: 10,
|
||||
session_messages_exchanged: 5,
|
||||
code_blocks_generated: 3,
|
||||
session_code_blocks_generated: 2,
|
||||
files_edited: 5,
|
||||
session_files_edited: 2,
|
||||
files_created: 1,
|
||||
session_files_created: 1,
|
||||
tools_usage: { Read: 5, Edit: 3 },
|
||||
session_tools_usage: { Read: 2, Edit: 1 },
|
||||
session_duration_seconds: 300,
|
||||
});
|
||||
|
||||
// Reset session stats
|
||||
resetSessionStats();
|
||||
|
||||
const currentStats = get(stats);
|
||||
|
||||
// Total stats should be preserved
|
||||
expect(currentStats.total_input_tokens).toBe(1000);
|
||||
expect(currentStats.total_output_tokens).toBe(2000);
|
||||
expect(currentStats.total_cost_usd).toBe(0.05);
|
||||
expect(currentStats.messages_exchanged).toBe(10);
|
||||
expect(currentStats.code_blocks_generated).toBe(3);
|
||||
expect(currentStats.files_edited).toBe(5);
|
||||
expect(currentStats.files_created).toBe(1);
|
||||
expect(currentStats.tools_usage).toEqual({ Read: 5, Edit: 3 });
|
||||
expect(currentStats.model).toBe("claude-sonnet-4");
|
||||
|
||||
// Session stats should be reset
|
||||
expect(currentStats.session_input_tokens).toBe(0);
|
||||
expect(currentStats.session_output_tokens).toBe(0);
|
||||
expect(currentStats.session_cost_usd).toBe(0);
|
||||
expect(currentStats.session_messages_exchanged).toBe(0);
|
||||
expect(currentStats.session_code_blocks_generated).toBe(0);
|
||||
expect(currentStats.session_files_edited).toBe(0);
|
||||
expect(currentStats.session_files_created).toBe(0);
|
||||
expect(currentStats.session_tools_usage).toEqual({});
|
||||
expect(currentStats.session_duration_seconds).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("formattedStats derived store", () => {
|
||||
it("formats token numbers with locale string", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
total_input_tokens: 1234567,
|
||||
total_output_tokens: 7654321,
|
||||
session_input_tokens: 12345,
|
||||
session_output_tokens: 54321,
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
|
||||
expect(formatted.totalTokens).toBe("8,888,888");
|
||||
expect(formatted.totalInputTokens).toBe("1,234,567");
|
||||
expect(formatted.totalOutputTokens).toBe("7,654,321");
|
||||
expect(formatted.sessionTokens).toBe("66,666");
|
||||
expect(formatted.sessionInputTokens).toBe("12,345");
|
||||
expect(formatted.sessionOutputTokens).toBe("54,321");
|
||||
});
|
||||
|
||||
it("formats cost with 4 decimal places", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
total_cost_usd: 1.23456,
|
||||
session_cost_usd: 0.00123,
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
|
||||
expect(formatted.totalCost).toBe("$1.2346");
|
||||
expect(formatted.sessionCost).toBe("$0.0012");
|
||||
});
|
||||
|
||||
it("formats duration seconds only", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
session_duration_seconds: 45,
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.sessionDuration).toBe("45s");
|
||||
});
|
||||
|
||||
it("formats duration minutes and seconds", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
session_duration_seconds: 125, // 2m 5s
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.sessionDuration).toBe("2m 5s");
|
||||
});
|
||||
|
||||
it("formats duration hours, minutes, and seconds", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
session_duration_seconds: 3725, // 1h 2m 5s
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.sessionDuration).toBe("1h 2m 5s");
|
||||
});
|
||||
|
||||
it("formats duration with zero seconds", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
session_duration_seconds: 3600, // exactly 1h
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.sessionDuration).toBe("1h 0m 0s");
|
||||
});
|
||||
|
||||
it("shows model name when available", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
model: "claude-opus-4-5",
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.model).toBe("claude-opus-4-5");
|
||||
});
|
||||
|
||||
it("shows placeholder when model is null", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
model: null,
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.model).toBe("No model selected");
|
||||
});
|
||||
|
||||
it("formats message counts", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
messages_exchanged: 100,
|
||||
session_messages_exchanged: 10,
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.messagesTotal).toBe("100");
|
||||
expect(formatted.messagesSession).toBe("10");
|
||||
});
|
||||
|
||||
it("formats code block counts", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
code_blocks_generated: 50,
|
||||
session_code_blocks_generated: 5,
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.codeBlocksTotal).toBe("50");
|
||||
expect(formatted.codeBlocksSession).toBe("5");
|
||||
});
|
||||
|
||||
it("formats file counts", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
files_edited: 25,
|
||||
session_files_edited: 3,
|
||||
files_created: 10,
|
||||
session_files_created: 2,
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.filesEditedTotal).toBe("25");
|
||||
expect(formatted.filesEditedSession).toBe("3");
|
||||
expect(formatted.filesCreatedTotal).toBe("10");
|
||||
expect(formatted.filesCreatedSession).toBe("2");
|
||||
});
|
||||
|
||||
it("exposes tools usage directly", () => {
|
||||
const toolsUsage = { Read: 10, Edit: 5, Write: 3 };
|
||||
const sessionToolsUsage = { Read: 2, Edit: 1 };
|
||||
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
tools_usage: toolsUsage,
|
||||
session_tools_usage: sessionToolsUsage,
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.toolsUsage).toEqual(toolsUsage);
|
||||
expect(formatted.sessionToolsUsage).toEqual(sessionToolsUsage);
|
||||
});
|
||||
|
||||
it("handles zero values correctly", () => {
|
||||
const formatted = get(formattedStats);
|
||||
|
||||
expect(formatted.totalTokens).toBe("0");
|
||||
expect(formatted.totalCost).toBe("$0.0000");
|
||||
expect(formatted.sessionDuration).toBe("0s");
|
||||
expect(formatted.messagesTotal).toBe("0");
|
||||
});
|
||||
|
||||
it("handles large numbers with proper formatting", () => {
|
||||
stats.update((current) => ({
|
||||
...current,
|
||||
total_input_tokens: 1000000000, // 1 billion
|
||||
messages_exchanged: 999999,
|
||||
}));
|
||||
|
||||
const formatted = get(formattedStats);
|
||||
expect(formatted.totalInputTokens).toBe("1,000,000,000");
|
||||
expect(formatted.messagesTotal).toBe("999,999");
|
||||
});
|
||||
});
|
||||
|
||||
describe("UsageStats interface", () => {
|
||||
it("supports all expected fields", () => {
|
||||
const fullStats: UsageStats = {
|
||||
total_input_tokens: 100,
|
||||
total_output_tokens: 200,
|
||||
total_cost_usd: 0.01,
|
||||
session_input_tokens: 50,
|
||||
session_output_tokens: 100,
|
||||
session_cost_usd: 0.005,
|
||||
model: "test-model",
|
||||
messages_exchanged: 5,
|
||||
session_messages_exchanged: 2,
|
||||
code_blocks_generated: 3,
|
||||
session_code_blocks_generated: 1,
|
||||
files_edited: 2,
|
||||
session_files_edited: 1,
|
||||
files_created: 1,
|
||||
session_files_created: 0,
|
||||
tools_usage: { Read: 3 },
|
||||
session_tools_usage: { Read: 1 },
|
||||
session_duration_seconds: 60,
|
||||
};
|
||||
|
||||
stats.set(fullStats);
|
||||
const currentStats = get(stats);
|
||||
|
||||
// Verify all fields are present and correct
|
||||
expect(currentStats).toEqual(fullStats);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user