generated from nhcarrigan/template
feat: add tests and assert coverage (#71)
### Explanation _No response_ ### Issue _No response_ ### Attestations - [ ] I have read and agree to the [Code of Conduct](https://docs.nhcarrigan.com/community/coc/) - [ ] I have read and agree to the [Community Guidelines](https://docs.nhcarrigan.com/community/guide/). - [ ] My contribution complies with the [Contributor Covenant](https://docs.nhcarrigan.com/dev/covenant/). ### Dependencies - [ ] I have pinned the dependencies to a specific patch version. ### Style - [ ] I have run the linter and resolved any errors. - [ ] My pull request uses an appropriate title, matching the conventional commit standards. - [ ] My scope of feat/fix/chore/etc. correctly matches the nature of changes in my pull request. ### Tests - [ ] My contribution adds new code, and I have added tests to cover it. - [ ] My contribution modifies existing code, and I have updated the tests to reflect these changes. - [ ] All new and existing tests pass locally with my changes. - [ ] Code coverage remains at or above the configured threshold. ### Documentation _No response_ ### Versioning _No response_ Co-authored-by: Hikari <hikari@nhcarrigan.com> Reviewed-on: #71 Co-authored-by: Naomi Carrigan <commits@nhcarrigan.com> Co-committed-by: Naomi Carrigan <commits@nhcarrigan.com>
This commit was merged in pull request #71.
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -257,3 +257,468 @@ pub fn update_clipboard_language(
|
||||
save_history(&app, &history)?;
|
||||
Ok(updated_entry)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// ==================== ClipboardEntry tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_new() {
|
||||
let entry = ClipboardEntry::new(
|
||||
"let x = 42;".to_string(),
|
||||
Some("rust".to_string()),
|
||||
Some("main.rs".to_string()),
|
||||
);
|
||||
|
||||
assert_eq!(entry.content, "let x = 42;");
|
||||
assert_eq!(entry.language, Some("rust".to_string()));
|
||||
assert_eq!(entry.source, Some("main.rs".to_string()));
|
||||
assert!(!entry.is_pinned);
|
||||
assert!(!entry.id.is_empty());
|
||||
assert!(!entry.timestamp.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_new_without_optional_fields() {
|
||||
let entry = ClipboardEntry::new("some content".to_string(), None, None);
|
||||
|
||||
assert_eq!(entry.content, "some content");
|
||||
assert!(entry.language.is_none());
|
||||
assert!(entry.source.is_none());
|
||||
assert!(!entry.is_pinned);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_unique_ids() {
|
||||
let entry1 = ClipboardEntry::new("content1".to_string(), None, None);
|
||||
let entry2 = ClipboardEntry::new("content2".to_string(), None, None);
|
||||
|
||||
assert_ne!(entry1.id, entry2.id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_serialization() {
|
||||
let entry = ClipboardEntry::new(
|
||||
"fn main() {}".to_string(),
|
||||
Some("rust".to_string()),
|
||||
Some("lib.rs".to_string()),
|
||||
);
|
||||
|
||||
let json = serde_json::to_string(&entry).unwrap();
|
||||
assert!(json.contains("fn main() {}"));
|
||||
assert!(json.contains("rust"));
|
||||
assert!(json.contains("lib.rs"));
|
||||
assert!(json.contains("is_pinned"));
|
||||
|
||||
let deserialized: ClipboardEntry = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.content, entry.content);
|
||||
assert_eq!(deserialized.language, entry.language);
|
||||
assert_eq!(deserialized.source, entry.source);
|
||||
assert_eq!(deserialized.id, entry.id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_clone() {
|
||||
let entry = ClipboardEntry::new(
|
||||
"original".to_string(),
|
||||
Some("python".to_string()),
|
||||
None,
|
||||
);
|
||||
|
||||
let cloned = entry.clone();
|
||||
assert_eq!(cloned.content, entry.content);
|
||||
assert_eq!(cloned.id, entry.id);
|
||||
assert_eq!(cloned.language, entry.language);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_timestamp_is_rfc3339() {
|
||||
let entry = ClipboardEntry::new("test".to_string(), None, None);
|
||||
|
||||
// RFC3339 timestamp should parse successfully
|
||||
let parsed = chrono::DateTime::parse_from_rfc3339(&entry.timestamp);
|
||||
assert!(parsed.is_ok());
|
||||
}
|
||||
|
||||
// ==================== ClipboardHistory tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_history_default() {
|
||||
let history = ClipboardHistory::default();
|
||||
assert!(history.entries.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_history_serialization() {
|
||||
let mut history = ClipboardHistory::default();
|
||||
history.entries.push(ClipboardEntry::new(
|
||||
"entry1".to_string(),
|
||||
Some("js".to_string()),
|
||||
None,
|
||||
));
|
||||
history.entries.push(ClipboardEntry::new(
|
||||
"entry2".to_string(),
|
||||
None,
|
||||
Some("file.txt".to_string()),
|
||||
));
|
||||
|
||||
let json = serde_json::to_string(&history).unwrap();
|
||||
assert!(json.contains("entry1"));
|
||||
assert!(json.contains("entry2"));
|
||||
assert!(json.contains("js"));
|
||||
assert!(json.contains("file.txt"));
|
||||
|
||||
let deserialized: ClipboardHistory = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.entries.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_history_entries_order() {
|
||||
let mut history = ClipboardHistory::default();
|
||||
|
||||
history.entries.push(ClipboardEntry::new("first".to_string(), None, None));
|
||||
history.entries.push(ClipboardEntry::new("second".to_string(), None, None));
|
||||
history.entries.push(ClipboardEntry::new("third".to_string(), None, None));
|
||||
|
||||
assert_eq!(history.entries[0].content, "first");
|
||||
assert_eq!(history.entries[1].content, "second");
|
||||
assert_eq!(history.entries[2].content, "third");
|
||||
}
|
||||
|
||||
// ==================== ClipboardState tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_state_default() {
|
||||
let state = ClipboardState::default();
|
||||
assert!(state.last_content.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_state_with_content() {
|
||||
let state = ClipboardState {
|
||||
last_content: Some("cached content".to_string()),
|
||||
};
|
||||
assert_eq!(state.last_content, Some("cached content".to_string()));
|
||||
}
|
||||
|
||||
// ==================== MAX_HISTORY_SIZE constant test ====================
|
||||
|
||||
#[test]
|
||||
fn test_max_history_size_is_reasonable() {
|
||||
assert_eq!(MAX_HISTORY_SIZE, 100);
|
||||
// Compile-time assertions for constant bounds
|
||||
const _: () = assert!(MAX_HISTORY_SIZE > 0);
|
||||
const _: () = assert!(MAX_HISTORY_SIZE <= 1000); // Sanity check
|
||||
}
|
||||
|
||||
// ==================== Pinned entry sorting tests ====================
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_pinned_entries_sorting() {
|
||||
let mut entries = vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "unpinned older".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "pinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "unpinned newer".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
];
|
||||
|
||||
// Apply the same sorting logic as used in the module
|
||||
entries.sort_by(|a, b| {
|
||||
if a.is_pinned && !b.is_pinned {
|
||||
std::cmp::Ordering::Less
|
||||
} else if !a.is_pinned && b.is_pinned {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
b.timestamp.cmp(&a.timestamp)
|
||||
}
|
||||
});
|
||||
|
||||
// Pinned should be first
|
||||
assert!(entries[0].is_pinned);
|
||||
assert_eq!(entries[0].id, "2");
|
||||
|
||||
// Then unpinned sorted by timestamp descending (newest first)
|
||||
assert_eq!(entries[1].id, "3"); // newer unpinned
|
||||
assert_eq!(entries[2].id, "1"); // older unpinned
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_multiple_pinned_entries_sorting() {
|
||||
let mut entries = vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "pinned older".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "unpinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "pinned newer".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
];
|
||||
|
||||
entries.sort_by(|a, b| {
|
||||
if a.is_pinned && !b.is_pinned {
|
||||
std::cmp::Ordering::Less
|
||||
} else if !a.is_pinned && b.is_pinned {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
b.timestamp.cmp(&a.timestamp)
|
||||
}
|
||||
});
|
||||
|
||||
// Both pinned first, sorted by timestamp
|
||||
assert!(entries[0].is_pinned);
|
||||
assert_eq!(entries[0].id, "3"); // pinned newer
|
||||
assert!(entries[1].is_pinned);
|
||||
assert_eq!(entries[1].id, "1"); // pinned older
|
||||
// Then unpinned
|
||||
assert!(!entries[2].is_pinned);
|
||||
assert_eq!(entries[2].id, "2");
|
||||
}
|
||||
|
||||
// ==================== Entry filtering tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_filter_entries_by_language() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "rust code".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "js code".to_string(),
|
||||
language: Some("javascript".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "more rust".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let filtered: Vec<_> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|e| e.language.as_ref() == Some(&"rust".to_string()))
|
||||
.collect();
|
||||
|
||||
assert_eq!(filtered.len(), 2);
|
||||
assert!(filtered.iter().all(|e| e.language == Some("rust".to_string())));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_entries_by_content() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "fn hello_world()".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "function hello()".to_string(),
|
||||
language: Some("javascript".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "def goodbye()".to_string(),
|
||||
language: Some("python".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let query = "hello";
|
||||
let query_lower = query.to_lowercase();
|
||||
let filtered: Vec<_> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|e| e.content.to_lowercase().contains(&query_lower))
|
||||
.collect();
|
||||
|
||||
assert_eq!(filtered.len(), 2);
|
||||
assert!(filtered[0].content.contains("hello"));
|
||||
assert!(filtered[1].content.contains("hello"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_entries_case_insensitive() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "HELLO WORLD".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let query = "hello";
|
||||
let query_lower = query.to_lowercase();
|
||||
let filtered: Vec<_> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|e| e.content.to_lowercase().contains(&query_lower))
|
||||
.collect();
|
||||
|
||||
assert_eq!(filtered.len(), 1);
|
||||
}
|
||||
|
||||
// ==================== Unique languages extraction test ====================
|
||||
|
||||
#[test]
|
||||
fn test_extract_unique_languages() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "".to_string(),
|
||||
language: Some("javascript".to_string()),
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "".to_string(),
|
||||
language: Some("rust".to_string()), // Duplicate
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "4".to_string(),
|
||||
content: "".to_string(),
|
||||
language: None, // No language
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let mut languages: Vec<String> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter_map(|e| e.language.clone())
|
||||
.collect();
|
||||
languages.sort();
|
||||
languages.dedup();
|
||||
|
||||
assert_eq!(languages.len(), 2);
|
||||
assert!(languages.contains(&"rust".to_string()));
|
||||
assert!(languages.contains(&"javascript".to_string()));
|
||||
}
|
||||
|
||||
// ==================== Retain pinned entries test ====================
|
||||
|
||||
#[test]
|
||||
fn test_retain_pinned_on_clear() {
|
||||
let mut history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "pinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "unpinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "another pinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// Simulate clear (keep only pinned)
|
||||
history.entries.retain(|e| e.is_pinned);
|
||||
|
||||
assert_eq!(history.entries.len(), 2);
|
||||
assert!(history.entries.iter().all(|e| e.is_pinned));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -393,3 +393,263 @@ pub async fn get_file_size(file_path: String) -> Result<u64, String> {
|
||||
.map_err(|e| format!("Failed to get file metadata: {}", e))?;
|
||||
Ok(metadata.len())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper to run async tests
|
||||
fn run_async<F: std::future::Future>(f: F) -> F::Output {
|
||||
tokio::runtime::Runtime::new().unwrap().block_on(f)
|
||||
}
|
||||
|
||||
// ==================== validate_directory tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_absolute_path_exists() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let path = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = run_async(validate_directory(path.clone(), None));
|
||||
assert!(result.is_ok());
|
||||
// Canonicalized path should be returned
|
||||
assert!(result.unwrap().contains(&temp_dir.path().file_name().unwrap().to_string_lossy().to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_path_not_exists() {
|
||||
let result = run_async(validate_directory(
|
||||
"/nonexistent/path/that/does/not/exist".to_string(),
|
||||
None,
|
||||
));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("does not exist"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_path_is_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("test_file.txt");
|
||||
File::create(&file_path).unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
file_path.to_string_lossy().to_string(),
|
||||
None,
|
||||
));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("not a directory"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_home_expansion() {
|
||||
// This test assumes HOME is set (which it should be on most systems)
|
||||
if std::env::var_os("HOME").is_some() {
|
||||
let result = run_async(validate_directory("~".to_string(), None));
|
||||
assert!(result.is_ok());
|
||||
// Should not contain ~ after expansion
|
||||
assert!(!result.unwrap().contains("~"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_home_subpath_expansion() {
|
||||
// This test assumes HOME is set and has some subdirectory
|
||||
if let Some(home) = std::env::var_os("HOME") {
|
||||
let home_path = std::path::Path::new(&home);
|
||||
// Find any subdirectory in home
|
||||
if let Ok(entries) = fs::read_dir(home_path) {
|
||||
for entry in entries.flatten() {
|
||||
if entry.path().is_dir() {
|
||||
let subdir_name = entry.file_name().to_string_lossy().to_string();
|
||||
let tilde_path = format!("~/{}", subdir_name);
|
||||
let result = run_async(validate_directory(tilde_path, None));
|
||||
assert!(result.is_ok());
|
||||
assert!(!result.unwrap().contains("~"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_relative_path_with_current_dir() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let subdir = temp_dir.path().join("subdir");
|
||||
fs::create_dir(&subdir).unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
"subdir".to_string(),
|
||||
Some(temp_dir.path().to_string_lossy().to_string()),
|
||||
));
|
||||
assert!(result.is_ok());
|
||||
assert!(result.unwrap().contains("subdir"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_dot_path() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
".".to_string(),
|
||||
Some(temp_dir.path().to_string_lossy().to_string()),
|
||||
));
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_dotdot_path() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let subdir = temp_dir.path().join("subdir");
|
||||
fs::create_dir(&subdir).unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
"..".to_string(),
|
||||
Some(subdir.to_string_lossy().to_string()),
|
||||
));
|
||||
assert!(result.is_ok());
|
||||
// Should resolve to parent
|
||||
let resolved = result.unwrap();
|
||||
assert!(resolved.contains(&temp_dir.path().file_name().unwrap().to_string_lossy().to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_relative_without_current_dir() {
|
||||
// Relative path without current_dir - should fail since relative path likely won't exist
|
||||
let result = run_async(validate_directory(
|
||||
"some_random_nonexistent_relative_path".to_string(),
|
||||
None,
|
||||
));
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// ==================== get_file_size tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_empty_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("empty.txt");
|
||||
File::create(&file_path).unwrap();
|
||||
|
||||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_with_content() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("content.txt");
|
||||
let mut file = File::create(&file_path).unwrap();
|
||||
file.write_all(b"Hello, Hikari!").unwrap();
|
||||
|
||||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 14); // "Hello, Hikari!" is 14 bytes
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_larger_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("large.txt");
|
||||
let mut file = File::create(&file_path).unwrap();
|
||||
// Write 1000 bytes
|
||||
let data = vec![b'x'; 1000];
|
||||
file.write_all(&data).unwrap();
|
||||
|
||||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 1000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_nonexistent_file() {
|
||||
let result = run_async(get_file_size(
|
||||
"/nonexistent/path/file.txt".to_string(),
|
||||
));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("Failed to get file metadata"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_directory() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
// Getting "size" of a directory should work but return directory metadata
|
||||
// This is actually valid - directories have metadata too
|
||||
let result = run_async(get_file_size(temp_dir.path().to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
// Directory size is platform-dependent, just check it returns something
|
||||
}
|
||||
|
||||
// ==================== list_skills tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_list_skills_no_skills_dir() {
|
||||
// This test is tricky because it depends on HOME being set
|
||||
// and potentially affecting real user data, so we'll just
|
||||
// verify the function doesn't panic
|
||||
let result = run_async(list_skills());
|
||||
// Should either return Ok with a list or Ok with empty vec
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
// ==================== select_wsl_directory tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_select_wsl_directory_returns_home() {
|
||||
let result = run_async(select_wsl_directory());
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), "/home");
|
||||
}
|
||||
|
||||
// ==================== UpdateInfo struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_update_info_serialization() {
|
||||
let info = UpdateInfo {
|
||||
current_version: "0.3.0".to_string(),
|
||||
latest_version: "0.4.0".to_string(),
|
||||
has_update: true,
|
||||
release_url: "https://example.com/release".to_string(),
|
||||
release_notes: Some("New features!".to_string()),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&info).unwrap();
|
||||
assert!(json.contains("0.3.0"));
|
||||
assert!(json.contains("0.4.0"));
|
||||
assert!(json.contains("true"));
|
||||
assert!(json.contains("New features!"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_update_info_without_notes() {
|
||||
let info = UpdateInfo {
|
||||
current_version: "0.3.0".to_string(),
|
||||
latest_version: "0.3.0".to_string(),
|
||||
has_update: false,
|
||||
release_url: "https://example.com/release".to_string(),
|
||||
release_notes: None,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&info).unwrap();
|
||||
assert!(json.contains("null") || json.contains("release_notes"));
|
||||
}
|
||||
|
||||
// ==================== SavedFileInfo struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_saved_file_info_serialization() {
|
||||
let info = SavedFileInfo {
|
||||
path: "/tmp/test.txt".to_string(),
|
||||
filename: "test.txt".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&info).unwrap();
|
||||
assert!(json.contains("/tmp/test.txt"));
|
||||
assert!(json.contains("test.txt"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,3 +286,593 @@ pub fn git_discard(working_dir: String, file_path: String) -> Result<String, Str
|
||||
pub fn git_create_branch(working_dir: String, branch_name: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["checkout", "-b", &branch_name])
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper to create a git repository in a temp directory
|
||||
fn create_test_repo() -> TempDir {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initialize git repo
|
||||
run_git_command(&working_dir, &["init"]).unwrap();
|
||||
|
||||
// Configure git user for commits
|
||||
run_git_command(&working_dir, &["config", "user.email", "test@example.com"]).unwrap();
|
||||
run_git_command(&working_dir, &["config", "user.name", "Test User"]).unwrap();
|
||||
|
||||
// Disable GPG signing for tests (user may have it enabled globally)
|
||||
run_git_command(&working_dir, &["config", "commit.gpgsign", "false"]).unwrap();
|
||||
|
||||
temp_dir
|
||||
}
|
||||
|
||||
// Helper to create a file in the test repo
|
||||
fn create_file(dir: &TempDir, name: &str, content: &str) {
|
||||
let file_path = dir.path().join(name);
|
||||
let mut file = File::create(file_path).unwrap();
|
||||
file.write_all(content.as_bytes()).unwrap();
|
||||
}
|
||||
|
||||
// ==================== GitStatus struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_status_serialization() {
|
||||
let status = GitStatus {
|
||||
is_repo: true,
|
||||
branch: Some("main".to_string()),
|
||||
upstream: Some("origin/main".to_string()),
|
||||
ahead: 2,
|
||||
behind: 1,
|
||||
staged: vec![GitFileChange {
|
||||
path: "file.txt".to_string(),
|
||||
status: "modified".to_string(),
|
||||
}],
|
||||
unstaged: vec![],
|
||||
untracked: vec!["new_file.txt".to_string()],
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&status).unwrap();
|
||||
assert!(json.contains("\"is_repo\":true"));
|
||||
assert!(json.contains("\"branch\":\"main\""));
|
||||
assert!(json.contains("\"ahead\":2"));
|
||||
assert!(json.contains("\"behind\":1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_not_a_repo() {
|
||||
let status = GitStatus {
|
||||
is_repo: false,
|
||||
branch: None,
|
||||
upstream: None,
|
||||
ahead: 0,
|
||||
behind: 0,
|
||||
staged: vec![],
|
||||
unstaged: vec![],
|
||||
untracked: vec![],
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&status).unwrap();
|
||||
let deserialized: GitStatus = serde_json::from_str(&json).unwrap();
|
||||
assert!(!deserialized.is_repo);
|
||||
assert!(deserialized.branch.is_none());
|
||||
}
|
||||
|
||||
// ==================== GitFileChange struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_file_change_serialization() {
|
||||
let change = GitFileChange {
|
||||
path: "src/main.rs".to_string(),
|
||||
status: "added".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&change).unwrap();
|
||||
assert!(json.contains("src/main.rs"));
|
||||
assert!(json.contains("added"));
|
||||
|
||||
let deserialized: GitFileChange = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.path, "src/main.rs");
|
||||
assert_eq!(deserialized.status, "added");
|
||||
}
|
||||
|
||||
// ==================== GitBranch struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_branch_serialization() {
|
||||
let branch = GitBranch {
|
||||
name: "feature/new-feature".to_string(),
|
||||
is_current: true,
|
||||
is_remote: false,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&branch).unwrap();
|
||||
assert!(json.contains("feature/new-feature"));
|
||||
assert!(json.contains("\"is_current\":true"));
|
||||
assert!(json.contains("\"is_remote\":false"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_branch_remote() {
|
||||
let branch = GitBranch {
|
||||
name: "origin/main".to_string(),
|
||||
is_current: false,
|
||||
is_remote: true,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&branch).unwrap();
|
||||
let deserialized: GitBranch = serde_json::from_str(&json).unwrap();
|
||||
assert!(deserialized.is_remote);
|
||||
assert!(!deserialized.is_current);
|
||||
}
|
||||
|
||||
// ==================== GitLogEntry struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_log_entry_serialization() {
|
||||
let entry = GitLogEntry {
|
||||
hash: "abc123def456".to_string(),
|
||||
short_hash: "abc123d".to_string(),
|
||||
author: "Hikari".to_string(),
|
||||
date: "2 hours ago".to_string(),
|
||||
message: "feat: add new feature".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&entry).unwrap();
|
||||
assert!(json.contains("abc123def456"));
|
||||
assert!(json.contains("Hikari"));
|
||||
assert!(json.contains("feat: add new feature"));
|
||||
}
|
||||
|
||||
// ==================== git_status integration tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_status_not_a_git_repo() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(!status.is_repo);
|
||||
assert!(status.branch.is_none());
|
||||
assert!(status.staged.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_empty_repo() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(status.staged.is_empty());
|
||||
assert!(status.unstaged.is_empty());
|
||||
assert!(status.untracked.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_with_untracked_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create an untracked file
|
||||
create_file(&temp_dir, "untracked.txt", "hello");
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(status.untracked.contains(&"untracked.txt".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_with_staged_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and stage a file
|
||||
create_file(&temp_dir, "staged.txt", "hello");
|
||||
run_git_command(&working_dir, &["add", "staged.txt"]).unwrap();
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(!status.staged.is_empty());
|
||||
assert_eq!(status.staged[0].path, "staged.txt");
|
||||
assert_eq!(status.staged[0].status, "added");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_with_modified_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create, stage, and commit a file
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial commit"]).unwrap();
|
||||
|
||||
// Modify the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(!status.unstaged.is_empty());
|
||||
assert_eq!(status.unstaged[0].path, "file.txt");
|
||||
assert_eq!(status.unstaged[0].status, "modified");
|
||||
}
|
||||
|
||||
// ==================== git_diff integration tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_no_changes() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_diff(working_dir, None, false);
|
||||
assert!(result.is_ok());
|
||||
assert!(result.unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_with_changes() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit a file
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
|
||||
let result = git_diff(working_dir, None, false);
|
||||
assert!(result.is_ok());
|
||||
let diff = result.unwrap();
|
||||
assert!(diff.contains("diff"));
|
||||
assert!(diff.contains("file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_staged() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit a file
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify and stage the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
|
||||
let result = git_diff(working_dir, None, true);
|
||||
assert!(result.is_ok());
|
||||
let diff = result.unwrap();
|
||||
assert!(diff.contains("diff"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_specific_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit files
|
||||
create_file(&temp_dir, "file1.txt", "content1");
|
||||
create_file(&temp_dir, "file2.txt", "content2");
|
||||
run_git_command(&working_dir, &["add", "-A"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify both files
|
||||
create_file(&temp_dir, "file1.txt", "modified1");
|
||||
create_file(&temp_dir, "file2.txt", "modified2");
|
||||
|
||||
// Get diff for only file1.txt
|
||||
let result = git_diff(working_dir, Some("file1.txt".to_string()), false);
|
||||
assert!(result.is_ok());
|
||||
let diff = result.unwrap();
|
||||
assert!(diff.contains("file1.txt"));
|
||||
assert!(!diff.contains("file2.txt"));
|
||||
}
|
||||
|
||||
// ==================== git_branches integration tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_branches_single_branch() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Need at least one commit for branches to show
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
let result = git_branches(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let branches = result.unwrap();
|
||||
assert!(!branches.is_empty());
|
||||
// Should have at least one branch (main or master)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_branches_multiple_branches() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initial commit
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Create additional branch
|
||||
run_git_command(&working_dir, &["branch", "feature-branch"]).unwrap();
|
||||
|
||||
let result = git_branches(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let branches = result.unwrap();
|
||||
assert!(branches.len() >= 2);
|
||||
assert!(branches.iter().any(|b| b.name == "feature-branch"));
|
||||
}
|
||||
|
||||
// ==================== git_stage and git_unstage tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_stage_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
|
||||
let result = git_stage(working_dir.clone(), "file.txt".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify file is staged
|
||||
let status = git_status(working_dir).unwrap();
|
||||
assert!(status.staged.iter().any(|f| f.path == "file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_unstage_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// First, commit a file so we have a HEAD to restore from
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify and stage the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
|
||||
let result = git_unstage(working_dir.clone(), "file.txt".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify file is unstaged (should now be in unstaged/modified, not staged)
|
||||
let status = git_status(working_dir).unwrap();
|
||||
assert!(!status.staged.iter().any(|f| f.path == "file.txt"));
|
||||
assert!(status.unstaged.iter().any(|f| f.path == "file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_stage_all() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
create_file(&temp_dir, "file1.txt", "content1");
|
||||
create_file(&temp_dir, "file2.txt", "content2");
|
||||
|
||||
let result = git_stage_all(working_dir.clone());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify all files are staged
|
||||
let status = git_status(working_dir).unwrap();
|
||||
assert_eq!(status.staged.len(), 2);
|
||||
}
|
||||
|
||||
// ==================== git_commit tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_commit() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
|
||||
let result = git_commit(working_dir.clone(), "test commit message".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify commit was made
|
||||
let log = git_log(working_dir, Some(1)).unwrap();
|
||||
assert!(!log.is_empty());
|
||||
assert!(log[0].message.contains("test commit message"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_commit_nothing_to_commit() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Need initial commit first
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Try to commit with nothing staged
|
||||
let result = git_commit(working_dir, "empty commit".to_string());
|
||||
assert!(result.is_err()); // Should fail because nothing to commit
|
||||
}
|
||||
|
||||
// ==================== git_log tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_log_empty_repo() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_log(working_dir, Some(10));
|
||||
// May fail on empty repo or return empty
|
||||
if let Ok(commits) = result {
|
||||
assert!(commits.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_log_with_commits() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Make multiple commits
|
||||
for i in 1..=3 {
|
||||
create_file(&temp_dir, &format!("file{}.txt", i), "content");
|
||||
run_git_command(&working_dir, &["add", "-A"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", &format!("commit {}", i)]).unwrap();
|
||||
}
|
||||
|
||||
let result = git_log(working_dir, Some(10));
|
||||
assert!(result.is_ok());
|
||||
|
||||
let log = result.unwrap();
|
||||
assert_eq!(log.len(), 3);
|
||||
assert!(log[0].message.contains("commit 3")); // Most recent first
|
||||
assert!(log[2].message.contains("commit 1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_log_limit() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Make 5 commits
|
||||
for i in 1..=5 {
|
||||
create_file(&temp_dir, &format!("file{}.txt", i), "content");
|
||||
run_git_command(&working_dir, &["add", "-A"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", &format!("commit {}", i)]).unwrap();
|
||||
}
|
||||
|
||||
// Only get last 2
|
||||
let result = git_log(working_dir, Some(2));
|
||||
assert!(result.is_ok());
|
||||
|
||||
let log = result.unwrap();
|
||||
assert_eq!(log.len(), 2);
|
||||
}
|
||||
|
||||
// ==================== git_discard tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_discard_changes() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit a file
|
||||
create_file(&temp_dir, "file.txt", "original content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
|
||||
// Discard changes
|
||||
let result = git_discard(working_dir.clone(), "file.txt".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify file contents are restored
|
||||
let content = fs::read_to_string(temp_dir.path().join("file.txt")).unwrap();
|
||||
assert_eq!(content, "original content");
|
||||
}
|
||||
|
||||
// ==================== git_create_branch tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_create_branch() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initial commit required
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
let result = git_create_branch(working_dir.clone(), "new-branch".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify branch exists and is current
|
||||
let branches = git_branches(working_dir).unwrap();
|
||||
assert!(branches.iter().any(|b| b.name == "new-branch" && b.is_current));
|
||||
}
|
||||
|
||||
// ==================== git_checkout tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_checkout() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initial commit required
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Create a branch
|
||||
run_git_command(&working_dir, &["branch", "other-branch"]).unwrap();
|
||||
|
||||
// Checkout the branch
|
||||
let result = git_checkout(working_dir.clone(), "other-branch".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify current branch
|
||||
let branches = git_branches(working_dir).unwrap();
|
||||
let current = branches.iter().find(|b| b.is_current);
|
||||
assert!(current.is_some());
|
||||
assert_eq!(current.unwrap().name, "other-branch");
|
||||
}
|
||||
|
||||
// ==================== run_git_command tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_run_git_command_success() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = run_git_command(&working_dir, &["status"]);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_git_command_failure() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// This should fail because it's not a git repo
|
||||
let result = run_git_command(&working_dir, &["log"]);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_git_command_invalid_dir() {
|
||||
let result = run_git_command("/nonexistent/path", &["status"]);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -171,6 +171,18 @@ pub async fn reset_default_quick_actions(app: AppHandle) -> Result<(), String> {
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn create_test_action(id: &str, name: &str, is_default: bool) -> QuickAction {
|
||||
QuickAction {
|
||||
id: id.to_string(),
|
||||
name: name.to_string(),
|
||||
prompt: "Test prompt".to_string(),
|
||||
icon: "star".to_string(),
|
||||
is_default,
|
||||
created_at: Utc::now(),
|
||||
updated_at: Utc::now(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_quick_actions_exist() {
|
||||
let defaults = get_default_quick_actions();
|
||||
@@ -188,4 +200,174 @@ mod tests {
|
||||
assert!(!action.icon.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_quick_actions_count() {
|
||||
let defaults = get_default_quick_actions();
|
||||
// Should have 6 default actions
|
||||
assert_eq!(defaults.len(), 6);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_quick_actions_have_unique_ids() {
|
||||
let defaults = get_default_quick_actions();
|
||||
let mut ids: Vec<&String> = defaults.iter().map(|a| &a.id).collect();
|
||||
ids.sort();
|
||||
ids.dedup();
|
||||
assert_eq!(ids.len(), defaults.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_quick_actions_ids_start_with_default() {
|
||||
let defaults = get_default_quick_actions();
|
||||
assert!(defaults.iter().all(|a| a.id.starts_with("default-")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quick_action_serialization() {
|
||||
let action = create_test_action("test-1", "Test Action", false);
|
||||
let json = serde_json::to_string(&action).expect("Failed to serialize");
|
||||
let parsed: QuickAction = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.id, action.id);
|
||||
assert_eq!(parsed.name, action.name);
|
||||
assert_eq!(parsed.prompt, action.prompt);
|
||||
assert_eq!(parsed.icon, action.icon);
|
||||
assert_eq!(parsed.is_default, action.is_default);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quick_action_clone() {
|
||||
let original = create_test_action("clone-test", "Clone Test", true);
|
||||
let cloned = original.clone();
|
||||
|
||||
assert_eq!(original.id, cloned.id);
|
||||
assert_eq!(original.name, cloned.name);
|
||||
assert_eq!(original.is_default, cloned.is_default);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_quick_action_sorting_defaults_first() {
|
||||
let mut actions = vec![
|
||||
create_test_action("custom-z", "Zebra", false),
|
||||
create_test_action("default-a", "Apple", true),
|
||||
create_test_action("custom-a", "Alpha", false),
|
||||
create_test_action("default-z", "Zulu", true),
|
||||
];
|
||||
|
||||
// Sort by: defaults first, then alphabetically by name
|
||||
actions.sort_by(|a, b| {
|
||||
let default_cmp = b.is_default.cmp(&a.is_default);
|
||||
if default_cmp == std::cmp::Ordering::Equal {
|
||||
a.name.cmp(&b.name)
|
||||
} else {
|
||||
default_cmp
|
||||
}
|
||||
});
|
||||
|
||||
// Defaults should come first
|
||||
assert!(actions[0].is_default);
|
||||
assert!(actions[1].is_default);
|
||||
assert!(!actions[2].is_default);
|
||||
assert!(!actions[3].is_default);
|
||||
|
||||
// Within defaults, alphabetically sorted
|
||||
assert_eq!(actions[0].name, "Apple");
|
||||
assert_eq!(actions[1].name, "Zulu");
|
||||
|
||||
// Within non-defaults, alphabetically sorted
|
||||
assert_eq!(actions[2].name, "Alpha");
|
||||
assert_eq!(actions[3].name, "Zebra");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_known_default_actions() {
|
||||
let defaults = get_default_quick_actions();
|
||||
let ids: Vec<&str> = defaults.iter().map(|a| a.id.as_str()).collect();
|
||||
|
||||
assert!(ids.contains(&"default-review-pr"));
|
||||
assert!(ids.contains(&"default-run-tests"));
|
||||
assert!(ids.contains(&"default-explain-file"));
|
||||
assert!(ids.contains(&"default-fix-error"));
|
||||
assert!(ids.contains(&"default-write-tests"));
|
||||
assert!(ids.contains(&"default-refactor"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_action_icons() {
|
||||
let defaults = get_default_quick_actions();
|
||||
let icons: Vec<&str> = defaults.iter().map(|a| a.icon.as_str()).collect();
|
||||
|
||||
assert!(icons.contains(&"git-pull-request"));
|
||||
assert!(icons.contains(&"play"));
|
||||
assert!(icons.contains(&"file-text"));
|
||||
assert!(icons.contains(&"alert-circle"));
|
||||
assert!(icons.contains(&"check-square"));
|
||||
assert!(icons.contains(&"refresh-cw"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quick_action_prompts_not_empty() {
|
||||
let defaults = get_default_quick_actions();
|
||||
for action in defaults {
|
||||
assert!(
|
||||
action.prompt.len() > 10,
|
||||
"Prompt should be meaningful: {}",
|
||||
action.name
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quick_action_timestamps() {
|
||||
let action = create_test_action("time-test", "Time Test", false);
|
||||
assert!(action.created_at <= action.updated_at);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_actions_have_same_timestamps() {
|
||||
let defaults = get_default_quick_actions();
|
||||
// All defaults are created at the same instant
|
||||
let first_created = defaults[0].created_at;
|
||||
let first_updated = defaults[0].updated_at;
|
||||
|
||||
for action in &defaults {
|
||||
assert_eq!(action.created_at, first_created);
|
||||
assert_eq!(action.updated_at, first_updated);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_action_retain_non_default() {
|
||||
let mut actions = vec![
|
||||
create_test_action("default-1", "Default 1", true),
|
||||
create_test_action("custom-1", "Custom 1", false),
|
||||
create_test_action("default-2", "Default 2", true),
|
||||
create_test_action("custom-2", "Custom 2", false),
|
||||
];
|
||||
|
||||
// Mimics reset_default_quick_actions behavior (retain non-defaults)
|
||||
actions.retain(|a| !a.is_default);
|
||||
|
||||
assert_eq!(actions.len(), 2);
|
||||
assert!(actions.iter().all(|a| !a.is_default));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_action_find_by_id() {
|
||||
let actions = vec![
|
||||
create_test_action("action-1", "First", false),
|
||||
create_test_action("action-2", "Second", false),
|
||||
create_test_action("action-3", "Third", false),
|
||||
];
|
||||
|
||||
let found = actions.iter().find(|a| a.id == "action-2");
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().name, "Second");
|
||||
|
||||
let not_found = actions.iter().find(|a| a.id == "action-999");
|
||||
assert!(not_found.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -145,6 +145,30 @@ pub async fn clear_all_sessions(app: AppHandle) -> Result<(), String> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::TimeZone;
|
||||
|
||||
fn create_test_session(id: &str, name: &str) -> SavedSession {
|
||||
SavedSession {
|
||||
id: id.to_string(),
|
||||
name: name.to_string(),
|
||||
created_at: Utc::now(),
|
||||
last_activity_at: Utc::now(),
|
||||
working_directory: "/home/test".to_string(),
|
||||
message_count: 5,
|
||||
preview: "Hello world".to_string(),
|
||||
messages: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
fn create_test_message(id: &str, content: &str, msg_type: &str) -> SavedMessage {
|
||||
SavedMessage {
|
||||
id: id.to_string(),
|
||||
message_type: msg_type.to_string(),
|
||||
content: content.to_string(),
|
||||
timestamp: Utc::now(),
|
||||
tool_name: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_list_item_from_saved_session() {
|
||||
@@ -164,4 +188,187 @@ mod tests {
|
||||
assert_eq!(item.name, "Test Session");
|
||||
assert_eq!(item.message_count, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_list_item_preserves_all_fields() {
|
||||
let created = Utc.with_ymd_and_hms(2024, 1, 15, 10, 30, 0).unwrap();
|
||||
let last_activity = Utc.with_ymd_and_hms(2024, 1, 15, 14, 45, 0).unwrap();
|
||||
|
||||
let session = SavedSession {
|
||||
id: "sess-123".to_string(),
|
||||
name: "My Chat".to_string(),
|
||||
created_at: created,
|
||||
last_activity_at: last_activity,
|
||||
working_directory: "/home/naomi/project".to_string(),
|
||||
message_count: 42,
|
||||
preview: "What is the meaning of life?".to_string(),
|
||||
messages: vec![],
|
||||
};
|
||||
|
||||
let item = SessionListItem::from(&session);
|
||||
|
||||
assert_eq!(item.id, "sess-123");
|
||||
assert_eq!(item.name, "My Chat");
|
||||
assert_eq!(item.created_at, created);
|
||||
assert_eq!(item.last_activity_at, last_activity);
|
||||
assert_eq!(item.working_directory, "/home/naomi/project");
|
||||
assert_eq!(item.message_count, 42);
|
||||
assert_eq!(item.preview, "What is the meaning of life?");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_saved_session_serialization() {
|
||||
let session = create_test_session("test-1", "Test Session");
|
||||
let json = serde_json::to_string(&session).expect("Failed to serialize");
|
||||
let parsed: SavedSession = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.id, session.id);
|
||||
assert_eq!(parsed.name, session.name);
|
||||
assert_eq!(parsed.working_directory, session.working_directory);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_saved_message_serialization() {
|
||||
let message = create_test_message("msg-1", "Hello!", "user");
|
||||
let json = serde_json::to_string(&message).expect("Failed to serialize");
|
||||
let parsed: SavedMessage = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.id, message.id);
|
||||
assert_eq!(parsed.content, message.content);
|
||||
assert_eq!(parsed.message_type, "user");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_saved_message_with_tool_name() {
|
||||
let message = SavedMessage {
|
||||
id: "msg-tool-1".to_string(),
|
||||
message_type: "tool".to_string(),
|
||||
content: "File read successfully".to_string(),
|
||||
timestamp: Utc::now(),
|
||||
tool_name: Some("Read".to_string()),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&message).expect("Failed to serialize");
|
||||
let parsed: SavedMessage = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.tool_name, Some("Read".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_with_messages_serialization() {
|
||||
let mut session = create_test_session("sess-full", "Full Session");
|
||||
session.messages = vec![
|
||||
create_test_message("msg-1", "Hello!", "user"),
|
||||
create_test_message("msg-2", "Hi there!", "assistant"),
|
||||
create_test_message("msg-3", "Read file", "tool"),
|
||||
];
|
||||
session.message_count = 3;
|
||||
|
||||
let json = serde_json::to_string(&session).expect("Failed to serialize");
|
||||
let parsed: SavedSession = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.messages.len(), 3);
|
||||
assert_eq!(parsed.messages[0].content, "Hello!");
|
||||
assert_eq!(parsed.messages[1].message_type, "assistant");
|
||||
assert_eq!(parsed.messages[2].message_type, "tool");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_list_item_serialization() {
|
||||
let item = SessionListItem {
|
||||
id: "list-item-1".to_string(),
|
||||
name: "Quick Chat".to_string(),
|
||||
created_at: Utc::now(),
|
||||
last_activity_at: Utc::now(),
|
||||
working_directory: "/tmp".to_string(),
|
||||
message_count: 10,
|
||||
preview: "Short preview...".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&item).expect("Failed to serialize");
|
||||
let parsed: SessionListItem = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.id, item.id);
|
||||
assert_eq!(parsed.name, item.name);
|
||||
assert_eq!(parsed.preview, item.preview);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_message_type_field_rename() {
|
||||
// The message_type field is renamed to "type" in JSON
|
||||
let message = create_test_message("msg-1", "Test", "assistant");
|
||||
let json = serde_json::to_string(&message).expect("Failed to serialize");
|
||||
|
||||
assert!(json.contains("\"type\":"));
|
||||
assert!(!json.contains("\"message_type\":"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_default_empty_messages() {
|
||||
let session = SavedSession {
|
||||
id: "empty".to_string(),
|
||||
name: "Empty".to_string(),
|
||||
created_at: Utc::now(),
|
||||
last_activity_at: Utc::now(),
|
||||
working_directory: "/".to_string(),
|
||||
message_count: 0,
|
||||
preview: "".to_string(),
|
||||
messages: vec![],
|
||||
};
|
||||
|
||||
assert!(session.messages.is_empty());
|
||||
assert_eq!(session.message_count, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_session_sorting_by_activity() {
|
||||
let old_time = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let new_time = Utc.with_ymd_and_hms(2024, 6, 15, 12, 0, 0).unwrap();
|
||||
|
||||
let mut sessions = vec![
|
||||
SessionListItem {
|
||||
id: "old".to_string(),
|
||||
name: "Old Session".to_string(),
|
||||
created_at: old_time,
|
||||
last_activity_at: old_time,
|
||||
working_directory: "/old".to_string(),
|
||||
message_count: 1,
|
||||
preview: "Old".to_string(),
|
||||
},
|
||||
SessionListItem {
|
||||
id: "new".to_string(),
|
||||
name: "New Session".to_string(),
|
||||
created_at: new_time,
|
||||
last_activity_at: new_time,
|
||||
working_directory: "/new".to_string(),
|
||||
message_count: 1,
|
||||
preview: "New".to_string(),
|
||||
},
|
||||
];
|
||||
|
||||
// Sort by last activity, most recent first (mimics list_sessions behavior)
|
||||
sessions.sort_by(|a, b| b.last_activity_at.cmp(&a.last_activity_at));
|
||||
|
||||
assert_eq!(sessions[0].id, "new");
|
||||
assert_eq!(sessions[1].id, "old");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_clone() {
|
||||
let original = create_test_session("clone-test", "Clone Test");
|
||||
let cloned = original.clone();
|
||||
|
||||
assert_eq!(original.id, cloned.id);
|
||||
assert_eq!(original.name, cloned.name);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_message_clone() {
|
||||
let original = create_test_message("msg-clone", "Content", "user");
|
||||
let cloned = original.clone();
|
||||
|
||||
assert_eq!(original.id, cloned.id);
|
||||
assert_eq!(original.content, cloned.content);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -205,6 +205,19 @@ pub async fn reset_default_snippets(app: AppHandle) -> Result<(), String> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashSet;
|
||||
|
||||
fn create_test_snippet(id: &str, name: &str, category: &str, is_default: bool) -> Snippet {
|
||||
Snippet {
|
||||
id: id.to_string(),
|
||||
name: name.to_string(),
|
||||
content: "Test content".to_string(),
|
||||
category: category.to_string(),
|
||||
is_default,
|
||||
created_at: Utc::now(),
|
||||
updated_at: Utc::now(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_exist() {
|
||||
@@ -223,4 +236,204 @@ mod tests {
|
||||
assert!(!snippet.category.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_count() {
|
||||
let defaults = get_default_snippets();
|
||||
// Should have 8 default snippets
|
||||
assert_eq!(defaults.len(), 8);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_have_unique_ids() {
|
||||
let defaults = get_default_snippets();
|
||||
let ids: HashSet<&String> = defaults.iter().map(|s| &s.id).collect();
|
||||
assert_eq!(ids.len(), defaults.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_ids_start_with_default() {
|
||||
let defaults = get_default_snippets();
|
||||
assert!(defaults.iter().all(|s| s.id.starts_with("default-")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_serialization() {
|
||||
let snippet = create_test_snippet("test-1", "Test Snippet", "Testing", false);
|
||||
let json = serde_json::to_string(&snippet).expect("Failed to serialize");
|
||||
let parsed: Snippet = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.id, snippet.id);
|
||||
assert_eq!(parsed.name, snippet.name);
|
||||
assert_eq!(parsed.content, snippet.content);
|
||||
assert_eq!(parsed.category, snippet.category);
|
||||
assert_eq!(parsed.is_default, snippet.is_default);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_clone() {
|
||||
let original = create_test_snippet("clone-test", "Clone Test", "Category", true);
|
||||
let cloned = original.clone();
|
||||
|
||||
assert_eq!(original.id, cloned.id);
|
||||
assert_eq!(original.name, cloned.name);
|
||||
assert_eq!(original.is_default, cloned.is_default);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_snippet_sorting_by_category_then_name() {
|
||||
let mut snippets = vec![
|
||||
create_test_snippet("s1", "Zebra", "B-Category", false),
|
||||
create_test_snippet("s2", "Apple", "A-Category", false),
|
||||
create_test_snippet("s3", "Banana", "B-Category", false),
|
||||
create_test_snippet("s4", "Alpha", "A-Category", false),
|
||||
];
|
||||
|
||||
// Sort by category, then by name (mimics list_snippets behavior)
|
||||
snippets.sort_by(|a, b| {
|
||||
let cat_cmp = a.category.cmp(&b.category);
|
||||
if cat_cmp == std::cmp::Ordering::Equal {
|
||||
a.name.cmp(&b.name)
|
||||
} else {
|
||||
cat_cmp
|
||||
}
|
||||
});
|
||||
|
||||
// A-Category should come first
|
||||
assert_eq!(snippets[0].category, "A-Category");
|
||||
assert_eq!(snippets[1].category, "A-Category");
|
||||
assert_eq!(snippets[2].category, "B-Category");
|
||||
assert_eq!(snippets[3].category, "B-Category");
|
||||
|
||||
// Within categories, alphabetically by name
|
||||
assert_eq!(snippets[0].name, "Alpha");
|
||||
assert_eq!(snippets[1].name, "Apple");
|
||||
assert_eq!(snippets[2].name, "Banana");
|
||||
assert_eq!(snippets[3].name, "Zebra");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_known_default_snippets() {
|
||||
let defaults = get_default_snippets();
|
||||
let ids: Vec<&str> = defaults.iter().map(|s| s.id.as_str()).collect();
|
||||
|
||||
assert!(ids.contains(&"default-explain-code"));
|
||||
assert!(ids.contains(&"default-fix-error"));
|
||||
assert!(ids.contains(&"default-write-tests"));
|
||||
assert!(ids.contains(&"default-refactor"));
|
||||
assert!(ids.contains(&"default-optimize"));
|
||||
assert!(ids.contains(&"default-review-pr"));
|
||||
assert!(ids.contains(&"default-add-comments"));
|
||||
assert!(ids.contains(&"default-security-review"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippet_categories() {
|
||||
let defaults = get_default_snippets();
|
||||
let categories: HashSet<&String> = defaults.iter().map(|s| &s.category).collect();
|
||||
|
||||
assert!(categories.contains(&"Code Review".to_string()));
|
||||
assert!(categories.contains(&"Debugging".to_string()));
|
||||
assert!(categories.contains(&"Testing".to_string()));
|
||||
assert!(categories.contains(&"Performance".to_string()));
|
||||
assert!(categories.contains(&"Documentation".to_string()));
|
||||
assert!(categories.contains(&"Security".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_content_not_empty() {
|
||||
let defaults = get_default_snippets();
|
||||
for snippet in defaults {
|
||||
assert!(
|
||||
snippet.content.len() > 10,
|
||||
"Content should be meaningful: {}",
|
||||
snippet.name
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_timestamps() {
|
||||
let snippet = create_test_snippet("time-test", "Time Test", "Cat", false);
|
||||
assert!(snippet.created_at <= snippet.updated_at);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_have_same_timestamps() {
|
||||
let defaults = get_default_snippets();
|
||||
// All defaults are created at the same instant
|
||||
let first_created = defaults[0].created_at;
|
||||
let first_updated = defaults[0].updated_at;
|
||||
|
||||
for snippet in &defaults {
|
||||
assert_eq!(snippet.created_at, first_created);
|
||||
assert_eq!(snippet.updated_at, first_updated);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_retain_non_default() {
|
||||
let mut snippets = vec![
|
||||
create_test_snippet("default-1", "Default 1", "Cat", true),
|
||||
create_test_snippet("custom-1", "Custom 1", "Cat", false),
|
||||
create_test_snippet("default-2", "Default 2", "Cat", true),
|
||||
create_test_snippet("custom-2", "Custom 2", "Cat", false),
|
||||
];
|
||||
|
||||
// Mimics reset_default_snippets behavior (retain non-defaults)
|
||||
snippets.retain(|s| !s.is_default);
|
||||
|
||||
assert_eq!(snippets.len(), 2);
|
||||
assert!(snippets.iter().all(|s| !s.is_default));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_snippet_find_by_id() {
|
||||
let snippets = vec![
|
||||
create_test_snippet("snippet-1", "First", "Cat", false),
|
||||
create_test_snippet("snippet-2", "Second", "Cat", false),
|
||||
create_test_snippet("snippet-3", "Third", "Cat", false),
|
||||
];
|
||||
|
||||
let found = snippets.iter().find(|s| s.id == "snippet-2");
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().name, "Second");
|
||||
|
||||
let not_found = snippets.iter().find(|s| s.id == "snippet-999");
|
||||
assert!(not_found.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_extract_categories_sorted_and_deduped() {
|
||||
let snippets = vec![
|
||||
create_test_snippet("s1", "S1", "Zebra", false),
|
||||
create_test_snippet("s2", "S2", "Alpha", false),
|
||||
create_test_snippet("s3", "S3", "Beta", false),
|
||||
create_test_snippet("s4", "S4", "Alpha", false), // Duplicate
|
||||
];
|
||||
|
||||
let mut categories: Vec<String> = snippets.iter().map(|s| s.category.clone()).collect();
|
||||
categories.sort();
|
||||
categories.dedup();
|
||||
|
||||
assert_eq!(categories.len(), 3);
|
||||
assert_eq!(categories[0], "Alpha");
|
||||
assert_eq!(categories[1], "Beta");
|
||||
assert_eq!(categories[2], "Zebra");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_category_code_review_count() {
|
||||
let defaults = get_default_snippets();
|
||||
let code_review_count = defaults
|
||||
.iter()
|
||||
.filter(|s| s.category == "Code Review")
|
||||
.count();
|
||||
|
||||
// There should be multiple code review snippets
|
||||
assert!(code_review_count >= 2);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -369,6 +369,36 @@ mod tests {
|
||||
assert!((cost - 0.165).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cost_calculation_opus_45() {
|
||||
let cost = calculate_cost(1000, 2000, "claude-opus-4-5-20251101");
|
||||
// Same pricing as Opus 4
|
||||
assert!((cost - 0.165).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cost_calculation_haiku() {
|
||||
let cost = calculate_cost(1000, 2000, "claude-3-5-haiku-20241022");
|
||||
// 1000 input * $1/M = $0.001
|
||||
// 2000 output * $5/M = $0.010
|
||||
// Total = $0.011
|
||||
assert!((cost - 0.011).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cost_calculation_unknown_defaults_to_sonnet() {
|
||||
let cost = calculate_cost(1000, 2000, "some-unknown-model");
|
||||
// Should default to Sonnet pricing
|
||||
assert!((cost - 0.033).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cost_calculation_legacy_sonnet() {
|
||||
let cost = calculate_cost(1000, 2000, "claude-3-5-sonnet-20241022");
|
||||
// Same as Sonnet 4 pricing
|
||||
assert!((cost - 0.033).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_stats_accumulation() {
|
||||
let mut stats = UsageStats::new();
|
||||
@@ -381,6 +411,28 @@ mod tests {
|
||||
assert!((stats.total_cost_usd - 0.033).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_stats_multiple_accumulations() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.add_usage(1000, 1000, "claude-sonnet-4-20250514");
|
||||
stats.add_usage(500, 500, "claude-sonnet-4-20250514");
|
||||
|
||||
assert_eq!(stats.total_input_tokens, 1500);
|
||||
assert_eq!(stats.total_output_tokens, 1500);
|
||||
assert_eq!(stats.session_input_tokens, 1500);
|
||||
assert_eq!(stats.session_output_tokens, 1500);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_stats_model_updated() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.add_usage(1000, 1000, "claude-sonnet-4-20250514");
|
||||
assert_eq!(stats.model, Some("claude-sonnet-4-20250514".to_string()));
|
||||
|
||||
stats.add_usage(500, 500, "claude-opus-4-20250514");
|
||||
assert_eq!(stats.model, Some("claude-opus-4-20250514".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_reset() {
|
||||
let mut stats = UsageStats::new();
|
||||
@@ -394,4 +446,230 @@ mod tests {
|
||||
assert_eq!(stats.session_cost_usd, 0.0);
|
||||
assert!(stats.total_cost_usd > 0.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_reset_clears_session_stats() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_messages();
|
||||
stats.increment_messages();
|
||||
stats.increment_code_blocks();
|
||||
stats.increment_files_edited();
|
||||
stats.increment_files_created();
|
||||
stats.increment_tool_usage("Read");
|
||||
|
||||
stats.reset_session();
|
||||
|
||||
assert_eq!(stats.session_messages_exchanged, 0);
|
||||
assert_eq!(stats.session_code_blocks_generated, 0);
|
||||
assert_eq!(stats.session_files_edited, 0);
|
||||
assert_eq!(stats.session_files_created, 0);
|
||||
assert!(stats.session_tools_usage.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_messages() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_messages();
|
||||
stats.increment_messages();
|
||||
stats.increment_messages();
|
||||
|
||||
assert_eq!(stats.messages_exchanged, 3);
|
||||
assert_eq!(stats.session_messages_exchanged, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_code_blocks() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_code_blocks();
|
||||
stats.increment_code_blocks();
|
||||
|
||||
assert_eq!(stats.code_blocks_generated, 2);
|
||||
assert_eq!(stats.session_code_blocks_generated, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_files_edited() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_files_edited();
|
||||
|
||||
assert_eq!(stats.files_edited, 1);
|
||||
assert_eq!(stats.session_files_edited, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_files_created() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_files_created();
|
||||
|
||||
assert_eq!(stats.files_created, 1);
|
||||
assert_eq!(stats.session_files_created, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_tool_usage() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_tool_usage("Read");
|
||||
stats.increment_tool_usage("Read");
|
||||
stats.increment_tool_usage("Write");
|
||||
|
||||
assert_eq!(stats.tools_usage.get("Read"), Some(&2));
|
||||
assert_eq!(stats.tools_usage.get("Write"), Some(&1));
|
||||
assert_eq!(stats.session_tools_usage.get("Read"), Some(&2));
|
||||
assert_eq!(stats.session_tools_usage.get("Write"), Some(&1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_duration_tracking() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.session_start = Some(Instant::now());
|
||||
|
||||
// Verify duration is returned (u64 is always non-negative)
|
||||
let _duration = stats.get_session_duration();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_duration_without_start() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.session_start = None;
|
||||
stats.session_duration_seconds = 100;
|
||||
|
||||
// Should return the stored value when no start time
|
||||
let duration = stats.get_session_duration();
|
||||
assert_eq!(duration, 100);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_consecutive_day_true() {
|
||||
assert!(is_consecutive_day("2024-01-15", "2024-01-16"));
|
||||
assert!(is_consecutive_day("2024-12-31", "2025-01-01"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_consecutive_day_false() {
|
||||
assert!(!is_consecutive_day("2024-01-15", "2024-01-15")); // Same day
|
||||
assert!(!is_consecutive_day("2024-01-15", "2024-01-17")); // Gap
|
||||
assert!(!is_consecutive_day("2024-01-15", "2024-01-14")); // Backwards
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_consecutive_day_invalid_dates() {
|
||||
assert!(!is_consecutive_day("invalid", "2024-01-01"));
|
||||
assert!(!is_consecutive_day("2024-01-01", "invalid"));
|
||||
assert!(!is_consecutive_day("invalid", "also-invalid"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_persisted_stats_from_usage_stats() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.total_input_tokens = 5000;
|
||||
stats.total_output_tokens = 10000;
|
||||
stats.total_cost_usd = 1.23;
|
||||
stats.messages_exchanged = 50;
|
||||
stats.sessions_started = 5;
|
||||
stats.consecutive_days = 3;
|
||||
|
||||
let persisted = PersistedStats::from(&stats);
|
||||
|
||||
assert_eq!(persisted.total_input_tokens, 5000);
|
||||
assert_eq!(persisted.total_output_tokens, 10000);
|
||||
assert_eq!(persisted.total_cost_usd, 1.23);
|
||||
assert_eq!(persisted.messages_exchanged, 50);
|
||||
assert_eq!(persisted.sessions_started, 5);
|
||||
assert_eq!(persisted.consecutive_days, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_persisted_stats() {
|
||||
let persisted = PersistedStats {
|
||||
total_input_tokens: 10000,
|
||||
total_output_tokens: 20000,
|
||||
total_cost_usd: 5.50,
|
||||
messages_exchanged: 100,
|
||||
code_blocks_generated: 25,
|
||||
files_edited: 10,
|
||||
files_created: 5,
|
||||
tools_usage: {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("Read".to_string(), 50);
|
||||
map
|
||||
},
|
||||
sessions_started: 10,
|
||||
consecutive_days: 7,
|
||||
total_days_used: 14,
|
||||
morning_sessions: 3,
|
||||
night_sessions: 2,
|
||||
last_session_date: Some("2024-06-15".to_string()),
|
||||
};
|
||||
|
||||
let mut stats = UsageStats::new();
|
||||
stats.apply_persisted(persisted);
|
||||
|
||||
assert_eq!(stats.total_input_tokens, 10000);
|
||||
assert_eq!(stats.total_output_tokens, 20000);
|
||||
assert_eq!(stats.total_cost_usd, 5.50);
|
||||
assert_eq!(stats.messages_exchanged, 100);
|
||||
assert_eq!(stats.tools_usage.get("Read"), Some(&50));
|
||||
assert_eq!(stats.consecutive_days, 7);
|
||||
assert_eq!(stats.morning_sessions, 3);
|
||||
assert_eq!(stats.last_session_date, Some("2024-06-15".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_stats_default() {
|
||||
let stats = UsageStats::default();
|
||||
|
||||
assert_eq!(stats.total_input_tokens, 0);
|
||||
assert_eq!(stats.total_output_tokens, 0);
|
||||
assert_eq!(stats.total_cost_usd, 0.0);
|
||||
assert!(stats.model.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_persisted_stats_default() {
|
||||
let persisted = PersistedStats::default();
|
||||
|
||||
assert_eq!(persisted.total_input_tokens, 0);
|
||||
assert!(persisted.last_session_date.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_stats_serialization() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.add_usage(1000, 2000, "claude-sonnet-4-20250514");
|
||||
stats.increment_messages();
|
||||
|
||||
// UsageStats should be serializable (for events)
|
||||
let json = serde_json::to_string(&stats).expect("Failed to serialize");
|
||||
assert!(json.contains("total_input_tokens"));
|
||||
assert!(json.contains("1000"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_persisted_stats_serialization() {
|
||||
let persisted = PersistedStats {
|
||||
total_input_tokens: 1234,
|
||||
total_output_tokens: 5678,
|
||||
total_cost_usd: 0.99,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&persisted).expect("Failed to serialize");
|
||||
let parsed: PersistedStats = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.total_input_tokens, 1234);
|
||||
assert_eq!(parsed.total_output_tokens, 5678);
|
||||
assert!((parsed.total_cost_usd - 0.99).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stats_update_event_serialization() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.add_usage(100, 200, "claude-sonnet-4-20250514");
|
||||
|
||||
let event = StatsUpdateEvent { stats };
|
||||
let json = serde_json::to_string(&event).expect("Failed to serialize");
|
||||
|
||||
assert!(json.contains("stats"));
|
||||
assert!(json.contains("total_input_tokens"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,3 +137,290 @@ pub type SharedTempFileManager = Arc<Mutex<TempFileManager>>;
|
||||
pub fn create_shared_temp_manager() -> Result<SharedTempFileManager, String> {
|
||||
Ok(Arc::new(Mutex::new(TempFileManager::new()?)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper to create a TempFileManager with a custom base directory for testing
|
||||
fn create_test_manager(base_dir: PathBuf) -> TempFileManager {
|
||||
if !base_dir.exists() {
|
||||
fs::create_dir_all(&base_dir).expect("Failed to create test temp dir");
|
||||
}
|
||||
TempFileManager {
|
||||
base_dir,
|
||||
files: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_creates_base_directory() {
|
||||
let manager = TempFileManager::new().expect("Failed to create TempFileManager");
|
||||
assert!(manager.base_dir.exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_base_dir_returns_correct_path() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let manager = create_test_manager(base_path.clone());
|
||||
|
||||
assert_eq!(manager.get_base_dir(), base_path.as_path());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_file_creates_file_with_content() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"Hello, world!";
|
||||
let result = manager.save_file("conv-1", data, Some("test.txt"));
|
||||
|
||||
assert!(result.is_ok());
|
||||
let file_path = result.unwrap();
|
||||
assert!(file_path.exists());
|
||||
|
||||
let content = fs::read(&file_path).expect("Failed to read file");
|
||||
assert_eq!(content, data);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_file_uses_correct_extension() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"test data";
|
||||
let result = manager.save_file("conv-1", data, Some("document.pdf"));
|
||||
|
||||
assert!(result.is_ok());
|
||||
let file_path = result.unwrap();
|
||||
assert_eq!(file_path.extension().unwrap(), "pdf");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_file_uses_bin_extension_when_no_filename() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"binary data";
|
||||
let result = manager.save_file("conv-1", data, None);
|
||||
|
||||
assert!(result.is_ok());
|
||||
let file_path = result.unwrap();
|
||||
assert_eq!(file_path.extension().unwrap(), "bin");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_register_file_tracks_file_path() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let file_path = PathBuf::from("/some/path/file.txt");
|
||||
manager.register_file("conv-1", file_path.clone());
|
||||
|
||||
let files = manager.get_files_for_conversation("conv-1");
|
||||
assert_eq!(files.len(), 1);
|
||||
assert_eq!(files[0], file_path);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_files_for_conversation_returns_empty_for_unknown() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let manager = create_test_manager(base_path);
|
||||
|
||||
let files = manager.get_files_for_conversation("unknown-conv");
|
||||
assert!(files.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_files_for_conversation_returns_all_files() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"test";
|
||||
manager.save_file("conv-1", data, Some("file1.txt")).unwrap();
|
||||
manager.save_file("conv-1", data, Some("file2.txt")).unwrap();
|
||||
manager.save_file("conv-2", data, Some("file3.txt")).unwrap();
|
||||
|
||||
let files_conv1 = manager.get_files_for_conversation("conv-1");
|
||||
let files_conv2 = manager.get_files_for_conversation("conv-2");
|
||||
|
||||
assert_eq!(files_conv1.len(), 2);
|
||||
assert_eq!(files_conv2.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_conversation_removes_files() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"test";
|
||||
let file_path = manager.save_file("conv-1", data, Some("test.txt")).unwrap();
|
||||
assert!(file_path.exists());
|
||||
|
||||
let result = manager.cleanup_conversation("conv-1");
|
||||
assert!(result.is_ok());
|
||||
assert!(!file_path.exists());
|
||||
assert!(manager.get_files_for_conversation("conv-1").is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_conversation_handles_missing_files() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
// Register a file that doesn't exist
|
||||
manager.register_file("conv-1", PathBuf::from("/nonexistent/file.txt"));
|
||||
|
||||
// Should not error, just skip missing files
|
||||
let result = manager.cleanup_conversation("conv-1");
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_conversation_for_unknown_returns_ok() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let result = manager.cleanup_conversation("unknown-conv");
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_all_removes_all_files() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"test";
|
||||
let file1 = manager.save_file("conv-1", data, Some("f1.txt")).unwrap();
|
||||
let file2 = manager.save_file("conv-2", data, Some("f2.txt")).unwrap();
|
||||
|
||||
assert!(file1.exists());
|
||||
assert!(file2.exists());
|
||||
|
||||
let result = manager.cleanup_all();
|
||||
assert!(result.is_ok());
|
||||
|
||||
assert!(!file1.exists());
|
||||
assert!(!file2.exists());
|
||||
assert!(manager.files.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_orphaned_files_removes_untracked() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path.clone());
|
||||
|
||||
// Create a tracked file
|
||||
let data = b"tracked";
|
||||
let tracked_path = manager.save_file("conv-1", data, Some("tracked.txt")).unwrap();
|
||||
|
||||
// Create an untracked (orphaned) file directly in the temp directory
|
||||
let orphan_path = base_path.join("orphan.txt");
|
||||
fs::write(&orphan_path, b"orphan").expect("Failed to create orphan file");
|
||||
|
||||
assert!(tracked_path.exists());
|
||||
assert!(orphan_path.exists());
|
||||
|
||||
let result = manager.cleanup_orphaned_files();
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 1); // One orphan removed
|
||||
|
||||
assert!(tracked_path.exists()); // Tracked file still exists
|
||||
assert!(!orphan_path.exists()); // Orphan removed
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_orphaned_returns_zero_when_none() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"test";
|
||||
manager.save_file("conv-1", data, Some("test.txt")).unwrap();
|
||||
|
||||
let result = manager.cleanup_orphaned_files();
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_orphaned_returns_zero_when_dir_missing() {
|
||||
let mut manager = TempFileManager {
|
||||
base_dir: PathBuf::from("/nonexistent/dir"),
|
||||
files: HashMap::new(),
|
||||
};
|
||||
|
||||
let result = manager.cleanup_orphaned_files();
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_creates_manager() {
|
||||
// Default should work as long as we can create temp directories
|
||||
let manager = TempFileManager::default();
|
||||
assert!(manager.base_dir.exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_create_shared_temp_manager() {
|
||||
let result = create_shared_temp_manager();
|
||||
assert!(result.is_ok());
|
||||
|
||||
let shared = result.unwrap();
|
||||
let manager = shared.lock();
|
||||
assert!(manager.base_dir.exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_files_same_conversation() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
// Save multiple files to same conversation
|
||||
for i in 0..5 {
|
||||
let data = format!("content {}", i);
|
||||
manager
|
||||
.save_file("conv-1", data.as_bytes(), Some(&format!("file{}.txt", i)))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let files = manager.get_files_for_conversation("conv-1");
|
||||
assert_eq!(files.len(), 5);
|
||||
|
||||
// Each file should have unique content
|
||||
for (i, file_path) in files.iter().enumerate() {
|
||||
let content = fs::read_to_string(file_path).expect("Failed to read");
|
||||
assert_eq!(content, format!("content {}", i));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_file_paths_contain_conversation_id() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let file_path = manager
|
||||
.save_file("my-conversation-id", b"test", Some("test.txt"))
|
||||
.unwrap();
|
||||
|
||||
let filename = file_path.file_name().unwrap().to_str().unwrap();
|
||||
assert!(filename.starts_with("my-conversation-id_"));
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user