Compare commits
20 Commits
v0.2.0
...
feat/providers
| Author | SHA1 | Date | |
|---|---|---|---|
|
ae06cc301d
|
|||
|
a0804ed32a
|
|||
|
daedbfd865
|
|||
| 7093e58fe4 | |||
|
cab759ec61
|
|||
| e45a1a1c98 | |||
| edc863e020 | |||
| b006f571bf | |||
| ea3cc8b26c | |||
| 2bb541fba6 | |||
|
bebf1552a6
|
|||
| b3d79a82ef | |||
| 4c46d4c8fd | |||
| 852a4d6661 | |||
|
bbeff7ae2e
|
|||
| 3f30997f0e | |||
| 06810537a9 | |||
| 94991796be | |||
| 947e56ef41 | |||
| 9fe4e8a48a |
@@ -49,13 +49,13 @@ jobs:
|
||||
- name: Run Svelte Check
|
||||
run: pnpm check
|
||||
|
||||
- name: Run frontend tests
|
||||
run: pnpm test
|
||||
- name: Run frontend tests with coverage
|
||||
run: pnpm test:coverage
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: clippy
|
||||
components: clippy, llvm-tools-preview
|
||||
|
||||
- name: Cache Rust dependencies
|
||||
uses: actions/cache@v4
|
||||
@@ -68,13 +68,16 @@ jobs:
|
||||
src-tauri/target/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Install cargo-llvm-cov
|
||||
run: cargo install cargo-llvm-cov --locked
|
||||
|
||||
- name: Run Clippy
|
||||
working-directory: src-tauri
|
||||
run: cargo clippy --all-targets --all-features -- -D warnings
|
||||
|
||||
- name: Run Rust tests
|
||||
- name: Run Rust tests with coverage
|
||||
working-directory: src-tauri
|
||||
run: cargo test
|
||||
run: cargo llvm-cov --fail-under-lines 50
|
||||
|
||||
build-linux:
|
||||
name: Build Linux
|
||||
|
||||
@@ -8,3 +8,6 @@ node_modules
|
||||
!.env.example
|
||||
vite.config.js.timestamp-*
|
||||
vite.config.ts.timestamp-*
|
||||
|
||||
# Coverage reports
|
||||
/coverage
|
||||
|
||||
@@ -2,6 +2,7 @@ build/
|
||||
.svelte-kit/
|
||||
dist/
|
||||
src-tauri/target/
|
||||
src-tauri/gen/
|
||||
node_modules/
|
||||
.pnpm-store/
|
||||
pnpm-lock.yaml
|
||||
|
||||
@@ -1 +1,29 @@
|
||||
tem
|
||||
# hikari-desktop
|
||||
|
||||
Desktop companion application featuring Hikari.
|
||||
|
||||
## Live Version
|
||||
|
||||
This page is currently deployed. [View the live website.](https://git.nhcarrigan.com/nhcarrigan/hikari-desktop/releases)
|
||||
|
||||
## Feedback and Bugs
|
||||
|
||||
If you have feedback or a bug report, please [log a ticket on our forum](https://support.nhcarrigan.com).
|
||||
|
||||
## Contributing
|
||||
|
||||
If you would like to contribute to the project, you may create a Pull Request containing your proposed changes and we will review it as soon as we are able! Please review our [contributing guidelines](CONTRIBUTING.md) first.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Before interacting with our community, please read our [Code of Conduct](CODE_OF_CONDUCT.md).
|
||||
|
||||
## License
|
||||
|
||||
This software is licensed under our [global software license](https://docs.nhcarrigan.com/#/license).
|
||||
|
||||
Copyright held by Naomi Carrigan.
|
||||
|
||||
## Contact
|
||||
|
||||
We may be contacted through our [Chat Server](http://chat.nhcarrigan.com) or via email at `contact@nhcarrigan.com`
|
||||
|
||||
@@ -27,6 +27,6 @@ export default tseslint.config(
|
||||
},
|
||||
},
|
||||
{
|
||||
ignores: ["build/", ".svelte-kit/", "dist/", "src-tauri/target/", "node_modules/"],
|
||||
ignores: ["build/", ".svelte-kit/", "dist/", "src-tauri/target/", "node_modules/", "coverage/"],
|
||||
}
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "hikari-desktop",
|
||||
"version": "0.2.0",
|
||||
"version": "1.1.1",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@@ -16,6 +16,10 @@
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test:backend": "cd src-tauri && cargo test",
|
||||
"test:backend:coverage": "cd src-tauri && cargo llvm-cov --text",
|
||||
"test:all": "pnpm test && pnpm test:backend",
|
||||
"coverage:all": "pnpm test:coverage && pnpm test:backend:coverage",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"format": "prettier --write .",
|
||||
@@ -23,13 +27,44 @@
|
||||
},
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/commands": "6.8.1",
|
||||
"@codemirror/lang-angular": "^0.1.4",
|
||||
"@codemirror/lang-cpp": "^6.0.3",
|
||||
"@codemirror/lang-css": "^6.3.1",
|
||||
"@codemirror/lang-go": "^6.0.1",
|
||||
"@codemirror/lang-html": "^6.4.11",
|
||||
"@codemirror/lang-java": "^6.0.2",
|
||||
"@codemirror/lang-javascript": "^6.2.4",
|
||||
"@codemirror/lang-json": "^6.0.2",
|
||||
"@codemirror/lang-less": "^6.0.2",
|
||||
"@codemirror/lang-markdown": "^6.5.0",
|
||||
"@codemirror/lang-php": "^6.0.2",
|
||||
"@codemirror/lang-python": "^6.2.1",
|
||||
"@codemirror/lang-rust": "^6.0.2",
|
||||
"@codemirror/lang-sass": "^6.0.2",
|
||||
"@codemirror/lang-sql": "^6.10.0",
|
||||
"@codemirror/lang-vue": "^0.1.3",
|
||||
"@codemirror/lang-wast": "^6.0.2",
|
||||
"@codemirror/lang-xml": "^6.1.0",
|
||||
"@codemirror/lang-yaml": "^6.1.2",
|
||||
"@codemirror/language": "^6.12.1",
|
||||
"@codemirror/legacy-modes": "^6.5.2",
|
||||
"@codemirror/state": "^6.5.4",
|
||||
"@codemirror/theme-one-dark": "^6.1.3",
|
||||
"@codemirror/view": "^6.39.11",
|
||||
"@lezer/highlight": "^1.2.3",
|
||||
"@tauri-apps/api": "^2",
|
||||
"@tauri-apps/plugin-clipboard-manager": "^2.3.2",
|
||||
"@tauri-apps/plugin-dialog": "^2",
|
||||
"@tauri-apps/plugin-fs": "^2.4.5",
|
||||
"@tauri-apps/plugin-notification": "^2",
|
||||
"@tauri-apps/plugin-opener": "^2",
|
||||
"@tauri-apps/plugin-os": "^2",
|
||||
"@tauri-apps/plugin-shell": "^2.3.4",
|
||||
"@tauri-apps/plugin-store": "^2",
|
||||
"@tauri-apps/plugin-notification": "^2",
|
||||
"@tauri-apps/plugin-os": "^2"
|
||||
"codemirror": "^6.0.2",
|
||||
"highlight.js": "^11.11.1",
|
||||
"marked": "^17.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.2",
|
||||
@@ -40,6 +75,7 @@
|
||||
"@tauri-apps/cli": "^2",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/svelte": "^5.3.1",
|
||||
"@vitest/coverage-v8": "^4.0.18",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-svelte": "^3.14.0",
|
||||
|
||||
@@ -8,12 +8,96 @@ importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@codemirror/commands':
|
||||
specifier: 6.8.1
|
||||
version: 6.8.1
|
||||
'@codemirror/lang-angular':
|
||||
specifier: ^0.1.4
|
||||
version: 0.1.4
|
||||
'@codemirror/lang-cpp':
|
||||
specifier: ^6.0.3
|
||||
version: 6.0.3
|
||||
'@codemirror/lang-css':
|
||||
specifier: ^6.3.1
|
||||
version: 6.3.1
|
||||
'@codemirror/lang-go':
|
||||
specifier: ^6.0.1
|
||||
version: 6.0.1
|
||||
'@codemirror/lang-html':
|
||||
specifier: ^6.4.11
|
||||
version: 6.4.11
|
||||
'@codemirror/lang-java':
|
||||
specifier: ^6.0.2
|
||||
version: 6.0.2
|
||||
'@codemirror/lang-javascript':
|
||||
specifier: ^6.2.4
|
||||
version: 6.2.4
|
||||
'@codemirror/lang-json':
|
||||
specifier: ^6.0.2
|
||||
version: 6.0.2
|
||||
'@codemirror/lang-less':
|
||||
specifier: ^6.0.2
|
||||
version: 6.0.2
|
||||
'@codemirror/lang-markdown':
|
||||
specifier: ^6.5.0
|
||||
version: 6.5.0
|
||||
'@codemirror/lang-php':
|
||||
specifier: ^6.0.2
|
||||
version: 6.0.2
|
||||
'@codemirror/lang-python':
|
||||
specifier: ^6.2.1
|
||||
version: 6.2.1
|
||||
'@codemirror/lang-rust':
|
||||
specifier: ^6.0.2
|
||||
version: 6.0.2
|
||||
'@codemirror/lang-sass':
|
||||
specifier: ^6.0.2
|
||||
version: 6.0.2
|
||||
'@codemirror/lang-sql':
|
||||
specifier: ^6.10.0
|
||||
version: 6.10.0
|
||||
'@codemirror/lang-vue':
|
||||
specifier: ^0.1.3
|
||||
version: 0.1.3
|
||||
'@codemirror/lang-wast':
|
||||
specifier: ^6.0.2
|
||||
version: 6.0.2
|
||||
'@codemirror/lang-xml':
|
||||
specifier: ^6.1.0
|
||||
version: 6.1.0
|
||||
'@codemirror/lang-yaml':
|
||||
specifier: ^6.1.2
|
||||
version: 6.1.2
|
||||
'@codemirror/language':
|
||||
specifier: ^6.12.1
|
||||
version: 6.12.1
|
||||
'@codemirror/legacy-modes':
|
||||
specifier: ^6.5.2
|
||||
version: 6.5.2
|
||||
'@codemirror/state':
|
||||
specifier: ^6.5.4
|
||||
version: 6.5.4
|
||||
'@codemirror/theme-one-dark':
|
||||
specifier: ^6.1.3
|
||||
version: 6.1.3
|
||||
'@codemirror/view':
|
||||
specifier: ^6.39.11
|
||||
version: 6.39.11
|
||||
'@lezer/highlight':
|
||||
specifier: ^1.2.3
|
||||
version: 1.2.3
|
||||
'@tauri-apps/api':
|
||||
specifier: ^2
|
||||
version: 2.9.1
|
||||
'@tauri-apps/plugin-clipboard-manager':
|
||||
specifier: ^2.3.2
|
||||
version: 2.3.2
|
||||
'@tauri-apps/plugin-dialog':
|
||||
specifier: ^2
|
||||
version: 2.6.0
|
||||
'@tauri-apps/plugin-fs':
|
||||
specifier: ^2.4.5
|
||||
version: 2.4.5
|
||||
'@tauri-apps/plugin-notification':
|
||||
specifier: ^2
|
||||
version: 2.3.3
|
||||
@@ -29,6 +113,15 @@ importers:
|
||||
'@tauri-apps/plugin-store':
|
||||
specifier: ^2
|
||||
version: 2.4.2
|
||||
codemirror:
|
||||
specifier: ^6.0.2
|
||||
version: 6.0.2
|
||||
highlight.js:
|
||||
specifier: ^11.11.1
|
||||
version: 11.11.1
|
||||
marked:
|
||||
specifier: ^17.0.1
|
||||
version: 17.0.1
|
||||
devDependencies:
|
||||
'@eslint/js':
|
||||
specifier: ^9.39.2
|
||||
@@ -54,6 +147,9 @@ importers:
|
||||
'@testing-library/svelte':
|
||||
specifier: ^5.3.1
|
||||
version: 5.3.1(svelte@5.46.3)(vite@6.4.1(jiti@2.6.1)(lightningcss@1.30.2))(vitest@4.0.17(jiti@2.6.1)(jsdom@27.4.0)(lightningcss@1.30.2))
|
||||
'@vitest/coverage-v8':
|
||||
specifier: ^4.0.18
|
||||
version: 4.0.18(vitest@4.0.17(jiti@2.6.1)(jsdom@27.4.0)(lightningcss@1.30.2))
|
||||
eslint:
|
||||
specifier: ^9.39.2
|
||||
version: 9.39.2(jiti@2.6.1)
|
||||
@@ -118,14 +214,115 @@ packages:
|
||||
resolution: {integrity: sha512-JYgintcMjRiCvS8mMECzaEn+m3PfoQiyqukOMCCVQtoJGYJw8j/8LBJEiqkHLkfwCcs74E3pbAUFNg7d9VNJ+Q==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
|
||||
'@babel/helper-string-parser@7.27.1':
|
||||
resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
|
||||
'@babel/helper-validator-identifier@7.28.5':
|
||||
resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
|
||||
'@babel/parser@7.28.6':
|
||||
resolution: {integrity: sha512-TeR9zWR18BvbfPmGbLampPMW+uW1NZnJlRuuHso8i87QZNq2JRF9i6RgxRqtEq+wQGsS19NNTWr2duhnE49mfQ==}
|
||||
engines: {node: '>=6.0.0'}
|
||||
hasBin: true
|
||||
|
||||
'@babel/runtime@7.28.6':
|
||||
resolution: {integrity: sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
|
||||
'@babel/types@7.28.6':
|
||||
resolution: {integrity: sha512-0ZrskXVEHSWIqZM/sQZ4EV3jZJXRkio/WCxaqKZP1g//CEWEPSfeZFcms4XeKBCHU0ZKnIkdJeU/kF+eRp5lBg==}
|
||||
engines: {node: '>=6.9.0'}
|
||||
|
||||
'@bcoe/v8-coverage@1.0.2':
|
||||
resolution: {integrity: sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
'@codemirror/autocomplete@6.20.0':
|
||||
resolution: {integrity: sha512-bOwvTOIJcG5FVo5gUUupiwYh8MioPLQ4UcqbcRf7UQ98X90tCa9E1kZ3Z7tqwpZxYyOvh1YTYbmZE9RTfTp5hg==}
|
||||
|
||||
'@codemirror/commands@6.8.1':
|
||||
resolution: {integrity: sha512-KlGVYufHMQzxbdQONiLyGQDUW0itrLZwq3CcY7xpv9ZLRHqzkBSoteocBHtMCoY7/Ci4xhzSrToIeLg7FxHuaw==}
|
||||
|
||||
'@codemirror/lang-angular@0.1.4':
|
||||
resolution: {integrity: sha512-oap+gsltb/fzdlTQWD6BFF4bSLKcDnlxDsLdePiJpCVNKWXSTAbiiQeYI3UmES+BLAdkmIC1WjyztC1pi/bX4g==}
|
||||
|
||||
'@codemirror/lang-cpp@6.0.3':
|
||||
resolution: {integrity: sha512-URM26M3vunFFn9/sm6rzqrBzDgfWuDixp85uTY49wKudToc2jTHUrKIGGKs+QWND+YLofNNZpxcNGRynFJfvgA==}
|
||||
|
||||
'@codemirror/lang-css@6.3.1':
|
||||
resolution: {integrity: sha512-kr5fwBGiGtmz6l0LSJIbno9QrifNMUusivHbnA1H6Dmqy4HZFte3UAICix1VuKo0lMPKQr2rqB+0BkKi/S3Ejg==}
|
||||
|
||||
'@codemirror/lang-go@6.0.1':
|
||||
resolution: {integrity: sha512-7fNvbyNylvqCphW9HD6WFnRpcDjr+KXX/FgqXy5H5ZS0eC5edDljukm/yNgYkwTsgp2busdod50AOTIy6Jikfg==}
|
||||
|
||||
'@codemirror/lang-html@6.4.11':
|
||||
resolution: {integrity: sha512-9NsXp7Nwp891pQchI7gPdTwBuSuT3K65NGTHWHNJ55HjYcHLllr0rbIZNdOzas9ztc1EUVBlHou85FFZS4BNnw==}
|
||||
|
||||
'@codemirror/lang-java@6.0.2':
|
||||
resolution: {integrity: sha512-m5Nt1mQ/cznJY7tMfQTJchmrjdjQ71IDs+55d1GAa8DGaB8JXWsVCkVT284C3RTASaY43YknrK2X3hPO/J3MOQ==}
|
||||
|
||||
'@codemirror/lang-javascript@6.2.4':
|
||||
resolution: {integrity: sha512-0WVmhp1QOqZ4Rt6GlVGwKJN3KW7Xh4H2q8ZZNGZaP6lRdxXJzmjm4FqvmOojVj6khWJHIb9sp7U/72W7xQgqAA==}
|
||||
|
||||
'@codemirror/lang-json@6.0.2':
|
||||
resolution: {integrity: sha512-x2OtO+AvwEHrEwR0FyyPtfDUiloG3rnVTSZV1W8UteaLL8/MajQd8DpvUb2YVzC+/T18aSDv0H9mu+xw0EStoQ==}
|
||||
|
||||
'@codemirror/lang-less@6.0.2':
|
||||
resolution: {integrity: sha512-EYdQTG22V+KUUk8Qq582g7FMnCZeEHsyuOJisHRft/mQ+ZSZ2w51NupvDUHiqtsOy7It5cHLPGfHQLpMh9bqpQ==}
|
||||
|
||||
'@codemirror/lang-markdown@6.5.0':
|
||||
resolution: {integrity: sha512-0K40bZ35jpHya6FriukbgaleaqzBLZfOh7HuzqbMxBXkbYMJDxfF39c23xOgxFezR+3G+tR2/Mup+Xk865OMvw==}
|
||||
|
||||
'@codemirror/lang-php@6.0.2':
|
||||
resolution: {integrity: sha512-ZKy2v1n8Fc8oEXj0Th0PUMXzQJ0AIR6TaZU+PbDHExFwdu+guzOA4jmCHS1Nz4vbFezwD7LyBdDnddSJeScMCA==}
|
||||
|
||||
'@codemirror/lang-python@6.2.1':
|
||||
resolution: {integrity: sha512-IRjC8RUBhn9mGR9ywecNhB51yePWCGgvHfY1lWN/Mrp3cKuHr0isDKia+9HnvhiWNnMpbGhWrkhuWOc09exRyw==}
|
||||
|
||||
'@codemirror/lang-rust@6.0.2':
|
||||
resolution: {integrity: sha512-EZaGjCUegtiU7kSMvOfEZpaCReowEf3yNidYu7+vfuGTm9ow4mthAparY5hisJqOHmJowVH3Upu+eJlUji6qqA==}
|
||||
|
||||
'@codemirror/lang-sass@6.0.2':
|
||||
resolution: {integrity: sha512-l/bdzIABvnTo1nzdY6U+kPAC51czYQcOErfzQ9zSm9D8GmNPD0WTW8st/CJwBTPLO8jlrbyvlSEcN20dc4iL0Q==}
|
||||
|
||||
'@codemirror/lang-sql@6.10.0':
|
||||
resolution: {integrity: sha512-6ayPkEd/yRw0XKBx5uAiToSgGECo/GY2NoJIHXIIQh1EVwLuKoU8BP/qK0qH5NLXAbtJRLuT73hx7P9X34iO4w==}
|
||||
|
||||
'@codemirror/lang-vue@0.1.3':
|
||||
resolution: {integrity: sha512-QSKdtYTDRhEHCfo5zOShzxCmqKJvgGrZwDQSdbvCRJ5pRLWBS7pD/8e/tH44aVQT6FKm0t6RVNoSUWHOI5vNug==}
|
||||
|
||||
'@codemirror/lang-wast@6.0.2':
|
||||
resolution: {integrity: sha512-Imi2KTpVGm7TKuUkqyJ5NRmeFWF7aMpNiwHnLQe0x9kmrxElndyH0K6H/gXtWwY6UshMRAhpENsgfpSwsgmC6Q==}
|
||||
|
||||
'@codemirror/lang-xml@6.1.0':
|
||||
resolution: {integrity: sha512-3z0blhicHLfwi2UgkZYRPioSgVTo9PV5GP5ducFH6FaHy0IAJRg+ixj5gTR1gnT/glAIC8xv4w2VL1LoZfs+Jg==}
|
||||
|
||||
'@codemirror/lang-yaml@6.1.2':
|
||||
resolution: {integrity: sha512-dxrfG8w5Ce/QbT7YID7mWZFKhdhsaTNOYjOkSIMt1qmC4VQnXSDSYVHHHn8k6kJUfIhtLo8t1JJgltlxWdsITw==}
|
||||
|
||||
'@codemirror/language@6.12.1':
|
||||
resolution: {integrity: sha512-Fa6xkSiuGKc8XC8Cn96T+TQHYj4ZZ7RdFmXA3i9xe/3hLHfwPZdM+dqfX0Cp0zQklBKhVD8Yzc8LS45rkqcwpQ==}
|
||||
|
||||
'@codemirror/legacy-modes@6.5.2':
|
||||
resolution: {integrity: sha512-/jJbwSTazlQEDOQw2FJ8LEEKVS72pU0lx6oM54kGpL8t/NJ2Jda3CZ4pcltiKTdqYSRk3ug1B3pil1gsjA6+8Q==}
|
||||
|
||||
'@codemirror/lint@6.9.3':
|
||||
resolution: {integrity: sha512-y3YkYhdnhjDBAe0VIA0c4wVoFOvnp8CnAvfLqi0TqotIv92wIlAAP7HELOpLBsKwjAX6W92rSflA6an/2zBvXw==}
|
||||
|
||||
'@codemirror/search@6.6.0':
|
||||
resolution: {integrity: sha512-koFuNXcDvyyotWcgOnZGmY7LZqEOXZaaxD/j6n18TCLx2/9HieZJ5H6hs1g8FiRxBD0DNfs0nXn17g872RmYdw==}
|
||||
|
||||
'@codemirror/state@6.5.4':
|
||||
resolution: {integrity: sha512-8y7xqG/hpB53l25CIoit9/ngxdfoG+fx+V3SHBrinnhOtLvKHRyAJJuHzkWrR4YXXLX8eXBsejgAAxHUOdW1yw==}
|
||||
|
||||
'@codemirror/theme-one-dark@6.1.3':
|
||||
resolution: {integrity: sha512-NzBdIvEJmx6fjeremiGp3t/okrLPYT0d9orIc7AFun8oZcRk58aejkqhv6spnz4MLAevrKNPMQYXEWMg4s+sKA==}
|
||||
|
||||
'@codemirror/view@6.39.11':
|
||||
resolution: {integrity: sha512-bWdeR8gWM87l4DB/kYSF9A+dVackzDb/V56Tq7QVrQ7rn86W0rgZFtlL3g3pem6AeGcb9NQNoy3ao4WpW4h5tQ==}
|
||||
|
||||
'@csstools/color-helpers@5.1.0':
|
||||
resolution: {integrity: sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==}
|
||||
engines: {node: '>=18'}
|
||||
@@ -393,6 +590,60 @@ packages:
|
||||
'@jridgewell/trace-mapping@0.3.31':
|
||||
resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==}
|
||||
|
||||
'@lezer/common@1.5.0':
|
||||
resolution: {integrity: sha512-PNGcolp9hr4PJdXR4ix7XtixDrClScvtSCYW3rQG106oVMOOI+jFb+0+J3mbeL/53g1Zd6s0kJzaw6Ri68GmAA==}
|
||||
|
||||
'@lezer/cpp@1.1.5':
|
||||
resolution: {integrity: sha512-DIhSXmYtJKLehrjzDFN+2cPt547ySQ41nA8yqcDf/GxMc+YM736xqltFkvADL2M0VebU5I+3+4ks2Vv+Kyq3Aw==}
|
||||
|
||||
'@lezer/css@1.3.0':
|
||||
resolution: {integrity: sha512-pBL7hup88KbI7hXnZV3PQsn43DHy6TWyzuyk2AO9UyoXcDltvIdqWKE1dLL/45JVZ+YZkHe1WVHqO6wugZZWcw==}
|
||||
|
||||
'@lezer/go@1.0.1':
|
||||
resolution: {integrity: sha512-xToRsYxwsgJNHTgNdStpcvmbVuKxTapV0dM0wey1geMMRc9aggoVyKgzYp41D2/vVOx+Ii4hmE206kvxIXBVXQ==}
|
||||
|
||||
'@lezer/highlight@1.2.3':
|
||||
resolution: {integrity: sha512-qXdH7UqTvGfdVBINrgKhDsVTJTxactNNxLk7+UMwZhU13lMHaOBlJe9Vqp907ya56Y3+ed2tlqzys7jDkTmW0g==}
|
||||
|
||||
'@lezer/html@1.3.13':
|
||||
resolution: {integrity: sha512-oI7n6NJml729m7pjm9lvLvmXbdoMoi2f+1pwSDJkl9d68zGr7a9Btz8NdHTGQZtW2DA25ybeuv/SyDb9D5tseg==}
|
||||
|
||||
'@lezer/java@1.1.3':
|
||||
resolution: {integrity: sha512-yHquUfujwg6Yu4Fd1GNHCvidIvJwi/1Xu2DaKl/pfWIA2c1oXkVvawH3NyXhCaFx4OdlYBVX5wvz2f7Aoa/4Xw==}
|
||||
|
||||
'@lezer/javascript@1.5.4':
|
||||
resolution: {integrity: sha512-vvYx3MhWqeZtGPwDStM2dwgljd5smolYD2lR2UyFcHfxbBQebqx8yjmFmxtJ/E6nN6u1D9srOiVWm3Rb4tmcUA==}
|
||||
|
||||
'@lezer/json@1.0.3':
|
||||
resolution: {integrity: sha512-BP9KzdF9Y35PDpv04r0VeSTKDeox5vVr3efE7eBbx3r4s3oNLfunchejZhjArmeieBH+nVOpgIiBJpEAv8ilqQ==}
|
||||
|
||||
'@lezer/lr@1.4.8':
|
||||
resolution: {integrity: sha512-bPWa0Pgx69ylNlMlPvBPryqeLYQjyJjqPx+Aupm5zydLIF3NE+6MMLT8Yi23Bd9cif9VS00aUebn+6fDIGBcDA==}
|
||||
|
||||
'@lezer/markdown@1.6.3':
|
||||
resolution: {integrity: sha512-jpGm5Ps+XErS+xA4urw7ogEGkeZOahVQF21Z6oECF0sj+2liwZopd2+I8uH5I/vZsRuuze3OxBREIANLf6KKUw==}
|
||||
|
||||
'@lezer/php@1.0.5':
|
||||
resolution: {integrity: sha512-W7asp9DhM6q0W6DYNwIkLSKOvxlXRrif+UXBMxzsJUuqmhE7oVU+gS3THO4S/Puh7Xzgm858UNaFi6dxTP8dJA==}
|
||||
|
||||
'@lezer/python@1.1.18':
|
||||
resolution: {integrity: sha512-31FiUrU7z9+d/ElGQLJFXl+dKOdx0jALlP3KEOsGTex8mvj+SoE1FgItcHWK/axkxCHGUSpqIHt6JAWfWu9Rhg==}
|
||||
|
||||
'@lezer/rust@1.0.2':
|
||||
resolution: {integrity: sha512-Lz5sIPBdF2FUXcWeCu1//ojFAZqzTQNRga0aYv6dYXqJqPfMdCAI0NzajWUd4Xijj1IKJLtjoXRPMvTKWBcqKg==}
|
||||
|
||||
'@lezer/sass@1.1.0':
|
||||
resolution: {integrity: sha512-3mMGdCTUZ/84ArHOuXWQr37pnf7f+Nw9ycPUeKX+wu19b7pSMcZGLbaXwvD2APMBDOGxPmpK/O6S1v1EvLoqgQ==}
|
||||
|
||||
'@lezer/xml@1.0.6':
|
||||
resolution: {integrity: sha512-CdDwirL0OEaStFue/66ZmFSeppuL6Dwjlk8qk153mSQwiSH/Dlri4GNymrNWnUmPl2Um7QfV1FO9KFUyX3Twww==}
|
||||
|
||||
'@lezer/yaml@1.0.3':
|
||||
resolution: {integrity: sha512-GuBLekbw9jDBDhGur82nuwkxKQ+a3W5H0GfaAthDXcAu+XdpS43VlnxA9E9hllkpSP5ellRDKjLLj7Lu9Wr6xA==}
|
||||
|
||||
'@marijn/find-cluster-break@1.0.2':
|
||||
resolution: {integrity: sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==}
|
||||
|
||||
'@polka/url@1.0.0-next.29':
|
||||
resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==}
|
||||
|
||||
@@ -729,9 +980,15 @@ packages:
|
||||
engines: {node: '>= 10'}
|
||||
hasBin: true
|
||||
|
||||
'@tauri-apps/plugin-clipboard-manager@2.3.2':
|
||||
resolution: {integrity: sha512-CUlb5Hqi2oZbcZf4VUyUH53XWPPdtpw43EUpCza5HWZJwxEoDowFzNUDt1tRUXA8Uq+XPn17Ysfptip33sG4eQ==}
|
||||
|
||||
'@tauri-apps/plugin-dialog@2.6.0':
|
||||
resolution: {integrity: sha512-q4Uq3eY87TdcYzXACiYSPhmpBA76shgmQswGkSVio4C82Sz2W4iehe9TnKYwbq7weHiL88Yw19XZm7v28+Micg==}
|
||||
|
||||
'@tauri-apps/plugin-fs@2.4.5':
|
||||
resolution: {integrity: sha512-dVxWWGE6VrOxC7/jlhyE+ON/Cc2REJlM35R3PJX3UvFw2XwYhLGQVAIyrehenDdKjotipjYEVc4YjOl3qq90fA==}
|
||||
|
||||
'@tauri-apps/plugin-notification@2.3.3':
|
||||
resolution: {integrity: sha512-Zw+ZH18RJb41G4NrfHgIuofJiymusqN+q8fGUIIV7vyCH+5sSn5coqRv/MWB9qETsUs97vmU045q7OyseCV3Qg==}
|
||||
|
||||
@@ -851,6 +1108,15 @@ packages:
|
||||
resolution: {integrity: sha512-LZ2NqIHFhvFwxG0qZeLL9DvdNAHPGCY5dIRwBhyYeU+LfLhcStE1ImjsuTG/WaVh3XysGaeLW8Rqq7cGkPCFvw==}
|
||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||
|
||||
'@vitest/coverage-v8@4.0.18':
|
||||
resolution: {integrity: sha512-7i+N2i0+ME+2JFZhfuz7Tg/FqKtilHjGyGvoHYQ6iLV0zahbsJ9sljC9OcFcPDbhYKCet+sG8SsVqlyGvPflZg==}
|
||||
peerDependencies:
|
||||
'@vitest/browser': 4.0.18
|
||||
vitest: 4.0.18
|
||||
peerDependenciesMeta:
|
||||
'@vitest/browser':
|
||||
optional: true
|
||||
|
||||
'@vitest/expect@4.0.17':
|
||||
resolution: {integrity: sha512-mEoqP3RqhKlbmUmntNDDCJeTDavDR+fVYkSOw8qRwJFaW/0/5zA9zFeTrHqNtcmwh6j26yMmwx2PqUDPzt5ZAQ==}
|
||||
|
||||
@@ -868,6 +1134,9 @@ packages:
|
||||
'@vitest/pretty-format@4.0.17':
|
||||
resolution: {integrity: sha512-Ah3VAYmjcEdHg6+MwFE17qyLqBHZ+ni2ScKCiW2XrlSBV4H3Z7vYfPfz7CWQ33gyu76oc0Ai36+kgLU3rfF4nw==}
|
||||
|
||||
'@vitest/pretty-format@4.0.18':
|
||||
resolution: {integrity: sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==}
|
||||
|
||||
'@vitest/runner@4.0.17':
|
||||
resolution: {integrity: sha512-JmuQyf8aMWoo/LmNFppdpkfRVHJcsgzkbCA+/Bk7VfNH7RE6Ut2qxegeyx2j3ojtJtKIbIGy3h+KxGfYfk28YQ==}
|
||||
|
||||
@@ -880,6 +1149,9 @@ packages:
|
||||
'@vitest/utils@4.0.17':
|
||||
resolution: {integrity: sha512-RG6iy+IzQpa9SB8HAFHJ9Y+pTzI+h8553MrciN9eC6TFBErqrQaTas4vG+MVj8S4uKk8uTT2p0vgZPnTdxd96w==}
|
||||
|
||||
'@vitest/utils@4.0.18':
|
||||
resolution: {integrity: sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==}
|
||||
|
||||
acorn-jsx@5.3.2:
|
||||
resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==}
|
||||
peerDependencies:
|
||||
@@ -923,6 +1195,9 @@ packages:
|
||||
resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
ast-v8-to-istanbul@0.3.10:
|
||||
resolution: {integrity: sha512-p4K7vMz2ZSk3wN8l5o3y2bJAoZXT3VuJI5OLTATY/01CYWumWvwkUw0SqDBnNq6IiTO3qDa1eSQDibAV8g7XOQ==}
|
||||
|
||||
axobject-query@4.1.0:
|
||||
resolution: {integrity: sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -959,6 +1234,9 @@ packages:
|
||||
resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==}
|
||||
engines: {node: '>=6'}
|
||||
|
||||
codemirror@6.0.2:
|
||||
resolution: {integrity: sha512-VhydHotNW5w1UGK0Qj96BwSk/Zqbp9WbnyK2W/eVMv4QyF41INRGpjUhFJY7/uDNuudSc33a/PKr4iDqRduvHw==}
|
||||
|
||||
color-convert@2.0.1:
|
||||
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
|
||||
engines: {node: '>=7.0.0'}
|
||||
@@ -973,6 +1251,9 @@ packages:
|
||||
resolution: {integrity: sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
crelt@1.0.6:
|
||||
resolution: {integrity: sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==}
|
||||
|
||||
cross-spawn@7.0.6:
|
||||
resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==}
|
||||
engines: {node: '>= 8'}
|
||||
@@ -1185,10 +1466,17 @@ packages:
|
||||
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
highlight.js@11.11.1:
|
||||
resolution: {integrity: sha512-Xwwo44whKBVCYoliBQwaPvtd/2tYFkRQtXDWj1nackaV2JPXx3L0+Jvd8/qCJ2p+ML0/XVkJ2q+Mr+UVdpJK5w==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
|
||||
html-encoding-sniffer@6.0.0:
|
||||
resolution: {integrity: sha512-CV9TW3Y3f8/wT0BRFc1/KAVQ3TUHiXmaAb6VW9vtiMFf7SLoMd1PdAc4W3KFOFETBJUb90KatHqlsZMWV+R9Gg==}
|
||||
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0}
|
||||
|
||||
html-escaper@2.0.2:
|
||||
resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==}
|
||||
|
||||
http-proxy-agent@7.0.2:
|
||||
resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==}
|
||||
engines: {node: '>= 14'}
|
||||
@@ -1234,6 +1522,18 @@ packages:
|
||||
isexe@2.0.0:
|
||||
resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
|
||||
|
||||
istanbul-lib-coverage@3.2.2:
|
||||
resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
istanbul-lib-report@3.0.1:
|
||||
resolution: {integrity: sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
istanbul-reports@3.2.0:
|
||||
resolution: {integrity: sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
jiti@2.6.1:
|
||||
resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==}
|
||||
hasBin: true
|
||||
@@ -1241,6 +1541,9 @@ packages:
|
||||
js-tokens@4.0.0:
|
||||
resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
|
||||
|
||||
js-tokens@9.0.1:
|
||||
resolution: {integrity: sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==}
|
||||
|
||||
js-yaml@4.1.1:
|
||||
resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==}
|
||||
hasBin: true
|
||||
@@ -1372,6 +1675,18 @@ packages:
|
||||
magic-string@0.30.21:
|
||||
resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==}
|
||||
|
||||
magicast@0.5.1:
|
||||
resolution: {integrity: sha512-xrHS24IxaLrvuo613F719wvOIv9xPHFWQHuvGUBmPnCA/3MQxKI3b+r7n1jAoDHmsbC5bRhTZYR77invLAxVnw==}
|
||||
|
||||
make-dir@4.0.0:
|
||||
resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
marked@17.0.1:
|
||||
resolution: {integrity: sha512-boeBdiS0ghpWcSwoNm/jJBwdpFaMnZWRzjA6SkUMYb40SVaN1x7mmfGKp0jvexGcx+7y2La5zRZsYFZI6Qpypg==}
|
||||
engines: {node: '>= 20'}
|
||||
hasBin: true
|
||||
|
||||
mdn-data@2.12.2:
|
||||
resolution: {integrity: sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==}
|
||||
|
||||
@@ -1573,6 +1888,9 @@ packages:
|
||||
resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
style-mod@4.1.3:
|
||||
resolution: {integrity: sha512-i/n8VsZydrugj3Iuzll8+x/00GH2vnYsk1eomD8QiRrSAeW6ItbCQDtfXCeJHd0iwiNagqjQkvpvREEPtW3IoQ==}
|
||||
|
||||
supports-color@7.2.0:
|
||||
resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
|
||||
engines: {node: '>=8'}
|
||||
@@ -1752,6 +2070,9 @@ packages:
|
||||
jsdom:
|
||||
optional: true
|
||||
|
||||
w3c-keyname@2.2.8:
|
||||
resolution: {integrity: sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==}
|
||||
|
||||
w3c-xmlserializer@5.0.0:
|
||||
resolution: {integrity: sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==}
|
||||
engines: {node: '>=18'}
|
||||
@@ -1842,10 +2163,233 @@ snapshots:
|
||||
js-tokens: 4.0.0
|
||||
picocolors: 1.1.1
|
||||
|
||||
'@babel/helper-string-parser@7.27.1': {}
|
||||
|
||||
'@babel/helper-validator-identifier@7.28.5': {}
|
||||
|
||||
'@babel/parser@7.28.6':
|
||||
dependencies:
|
||||
'@babel/types': 7.28.6
|
||||
|
||||
'@babel/runtime@7.28.6': {}
|
||||
|
||||
'@babel/types@7.28.6':
|
||||
dependencies:
|
||||
'@babel/helper-string-parser': 7.27.1
|
||||
'@babel/helper-validator-identifier': 7.28.5
|
||||
|
||||
'@bcoe/v8-coverage@1.0.2': {}
|
||||
|
||||
'@codemirror/autocomplete@6.20.0':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
'@lezer/common': 1.5.0
|
||||
|
||||
'@codemirror/commands@6.8.1':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
'@lezer/common': 1.5.0
|
||||
|
||||
'@codemirror/lang-angular@0.1.4':
|
||||
dependencies:
|
||||
'@codemirror/lang-html': 6.4.11
|
||||
'@codemirror/lang-javascript': 6.2.4
|
||||
'@codemirror/language': 6.12.1
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@codemirror/lang-cpp@6.0.3':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.1
|
||||
'@lezer/cpp': 1.1.5
|
||||
|
||||
'@codemirror/lang-css@6.3.1':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.20.0
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/css': 1.3.0
|
||||
|
||||
'@codemirror/lang-go@6.0.1':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.20.0
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/go': 1.0.1
|
||||
|
||||
'@codemirror/lang-html@6.4.11':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.20.0
|
||||
'@codemirror/lang-css': 6.3.1
|
||||
'@codemirror/lang-javascript': 6.2.4
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/css': 1.3.0
|
||||
'@lezer/html': 1.3.13
|
||||
|
||||
'@codemirror/lang-java@6.0.2':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.1
|
||||
'@lezer/java': 1.1.3
|
||||
|
||||
'@codemirror/lang-javascript@6.2.4':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.20.0
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/lint': 6.9.3
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/javascript': 1.5.4
|
||||
|
||||
'@codemirror/lang-json@6.0.2':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.1
|
||||
'@lezer/json': 1.0.3
|
||||
|
||||
'@codemirror/lang-less@6.0.2':
|
||||
dependencies:
|
||||
'@codemirror/lang-css': 6.3.1
|
||||
'@codemirror/language': 6.12.1
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@codemirror/lang-markdown@6.5.0':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.20.0
|
||||
'@codemirror/lang-html': 6.4.11
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/markdown': 1.6.3
|
||||
|
||||
'@codemirror/lang-php@6.0.2':
|
||||
dependencies:
|
||||
'@codemirror/lang-html': 6.4.11
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/php': 1.0.5
|
||||
|
||||
'@codemirror/lang-python@6.2.1':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.20.0
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/python': 1.1.18
|
||||
|
||||
'@codemirror/lang-rust@6.0.2':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.1
|
||||
'@lezer/rust': 1.0.2
|
||||
|
||||
'@codemirror/lang-sass@6.0.2':
|
||||
dependencies:
|
||||
'@codemirror/lang-css': 6.3.1
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/sass': 1.1.0
|
||||
|
||||
'@codemirror/lang-sql@6.10.0':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.20.0
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@codemirror/lang-vue@0.1.3':
|
||||
dependencies:
|
||||
'@codemirror/lang-html': 6.4.11
|
||||
'@codemirror/lang-javascript': 6.2.4
|
||||
'@codemirror/language': 6.12.1
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@codemirror/lang-wast@6.0.2':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.1
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@codemirror/lang-xml@6.1.0':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.20.0
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/xml': 1.0.6
|
||||
|
||||
'@codemirror/lang-yaml@6.1.2':
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.20.0
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
'@lezer/yaml': 1.0.3
|
||||
|
||||
'@codemirror/language@6.12.1':
|
||||
dependencies:
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
style-mod: 4.1.3
|
||||
|
||||
'@codemirror/legacy-modes@6.5.2':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.1
|
||||
|
||||
'@codemirror/lint@6.9.3':
|
||||
dependencies:
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
crelt: 1.0.6
|
||||
|
||||
'@codemirror/search@6.6.0':
|
||||
dependencies:
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
crelt: 1.0.6
|
||||
|
||||
'@codemirror/state@6.5.4':
|
||||
dependencies:
|
||||
'@marijn/find-cluster-break': 1.0.2
|
||||
|
||||
'@codemirror/theme-one-dark@6.1.3':
|
||||
dependencies:
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
'@lezer/highlight': 1.2.3
|
||||
|
||||
'@codemirror/view@6.39.11':
|
||||
dependencies:
|
||||
'@codemirror/state': 6.5.4
|
||||
crelt: 1.0.6
|
||||
style-mod: 4.1.3
|
||||
w3c-keyname: 2.2.8
|
||||
|
||||
'@csstools/color-helpers@5.1.0': {}
|
||||
|
||||
'@csstools/css-calc@2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)':
|
||||
@@ -2024,6 +2568,101 @@ snapshots:
|
||||
'@jridgewell/resolve-uri': 3.1.2
|
||||
'@jridgewell/sourcemap-codec': 1.5.5
|
||||
|
||||
'@lezer/common@1.5.0': {}
|
||||
|
||||
'@lezer/cpp@1.1.5':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/css@1.3.0':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/go@1.0.1':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/highlight@1.2.3':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
|
||||
'@lezer/html@1.3.13':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/java@1.1.3':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/javascript@1.5.4':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/json@1.0.3':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/lr@1.4.8':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
|
||||
'@lezer/markdown@1.6.3':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
|
||||
'@lezer/php@1.0.5':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/python@1.1.18':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/rust@1.0.2':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/sass@1.1.0':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/xml@1.0.6':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@lezer/yaml@1.0.3':
|
||||
dependencies:
|
||||
'@lezer/common': 1.5.0
|
||||
'@lezer/highlight': 1.2.3
|
||||
'@lezer/lr': 1.4.8
|
||||
|
||||
'@marijn/find-cluster-break@1.0.2': {}
|
||||
|
||||
'@polka/url@1.0.0-next.29': {}
|
||||
|
||||
'@rollup/rollup-android-arm-eabi@4.55.1':
|
||||
@@ -2271,10 +2910,18 @@ snapshots:
|
||||
'@tauri-apps/cli-win32-ia32-msvc': 2.9.6
|
||||
'@tauri-apps/cli-win32-x64-msvc': 2.9.6
|
||||
|
||||
'@tauri-apps/plugin-clipboard-manager@2.3.2':
|
||||
dependencies:
|
||||
'@tauri-apps/api': 2.9.1
|
||||
|
||||
'@tauri-apps/plugin-dialog@2.6.0':
|
||||
dependencies:
|
||||
'@tauri-apps/api': 2.9.1
|
||||
|
||||
'@tauri-apps/plugin-fs@2.4.5':
|
||||
dependencies:
|
||||
'@tauri-apps/api': 2.9.1
|
||||
|
||||
'@tauri-apps/plugin-notification@2.3.3':
|
||||
dependencies:
|
||||
'@tauri-apps/api': 2.9.1
|
||||
@@ -2434,6 +3081,20 @@ snapshots:
|
||||
'@typescript-eslint/types': 8.53.0
|
||||
eslint-visitor-keys: 4.2.1
|
||||
|
||||
'@vitest/coverage-v8@4.0.18(vitest@4.0.17(jiti@2.6.1)(jsdom@27.4.0)(lightningcss@1.30.2))':
|
||||
dependencies:
|
||||
'@bcoe/v8-coverage': 1.0.2
|
||||
'@vitest/utils': 4.0.18
|
||||
ast-v8-to-istanbul: 0.3.10
|
||||
istanbul-lib-coverage: 3.2.2
|
||||
istanbul-lib-report: 3.0.1
|
||||
istanbul-reports: 3.2.0
|
||||
magicast: 0.5.1
|
||||
obug: 2.1.1
|
||||
std-env: 3.10.0
|
||||
tinyrainbow: 3.0.3
|
||||
vitest: 4.0.17(jiti@2.6.1)(jsdom@27.4.0)(lightningcss@1.30.2)
|
||||
|
||||
'@vitest/expect@4.0.17':
|
||||
dependencies:
|
||||
'@standard-schema/spec': 1.1.0
|
||||
@@ -2455,6 +3116,10 @@ snapshots:
|
||||
dependencies:
|
||||
tinyrainbow: 3.0.3
|
||||
|
||||
'@vitest/pretty-format@4.0.18':
|
||||
dependencies:
|
||||
tinyrainbow: 3.0.3
|
||||
|
||||
'@vitest/runner@4.0.17':
|
||||
dependencies:
|
||||
'@vitest/utils': 4.0.17
|
||||
@@ -2473,6 +3138,11 @@ snapshots:
|
||||
'@vitest/pretty-format': 4.0.17
|
||||
tinyrainbow: 3.0.3
|
||||
|
||||
'@vitest/utils@4.0.18':
|
||||
dependencies:
|
||||
'@vitest/pretty-format': 4.0.18
|
||||
tinyrainbow: 3.0.3
|
||||
|
||||
acorn-jsx@5.3.2(acorn@8.15.0):
|
||||
dependencies:
|
||||
acorn: 8.15.0
|
||||
@@ -2506,6 +3176,12 @@ snapshots:
|
||||
|
||||
assertion-error@2.0.1: {}
|
||||
|
||||
ast-v8-to-istanbul@0.3.10:
|
||||
dependencies:
|
||||
'@jridgewell/trace-mapping': 0.3.31
|
||||
estree-walker: 3.0.3
|
||||
js-tokens: 9.0.1
|
||||
|
||||
axobject-query@4.1.0: {}
|
||||
|
||||
balanced-match@1.0.2: {}
|
||||
@@ -2538,6 +3214,16 @@ snapshots:
|
||||
|
||||
clsx@2.1.1: {}
|
||||
|
||||
codemirror@6.0.2:
|
||||
dependencies:
|
||||
'@codemirror/autocomplete': 6.20.0
|
||||
'@codemirror/commands': 6.8.1
|
||||
'@codemirror/language': 6.12.1
|
||||
'@codemirror/lint': 6.9.3
|
||||
'@codemirror/search': 6.6.0
|
||||
'@codemirror/state': 6.5.4
|
||||
'@codemirror/view': 6.39.11
|
||||
|
||||
color-convert@2.0.1:
|
||||
dependencies:
|
||||
color-name: 1.1.4
|
||||
@@ -2548,6 +3234,8 @@ snapshots:
|
||||
|
||||
cookie@0.6.0: {}
|
||||
|
||||
crelt@1.0.6: {}
|
||||
|
||||
cross-spawn@7.0.6:
|
||||
dependencies:
|
||||
path-key: 3.1.1
|
||||
@@ -2780,12 +3468,16 @@ snapshots:
|
||||
|
||||
has-flag@4.0.0: {}
|
||||
|
||||
highlight.js@11.11.1: {}
|
||||
|
||||
html-encoding-sniffer@6.0.0:
|
||||
dependencies:
|
||||
'@exodus/bytes': 1.8.0
|
||||
transitivePeerDependencies:
|
||||
- '@exodus/crypto'
|
||||
|
||||
html-escaper@2.0.2: {}
|
||||
|
||||
http-proxy-agent@7.0.2:
|
||||
dependencies:
|
||||
agent-base: 7.1.4
|
||||
@@ -2827,10 +3519,25 @@ snapshots:
|
||||
|
||||
isexe@2.0.0: {}
|
||||
|
||||
istanbul-lib-coverage@3.2.2: {}
|
||||
|
||||
istanbul-lib-report@3.0.1:
|
||||
dependencies:
|
||||
istanbul-lib-coverage: 3.2.2
|
||||
make-dir: 4.0.0
|
||||
supports-color: 7.2.0
|
||||
|
||||
istanbul-reports@3.2.0:
|
||||
dependencies:
|
||||
html-escaper: 2.0.2
|
||||
istanbul-lib-report: 3.0.1
|
||||
|
||||
jiti@2.6.1: {}
|
||||
|
||||
js-tokens@4.0.0: {}
|
||||
|
||||
js-tokens@9.0.1: {}
|
||||
|
||||
js-yaml@4.1.1:
|
||||
dependencies:
|
||||
argparse: 2.0.1
|
||||
@@ -2949,6 +3656,18 @@ snapshots:
|
||||
dependencies:
|
||||
'@jridgewell/sourcemap-codec': 1.5.5
|
||||
|
||||
magicast@0.5.1:
|
||||
dependencies:
|
||||
'@babel/parser': 7.28.6
|
||||
'@babel/types': 7.28.6
|
||||
source-map-js: 1.2.1
|
||||
|
||||
make-dir@4.0.0:
|
||||
dependencies:
|
||||
semver: 7.7.3
|
||||
|
||||
marked@17.0.1: {}
|
||||
|
||||
mdn-data@2.12.2: {}
|
||||
|
||||
min-indent@1.0.1: {}
|
||||
@@ -3133,6 +3852,8 @@ snapshots:
|
||||
|
||||
strip-json-comments@3.1.1: {}
|
||||
|
||||
style-mod@4.1.3: {}
|
||||
|
||||
supports-color@7.2.0:
|
||||
dependencies:
|
||||
has-flag: 4.0.0
|
||||
@@ -3292,6 +4013,8 @@ snapshots:
|
||||
- tsx
|
||||
- yaml
|
||||
|
||||
w3c-keyname@2.2.8: {}
|
||||
|
||||
w3c-xmlserializer@5.0.0:
|
||||
dependencies:
|
||||
xml-name-validator: 5.0.0
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "hikari-desktop"
|
||||
version = "0.2.0"
|
||||
version = "1.1.1"
|
||||
description = "Hikari - Claude Code Visual Assistant"
|
||||
authors = ["Naomi Carrigan"]
|
||||
edition = "2021"
|
||||
@@ -13,7 +13,7 @@ crate-type = ["staticlib", "cdylib", "rlib"]
|
||||
tauri-build = { version = "2", features = [] }
|
||||
|
||||
[dependencies]
|
||||
tauri = { version = "2", features = [] }
|
||||
tauri = { version = "2", features = ["tray-icon", "image-png"] }
|
||||
tauri-plugin-dialog = "2"
|
||||
tauri-plugin-opener = "2"
|
||||
tauri-plugin-shell = "2"
|
||||
@@ -25,8 +25,15 @@ uuid = { version = "1", features = ["v4"] }
|
||||
tauri-plugin-store = "2.4.2"
|
||||
tauri-plugin-notification = "2"
|
||||
tauri-plugin-os = "2"
|
||||
tauri-plugin-http = "2"
|
||||
tauri-plugin-clipboard-manager = "2"
|
||||
tauri-plugin-fs = "2"
|
||||
tempfile = "3"
|
||||
semver = "1"
|
||||
chrono = { version = "0.4.43", features = ["serde"] }
|
||||
async-trait = "0.1"
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
futures-util = "0.3"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
windows = { version = "0.62", features = [
|
||||
|
||||
@@ -13,6 +13,23 @@
|
||||
"notification:default",
|
||||
"notification:allow-is-permission-granted",
|
||||
"notification:allow-request-permission",
|
||||
"notification:allow-notify"
|
||||
"notification:allow-notify",
|
||||
"clipboard-manager:default",
|
||||
"clipboard-manager:allow-read-image",
|
||||
"core:tray:default",
|
||||
"fs:default",
|
||||
"fs:allow-read-text-file",
|
||||
"fs:allow-write-text-file",
|
||||
{
|
||||
"identifier": "fs:allow-read-file",
|
||||
"allow": [{ "path": "**" }]
|
||||
},
|
||||
{
|
||||
"identifier": "fs:allow-write-file",
|
||||
"allow": [{ "path": "**" }]
|
||||
},
|
||||
"core:window:allow-set-size",
|
||||
"core:window:allow-set-always-on-top",
|
||||
"core:window:allow-inner-size"
|
||||
]
|
||||
}
|
||||
|
||||
|
Before Width: | Height: | Size: 9.2 KiB After Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 127 KiB |
|
Before Width: | Height: | Size: 1.9 KiB After Width: | Height: | Size: 2.8 KiB |
|
Before Width: | Height: | Size: 4.4 KiB After Width: | Height: | Size: 10 KiB |
|
Before Width: | Height: | Size: 7.5 KiB After Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 43 KiB |
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 47 KiB |
|
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 154 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 2.5 KiB |
|
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 181 KiB |
|
Before Width: | Height: | Size: 2.9 KiB After Width: | Height: | Size: 5.1 KiB |
|
Before Width: | Height: | Size: 5.0 KiB After Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 6.4 KiB After Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 3.3 KiB After Width: | Height: | Size: 6.4 KiB |
|
Before Width: | Height: | Size: 3.6 KiB After Width: | Height: | Size: 4.6 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 54 KiB |
|
Before Width: | Height: | Size: 4.2 KiB After Width: | Height: | Size: 4.7 KiB |
|
Before Width: | Height: | Size: 3.4 KiB After Width: | Height: | Size: 4.4 KiB |
|
Before Width: | Height: | Size: 7.6 KiB After Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 4.0 KiB After Width: | Height: | Size: 4.5 KiB |
|
Before Width: | Height: | Size: 8.2 KiB After Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 92 KiB |
|
Before Width: | Height: | Size: 9.2 KiB After Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 25 KiB After Width: | Height: | Size: 196 KiB |
|
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 55 KiB |
|
Before Width: | Height: | Size: 35 KiB After Width: | Height: | Size: 338 KiB |
|
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 54 KiB |
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 149 KiB |
|
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 466 KiB |
|
Before Width: | Height: | Size: 878 B After Width: | Height: | Size: 1.2 KiB |
|
Before Width: | Height: | Size: 2.2 KiB After Width: | Height: | Size: 4.3 KiB |
|
Before Width: | Height: | Size: 2.2 KiB After Width: | Height: | Size: 4.3 KiB |
|
Before Width: | Height: | Size: 3.7 KiB After Width: | Height: | Size: 9.0 KiB |
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 2.4 KiB |
|
Before Width: | Height: | Size: 3.5 KiB After Width: | Height: | Size: 8.4 KiB |
|
Before Width: | Height: | Size: 3.5 KiB After Width: | Height: | Size: 8.4 KiB |
|
Before Width: | Height: | Size: 5.6 KiB After Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 2.2 KiB After Width: | Height: | Size: 4.3 KiB |
|
Before Width: | Height: | Size: 5.0 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 5.0 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 7.7 KiB After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 123 KiB After Width: | Height: | Size: 1.7 MiB |
|
Before Width: | Height: | Size: 7.7 KiB After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 66 KiB |
|
Before Width: | Height: | Size: 4.8 KiB After Width: | Height: | Size: 14 KiB |
|
Before Width: | Height: | Size: 9.8 KiB After Width: | Height: | Size: 48 KiB |
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 58 KiB |
@@ -4,11 +4,11 @@ use std::sync::Arc;
|
||||
use tauri::AppHandle;
|
||||
|
||||
use crate::config::ClaudeStartOptions;
|
||||
use crate::provider_bridge::ProviderBridge;
|
||||
use crate::stats::UsageStats;
|
||||
use crate::wsl_bridge::WslBridge;
|
||||
|
||||
pub struct BridgeManager {
|
||||
bridges: HashMap<String, WslBridge>,
|
||||
bridges: HashMap<String, ProviderBridge>,
|
||||
app_handle: Option<AppHandle>,
|
||||
}
|
||||
|
||||
@@ -29,30 +29,52 @@ impl BridgeManager {
|
||||
conversation_id: &str,
|
||||
options: ClaudeStartOptions,
|
||||
) -> Result<(), String> {
|
||||
// Check if a bridge already exists for this conversation
|
||||
if self.bridges.get(conversation_id).map(|b| b.is_running()).unwrap_or(false) {
|
||||
// Check if a bridge already exists and is running for this conversation
|
||||
if self
|
||||
.bridges
|
||||
.get(conversation_id)
|
||||
.map(|b| b.is_running())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return Err("Claude is already running for this conversation".to_string());
|
||||
}
|
||||
|
||||
let app = self.app_handle.as_ref()
|
||||
let app = self
|
||||
.app_handle
|
||||
.as_ref()
|
||||
.ok_or_else(|| "App handle not set".to_string())?
|
||||
.clone();
|
||||
|
||||
// Create a new bridge for this conversation
|
||||
let mut bridge = WslBridge::new_with_conversation_id(conversation_id.to_string());
|
||||
// Check if existing bridge matches the requested provider type
|
||||
// If provider type changed, create a new bridge
|
||||
let should_recreate = self.bridges.get(conversation_id).map_or(false, |bridge| {
|
||||
bridge.provider_type() != options.provider_type
|
||||
});
|
||||
|
||||
if should_recreate {
|
||||
// Remove existing bridge if provider type changed
|
||||
self.bridges.remove(conversation_id);
|
||||
}
|
||||
|
||||
// Reuse existing bridge if it exists (preserves stats across reconnects)
|
||||
// Only create a new bridge if one doesn't exist for this conversation
|
||||
let provider_type = options.provider_type;
|
||||
let bridge = self
|
||||
.bridges
|
||||
.entry(conversation_id.to_string())
|
||||
.or_insert_with(|| ProviderBridge::new(provider_type, conversation_id.to_string()));
|
||||
|
||||
// Start the Claude process
|
||||
bridge.start(app, options)?;
|
||||
|
||||
// Store the bridge
|
||||
self.bridges.insert(conversation_id.to_string(), bridge);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn stop_claude(&mut self, conversation_id: &str) -> Result<(), String> {
|
||||
if let Some(bridge) = self.bridges.get_mut(conversation_id) {
|
||||
let app = self.app_handle.as_ref()
|
||||
let app = self
|
||||
.app_handle
|
||||
.as_ref()
|
||||
.ok_or_else(|| "App handle not set".to_string())?;
|
||||
bridge.stop(app);
|
||||
Ok(())
|
||||
@@ -63,7 +85,9 @@ impl BridgeManager {
|
||||
|
||||
pub fn interrupt_claude(&mut self, conversation_id: &str) -> Result<(), String> {
|
||||
if let Some(bridge) = self.bridges.get_mut(conversation_id) {
|
||||
let app = self.app_handle.as_ref()
|
||||
let app = self
|
||||
.app_handle
|
||||
.as_ref()
|
||||
.ok_or_else(|| "App handle not set".to_string())?;
|
||||
bridge.interrupt(app)
|
||||
} else {
|
||||
@@ -79,20 +103,36 @@ impl BridgeManager {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn send_tool_result(
|
||||
&mut self,
|
||||
conversation_id: &str,
|
||||
tool_use_id: &str,
|
||||
result: serde_json::Value,
|
||||
) -> Result<(), String> {
|
||||
if let Some(bridge) = self.bridges.get_mut(conversation_id) {
|
||||
bridge.send_tool_result(tool_use_id, result)
|
||||
} else {
|
||||
Err("No Claude instance found for this conversation".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_claude_running(&self, conversation_id: &str) -> bool {
|
||||
self.bridges.get(conversation_id)
|
||||
self.bridges
|
||||
.get(conversation_id)
|
||||
.map(|b| b.is_running())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn get_working_directory(&self, conversation_id: &str) -> Result<String, String> {
|
||||
self.bridges.get(conversation_id)
|
||||
self.bridges
|
||||
.get(conversation_id)
|
||||
.map(|b| b.get_working_directory().to_string())
|
||||
.ok_or_else(|| "No Claude instance found for this conversation".to_string())
|
||||
}
|
||||
|
||||
pub fn get_usage_stats(&self, conversation_id: &str) -> Result<UsageStats, String> {
|
||||
self.bridges.get(conversation_id)
|
||||
self.bridges
|
||||
.get(conversation_id)
|
||||
.map(|b| b.get_stats())
|
||||
.ok_or_else(|| "No Claude instance found for this conversation".to_string())
|
||||
}
|
||||
@@ -115,8 +155,14 @@ impl BridgeManager {
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn get_active_conversations(&self) -> Vec<String> {
|
||||
self.bridges.keys()
|
||||
.filter(|id| self.bridges.get(*id).map(|b| b.is_running()).unwrap_or(false))
|
||||
self.bridges
|
||||
.keys()
|
||||
.filter(|id| {
|
||||
self.bridges
|
||||
.get(*id)
|
||||
.map(|b| b.is_running())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
@@ -132,4 +178,4 @@ pub type SharedBridgeManager = Arc<Mutex<BridgeManager>>;
|
||||
|
||||
pub fn create_shared_bridge_manager() -> SharedBridgeManager {
|
||||
Arc::new(Mutex::new(BridgeManager::new()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,724 @@
|
||||
// Clipboard history module for tracking and managing copied code snippets
|
||||
// Implements issue #25 - Clipboard History feature
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Mutex;
|
||||
use tauri_plugin_store::StoreExt;
|
||||
use uuid::Uuid;
|
||||
|
||||
const STORE_FILE: &str = "hikari-clipboard.json";
|
||||
const HISTORY_KEY: &str = "clipboard_history";
|
||||
const MAX_HISTORY_SIZE: usize = 100;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ClipboardEntry {
|
||||
pub id: String,
|
||||
pub content: String,
|
||||
pub language: Option<String>,
|
||||
pub source: Option<String>,
|
||||
pub timestamp: String,
|
||||
pub is_pinned: bool,
|
||||
}
|
||||
|
||||
impl ClipboardEntry {
|
||||
pub fn new(content: String, language: Option<String>, source: Option<String>) -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
content,
|
||||
language,
|
||||
source,
|
||||
timestamp: chrono::Utc::now().to_rfc3339(),
|
||||
is_pinned: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
struct ClipboardHistory {
|
||||
entries: Vec<ClipboardEntry>,
|
||||
}
|
||||
|
||||
// Track last clipboard content to avoid duplicates
|
||||
#[derive(Default)]
|
||||
struct ClipboardState {
|
||||
last_content: Option<String>,
|
||||
}
|
||||
|
||||
static CLIPBOARD_STATE: Mutex<ClipboardState> = Mutex::new(ClipboardState { last_content: None });
|
||||
|
||||
fn load_history(app: &tauri::AppHandle) -> ClipboardHistory {
|
||||
let store = app.store(STORE_FILE).ok();
|
||||
store
|
||||
.and_then(|s| s.get(HISTORY_KEY))
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn save_history(app: &tauri::AppHandle, history: &ClipboardHistory) -> Result<(), String> {
|
||||
let store = app.store(STORE_FILE).map_err(|e| e.to_string())?;
|
||||
store.set(
|
||||
HISTORY_KEY,
|
||||
serde_json::to_value(history).map_err(|e| e.to_string())?,
|
||||
);
|
||||
store.save().map_err(|e| e.to_string())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// List all clipboard entries, optionally filtered by language
|
||||
#[tauri::command]
|
||||
pub fn list_clipboard_entries(
|
||||
app: tauri::AppHandle,
|
||||
language: Option<String>,
|
||||
) -> Result<Vec<ClipboardEntry>, String> {
|
||||
let history = load_history(&app);
|
||||
let entries = if let Some(lang) = language {
|
||||
history
|
||||
.entries
|
||||
.into_iter()
|
||||
.filter(|e| e.language.as_ref() == Some(&lang))
|
||||
.collect()
|
||||
} else {
|
||||
history.entries
|
||||
};
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
/// Capture current clipboard content and add to history
|
||||
#[tauri::command]
|
||||
pub fn capture_clipboard(
|
||||
app: tauri::AppHandle,
|
||||
content: String,
|
||||
language: Option<String>,
|
||||
source: Option<String>,
|
||||
) -> Result<ClipboardEntry, String> {
|
||||
// Check for duplicate (same content as last capture)
|
||||
{
|
||||
let mut state = CLIPBOARD_STATE.lock().map_err(|e| e.to_string())?;
|
||||
if state.last_content.as_ref() == Some(&content) {
|
||||
// Return existing entry if content is the same
|
||||
let history = load_history(&app);
|
||||
if let Some(entry) = history.entries.first() {
|
||||
if entry.content == content {
|
||||
return Ok(entry.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
state.last_content = Some(content.clone());
|
||||
}
|
||||
|
||||
let entry = ClipboardEntry::new(content, language, source);
|
||||
let mut history = load_history(&app);
|
||||
|
||||
// Add to front of history
|
||||
history.entries.insert(0, entry.clone());
|
||||
|
||||
// Enforce max size (keep pinned entries)
|
||||
let mut pinned: Vec<ClipboardEntry> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|e| e.is_pinned)
|
||||
.cloned()
|
||||
.collect();
|
||||
let mut unpinned: Vec<ClipboardEntry> = history
|
||||
.entries
|
||||
.into_iter()
|
||||
.filter(|e| !e.is_pinned)
|
||||
.collect();
|
||||
|
||||
// Trim unpinned entries if over max size
|
||||
if unpinned.len() + pinned.len() > MAX_HISTORY_SIZE {
|
||||
let max_unpinned = MAX_HISTORY_SIZE.saturating_sub(pinned.len());
|
||||
unpinned.truncate(max_unpinned);
|
||||
}
|
||||
|
||||
// Merge back, pinned first then unpinned
|
||||
pinned.extend(unpinned);
|
||||
history.entries = pinned;
|
||||
|
||||
// Sort by timestamp descending (newest first), pinned entries stay at top
|
||||
history.entries.sort_by(|a, b| {
|
||||
if a.is_pinned && !b.is_pinned {
|
||||
std::cmp::Ordering::Less
|
||||
} else if !a.is_pinned && b.is_pinned {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
b.timestamp.cmp(&a.timestamp)
|
||||
}
|
||||
});
|
||||
|
||||
save_history(&app, &history)?;
|
||||
Ok(entry)
|
||||
}
|
||||
|
||||
/// Delete a clipboard entry by ID
|
||||
#[tauri::command]
|
||||
pub fn delete_clipboard_entry(app: tauri::AppHandle, id: String) -> Result<(), String> {
|
||||
let mut history = load_history(&app);
|
||||
history.entries.retain(|e| e.id != id);
|
||||
save_history(&app, &history)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Toggle pin status of an entry
|
||||
#[tauri::command]
|
||||
pub fn toggle_pin_clipboard_entry(
|
||||
app: tauri::AppHandle,
|
||||
id: String,
|
||||
) -> Result<ClipboardEntry, String> {
|
||||
let mut history = load_history(&app);
|
||||
let entry = history
|
||||
.entries
|
||||
.iter_mut()
|
||||
.find(|e| e.id == id)
|
||||
.ok_or("Entry not found")?;
|
||||
|
||||
entry.is_pinned = !entry.is_pinned;
|
||||
let updated_entry = entry.clone();
|
||||
|
||||
// Re-sort to move pinned entries to top
|
||||
history.entries.sort_by(|a, b| {
|
||||
if a.is_pinned && !b.is_pinned {
|
||||
std::cmp::Ordering::Less
|
||||
} else if !a.is_pinned && b.is_pinned {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
b.timestamp.cmp(&a.timestamp)
|
||||
}
|
||||
});
|
||||
|
||||
save_history(&app, &history)?;
|
||||
Ok(updated_entry)
|
||||
}
|
||||
|
||||
/// Clear all non-pinned entries
|
||||
#[tauri::command]
|
||||
pub fn clear_clipboard_history(app: tauri::AppHandle) -> Result<(), String> {
|
||||
let mut history = load_history(&app);
|
||||
history.entries.retain(|e| e.is_pinned);
|
||||
save_history(&app, &history)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Search clipboard entries by content
|
||||
#[tauri::command]
|
||||
pub fn search_clipboard_entries(
|
||||
app: tauri::AppHandle,
|
||||
query: String,
|
||||
) -> Result<Vec<ClipboardEntry>, String> {
|
||||
let history = load_history(&app);
|
||||
let query_lower = query.to_lowercase();
|
||||
let entries = history
|
||||
.entries
|
||||
.into_iter()
|
||||
.filter(|e| {
|
||||
e.content.to_lowercase().contains(&query_lower)
|
||||
|| e.language
|
||||
.as_ref()
|
||||
.is_some_and(|l| l.to_lowercase().contains(&query_lower))
|
||||
|| e.source
|
||||
.as_ref()
|
||||
.is_some_and(|s| s.to_lowercase().contains(&query_lower))
|
||||
})
|
||||
.collect();
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
/// Get all unique languages from history
|
||||
#[tauri::command]
|
||||
pub fn get_clipboard_languages(app: tauri::AppHandle) -> Result<Vec<String>, String> {
|
||||
let history = load_history(&app);
|
||||
let mut languages: Vec<String> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter_map(|e| e.language.clone())
|
||||
.collect();
|
||||
languages.sort();
|
||||
languages.dedup();
|
||||
Ok(languages)
|
||||
}
|
||||
|
||||
/// Update the language of an entry
|
||||
#[tauri::command]
|
||||
pub fn update_clipboard_language(
|
||||
app: tauri::AppHandle,
|
||||
id: String,
|
||||
language: Option<String>,
|
||||
) -> Result<ClipboardEntry, String> {
|
||||
let mut history = load_history(&app);
|
||||
let entry = history
|
||||
.entries
|
||||
.iter_mut()
|
||||
.find(|e| e.id == id)
|
||||
.ok_or("Entry not found")?;
|
||||
|
||||
entry.language = language;
|
||||
let updated_entry = entry.clone();
|
||||
|
||||
save_history(&app, &history)?;
|
||||
Ok(updated_entry)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// ==================== ClipboardEntry tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_new() {
|
||||
let entry = ClipboardEntry::new(
|
||||
"let x = 42;".to_string(),
|
||||
Some("rust".to_string()),
|
||||
Some("main.rs".to_string()),
|
||||
);
|
||||
|
||||
assert_eq!(entry.content, "let x = 42;");
|
||||
assert_eq!(entry.language, Some("rust".to_string()));
|
||||
assert_eq!(entry.source, Some("main.rs".to_string()));
|
||||
assert!(!entry.is_pinned);
|
||||
assert!(!entry.id.is_empty());
|
||||
assert!(!entry.timestamp.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_new_without_optional_fields() {
|
||||
let entry = ClipboardEntry::new("some content".to_string(), None, None);
|
||||
|
||||
assert_eq!(entry.content, "some content");
|
||||
assert!(entry.language.is_none());
|
||||
assert!(entry.source.is_none());
|
||||
assert!(!entry.is_pinned);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_unique_ids() {
|
||||
let entry1 = ClipboardEntry::new("content1".to_string(), None, None);
|
||||
let entry2 = ClipboardEntry::new("content2".to_string(), None, None);
|
||||
|
||||
assert_ne!(entry1.id, entry2.id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_serialization() {
|
||||
let entry = ClipboardEntry::new(
|
||||
"fn main() {}".to_string(),
|
||||
Some("rust".to_string()),
|
||||
Some("lib.rs".to_string()),
|
||||
);
|
||||
|
||||
let json = serde_json::to_string(&entry).unwrap();
|
||||
assert!(json.contains("fn main() {}"));
|
||||
assert!(json.contains("rust"));
|
||||
assert!(json.contains("lib.rs"));
|
||||
assert!(json.contains("is_pinned"));
|
||||
|
||||
let deserialized: ClipboardEntry = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.content, entry.content);
|
||||
assert_eq!(deserialized.language, entry.language);
|
||||
assert_eq!(deserialized.source, entry.source);
|
||||
assert_eq!(deserialized.id, entry.id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_clone() {
|
||||
let entry = ClipboardEntry::new(
|
||||
"original".to_string(),
|
||||
Some("python".to_string()),
|
||||
None,
|
||||
);
|
||||
|
||||
let cloned = entry.clone();
|
||||
assert_eq!(cloned.content, entry.content);
|
||||
assert_eq!(cloned.id, entry.id);
|
||||
assert_eq!(cloned.language, entry.language);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_entry_timestamp_is_rfc3339() {
|
||||
let entry = ClipboardEntry::new("test".to_string(), None, None);
|
||||
|
||||
// RFC3339 timestamp should parse successfully
|
||||
let parsed = chrono::DateTime::parse_from_rfc3339(&entry.timestamp);
|
||||
assert!(parsed.is_ok());
|
||||
}
|
||||
|
||||
// ==================== ClipboardHistory tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_history_default() {
|
||||
let history = ClipboardHistory::default();
|
||||
assert!(history.entries.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_history_serialization() {
|
||||
let mut history = ClipboardHistory::default();
|
||||
history.entries.push(ClipboardEntry::new(
|
||||
"entry1".to_string(),
|
||||
Some("js".to_string()),
|
||||
None,
|
||||
));
|
||||
history.entries.push(ClipboardEntry::new(
|
||||
"entry2".to_string(),
|
||||
None,
|
||||
Some("file.txt".to_string()),
|
||||
));
|
||||
|
||||
let json = serde_json::to_string(&history).unwrap();
|
||||
assert!(json.contains("entry1"));
|
||||
assert!(json.contains("entry2"));
|
||||
assert!(json.contains("js"));
|
||||
assert!(json.contains("file.txt"));
|
||||
|
||||
let deserialized: ClipboardHistory = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.entries.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_history_entries_order() {
|
||||
let mut history = ClipboardHistory::default();
|
||||
|
||||
history.entries.push(ClipboardEntry::new("first".to_string(), None, None));
|
||||
history.entries.push(ClipboardEntry::new("second".to_string(), None, None));
|
||||
history.entries.push(ClipboardEntry::new("third".to_string(), None, None));
|
||||
|
||||
assert_eq!(history.entries[0].content, "first");
|
||||
assert_eq!(history.entries[1].content, "second");
|
||||
assert_eq!(history.entries[2].content, "third");
|
||||
}
|
||||
|
||||
// ==================== ClipboardState tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_state_default() {
|
||||
let state = ClipboardState::default();
|
||||
assert!(state.last_content.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clipboard_state_with_content() {
|
||||
let state = ClipboardState {
|
||||
last_content: Some("cached content".to_string()),
|
||||
};
|
||||
assert_eq!(state.last_content, Some("cached content".to_string()));
|
||||
}
|
||||
|
||||
// ==================== MAX_HISTORY_SIZE constant test ====================
|
||||
|
||||
#[test]
|
||||
fn test_max_history_size_is_reasonable() {
|
||||
assert_eq!(MAX_HISTORY_SIZE, 100);
|
||||
// Compile-time assertions for constant bounds
|
||||
const _: () = assert!(MAX_HISTORY_SIZE > 0);
|
||||
const _: () = assert!(MAX_HISTORY_SIZE <= 1000); // Sanity check
|
||||
}
|
||||
|
||||
// ==================== Pinned entry sorting tests ====================
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_pinned_entries_sorting() {
|
||||
let mut entries = vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "unpinned older".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "pinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "unpinned newer".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
];
|
||||
|
||||
// Apply the same sorting logic as used in the module
|
||||
entries.sort_by(|a, b| {
|
||||
if a.is_pinned && !b.is_pinned {
|
||||
std::cmp::Ordering::Less
|
||||
} else if !a.is_pinned && b.is_pinned {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
b.timestamp.cmp(&a.timestamp)
|
||||
}
|
||||
});
|
||||
|
||||
// Pinned should be first
|
||||
assert!(entries[0].is_pinned);
|
||||
assert_eq!(entries[0].id, "2");
|
||||
|
||||
// Then unpinned sorted by timestamp descending (newest first)
|
||||
assert_eq!(entries[1].id, "3"); // newer unpinned
|
||||
assert_eq!(entries[2].id, "1"); // older unpinned
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_multiple_pinned_entries_sorting() {
|
||||
let mut entries = vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "pinned older".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "unpinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "pinned newer".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
];
|
||||
|
||||
entries.sort_by(|a, b| {
|
||||
if a.is_pinned && !b.is_pinned {
|
||||
std::cmp::Ordering::Less
|
||||
} else if !a.is_pinned && b.is_pinned {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
b.timestamp.cmp(&a.timestamp)
|
||||
}
|
||||
});
|
||||
|
||||
// Both pinned first, sorted by timestamp
|
||||
assert!(entries[0].is_pinned);
|
||||
assert_eq!(entries[0].id, "3"); // pinned newer
|
||||
assert!(entries[1].is_pinned);
|
||||
assert_eq!(entries[1].id, "1"); // pinned older
|
||||
// Then unpinned
|
||||
assert!(!entries[2].is_pinned);
|
||||
assert_eq!(entries[2].id, "2");
|
||||
}
|
||||
|
||||
// ==================== Entry filtering tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_filter_entries_by_language() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "rust code".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "js code".to_string(),
|
||||
language: Some("javascript".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "more rust".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let filtered: Vec<_> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|e| e.language.as_ref() == Some(&"rust".to_string()))
|
||||
.collect();
|
||||
|
||||
assert_eq!(filtered.len(), 2);
|
||||
assert!(filtered.iter().all(|e| e.language == Some("rust".to_string())));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_entries_by_content() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "fn hello_world()".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "function hello()".to_string(),
|
||||
language: Some("javascript".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-02T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "def goodbye()".to_string(),
|
||||
language: Some("python".to_string()),
|
||||
source: None,
|
||||
timestamp: "2024-01-03T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let query = "hello";
|
||||
let query_lower = query.to_lowercase();
|
||||
let filtered: Vec<_> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|e| e.content.to_lowercase().contains(&query_lower))
|
||||
.collect();
|
||||
|
||||
assert_eq!(filtered.len(), 2);
|
||||
assert!(filtered[0].content.contains("hello"));
|
||||
assert!(filtered[1].content.contains("hello"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_search_entries_case_insensitive() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "HELLO WORLD".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "2024-01-01T00:00:00Z".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let query = "hello";
|
||||
let query_lower = query.to_lowercase();
|
||||
let filtered: Vec<_> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|e| e.content.to_lowercase().contains(&query_lower))
|
||||
.collect();
|
||||
|
||||
assert_eq!(filtered.len(), 1);
|
||||
}
|
||||
|
||||
// ==================== Unique languages extraction test ====================
|
||||
|
||||
#[test]
|
||||
fn test_extract_unique_languages() {
|
||||
let history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "".to_string(),
|
||||
language: Some("rust".to_string()),
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "".to_string(),
|
||||
language: Some("javascript".to_string()),
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "".to_string(),
|
||||
language: Some("rust".to_string()), // Duplicate
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "4".to_string(),
|
||||
content: "".to_string(),
|
||||
language: None, // No language
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let mut languages: Vec<String> = history
|
||||
.entries
|
||||
.iter()
|
||||
.filter_map(|e| e.language.clone())
|
||||
.collect();
|
||||
languages.sort();
|
||||
languages.dedup();
|
||||
|
||||
assert_eq!(languages.len(), 2);
|
||||
assert!(languages.contains(&"rust".to_string()));
|
||||
assert!(languages.contains(&"javascript".to_string()));
|
||||
}
|
||||
|
||||
// ==================== Retain pinned entries test ====================
|
||||
|
||||
#[test]
|
||||
fn test_retain_pinned_on_clear() {
|
||||
let mut history = ClipboardHistory {
|
||||
entries: vec![
|
||||
ClipboardEntry {
|
||||
id: "1".to_string(),
|
||||
content: "pinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "2".to_string(),
|
||||
content: "unpinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: false,
|
||||
},
|
||||
ClipboardEntry {
|
||||
id: "3".to_string(),
|
||||
content: "another pinned".to_string(),
|
||||
language: None,
|
||||
source: None,
|
||||
timestamp: "".to_string(),
|
||||
is_pinned: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// Simulate clear (keep only pinned)
|
||||
history.entries.retain(|e| e.is_pinned);
|
||||
|
||||
assert_eq!(history.entries.len(), 2);
|
||||
assert!(history.entries.iter().all(|e| e.is_pinned));
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,13 @@
|
||||
use std::path::PathBuf;
|
||||
use tauri::{AppHandle, State};
|
||||
use tauri_plugin_http::reqwest;
|
||||
use tauri_plugin_store::StoreExt;
|
||||
|
||||
use crate::achievements::{get_achievement_info, load_achievements, AchievementUnlockedEvent};
|
||||
use crate::bridge_manager::SharedBridgeManager;
|
||||
use crate::config::{ClaudeStartOptions, HikariConfig};
|
||||
use crate::stats::UsageStats;
|
||||
use crate::bridge_manager::SharedBridgeManager;
|
||||
use crate::achievements::{load_achievements, get_achievement_info, AchievementUnlockedEvent};
|
||||
use crate::temp_manager::SharedTempFileManager;
|
||||
|
||||
const CONFIG_STORE_KEY: &str = "config";
|
||||
|
||||
@@ -71,23 +74,17 @@ pub async fn select_wsl_directory() -> Result<String, String> {
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_config(app: AppHandle) -> Result<HikariConfig, String> {
|
||||
let store = app
|
||||
.store("hikari-config.json")
|
||||
.map_err(|e| e.to_string())?;
|
||||
let store = app.store("hikari-config.json").map_err(|e| e.to_string())?;
|
||||
|
||||
match store.get(CONFIG_STORE_KEY) {
|
||||
Some(value) => {
|
||||
serde_json::from_value(value.clone()).map_err(|e| e.to_string())
|
||||
}
|
||||
Some(value) => serde_json::from_value(value.clone()).map_err(|e| e.to_string()),
|
||||
None => Ok(HikariConfig::default()),
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_config(app: AppHandle, config: HikariConfig) -> Result<(), String> {
|
||||
let store = app
|
||||
.store("hikari-config.json")
|
||||
.map_err(|e| e.to_string())?;
|
||||
let store = app.store("hikari-config.json").map_err(|e| e.to_string())?;
|
||||
|
||||
let value = serde_json::to_value(&config).map_err(|e| e.to_string())?;
|
||||
store.set(CONFIG_STORE_KEY, value);
|
||||
@@ -105,8 +102,77 @@ pub async fn get_usage_stats(
|
||||
manager.get_usage_stats(&conversation_id)
|
||||
}
|
||||
|
||||
/// Load persisted lifetime stats from store (no bridge required)
|
||||
#[tauri::command]
|
||||
pub async fn load_saved_achievements(app: AppHandle) -> Result<Vec<AchievementUnlockedEvent>, String> {
|
||||
pub async fn get_persisted_stats(app: AppHandle) -> Result<UsageStats, String> {
|
||||
let mut stats = UsageStats::new();
|
||||
|
||||
// Load persisted stats if available
|
||||
if let Some(persisted) = crate::stats::load_stats(&app).await {
|
||||
stats.apply_persisted(persisted);
|
||||
}
|
||||
|
||||
Ok(stats)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn validate_directory(
|
||||
path: String,
|
||||
current_dir: Option<String>,
|
||||
) -> Result<String, String> {
|
||||
use std::path::Path;
|
||||
|
||||
let path = Path::new(&path);
|
||||
|
||||
// Expand ~ to home directory
|
||||
let expanded_path = if path.starts_with("~") {
|
||||
if let Some(home) = std::env::var_os("HOME") {
|
||||
let home_path = Path::new(&home);
|
||||
if path == Path::new("~") {
|
||||
home_path.to_path_buf()
|
||||
} else {
|
||||
home_path.join(path.strip_prefix("~").unwrap())
|
||||
}
|
||||
} else {
|
||||
return Err("Could not determine home directory".to_string());
|
||||
}
|
||||
} else if path.is_relative() {
|
||||
// Handle relative paths (., .., or any relative path) by resolving against current_dir
|
||||
if let Some(ref cwd) = current_dir {
|
||||
Path::new(cwd).join(path)
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
}
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
};
|
||||
|
||||
// Check if the path exists and is a directory
|
||||
if !expanded_path.exists() {
|
||||
return Err(format!(
|
||||
"Directory does not exist: {}",
|
||||
expanded_path.display()
|
||||
));
|
||||
}
|
||||
|
||||
if !expanded_path.is_dir() {
|
||||
return Err(format!(
|
||||
"Path is not a directory: {}",
|
||||
expanded_path.display()
|
||||
));
|
||||
}
|
||||
|
||||
// Return the canonicalized (absolute) path
|
||||
expanded_path
|
||||
.canonicalize()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.map_err(|e| format!("Failed to resolve path: {}", e))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn load_saved_achievements(
|
||||
app: AppHandle,
|
||||
) -> Result<Vec<AchievementUnlockedEvent>, String> {
|
||||
use chrono::Utc;
|
||||
|
||||
// Load achievements from persistent store
|
||||
@@ -117,10 +183,635 @@ pub async fn load_saved_achievements(app: AppHandle) -> Result<Vec<AchievementUn
|
||||
for achievement_id in &progress.unlocked {
|
||||
let mut info = get_achievement_info(achievement_id);
|
||||
info.unlocked_at = Some(Utc::now()); // We don't store timestamps, so just use now
|
||||
events.push(AchievementUnlockedEvent {
|
||||
achievement: info,
|
||||
});
|
||||
events.push(AchievementUnlockedEvent { achievement: info });
|
||||
}
|
||||
|
||||
Ok(events)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn answer_question(
|
||||
bridge_manager: State<'_, SharedBridgeManager>,
|
||||
conversation_id: String,
|
||||
tool_use_id: String,
|
||||
answers: serde_json::Value,
|
||||
) -> Result<(), String> {
|
||||
let mut manager = bridge_manager.lock();
|
||||
manager.send_tool_result(&conversation_id, &tool_use_id, answers)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn list_skills() -> Result<Vec<String>, String> {
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
// Get the home directory
|
||||
let home =
|
||||
std::env::var_os("HOME").ok_or_else(|| "Could not determine home directory".to_string())?;
|
||||
|
||||
let skills_dir = Path::new(&home).join(".claude").join("skills");
|
||||
|
||||
// If the skills directory doesn't exist, return empty list
|
||||
if !skills_dir.exists() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
// Read the directory and collect skill names
|
||||
let mut skills = Vec::new();
|
||||
let entries =
|
||||
fs::read_dir(&skills_dir).map_err(|e| format!("Failed to read skills directory: {}", e))?;
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
|
||||
let path = entry.path();
|
||||
|
||||
// Only include directories that contain a SKILL.md file
|
||||
if path.is_dir() {
|
||||
let skill_file = path.join("SKILL.md");
|
||||
if skill_file.exists() {
|
||||
if let Some(name) = path.file_name() {
|
||||
skills.push(name.to_string_lossy().to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort alphabetically
|
||||
skills.sort();
|
||||
|
||||
Ok(skills)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub struct UpdateInfo {
|
||||
pub current_version: String,
|
||||
pub latest_version: String,
|
||||
pub has_update: bool,
|
||||
pub release_url: String,
|
||||
pub release_notes: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct GiteaRelease {
|
||||
tag_name: String,
|
||||
html_url: String,
|
||||
body: Option<String>,
|
||||
prerelease: bool,
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn check_for_updates() -> Result<UpdateInfo, String> {
|
||||
const CURRENT_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const RELEASES_API: &str =
|
||||
"https://git.nhcarrigan.com/api/v1/repos/nhcarrigan/hikari-desktop/releases";
|
||||
|
||||
// Fetch releases from Gitea API
|
||||
let client = reqwest::Client::new();
|
||||
let response = client
|
||||
.get(RELEASES_API)
|
||||
.header("Accept", "application/json")
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to fetch releases: {}", e))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("API returned status: {}", response.status()));
|
||||
}
|
||||
|
||||
let text = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response: {}", e))?;
|
||||
|
||||
let releases: Vec<GiteaRelease> =
|
||||
serde_json::from_str(&text).map_err(|e| format!("Failed to parse releases: {}", e))?;
|
||||
|
||||
// Find the latest non-prerelease, or fall back to latest prerelease
|
||||
let latest = releases
|
||||
.iter()
|
||||
.find(|r| !r.prerelease)
|
||||
.or_else(|| releases.first());
|
||||
|
||||
let latest = match latest {
|
||||
Some(r) => r,
|
||||
None => return Err("No releases found".to_string()),
|
||||
};
|
||||
|
||||
// Parse version strings (remove 'v' prefix if present)
|
||||
let current = semver::Version::parse(CURRENT_VERSION)
|
||||
.map_err(|e| format!("Failed to parse current version: {}", e))?;
|
||||
|
||||
let latest_tag = latest.tag_name.trim_start_matches('v');
|
||||
let latest_ver = semver::Version::parse(latest_tag)
|
||||
.map_err(|e| format!("Failed to parse latest version: {}", e))?;
|
||||
|
||||
Ok(UpdateInfo {
|
||||
current_version: CURRENT_VERSION.to_string(),
|
||||
latest_version: latest.tag_name.clone(),
|
||||
has_update: latest_ver > current,
|
||||
release_url: latest.html_url.clone(),
|
||||
release_notes: latest.body.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize)]
|
||||
pub struct SavedFileInfo {
|
||||
pub path: String,
|
||||
pub filename: String,
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_temp_file(
|
||||
temp_manager: State<'_, SharedTempFileManager>,
|
||||
conversation_id: String,
|
||||
data: Vec<u8>,
|
||||
filename: Option<String>,
|
||||
) -> Result<SavedFileInfo, String> {
|
||||
let mut manager = temp_manager.lock();
|
||||
let path = manager.save_file(&conversation_id, &data, filename.as_deref())?;
|
||||
|
||||
let filename = path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().to_string())
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
|
||||
Ok(SavedFileInfo {
|
||||
path: path.to_string_lossy().to_string(),
|
||||
filename,
|
||||
})
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn register_temp_file(
|
||||
temp_manager: State<'_, SharedTempFileManager>,
|
||||
conversation_id: String,
|
||||
file_path: String,
|
||||
) -> Result<(), String> {
|
||||
let mut manager = temp_manager.lock();
|
||||
manager.register_file(&conversation_id, PathBuf::from(file_path));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_temp_files(
|
||||
temp_manager: State<'_, SharedTempFileManager>,
|
||||
conversation_id: String,
|
||||
) -> Result<Vec<String>, String> {
|
||||
let manager = temp_manager.lock();
|
||||
let files = manager.get_files_for_conversation(&conversation_id);
|
||||
Ok(files.iter().map(|p| p.to_string_lossy().to_string()).collect())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cleanup_temp_files(
|
||||
temp_manager: State<'_, SharedTempFileManager>,
|
||||
conversation_id: String,
|
||||
) -> Result<(), String> {
|
||||
let mut manager = temp_manager.lock();
|
||||
manager.cleanup_conversation(&conversation_id)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cleanup_all_temp_files(
|
||||
temp_manager: State<'_, SharedTempFileManager>,
|
||||
) -> Result<(), String> {
|
||||
let mut manager = temp_manager.lock();
|
||||
manager.cleanup_all()
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cleanup_orphaned_temp_files(
|
||||
temp_manager: State<'_, SharedTempFileManager>,
|
||||
) -> Result<usize, String> {
|
||||
let mut manager = temp_manager.lock();
|
||||
manager.cleanup_orphaned_files()
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_file_size(file_path: String) -> Result<u64, String> {
|
||||
let metadata = std::fs::metadata(&file_path)
|
||||
.map_err(|e| format!("Failed to get file metadata: {}", e))?;
|
||||
Ok(metadata.len())
|
||||
}
|
||||
|
||||
// ==================== Editor File Operations ====================
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize)]
|
||||
pub struct FileEntry {
|
||||
pub name: String,
|
||||
pub path: String,
|
||||
#[serde(rename = "isDirectory")]
|
||||
pub is_directory: bool,
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn list_directory(path: String) -> Result<Vec<FileEntry>, String> {
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
let dir_path = Path::new(&path);
|
||||
|
||||
if !dir_path.exists() {
|
||||
return Err(format!("Directory does not exist: {}", path));
|
||||
}
|
||||
|
||||
if !dir_path.is_dir() {
|
||||
return Err(format!("Path is not a directory: {}", path));
|
||||
}
|
||||
|
||||
let entries = fs::read_dir(dir_path)
|
||||
.map_err(|e| format!("Failed to read directory: {}", e))?;
|
||||
|
||||
let mut file_entries = Vec::new();
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry.map_err(|e| format!("Failed to read entry: {}", e))?;
|
||||
let path = entry.path();
|
||||
let name = entry
|
||||
.file_name()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
// Skip hidden files by default (can be made configurable later)
|
||||
if name.starts_with('.') {
|
||||
continue;
|
||||
}
|
||||
|
||||
file_entries.push(FileEntry {
|
||||
name,
|
||||
path: path.to_string_lossy().to_string(),
|
||||
is_directory: path.is_dir(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(file_entries)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn read_file_content(path: String) -> Result<String, String> {
|
||||
use std::fs;
|
||||
|
||||
fs::read_to_string(&path)
|
||||
.map_err(|e| format!("Failed to read file: {}", e))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn write_file_content(path: String, content: String) -> Result<(), String> {
|
||||
use std::fs;
|
||||
|
||||
fs::write(&path, content)
|
||||
.map_err(|e| format!("Failed to write file: {}", e))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn create_file(path: String) -> Result<(), String> {
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
let file_path = Path::new(&path);
|
||||
|
||||
if file_path.exists() {
|
||||
return Err("File already exists".to_string());
|
||||
}
|
||||
|
||||
File::create(file_path).map_err(|e| format!("Failed to create file: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn create_directory(path: String) -> Result<(), String> {
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
let dir_path = Path::new(&path);
|
||||
|
||||
if dir_path.exists() {
|
||||
return Err("Directory already exists".to_string());
|
||||
}
|
||||
|
||||
fs::create_dir_all(dir_path).map_err(|e| format!("Failed to create directory: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_file(path: String) -> Result<(), String> {
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
let file_path = Path::new(&path);
|
||||
|
||||
if !file_path.exists() {
|
||||
return Err("File does not exist".to_string());
|
||||
}
|
||||
|
||||
if file_path.is_dir() {
|
||||
return Err("Path is a directory, use delete_directory instead".to_string());
|
||||
}
|
||||
|
||||
fs::remove_file(file_path).map_err(|e| format!("Failed to delete file: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_directory(path: String) -> Result<(), String> {
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
let dir_path = Path::new(&path);
|
||||
|
||||
if !dir_path.exists() {
|
||||
return Err("Directory does not exist".to_string());
|
||||
}
|
||||
|
||||
if !dir_path.is_dir() {
|
||||
return Err("Path is not a directory".to_string());
|
||||
}
|
||||
|
||||
fs::remove_dir_all(dir_path).map_err(|e| format!("Failed to delete directory: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn rename_path(old_path: String, new_path: String) -> Result<(), String> {
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
let old = Path::new(&old_path);
|
||||
let new = Path::new(&new_path);
|
||||
|
||||
if !old.exists() {
|
||||
return Err("Path does not exist".to_string());
|
||||
}
|
||||
|
||||
if new.exists() {
|
||||
return Err("Destination already exists".to_string());
|
||||
}
|
||||
|
||||
fs::rename(old, new).map_err(|e| format!("Failed to rename: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper to run async tests
|
||||
fn run_async<F: std::future::Future>(f: F) -> F::Output {
|
||||
tokio::runtime::Runtime::new().unwrap().block_on(f)
|
||||
}
|
||||
|
||||
// ==================== validate_directory tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_absolute_path_exists() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let path = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = run_async(validate_directory(path.clone(), None));
|
||||
assert!(result.is_ok());
|
||||
// Canonicalized path should be returned
|
||||
assert!(result.unwrap().contains(&temp_dir.path().file_name().unwrap().to_string_lossy().to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_path_not_exists() {
|
||||
let result = run_async(validate_directory(
|
||||
"/nonexistent/path/that/does/not/exist".to_string(),
|
||||
None,
|
||||
));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("does not exist"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_path_is_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("test_file.txt");
|
||||
File::create(&file_path).unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
file_path.to_string_lossy().to_string(),
|
||||
None,
|
||||
));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("not a directory"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_home_expansion() {
|
||||
// This test assumes HOME is set (which it should be on most systems)
|
||||
if std::env::var_os("HOME").is_some() {
|
||||
let result = run_async(validate_directory("~".to_string(), None));
|
||||
assert!(result.is_ok());
|
||||
// Should not contain ~ after expansion
|
||||
assert!(!result.unwrap().contains("~"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_home_subpath_expansion() {
|
||||
// This test assumes HOME is set and has some subdirectory
|
||||
if let Some(home) = std::env::var_os("HOME") {
|
||||
let home_path = std::path::Path::new(&home);
|
||||
// Find any subdirectory in home
|
||||
if let Ok(entries) = fs::read_dir(home_path) {
|
||||
for entry in entries.flatten() {
|
||||
if entry.path().is_dir() {
|
||||
let subdir_name = entry.file_name().to_string_lossy().to_string();
|
||||
let tilde_path = format!("~/{}", subdir_name);
|
||||
let result = run_async(validate_directory(tilde_path, None));
|
||||
assert!(result.is_ok());
|
||||
assert!(!result.unwrap().contains("~"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_relative_path_with_current_dir() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let subdir = temp_dir.path().join("subdir");
|
||||
fs::create_dir(&subdir).unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
"subdir".to_string(),
|
||||
Some(temp_dir.path().to_string_lossy().to_string()),
|
||||
));
|
||||
assert!(result.is_ok());
|
||||
assert!(result.unwrap().contains("subdir"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_dot_path() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
".".to_string(),
|
||||
Some(temp_dir.path().to_string_lossy().to_string()),
|
||||
));
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_dotdot_path() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let subdir = temp_dir.path().join("subdir");
|
||||
fs::create_dir(&subdir).unwrap();
|
||||
|
||||
let result = run_async(validate_directory(
|
||||
"..".to_string(),
|
||||
Some(subdir.to_string_lossy().to_string()),
|
||||
));
|
||||
assert!(result.is_ok());
|
||||
// Should resolve to parent
|
||||
let resolved = result.unwrap();
|
||||
assert!(resolved.contains(&temp_dir.path().file_name().unwrap().to_string_lossy().to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_directory_relative_without_current_dir() {
|
||||
// Relative path without current_dir - should fail since relative path likely won't exist
|
||||
let result = run_async(validate_directory(
|
||||
"some_random_nonexistent_relative_path".to_string(),
|
||||
None,
|
||||
));
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// ==================== get_file_size tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_empty_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("empty.txt");
|
||||
File::create(&file_path).unwrap();
|
||||
|
||||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_with_content() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("content.txt");
|
||||
let mut file = File::create(&file_path).unwrap();
|
||||
file.write_all(b"Hello, Hikari!").unwrap();
|
||||
|
||||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 14); // "Hello, Hikari!" is 14 bytes
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_larger_file() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let file_path = temp_dir.path().join("large.txt");
|
||||
let mut file = File::create(&file_path).unwrap();
|
||||
// Write 1000 bytes
|
||||
let data = vec![b'x'; 1000];
|
||||
file.write_all(&data).unwrap();
|
||||
|
||||
let result = run_async(get_file_size(file_path.to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 1000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_nonexistent_file() {
|
||||
let result = run_async(get_file_size(
|
||||
"/nonexistent/path/file.txt".to_string(),
|
||||
));
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("Failed to get file metadata"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_file_size_directory() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
// Getting "size" of a directory should work but return directory metadata
|
||||
// This is actually valid - directories have metadata too
|
||||
let result = run_async(get_file_size(temp_dir.path().to_string_lossy().to_string()));
|
||||
assert!(result.is_ok());
|
||||
// Directory size is platform-dependent, just check it returns something
|
||||
}
|
||||
|
||||
// ==================== list_skills tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_list_skills_no_skills_dir() {
|
||||
// This test is tricky because it depends on HOME being set
|
||||
// and potentially affecting real user data, so we'll just
|
||||
// verify the function doesn't panic
|
||||
let result = run_async(list_skills());
|
||||
// Should either return Ok with a list or Ok with empty vec
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
// ==================== select_wsl_directory tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_select_wsl_directory_returns_home() {
|
||||
let result = run_async(select_wsl_directory());
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), "/home");
|
||||
}
|
||||
|
||||
// ==================== UpdateInfo struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_update_info_serialization() {
|
||||
let info = UpdateInfo {
|
||||
current_version: "1.0.0".to_string(),
|
||||
latest_version: "0.4.0".to_string(),
|
||||
has_update: true,
|
||||
release_url: "https://example.com/release".to_string(),
|
||||
release_notes: Some("New features!".to_string()),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&info).unwrap();
|
||||
assert!(json.contains("1.0.0"));
|
||||
assert!(json.contains("0.4.0"));
|
||||
assert!(json.contains("true"));
|
||||
assert!(json.contains("New features!"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_update_info_without_notes() {
|
||||
let info = UpdateInfo {
|
||||
current_version: "1.0.0".to_string(),
|
||||
latest_version: "1.0.0".to_string(),
|
||||
has_update: false,
|
||||
release_url: "https://example.com/release".to_string(),
|
||||
release_notes: None,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&info).unwrap();
|
||||
assert!(json.contains("null") || json.contains("release_notes"));
|
||||
}
|
||||
|
||||
// ==================== SavedFileInfo struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_saved_file_info_serialization() {
|
||||
let info = SavedFileInfo {
|
||||
path: "/tmp/test.txt".to_string(),
|
||||
filename: "test.txt".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&info).unwrap();
|
||||
assert!(json.contains("/tmp/test.txt"));
|
||||
assert!(json.contains("test.txt"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
use crate::providers::ProviderType;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct ClaudeStartOptions {
|
||||
#[serde(default)]
|
||||
pub provider_type: ProviderType,
|
||||
|
||||
#[serde(default)]
|
||||
pub working_dir: String,
|
||||
|
||||
@@ -22,10 +26,50 @@ pub struct ClaudeStartOptions {
|
||||
|
||||
#[serde(default)]
|
||||
pub skip_greeting: bool,
|
||||
|
||||
#[serde(default)]
|
||||
pub resume_session_id: Option<String>,
|
||||
|
||||
// Ollama-specific options
|
||||
#[serde(default = "default_ollama_base_url")]
|
||||
pub ollama_base_url: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub ollama_model: Option<String>,
|
||||
|
||||
// OpenAI-specific options
|
||||
#[serde(default)]
|
||||
pub openai_api_key: Option<String>,
|
||||
|
||||
#[serde(default = "default_openai_base_url")]
|
||||
pub openai_base_url: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub openai_model: Option<String>,
|
||||
|
||||
// Anthropic-specific options
|
||||
#[serde(default)]
|
||||
pub anthropic_api_key: Option<String>,
|
||||
|
||||
#[serde(default = "default_anthropic_base_url")]
|
||||
pub anthropic_base_url: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub anthropic_model: Option<String>,
|
||||
|
||||
// Gemini-specific options
|
||||
#[serde(default)]
|
||||
pub gemini_api_key: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub gemini_model: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct HikariConfig {
|
||||
#[serde(default)]
|
||||
pub provider_type: ProviderType,
|
||||
|
||||
#[serde(default)]
|
||||
pub model: Option<String>,
|
||||
|
||||
@@ -41,6 +85,40 @@ pub struct HikariConfig {
|
||||
#[serde(default)]
|
||||
pub auto_granted_tools: Vec<String>,
|
||||
|
||||
// Ollama-specific settings
|
||||
#[serde(default = "default_ollama_base_url")]
|
||||
pub ollama_base_url: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub ollama_model: Option<String>,
|
||||
|
||||
// OpenAI-specific settings
|
||||
#[serde(default)]
|
||||
pub openai_api_key: Option<String>,
|
||||
|
||||
#[serde(default = "default_openai_base_url")]
|
||||
pub openai_base_url: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub openai_model: Option<String>,
|
||||
|
||||
// Anthropic-specific settings
|
||||
#[serde(default)]
|
||||
pub anthropic_api_key: Option<String>,
|
||||
|
||||
#[serde(default = "default_anthropic_base_url")]
|
||||
pub anthropic_base_url: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub anthropic_model: Option<String>,
|
||||
|
||||
// Gemini-specific settings
|
||||
#[serde(default)]
|
||||
pub gemini_api_key: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub gemini_model: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub theme: Theme,
|
||||
|
||||
@@ -55,25 +133,90 @@ pub struct HikariConfig {
|
||||
|
||||
#[serde(default = "default_notification_volume")]
|
||||
pub notification_volume: f32,
|
||||
|
||||
#[serde(default)]
|
||||
pub always_on_top: bool,
|
||||
|
||||
#[serde(default = "default_update_checks_enabled")]
|
||||
pub update_checks_enabled: bool,
|
||||
|
||||
#[serde(default)]
|
||||
pub character_panel_width: Option<u32>,
|
||||
|
||||
#[serde(default = "default_font_size")]
|
||||
pub font_size: u32,
|
||||
|
||||
#[serde(default)]
|
||||
pub minimize_to_tray: bool,
|
||||
|
||||
#[serde(default)]
|
||||
pub streamer_mode: bool,
|
||||
|
||||
#[serde(default)]
|
||||
pub streamer_hide_paths: bool,
|
||||
|
||||
#[serde(default)]
|
||||
pub compact_mode: bool,
|
||||
|
||||
// Profile fields
|
||||
#[serde(default)]
|
||||
pub profile_name: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub profile_avatar_path: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub profile_bio: Option<String>,
|
||||
|
||||
// Custom theme colors
|
||||
#[serde(default)]
|
||||
pub custom_theme_colors: CustomThemeColors,
|
||||
}
|
||||
|
||||
impl Default for HikariConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
provider_type: ProviderType::default(),
|
||||
model: None,
|
||||
api_key: None,
|
||||
custom_instructions: None,
|
||||
mcp_servers_json: None,
|
||||
auto_granted_tools: Vec::new(),
|
||||
ollama_base_url: default_ollama_base_url(),
|
||||
ollama_model: None,
|
||||
openai_api_key: None,
|
||||
openai_base_url: default_openai_base_url(),
|
||||
openai_model: None,
|
||||
anthropic_api_key: None,
|
||||
anthropic_base_url: default_anthropic_base_url(),
|
||||
anthropic_model: None,
|
||||
gemini_api_key: None,
|
||||
gemini_model: None,
|
||||
theme: Theme::default(),
|
||||
greeting_enabled: true,
|
||||
greeting_custom_prompt: None,
|
||||
notifications_enabled: true,
|
||||
notification_volume: 0.7,
|
||||
always_on_top: false,
|
||||
update_checks_enabled: true,
|
||||
character_panel_width: None,
|
||||
font_size: 14,
|
||||
minimize_to_tray: false,
|
||||
streamer_mode: false,
|
||||
streamer_hide_paths: false,
|
||||
compact_mode: false,
|
||||
profile_name: None,
|
||||
profile_avatar_path: None,
|
||||
profile_bio: None,
|
||||
custom_theme_colors: CustomThemeColors::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_update_checks_enabled() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_greeting_enabled() -> bool {
|
||||
true
|
||||
}
|
||||
@@ -86,12 +229,51 @@ fn default_notification_volume() -> f32 {
|
||||
0.7
|
||||
}
|
||||
|
||||
fn default_font_size() -> u32 {
|
||||
14
|
||||
}
|
||||
|
||||
fn default_ollama_base_url() -> String {
|
||||
"http://localhost:11434".to_string()
|
||||
}
|
||||
|
||||
fn default_openai_base_url() -> String {
|
||||
"https://api.openai.com/v1".to_string()
|
||||
}
|
||||
|
||||
fn default_anthropic_base_url() -> String {
|
||||
"https://api.anthropic.com".to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Theme {
|
||||
#[default]
|
||||
Dark,
|
||||
Light,
|
||||
#[serde(rename = "high-contrast")]
|
||||
HighContrast,
|
||||
Custom,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq)]
|
||||
pub struct CustomThemeColors {
|
||||
#[serde(default)]
|
||||
pub bg_primary: Option<String>,
|
||||
#[serde(default)]
|
||||
pub bg_secondary: Option<String>,
|
||||
#[serde(default)]
|
||||
pub bg_terminal: Option<String>,
|
||||
#[serde(default)]
|
||||
pub accent_primary: Option<String>,
|
||||
#[serde(default)]
|
||||
pub accent_secondary: Option<String>,
|
||||
#[serde(default)]
|
||||
pub text_primary: Option<String>,
|
||||
#[serde(default)]
|
||||
pub text_secondary: Option<String>,
|
||||
#[serde(default)]
|
||||
pub border_color: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -101,29 +283,79 @@ mod tests {
|
||||
#[test]
|
||||
fn test_default_config() {
|
||||
let config = HikariConfig::default();
|
||||
assert_eq!(config.provider_type, ProviderType::ClaudeCli);
|
||||
assert!(config.model.is_none());
|
||||
assert!(config.api_key.is_none());
|
||||
assert!(config.custom_instructions.is_none());
|
||||
assert!(config.mcp_servers_json.is_none());
|
||||
assert!(config.auto_granted_tools.is_empty());
|
||||
assert_eq!(config.ollama_base_url, "http://localhost:11434");
|
||||
assert!(config.ollama_model.is_none());
|
||||
// OpenAI defaults
|
||||
assert!(config.openai_api_key.is_none());
|
||||
assert_eq!(config.openai_base_url, "https://api.openai.com/v1");
|
||||
assert!(config.openai_model.is_none());
|
||||
// Anthropic defaults
|
||||
assert!(config.anthropic_api_key.is_none());
|
||||
assert_eq!(config.anthropic_base_url, "https://api.anthropic.com");
|
||||
assert!(config.anthropic_model.is_none());
|
||||
// Gemini defaults
|
||||
assert!(config.gemini_api_key.is_none());
|
||||
assert!(config.gemini_model.is_none());
|
||||
// Other settings
|
||||
assert_eq!(config.theme, Theme::Dark);
|
||||
assert!(config.greeting_enabled);
|
||||
assert!(config.greeting_custom_prompt.is_none());
|
||||
assert!(!config.always_on_top);
|
||||
assert!(config.update_checks_enabled);
|
||||
assert!(config.character_panel_width.is_none());
|
||||
assert_eq!(config.font_size, 14);
|
||||
assert!(!config.minimize_to_tray);
|
||||
assert!(!config.streamer_mode);
|
||||
assert!(!config.streamer_hide_paths);
|
||||
assert!(!config.compact_mode);
|
||||
assert!(config.profile_name.is_none());
|
||||
assert!(config.profile_avatar_path.is_none());
|
||||
assert!(config.profile_bio.is_none());
|
||||
assert_eq!(config.custom_theme_colors, CustomThemeColors::default());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_serialization() {
|
||||
let config = HikariConfig {
|
||||
provider_type: ProviderType::ClaudeCli,
|
||||
model: Some("claude-sonnet-4-20250514".to_string()),
|
||||
api_key: None,
|
||||
custom_instructions: Some("Be helpful".to_string()),
|
||||
mcp_servers_json: None,
|
||||
auto_granted_tools: vec!["Read".to_string(), "Glob".to_string()],
|
||||
ollama_base_url: "http://localhost:11434".to_string(),
|
||||
ollama_model: None,
|
||||
openai_api_key: None,
|
||||
openai_base_url: "https://api.openai.com/v1".to_string(),
|
||||
openai_model: None,
|
||||
anthropic_api_key: None,
|
||||
anthropic_base_url: "https://api.anthropic.com".to_string(),
|
||||
anthropic_model: None,
|
||||
gemini_api_key: None,
|
||||
gemini_model: None,
|
||||
theme: Theme::Light,
|
||||
greeting_enabled: true,
|
||||
greeting_custom_prompt: Some("Hello!".to_string()),
|
||||
notifications_enabled: true,
|
||||
notification_volume: 0.7,
|
||||
always_on_top: true,
|
||||
update_checks_enabled: true,
|
||||
character_panel_width: Some(400),
|
||||
font_size: 16,
|
||||
minimize_to_tray: true,
|
||||
streamer_mode: false,
|
||||
streamer_hide_paths: false,
|
||||
compact_mode: false,
|
||||
profile_name: Some("Test User".to_string()),
|
||||
profile_avatar_path: None,
|
||||
profile_bio: Some("A test bio".to_string()),
|
||||
custom_theme_colors: CustomThemeColors::default(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&config).unwrap();
|
||||
@@ -134,15 +366,26 @@ mod tests {
|
||||
assert_eq!(deserialized.auto_granted_tools, config.auto_granted_tools);
|
||||
assert_eq!(deserialized.theme, Theme::Light);
|
||||
assert!(deserialized.greeting_enabled);
|
||||
assert_eq!(deserialized.greeting_custom_prompt, Some("Hello!".to_string()));
|
||||
assert_eq!(
|
||||
deserialized.greeting_custom_prompt,
|
||||
Some("Hello!".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_theme_serialization() {
|
||||
let dark = Theme::Dark;
|
||||
let light = Theme::Light;
|
||||
let high_contrast = Theme::HighContrast;
|
||||
|
||||
assert_eq!(serde_json::to_string(&dark).unwrap(), "\"dark\"");
|
||||
assert_eq!(serde_json::to_string(&light).unwrap(), "\"light\"");
|
||||
assert_eq!(
|
||||
serde_json::to_string(&high_contrast).unwrap(),
|
||||
"\"high-contrast\""
|
||||
);
|
||||
|
||||
let custom = Theme::Custom;
|
||||
assert_eq!(serde_json::to_string(&custom).unwrap(), "\"custom\"");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,878 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process::Command;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GitStatus {
|
||||
pub is_repo: bool,
|
||||
pub branch: Option<String>,
|
||||
pub upstream: Option<String>,
|
||||
pub ahead: u32,
|
||||
pub behind: u32,
|
||||
pub staged: Vec<GitFileChange>,
|
||||
pub unstaged: Vec<GitFileChange>,
|
||||
pub untracked: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GitFileChange {
|
||||
pub path: String,
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GitBranch {
|
||||
pub name: String,
|
||||
pub is_current: bool,
|
||||
pub is_remote: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GitLogEntry {
|
||||
pub hash: String,
|
||||
pub short_hash: String,
|
||||
pub author: String,
|
||||
pub date: String,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
fn run_git_command(working_dir: &str, args: &[&str]) -> Result<String, String> {
|
||||
let output = Command::new("git")
|
||||
.args(args)
|
||||
.current_dir(working_dir)
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to execute git: {}", e))?;
|
||||
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
||||
} else {
|
||||
Err(String::from_utf8_lossy(&output.stderr).to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_status(working_dir: String) -> Result<GitStatus, String> {
|
||||
// Check if it's a git repo
|
||||
let is_repo = run_git_command(&working_dir, &["rev-parse", "--git-dir"]).is_ok();
|
||||
|
||||
if !is_repo {
|
||||
return Ok(GitStatus {
|
||||
is_repo: false,
|
||||
branch: None,
|
||||
upstream: None,
|
||||
ahead: 0,
|
||||
behind: 0,
|
||||
staged: vec![],
|
||||
unstaged: vec![],
|
||||
untracked: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
// Get current branch
|
||||
let branch = run_git_command(&working_dir, &["rev-parse", "--abbrev-ref", "HEAD"])
|
||||
.ok()
|
||||
.map(|s| s.trim().to_string());
|
||||
|
||||
// Get upstream branch
|
||||
let upstream = run_git_command(
|
||||
&working_dir,
|
||||
&["rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}"],
|
||||
)
|
||||
.ok()
|
||||
.map(|s| s.trim().to_string());
|
||||
|
||||
// Get ahead/behind counts
|
||||
let (ahead, behind) = if upstream.is_some() {
|
||||
let rev_list =
|
||||
run_git_command(&working_dir, &["rev-list", "--left-right", "--count", "@{u}...HEAD"])
|
||||
.unwrap_or_default();
|
||||
let parts: Vec<&str> = rev_list.trim().split('\t').collect();
|
||||
if parts.len() == 2 {
|
||||
(
|
||||
parts[1].parse().unwrap_or(0),
|
||||
parts[0].parse().unwrap_or(0),
|
||||
)
|
||||
} else {
|
||||
(0, 0)
|
||||
}
|
||||
} else {
|
||||
(0, 0)
|
||||
};
|
||||
|
||||
// Get status with porcelain format
|
||||
let status_output =
|
||||
run_git_command(&working_dir, &["status", "--porcelain=v1"]).unwrap_or_default();
|
||||
|
||||
let mut staged = vec![];
|
||||
let mut unstaged = vec![];
|
||||
let mut untracked = vec![];
|
||||
|
||||
for line in status_output.lines() {
|
||||
if line.len() < 3 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let index_status = line.chars().next().unwrap_or(' ');
|
||||
let worktree_status = line.chars().nth(1).unwrap_or(' ');
|
||||
let path = line[3..].to_string();
|
||||
|
||||
// Untracked files
|
||||
if index_status == '?' && worktree_status == '?' {
|
||||
untracked.push(path);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Staged changes (index status)
|
||||
if index_status != ' ' && index_status != '?' {
|
||||
staged.push(GitFileChange {
|
||||
path: path.clone(),
|
||||
status: match index_status {
|
||||
'M' => "modified".to_string(),
|
||||
'A' => "added".to_string(),
|
||||
'D' => "deleted".to_string(),
|
||||
'R' => "renamed".to_string(),
|
||||
'C' => "copied".to_string(),
|
||||
_ => "unknown".to_string(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Unstaged changes (worktree status)
|
||||
if worktree_status != ' ' && worktree_status != '?' {
|
||||
unstaged.push(GitFileChange {
|
||||
path,
|
||||
status: match worktree_status {
|
||||
'M' => "modified".to_string(),
|
||||
'D' => "deleted".to_string(),
|
||||
_ => "unknown".to_string(),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(GitStatus {
|
||||
is_repo: true,
|
||||
branch,
|
||||
upstream,
|
||||
ahead,
|
||||
behind,
|
||||
staged,
|
||||
unstaged,
|
||||
untracked,
|
||||
})
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_diff(working_dir: String, file_path: Option<String>, staged: bool) -> Result<String, String> {
|
||||
let mut args = vec!["diff"];
|
||||
|
||||
if staged {
|
||||
args.push("--cached");
|
||||
}
|
||||
|
||||
if let Some(ref path) = file_path {
|
||||
args.push("--");
|
||||
args.push(path);
|
||||
}
|
||||
|
||||
run_git_command(&working_dir, &args)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_branches(working_dir: String) -> Result<Vec<GitBranch>, String> {
|
||||
let output = run_git_command(&working_dir, &["branch", "-a", "--format=%(refname:short)\t%(HEAD)"])?;
|
||||
|
||||
let branches: Vec<GitBranch> = output
|
||||
.lines()
|
||||
.filter_map(|line| {
|
||||
let parts: Vec<&str> = line.split('\t').collect();
|
||||
if parts.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name = parts[0].to_string();
|
||||
let is_current = parts.get(1).map(|s| *s == "*").unwrap_or(false);
|
||||
let is_remote = name.starts_with("remotes/") || name.starts_with("origin/");
|
||||
|
||||
Some(GitBranch {
|
||||
name,
|
||||
is_current,
|
||||
is_remote,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(branches)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_checkout(working_dir: String, branch: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["checkout", &branch])
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_stage(working_dir: String, file_path: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["add", &file_path])
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_unstage(working_dir: String, file_path: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["restore", "--staged", &file_path])
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_stage_all(working_dir: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["add", "-A"])
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_commit(working_dir: String, message: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["commit", "-m", &message])
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_push(working_dir: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["push"])
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_pull(working_dir: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["pull"])
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_fetch(working_dir: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["fetch", "--all"])
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_log(working_dir: String, limit: Option<u32>) -> Result<Vec<GitLogEntry>, String> {
|
||||
let limit_str = limit.unwrap_or(10).to_string();
|
||||
let output = run_git_command(
|
||||
&working_dir,
|
||||
&[
|
||||
"log",
|
||||
&format!("-{}", limit_str),
|
||||
"--pretty=format:%H\t%h\t%an\t%ar\t%s",
|
||||
],
|
||||
)?;
|
||||
|
||||
let entries: Vec<GitLogEntry> = output
|
||||
.lines()
|
||||
.filter_map(|line| {
|
||||
let parts: Vec<&str> = line.split('\t').collect();
|
||||
if parts.len() < 5 {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(GitLogEntry {
|
||||
hash: parts[0].to_string(),
|
||||
short_hash: parts[1].to_string(),
|
||||
author: parts[2].to_string(),
|
||||
date: parts[3].to_string(),
|
||||
message: parts[4..].join("\t"),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_discard(working_dir: String, file_path: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["checkout", "--", &file_path])
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn git_create_branch(working_dir: String, branch_name: String) -> Result<String, String> {
|
||||
run_git_command(&working_dir, &["checkout", "-b", &branch_name])
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper to create a git repository in a temp directory
|
||||
fn create_test_repo() -> TempDir {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initialize git repo
|
||||
run_git_command(&working_dir, &["init"]).unwrap();
|
||||
|
||||
// Configure git user for commits
|
||||
run_git_command(&working_dir, &["config", "user.email", "test@example.com"]).unwrap();
|
||||
run_git_command(&working_dir, &["config", "user.name", "Test User"]).unwrap();
|
||||
|
||||
// Disable GPG signing for tests (user may have it enabled globally)
|
||||
run_git_command(&working_dir, &["config", "commit.gpgsign", "false"]).unwrap();
|
||||
|
||||
temp_dir
|
||||
}
|
||||
|
||||
// Helper to create a file in the test repo
|
||||
fn create_file(dir: &TempDir, name: &str, content: &str) {
|
||||
let file_path = dir.path().join(name);
|
||||
let mut file = File::create(file_path).unwrap();
|
||||
file.write_all(content.as_bytes()).unwrap();
|
||||
}
|
||||
|
||||
// ==================== GitStatus struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_status_serialization() {
|
||||
let status = GitStatus {
|
||||
is_repo: true,
|
||||
branch: Some("main".to_string()),
|
||||
upstream: Some("origin/main".to_string()),
|
||||
ahead: 2,
|
||||
behind: 1,
|
||||
staged: vec![GitFileChange {
|
||||
path: "file.txt".to_string(),
|
||||
status: "modified".to_string(),
|
||||
}],
|
||||
unstaged: vec![],
|
||||
untracked: vec!["new_file.txt".to_string()],
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&status).unwrap();
|
||||
assert!(json.contains("\"is_repo\":true"));
|
||||
assert!(json.contains("\"branch\":\"main\""));
|
||||
assert!(json.contains("\"ahead\":2"));
|
||||
assert!(json.contains("\"behind\":1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_not_a_repo() {
|
||||
let status = GitStatus {
|
||||
is_repo: false,
|
||||
branch: None,
|
||||
upstream: None,
|
||||
ahead: 0,
|
||||
behind: 0,
|
||||
staged: vec![],
|
||||
unstaged: vec![],
|
||||
untracked: vec![],
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&status).unwrap();
|
||||
let deserialized: GitStatus = serde_json::from_str(&json).unwrap();
|
||||
assert!(!deserialized.is_repo);
|
||||
assert!(deserialized.branch.is_none());
|
||||
}
|
||||
|
||||
// ==================== GitFileChange struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_file_change_serialization() {
|
||||
let change = GitFileChange {
|
||||
path: "src/main.rs".to_string(),
|
||||
status: "added".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&change).unwrap();
|
||||
assert!(json.contains("src/main.rs"));
|
||||
assert!(json.contains("added"));
|
||||
|
||||
let deserialized: GitFileChange = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.path, "src/main.rs");
|
||||
assert_eq!(deserialized.status, "added");
|
||||
}
|
||||
|
||||
// ==================== GitBranch struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_branch_serialization() {
|
||||
let branch = GitBranch {
|
||||
name: "feature/new-feature".to_string(),
|
||||
is_current: true,
|
||||
is_remote: false,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&branch).unwrap();
|
||||
assert!(json.contains("feature/new-feature"));
|
||||
assert!(json.contains("\"is_current\":true"));
|
||||
assert!(json.contains("\"is_remote\":false"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_branch_remote() {
|
||||
let branch = GitBranch {
|
||||
name: "origin/main".to_string(),
|
||||
is_current: false,
|
||||
is_remote: true,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&branch).unwrap();
|
||||
let deserialized: GitBranch = serde_json::from_str(&json).unwrap();
|
||||
assert!(deserialized.is_remote);
|
||||
assert!(!deserialized.is_current);
|
||||
}
|
||||
|
||||
// ==================== GitLogEntry struct tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_log_entry_serialization() {
|
||||
let entry = GitLogEntry {
|
||||
hash: "abc123def456".to_string(),
|
||||
short_hash: "abc123d".to_string(),
|
||||
author: "Hikari".to_string(),
|
||||
date: "2 hours ago".to_string(),
|
||||
message: "feat: add new feature".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&entry).unwrap();
|
||||
assert!(json.contains("abc123def456"));
|
||||
assert!(json.contains("Hikari"));
|
||||
assert!(json.contains("feat: add new feature"));
|
||||
}
|
||||
|
||||
// ==================== git_status integration tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_status_not_a_git_repo() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(!status.is_repo);
|
||||
assert!(status.branch.is_none());
|
||||
assert!(status.staged.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_empty_repo() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(status.staged.is_empty());
|
||||
assert!(status.unstaged.is_empty());
|
||||
assert!(status.untracked.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_with_untracked_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create an untracked file
|
||||
create_file(&temp_dir, "untracked.txt", "hello");
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(status.untracked.contains(&"untracked.txt".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_with_staged_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and stage a file
|
||||
create_file(&temp_dir, "staged.txt", "hello");
|
||||
run_git_command(&working_dir, &["add", "staged.txt"]).unwrap();
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(!status.staged.is_empty());
|
||||
assert_eq!(status.staged[0].path, "staged.txt");
|
||||
assert_eq!(status.staged[0].status, "added");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_status_with_modified_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create, stage, and commit a file
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial commit"]).unwrap();
|
||||
|
||||
// Modify the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
|
||||
let result = git_status(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
assert!(status.is_repo);
|
||||
assert!(!status.unstaged.is_empty());
|
||||
assert_eq!(status.unstaged[0].path, "file.txt");
|
||||
assert_eq!(status.unstaged[0].status, "modified");
|
||||
}
|
||||
|
||||
// ==================== git_diff integration tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_no_changes() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_diff(working_dir, None, false);
|
||||
assert!(result.is_ok());
|
||||
assert!(result.unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_with_changes() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit a file
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
|
||||
let result = git_diff(working_dir, None, false);
|
||||
assert!(result.is_ok());
|
||||
let diff = result.unwrap();
|
||||
assert!(diff.contains("diff"));
|
||||
assert!(diff.contains("file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_staged() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit a file
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify and stage the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
|
||||
let result = git_diff(working_dir, None, true);
|
||||
assert!(result.is_ok());
|
||||
let diff = result.unwrap();
|
||||
assert!(diff.contains("diff"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_diff_specific_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit files
|
||||
create_file(&temp_dir, "file1.txt", "content1");
|
||||
create_file(&temp_dir, "file2.txt", "content2");
|
||||
run_git_command(&working_dir, &["add", "-A"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify both files
|
||||
create_file(&temp_dir, "file1.txt", "modified1");
|
||||
create_file(&temp_dir, "file2.txt", "modified2");
|
||||
|
||||
// Get diff for only file1.txt
|
||||
let result = git_diff(working_dir, Some("file1.txt".to_string()), false);
|
||||
assert!(result.is_ok());
|
||||
let diff = result.unwrap();
|
||||
assert!(diff.contains("file1.txt"));
|
||||
assert!(!diff.contains("file2.txt"));
|
||||
}
|
||||
|
||||
// ==================== git_branches integration tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_branches_single_branch() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Need at least one commit for branches to show
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
let result = git_branches(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let branches = result.unwrap();
|
||||
assert!(!branches.is_empty());
|
||||
// Should have at least one branch (main or master)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_branches_multiple_branches() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initial commit
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Create additional branch
|
||||
run_git_command(&working_dir, &["branch", "feature-branch"]).unwrap();
|
||||
|
||||
let result = git_branches(working_dir);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let branches = result.unwrap();
|
||||
assert!(branches.len() >= 2);
|
||||
assert!(branches.iter().any(|b| b.name == "feature-branch"));
|
||||
}
|
||||
|
||||
// ==================== git_stage and git_unstage tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_stage_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
|
||||
let result = git_stage(working_dir.clone(), "file.txt".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify file is staged
|
||||
let status = git_status(working_dir).unwrap();
|
||||
assert!(status.staged.iter().any(|f| f.path == "file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_unstage_file() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// First, commit a file so we have a HEAD to restore from
|
||||
create_file(&temp_dir, "file.txt", "initial content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify and stage the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
|
||||
let result = git_unstage(working_dir.clone(), "file.txt".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify file is unstaged (should now be in unstaged/modified, not staged)
|
||||
let status = git_status(working_dir).unwrap();
|
||||
assert!(!status.staged.iter().any(|f| f.path == "file.txt"));
|
||||
assert!(status.unstaged.iter().any(|f| f.path == "file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_stage_all() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
create_file(&temp_dir, "file1.txt", "content1");
|
||||
create_file(&temp_dir, "file2.txt", "content2");
|
||||
|
||||
let result = git_stage_all(working_dir.clone());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify all files are staged
|
||||
let status = git_status(working_dir).unwrap();
|
||||
assert_eq!(status.staged.len(), 2);
|
||||
}
|
||||
|
||||
// ==================== git_commit tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_commit() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
|
||||
let result = git_commit(working_dir.clone(), "test commit message".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify commit was made
|
||||
let log = git_log(working_dir, Some(1)).unwrap();
|
||||
assert!(!log.is_empty());
|
||||
assert!(log[0].message.contains("test commit message"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_commit_nothing_to_commit() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Need initial commit first
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Try to commit with nothing staged
|
||||
let result = git_commit(working_dir, "empty commit".to_string());
|
||||
assert!(result.is_err()); // Should fail because nothing to commit
|
||||
}
|
||||
|
||||
// ==================== git_log tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_log_empty_repo() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = git_log(working_dir, Some(10));
|
||||
// May fail on empty repo or return empty
|
||||
if let Ok(commits) = result {
|
||||
assert!(commits.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_log_with_commits() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Make multiple commits
|
||||
for i in 1..=3 {
|
||||
create_file(&temp_dir, &format!("file{}.txt", i), "content");
|
||||
run_git_command(&working_dir, &["add", "-A"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", &format!("commit {}", i)]).unwrap();
|
||||
}
|
||||
|
||||
let result = git_log(working_dir, Some(10));
|
||||
assert!(result.is_ok());
|
||||
|
||||
let log = result.unwrap();
|
||||
assert_eq!(log.len(), 3);
|
||||
assert!(log[0].message.contains("commit 3")); // Most recent first
|
||||
assert!(log[2].message.contains("commit 1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_log_limit() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Make 5 commits
|
||||
for i in 1..=5 {
|
||||
create_file(&temp_dir, &format!("file{}.txt", i), "content");
|
||||
run_git_command(&working_dir, &["add", "-A"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", &format!("commit {}", i)]).unwrap();
|
||||
}
|
||||
|
||||
// Only get last 2
|
||||
let result = git_log(working_dir, Some(2));
|
||||
assert!(result.is_ok());
|
||||
|
||||
let log = result.unwrap();
|
||||
assert_eq!(log.len(), 2);
|
||||
}
|
||||
|
||||
// ==================== git_discard tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_discard_changes() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Create and commit a file
|
||||
create_file(&temp_dir, "file.txt", "original content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Modify the file
|
||||
create_file(&temp_dir, "file.txt", "modified content");
|
||||
|
||||
// Discard changes
|
||||
let result = git_discard(working_dir.clone(), "file.txt".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify file contents are restored
|
||||
let content = fs::read_to_string(temp_dir.path().join("file.txt")).unwrap();
|
||||
assert_eq!(content, "original content");
|
||||
}
|
||||
|
||||
// ==================== git_create_branch tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_create_branch() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initial commit required
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
let result = git_create_branch(working_dir.clone(), "new-branch".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify branch exists and is current
|
||||
let branches = git_branches(working_dir).unwrap();
|
||||
assert!(branches.iter().any(|b| b.name == "new-branch" && b.is_current));
|
||||
}
|
||||
|
||||
// ==================== git_checkout tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_git_checkout() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// Initial commit required
|
||||
create_file(&temp_dir, "file.txt", "content");
|
||||
run_git_command(&working_dir, &["add", "file.txt"]).unwrap();
|
||||
run_git_command(&working_dir, &["commit", "-m", "initial"]).unwrap();
|
||||
|
||||
// Create a branch
|
||||
run_git_command(&working_dir, &["branch", "other-branch"]).unwrap();
|
||||
|
||||
// Checkout the branch
|
||||
let result = git_checkout(working_dir.clone(), "other-branch".to_string());
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify current branch
|
||||
let branches = git_branches(working_dir).unwrap();
|
||||
let current = branches.iter().find(|b| b.is_current);
|
||||
assert!(current.is_some());
|
||||
assert_eq!(current.unwrap().name, "other-branch");
|
||||
}
|
||||
|
||||
// ==================== run_git_command tests ====================
|
||||
|
||||
#[test]
|
||||
fn test_run_git_command_success() {
|
||||
let temp_dir = create_test_repo();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
let result = run_git_command(&working_dir, &["status"]);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_git_command_failure() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let working_dir = temp_dir.path().to_string_lossy().to_string();
|
||||
|
||||
// This should fail because it's not a git repo
|
||||
let result = run_git_command(&working_dir, &["log"]);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_git_command_invalid_dir() {
|
||||
let result = run_git_command("/nonexistent/path", &["status"]);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
}
|
||||
@@ -1,26 +1,44 @@
|
||||
mod achievements;
|
||||
mod bridge_manager;
|
||||
mod clipboard;
|
||||
mod commands;
|
||||
mod config;
|
||||
mod git;
|
||||
mod notifications;
|
||||
mod provider_bridge;
|
||||
mod providers;
|
||||
mod quick_actions;
|
||||
mod sessions;
|
||||
mod snippets;
|
||||
mod stats;
|
||||
mod temp_manager;
|
||||
mod tray;
|
||||
mod types;
|
||||
mod wsl_bridge;
|
||||
mod wsl_notifications;
|
||||
mod vbs_notification;
|
||||
mod windows_toast;
|
||||
mod wsl_bridge;
|
||||
mod wsl_notifications;
|
||||
|
||||
use commands::*;
|
||||
use notifications::*;
|
||||
use bridge_manager::create_shared_bridge_manager;
|
||||
use clipboard::*;
|
||||
use commands::load_saved_achievements;
|
||||
use wsl_notifications::*;
|
||||
use commands::*;
|
||||
use git::*;
|
||||
use notifications::*;
|
||||
use quick_actions::*;
|
||||
use sessions::*;
|
||||
use snippets::*;
|
||||
use tauri::Manager;
|
||||
use temp_manager::create_shared_temp_manager;
|
||||
use tray::{setup_tray, should_minimize_to_tray};
|
||||
use vbs_notification::*;
|
||||
use windows_toast::*;
|
||||
use wsl_notifications::*;
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
pub fn run() {
|
||||
let bridge_manager = create_shared_bridge_manager();
|
||||
let temp_manager = create_shared_temp_manager().expect("Failed to create temp file manager");
|
||||
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_dialog::init())
|
||||
@@ -29,10 +47,43 @@ pub fn run() {
|
||||
.plugin(tauri_plugin_store::Builder::new().build())
|
||||
.plugin(tauri_plugin_notification::init())
|
||||
.plugin(tauri_plugin_os::init())
|
||||
.plugin(tauri_plugin_http::init())
|
||||
.plugin(tauri_plugin_clipboard_manager::init())
|
||||
.plugin(tauri_plugin_fs::init())
|
||||
.manage(bridge_manager.clone())
|
||||
.manage(temp_manager.clone())
|
||||
.setup(move |app| {
|
||||
// Initialize the app handle in the bridge manager
|
||||
bridge_manager.lock().set_app_handle(app.handle().clone());
|
||||
|
||||
// Clean up any orphaned temp files from previous sessions
|
||||
if let Ok(count) = temp_manager.lock().cleanup_orphaned_files() {
|
||||
if count > 0 {
|
||||
println!("Cleaned up {} orphaned temp files", count);
|
||||
}
|
||||
}
|
||||
|
||||
// Set up system tray
|
||||
if let Err(e) = setup_tray(app.handle()) {
|
||||
eprintln!("Failed to set up system tray: {}", e);
|
||||
}
|
||||
|
||||
// Handle window close event for minimize to tray
|
||||
let main_window = app.get_webview_window("main").unwrap();
|
||||
main_window.on_window_event({
|
||||
let app_handle = app.handle().clone();
|
||||
move |event| {
|
||||
if let tauri::WindowEvent::CloseRequested { api, .. } = event {
|
||||
if should_minimize_to_tray(&app_handle) {
|
||||
api.prevent_close();
|
||||
if let Some(window) = app_handle.get_webview_window("main") {
|
||||
let _ = window.hide();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
@@ -46,13 +97,70 @@ pub fn run() {
|
||||
get_config,
|
||||
save_config,
|
||||
get_usage_stats,
|
||||
get_persisted_stats,
|
||||
load_saved_achievements,
|
||||
answer_question,
|
||||
send_windows_notification,
|
||||
send_simple_notification,
|
||||
send_windows_toast,
|
||||
send_notify_send,
|
||||
send_wsl_notification,
|
||||
send_vbs_notification,
|
||||
validate_directory,
|
||||
list_skills,
|
||||
check_for_updates,
|
||||
save_temp_file,
|
||||
register_temp_file,
|
||||
get_temp_files,
|
||||
cleanup_temp_files,
|
||||
cleanup_all_temp_files,
|
||||
cleanup_orphaned_temp_files,
|
||||
get_file_size,
|
||||
list_sessions,
|
||||
save_session,
|
||||
load_session,
|
||||
delete_session,
|
||||
search_sessions,
|
||||
clear_all_sessions,
|
||||
list_snippets,
|
||||
save_snippet,
|
||||
delete_snippet,
|
||||
get_snippet_categories,
|
||||
reset_default_snippets,
|
||||
list_quick_actions,
|
||||
save_quick_action,
|
||||
delete_quick_action,
|
||||
reset_default_quick_actions,
|
||||
git_status,
|
||||
git_diff,
|
||||
git_branches,
|
||||
git_checkout,
|
||||
git_stage,
|
||||
git_unstage,
|
||||
git_stage_all,
|
||||
git_commit,
|
||||
git_push,
|
||||
git_pull,
|
||||
git_fetch,
|
||||
git_log,
|
||||
git_discard,
|
||||
git_create_branch,
|
||||
list_clipboard_entries,
|
||||
capture_clipboard,
|
||||
delete_clipboard_entry,
|
||||
toggle_pin_clipboard_entry,
|
||||
clear_clipboard_history,
|
||||
search_clipboard_entries,
|
||||
get_clipboard_languages,
|
||||
update_clipboard_language,
|
||||
list_directory,
|
||||
read_file_content,
|
||||
write_file_content,
|
||||
create_file,
|
||||
create_directory,
|
||||
delete_file,
|
||||
delete_directory,
|
||||
rename_path,
|
||||
])
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use tauri::command;
|
||||
use std::process::Command;
|
||||
use tauri::command;
|
||||
|
||||
#[command]
|
||||
pub async fn send_notify_send(title: String, body: String) -> Result<(), String> {
|
||||
@@ -10,7 +10,12 @@ pub async fn send_notify_send(title: String, body: String) -> Result<(), String>
|
||||
.arg("--urgency=normal")
|
||||
.arg("--app-name=Hikari Desktop")
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to execute notify-send: {}. Make sure libnotify-bin is installed.", e))?;
|
||||
.map_err(|e| {
|
||||
format!(
|
||||
"Failed to execute notify-send: {}. Make sure libnotify-bin is installed.",
|
||||
e
|
||||
)
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
let error = String::from_utf8_lossy(&output.stderr);
|
||||
@@ -93,4 +98,4 @@ pub async fn send_simple_notification(title: String, body: String) -> Result<(),
|
||||
.map_err(|e| format!("Failed to send message: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,773 @@
|
||||
// This provider wraps the Claude CLI subprocess.
|
||||
// It will be actively used once providers are fully integrated with BridgeManager.
|
||||
#![allow(dead_code)]
|
||||
|
||||
use async_trait::async_trait;
|
||||
use std::io::{BufRead, BufReader, Write};
|
||||
use std::process::{Child, ChildStdin, Command, Stdio};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use crate::providers::traits::{
|
||||
LlmProvider, ModelInfo, ProviderCapabilities, ProviderConfig, ProviderStreamEvent,
|
||||
QuestionOption, StreamCallback,
|
||||
};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
fn detect_wsl() -> bool {
|
||||
if let Ok(version) = std::fs::read_to_string("/proc/version") {
|
||||
let version_lower = version.to_lowercase();
|
||||
if version_lower.contains("microsoft") || version_lower.contains("wsl") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if std::path::Path::new("/proc/sys/fs/binfmt_misc/WSLInterop").exists() {
|
||||
return true;
|
||||
}
|
||||
|
||||
if std::env::var("WSL_DISTRO_NAME").is_ok() {
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn find_claude_binary() -> Option<String> {
|
||||
let home = std::env::var("HOME").ok()?;
|
||||
let paths_to_check = [
|
||||
format!("{}/.local/bin/claude", home),
|
||||
format!("{}/.claude/local/claude", home),
|
||||
"/usr/local/bin/claude".to_string(),
|
||||
"/usr/bin/claude".to_string(),
|
||||
];
|
||||
|
||||
for path in &paths_to_check {
|
||||
if std::path::Path::new(path).exists() {
|
||||
return Some(path.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(output) = Command::new("which").arg("claude").output() {
|
||||
if output.status.success() {
|
||||
let path = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
if !path.is_empty() {
|
||||
return Some(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub struct ClaudeCliProvider {
|
||||
config: ProviderConfig,
|
||||
process: Option<Child>,
|
||||
stdin: Option<ChildStdin>,
|
||||
session_id: Option<String>,
|
||||
mcp_config_file: Option<NamedTempFile>,
|
||||
is_running: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl ClaudeCliProvider {
|
||||
pub fn new(config: ProviderConfig) -> Self {
|
||||
Self {
|
||||
config,
|
||||
process: None,
|
||||
stdin: None,
|
||||
session_id: None,
|
||||
mcp_config_file: None,
|
||||
is_running: Arc::new(AtomicBool::new(false)),
|
||||
}
|
||||
}
|
||||
|
||||
fn build_command(&mut self) -> Result<Command, String> {
|
||||
let mcp_config_path = if let Some(ref mcp_json) = self.config.mcp_servers_json {
|
||||
if !mcp_json.trim().is_empty() {
|
||||
serde_json::from_str::<serde_json::Value>(mcp_json)
|
||||
.map_err(|e| format!("Invalid MCP servers JSON: {}", e))?;
|
||||
|
||||
let mut temp_file = NamedTempFile::new()
|
||||
.map_err(|e| format!("Failed to create temp file for MCP config: {}", e))?;
|
||||
temp_file
|
||||
.write_all(mcp_json.as_bytes())
|
||||
.map_err(|e| format!("Failed to write MCP config: {}", e))?;
|
||||
temp_file
|
||||
.flush()
|
||||
.map_err(|e| format!("Failed to flush MCP config: {}", e))?;
|
||||
|
||||
let path = temp_file.path().to_string_lossy().to_string();
|
||||
self.mcp_config_file = Some(temp_file);
|
||||
Some(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let is_wsl = detect_wsl();
|
||||
let working_dir = &self.config.working_directory;
|
||||
|
||||
let command = if is_wsl {
|
||||
let claude_path = find_claude_binary().ok_or_else(|| {
|
||||
"Could not find claude binary. Is Claude Code installed?".to_string()
|
||||
})?;
|
||||
|
||||
let mut cmd = Command::new(&claude_path);
|
||||
cmd.args([
|
||||
"--output-format",
|
||||
"stream-json",
|
||||
"--input-format",
|
||||
"stream-json",
|
||||
"--verbose",
|
||||
]);
|
||||
|
||||
if let Some(ref model) = self.config.model {
|
||||
if !model.is_empty() {
|
||||
cmd.args(["--model", model]);
|
||||
}
|
||||
}
|
||||
|
||||
for tool in &self.config.allowed_tools {
|
||||
cmd.args(["--allowedTools", tool]);
|
||||
}
|
||||
|
||||
if let Some(ref instructions) = self.config.custom_instructions {
|
||||
if !instructions.is_empty() {
|
||||
cmd.args(["--system-prompt", instructions]);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref mcp_path) = mcp_config_path {
|
||||
cmd.args(["--mcp-config", mcp_path]);
|
||||
}
|
||||
|
||||
cmd.current_dir(working_dir);
|
||||
|
||||
if let Some(ref api_key) = self.config.api_key {
|
||||
if !api_key.is_empty() {
|
||||
cmd.env("ANTHROPIC_API_KEY", api_key);
|
||||
}
|
||||
}
|
||||
|
||||
cmd
|
||||
} else {
|
||||
let mut cmd = Command::new("wsl");
|
||||
|
||||
let mut claude_cmd = format!("cd '{}' && ", working_dir);
|
||||
|
||||
if let Some(ref api_key) = self.config.api_key {
|
||||
if !api_key.is_empty() {
|
||||
claude_cmd.push_str(&format!("ANTHROPIC_API_KEY='{}' ", api_key));
|
||||
}
|
||||
}
|
||||
|
||||
claude_cmd.push_str(
|
||||
"claude --output-format stream-json --input-format stream-json --verbose",
|
||||
);
|
||||
|
||||
if let Some(ref model) = self.config.model {
|
||||
if !model.is_empty() {
|
||||
claude_cmd.push_str(&format!(" --model '{}'", model));
|
||||
}
|
||||
}
|
||||
|
||||
for tool in &self.config.allowed_tools {
|
||||
claude_cmd.push_str(&format!(" --allowedTools '{}'", tool));
|
||||
}
|
||||
|
||||
if let Some(ref instructions) = self.config.custom_instructions {
|
||||
if !instructions.is_empty() {
|
||||
let escaped = instructions.replace('\'', "'\\''");
|
||||
claude_cmd.push_str(&format!(" --system-prompt '{}'", escaped));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref mcp_path) = mcp_config_path {
|
||||
claude_cmd.push_str(&format!(" --mcp-config '{}'", mcp_path));
|
||||
}
|
||||
|
||||
cmd.args(["-e", "bash", "-lc", &claude_cmd]);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
cmd.creation_flags(0x08000000); // CREATE_NO_WINDOW
|
||||
|
||||
cmd
|
||||
};
|
||||
|
||||
Ok(command)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl LlmProvider for ClaudeCliProvider {
|
||||
fn name(&self) -> &'static str {
|
||||
"Claude CLI"
|
||||
}
|
||||
|
||||
fn capabilities(&self) -> ProviderCapabilities {
|
||||
ProviderCapabilities {
|
||||
supports_streaming: true,
|
||||
supports_tools: true,
|
||||
supports_vision: true,
|
||||
supports_thinking: true,
|
||||
supports_mcp: true,
|
||||
supports_resume_session: true,
|
||||
max_context_tokens: Some(200_000),
|
||||
available_models: vec![
|
||||
ModelInfo {
|
||||
id: "claude-sonnet-4-20250514".to_string(),
|
||||
name: "Claude Sonnet 4".to_string(),
|
||||
description: Some("Fast and intelligent".to_string()),
|
||||
context_window: Some(200_000),
|
||||
input_cost_per_mtok: Some(3.0),
|
||||
output_cost_per_mtok: Some(15.0),
|
||||
},
|
||||
ModelInfo {
|
||||
id: "claude-opus-4-20250514".to_string(),
|
||||
name: "Claude Opus 4".to_string(),
|
||||
description: Some("Most capable model".to_string()),
|
||||
context_window: Some(200_000),
|
||||
input_cost_per_mtok: Some(15.0),
|
||||
output_cost_per_mtok: Some(75.0),
|
||||
},
|
||||
ModelInfo {
|
||||
id: "claude-3-5-haiku-20241022".to_string(),
|
||||
name: "Claude 3.5 Haiku".to_string(),
|
||||
description: Some("Fast and efficient".to_string()),
|
||||
context_window: Some(200_000),
|
||||
input_cost_per_mtok: Some(1.0),
|
||||
output_cost_per_mtok: Some(5.0),
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
async fn start(&mut self, callback: StreamCallback) -> Result<(), String> {
|
||||
if self.process.is_some() {
|
||||
return Err("Process already running".to_string());
|
||||
}
|
||||
|
||||
callback(ProviderStreamEvent::Connected { session_id: None });
|
||||
|
||||
let mut command = self.build_command()?;
|
||||
|
||||
command
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped());
|
||||
|
||||
let mut child = command
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to spawn process: {}", e))?;
|
||||
|
||||
let stdin = child.stdin.take();
|
||||
let stdout = child.stdout.take();
|
||||
let stderr = child.stderr.take();
|
||||
|
||||
self.stdin = stdin;
|
||||
self.process = Some(child);
|
||||
self.is_running.store(true, Ordering::SeqCst);
|
||||
|
||||
let is_running = self.is_running.clone();
|
||||
let callback = Arc::new(callback);
|
||||
|
||||
if let Some(stdout) = stdout {
|
||||
let callback_clone = callback.clone();
|
||||
let is_running_clone = is_running.clone();
|
||||
thread::spawn(move || {
|
||||
handle_stdout(stdout, callback_clone, is_running_clone);
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(stderr) = stderr {
|
||||
let callback_clone = callback.clone();
|
||||
thread::spawn(move || {
|
||||
handle_stderr(stderr, callback_clone);
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn stop(&mut self) -> Result<(), String> {
|
||||
if let Some(mut process) = self.process.take() {
|
||||
let _ = process.kill();
|
||||
let _ = process.wait();
|
||||
}
|
||||
self.stdin = None;
|
||||
self.session_id = None;
|
||||
self.mcp_config_file = None;
|
||||
self.is_running.store(false, Ordering::SeqCst);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_running(&self) -> bool {
|
||||
self.is_running.load(Ordering::SeqCst)
|
||||
}
|
||||
|
||||
async fn send_message(&mut self, message: &str) -> Result<(), String> {
|
||||
let stdin = self.stdin.as_mut().ok_or("Process not running")?;
|
||||
|
||||
let input = serde_json::json!({
|
||||
"type": "user",
|
||||
"message": {
|
||||
"role": "user",
|
||||
"content": [{
|
||||
"type": "text",
|
||||
"text": message
|
||||
}]
|
||||
}
|
||||
});
|
||||
|
||||
let json_line = serde_json::to_string(&input).map_err(|e| e.to_string())?;
|
||||
|
||||
stdin
|
||||
.write_all(format!("{}\n", json_line).as_bytes())
|
||||
.map_err(|e| format!("Failed to write to stdin: {}", e))?;
|
||||
|
||||
stdin
|
||||
.flush()
|
||||
.map_err(|e| format!("Failed to flush stdin: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn send_tool_result(
|
||||
&mut self,
|
||||
tool_use_id: &str,
|
||||
result: serde_json::Value,
|
||||
_is_error: bool,
|
||||
) -> Result<(), String> {
|
||||
let stdin = self.stdin.as_mut().ok_or("Process not running")?;
|
||||
|
||||
let content_str = serde_json::to_string(&result).map_err(|e| e.to_string())?;
|
||||
|
||||
let input = serde_json::json!({
|
||||
"type": "user",
|
||||
"message": {
|
||||
"role": "user",
|
||||
"content": [{
|
||||
"type": "tool_result",
|
||||
"tool_use_id": tool_use_id,
|
||||
"content": content_str
|
||||
}]
|
||||
}
|
||||
});
|
||||
|
||||
let json_line = serde_json::to_string(&input).map_err(|e| e.to_string())?;
|
||||
|
||||
stdin
|
||||
.write_all(format!("{}\n", json_line).as_bytes())
|
||||
.map_err(|e| format!("Failed to write to stdin: {}", e))?;
|
||||
|
||||
stdin
|
||||
.flush()
|
||||
.map_err(|e| format!("Failed to flush stdin: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_model(&self) -> Option<String> {
|
||||
self.config.model.clone()
|
||||
}
|
||||
|
||||
fn get_working_directory(&self) -> &str {
|
||||
&self.config.working_directory
|
||||
}
|
||||
|
||||
async fn interrupt(&mut self) -> Result<(), String> {
|
||||
if let Some(mut process) = self.process.take() {
|
||||
let _ = process.kill();
|
||||
let _ = process.wait();
|
||||
self.stdin = None;
|
||||
self.is_running.store(false, Ordering::SeqCst);
|
||||
Ok(())
|
||||
} else {
|
||||
Err("No active process to interrupt".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_stdout(
|
||||
stdout: std::process::ChildStdout,
|
||||
callback: Arc<StreamCallback>,
|
||||
is_running: Arc<AtomicBool>,
|
||||
) {
|
||||
let reader = BufReader::new(stdout);
|
||||
|
||||
for line in reader.lines() {
|
||||
match line {
|
||||
Ok(line) if !line.is_empty() => {
|
||||
if let Err(e) = process_json_line(&line, &callback) {
|
||||
eprintln!("Error processing line: {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error reading stdout: {}", e);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
is_running.store(false, Ordering::SeqCst);
|
||||
callback(ProviderStreamEvent::Disconnected);
|
||||
}
|
||||
|
||||
fn handle_stderr(stderr: std::process::ChildStderr, callback: Arc<StreamCallback>) {
|
||||
let reader = BufReader::new(stderr);
|
||||
|
||||
for line in reader.lines() {
|
||||
match line {
|
||||
Ok(line) if !line.is_empty() => {
|
||||
callback(ProviderStreamEvent::Error { message: line });
|
||||
}
|
||||
Err(_) => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn process_json_line(line: &str, callback: &StreamCallback) -> Result<(), String> {
|
||||
let message: serde_json::Value = serde_json::from_str(line)
|
||||
.map_err(|e| format!("Failed to parse JSON: {} - Line: {}", e, line))?;
|
||||
|
||||
let msg_type = message.get("type").and_then(|t| t.as_str()).unwrap_or("");
|
||||
|
||||
match msg_type {
|
||||
"system" => {
|
||||
let subtype = message
|
||||
.get("subtype")
|
||||
.and_then(|s| s.as_str())
|
||||
.unwrap_or("");
|
||||
if subtype == "init" {
|
||||
let session_id = message
|
||||
.get("session_id")
|
||||
.and_then(|s| s.as_str())
|
||||
.map(|s| s.to_string());
|
||||
callback(ProviderStreamEvent::Connected { session_id });
|
||||
|
||||
if let Some(cwd) = message.get("cwd").and_then(|c| c.as_str()) {
|
||||
callback(ProviderStreamEvent::WorkingDirectory {
|
||||
path: cwd.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
"assistant" => {
|
||||
if let Some(msg) = message.get("message") {
|
||||
if let Some(content) = msg.get("content").and_then(|c| c.as_array()) {
|
||||
for block in content {
|
||||
let block_type = block.get("type").and_then(|t| t.as_str()).unwrap_or("");
|
||||
match block_type {
|
||||
"text" => {
|
||||
if let Some(text) = block.get("text").and_then(|t| t.as_str()) {
|
||||
callback(ProviderStreamEvent::TextDelta {
|
||||
text: text.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
"thinking" => {
|
||||
if let Some(thinking) =
|
||||
block.get("thinking").and_then(|t| t.as_str())
|
||||
{
|
||||
callback(ProviderStreamEvent::ThinkingDelta {
|
||||
text: thinking.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
"tool_use" => {
|
||||
let id = block
|
||||
.get("id")
|
||||
.and_then(|i| i.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let name = block
|
||||
.get("name")
|
||||
.and_then(|n| n.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let input = block
|
||||
.get("input")
|
||||
.cloned()
|
||||
.unwrap_or(serde_json::Value::Null);
|
||||
|
||||
callback(ProviderStreamEvent::ToolUseStart {
|
||||
id: id.clone(),
|
||||
name,
|
||||
});
|
||||
callback(ProviderStreamEvent::ToolUseEnd { id, input });
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract usage if available
|
||||
let usage = msg.get("usage").and_then(|u| {
|
||||
let input_tokens = u.get("input_tokens").and_then(|t| t.as_u64())?;
|
||||
let output_tokens = u.get("output_tokens").and_then(|t| t.as_u64())?;
|
||||
let model = msg
|
||||
.get("model")
|
||||
.and_then(|m| m.as_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
Some(crate::providers::ProviderUsage {
|
||||
input_tokens,
|
||||
output_tokens,
|
||||
model,
|
||||
})
|
||||
});
|
||||
|
||||
callback(ProviderStreamEvent::MessageComplete {
|
||||
content: vec![],
|
||||
usage,
|
||||
stop_reason: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
"result" => {
|
||||
let subtype = message
|
||||
.get("subtype")
|
||||
.and_then(|s| s.as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
// Handle permission denials
|
||||
if let Some(denials) = message
|
||||
.get("permission_denials")
|
||||
.and_then(|d| d.as_array())
|
||||
{
|
||||
for denial in denials {
|
||||
let tool_name = denial
|
||||
.get("tool_name")
|
||||
.and_then(|t| t.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
// Check if this is an AskUserQuestion
|
||||
if tool_name == "AskUserQuestion" {
|
||||
if let Some(tool_input) = denial.get("tool_input") {
|
||||
if let Some(questions) =
|
||||
tool_input.get("questions").and_then(|q| q.as_array())
|
||||
{
|
||||
if let Some(first_question) = questions.first() {
|
||||
let question = first_question
|
||||
.get("question")
|
||||
.and_then(|q| q.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let header = first_question
|
||||
.get("header")
|
||||
.and_then(|h| h.as_str())
|
||||
.map(|s| s.to_string());
|
||||
|
||||
let multi_select = first_question
|
||||
.get("multiSelect")
|
||||
.and_then(|m| m.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
let options: Vec<QuestionOption> = first_question
|
||||
.get("options")
|
||||
.and_then(|opts| opts.as_array())
|
||||
.map(|opts| {
|
||||
opts.iter()
|
||||
.filter_map(|opt| {
|
||||
let label = opt
|
||||
.get("label")
|
||||
.and_then(|l| l.as_str())?;
|
||||
let description = opt
|
||||
.get("description")
|
||||
.and_then(|d| d.as_str())
|
||||
.map(|s| s.to_string());
|
||||
Some(QuestionOption {
|
||||
label: label.to_string(),
|
||||
description,
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let id = denial
|
||||
.get("tool_use_id")
|
||||
.and_then(|i| i.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
callback(ProviderStreamEvent::Question {
|
||||
id,
|
||||
question,
|
||||
header,
|
||||
options,
|
||||
multi_select,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let id = denial
|
||||
.get("tool_use_id")
|
||||
.and_then(|i| i.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let tool_input = denial
|
||||
.get("tool_input")
|
||||
.cloned()
|
||||
.unwrap_or(serde_json::Value::Null);
|
||||
let description = format_tool_description(&tool_name, &tool_input);
|
||||
|
||||
callback(ProviderStreamEvent::PermissionRequest {
|
||||
id,
|
||||
tool_name,
|
||||
tool_input,
|
||||
description,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if subtype != "success" {
|
||||
if let Some(result) = message.get("result").and_then(|r| r.as_str()) {
|
||||
callback(ProviderStreamEvent::Error {
|
||||
message: result.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn format_tool_description(name: &str, input: &serde_json::Value) -> String {
|
||||
match name {
|
||||
"Read" => {
|
||||
if let Some(path) = input.get("file_path").and_then(|v| v.as_str()) {
|
||||
format!("Reading file: {}", path)
|
||||
} else {
|
||||
"Reading file...".to_string()
|
||||
}
|
||||
}
|
||||
"Glob" => {
|
||||
if let Some(pattern) = input.get("pattern").and_then(|v| v.as_str()) {
|
||||
format!("Searching for files: {}", pattern)
|
||||
} else {
|
||||
"Searching for files...".to_string()
|
||||
}
|
||||
}
|
||||
"Grep" => {
|
||||
if let Some(pattern) = input.get("pattern").and_then(|v| v.as_str()) {
|
||||
format!("Searching for: {}", pattern)
|
||||
} else {
|
||||
"Searching in files...".to_string()
|
||||
}
|
||||
}
|
||||
"Edit" | "Write" => {
|
||||
if let Some(path) = input.get("file_path").and_then(|v| v.as_str()) {
|
||||
format!("Editing: {}", path)
|
||||
} else {
|
||||
"Editing file...".to_string()
|
||||
}
|
||||
}
|
||||
"Bash" => {
|
||||
if let Some(cmd) = input.get("command").and_then(|v| v.as_str()) {
|
||||
let truncated = if cmd.len() > 50 {
|
||||
format!("{}...", &cmd[..50])
|
||||
} else {
|
||||
cmd.to_string()
|
||||
};
|
||||
format!("Running: {}", truncated)
|
||||
} else {
|
||||
"Running command...".to_string()
|
||||
}
|
||||
}
|
||||
_ => format!("Using tool: {}", name),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_provider_name() {
|
||||
let config = ProviderConfig::default();
|
||||
let provider = ClaudeCliProvider::new(config);
|
||||
assert_eq!(provider.name(), "Claude CLI");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capabilities() {
|
||||
let config = ProviderConfig::default();
|
||||
let provider = ClaudeCliProvider::new(config);
|
||||
let caps = provider.capabilities();
|
||||
|
||||
assert!(caps.supports_streaming);
|
||||
assert!(caps.supports_tools);
|
||||
assert!(caps.supports_vision);
|
||||
assert!(caps.supports_thinking);
|
||||
assert!(caps.supports_mcp);
|
||||
assert!(caps.supports_resume_session);
|
||||
assert_eq!(caps.max_context_tokens, Some(200_000));
|
||||
assert!(!caps.available_models.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_running_initial() {
|
||||
let config = ProviderConfig::default();
|
||||
let provider = ClaudeCliProvider::new(config);
|
||||
assert!(!provider.is_running());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_working_directory() {
|
||||
let config = ProviderConfig {
|
||||
working_directory: "/home/test".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
let provider = ClaudeCliProvider::new(config);
|
||||
assert_eq!(provider.get_working_directory(), "/home/test");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_model() {
|
||||
let config = ProviderConfig {
|
||||
model: Some("claude-sonnet-4-20250514".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
let provider = ClaudeCliProvider::new(config);
|
||||
assert_eq!(
|
||||
provider.get_model(),
|
||||
Some("claude-sonnet-4-20250514".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_tool_description() {
|
||||
let input = serde_json::json!({"file_path": "/test/file.rs"});
|
||||
assert_eq!(
|
||||
format_tool_description("Read", &input),
|
||||
"Reading file: /test/file.rs"
|
||||
);
|
||||
|
||||
let input = serde_json::json!({"pattern": "*.rs"});
|
||||
assert_eq!(
|
||||
format_tool_description("Glob", &input),
|
||||
"Searching for files: *.rs"
|
||||
);
|
||||
|
||||
let input = serde_json::json!({"command": "ls -la"});
|
||||
assert_eq!(format_tool_description("Bash", &input), "Running: ls -la");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,148 @@
|
||||
mod claude_cli;
|
||||
mod ollama;
|
||||
mod traits;
|
||||
|
||||
// Re-exports for when providers are fully integrated
|
||||
#[allow(unused_imports)]
|
||||
pub use claude_cli::ClaudeCliProvider;
|
||||
#[allow(unused_imports)]
|
||||
pub use ollama::OllamaProvider;
|
||||
#[allow(unused_imports)]
|
||||
pub use traits::{
|
||||
LlmProvider, ModelInfo, ProviderCapabilities, ProviderConfig, ProviderMessage,
|
||||
ProviderStreamEvent, ProviderUsage, QuestionOption, StreamCallback,
|
||||
};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ProviderType {
|
||||
#[default]
|
||||
ClaudeCli,
|
||||
Ollama,
|
||||
OpenAi,
|
||||
Anthropic,
|
||||
Gemini,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl ProviderType {
|
||||
pub fn display_name(&self) -> &'static str {
|
||||
match self {
|
||||
ProviderType::ClaudeCli => "Claude CLI",
|
||||
ProviderType::Ollama => "Ollama (Local)",
|
||||
ProviderType::OpenAi => "OpenAI API",
|
||||
ProviderType::Anthropic => "Anthropic API",
|
||||
ProviderType::Gemini => "Google Gemini",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn description(&self) -> &'static str {
|
||||
match self {
|
||||
ProviderType::ClaudeCli => "Use Claude Code CLI for AI assistance",
|
||||
ProviderType::Ollama => "Use locally running Ollama models",
|
||||
ProviderType::OpenAi => "Direct OpenAI API access (GPT-4, etc.)",
|
||||
ProviderType::Anthropic => "Direct Anthropic API access (Claude models)",
|
||||
ProviderType::Gemini => "Direct Google Gemini API access",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn requires_api_key(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
ProviderType::OpenAi | ProviderType::Anthropic | ProviderType::Gemini
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Note: The new providers (OpenAI, Anthropic, Gemini) are implemented directly
|
||||
// in provider_bridge.rs using the Bridge pattern rather than the LlmProvider trait.
|
||||
// This simplifies the architecture while still providing full functionality.
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn create_provider(
|
||||
provider_type: ProviderType,
|
||||
config: ProviderConfig,
|
||||
) -> Box<dyn LlmProvider> {
|
||||
match provider_type {
|
||||
ProviderType::ClaudeCli => Box::new(ClaudeCliProvider::new(config)),
|
||||
ProviderType::Ollama => Box::new(OllamaProvider::new(config)),
|
||||
// The new API-based providers are handled in provider_bridge.rs
|
||||
ProviderType::OpenAi | ProviderType::Anthropic | ProviderType::Gemini => {
|
||||
// These providers use the Bridge pattern in provider_bridge.rs
|
||||
// Fall back to Claude CLI for trait-based usage
|
||||
Box::new(ClaudeCliProvider::new(config))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_provider_type_display_name() {
|
||||
assert_eq!(ProviderType::ClaudeCli.display_name(), "Claude CLI");
|
||||
assert_eq!(ProviderType::Ollama.display_name(), "Ollama (Local)");
|
||||
assert_eq!(ProviderType::OpenAi.display_name(), "OpenAI API");
|
||||
assert_eq!(ProviderType::Anthropic.display_name(), "Anthropic API");
|
||||
assert_eq!(ProviderType::Gemini.display_name(), "Google Gemini");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_provider_type_default() {
|
||||
let default: ProviderType = Default::default();
|
||||
assert_eq!(default, ProviderType::ClaudeCli);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_provider_type_serialization() {
|
||||
let claude = ProviderType::ClaudeCli;
|
||||
let json = serde_json::to_string(&claude).unwrap();
|
||||
assert_eq!(json, "\"claude_cli\"");
|
||||
|
||||
let ollama = ProviderType::Ollama;
|
||||
let json = serde_json::to_string(&ollama).unwrap();
|
||||
assert_eq!(json, "\"ollama\"");
|
||||
|
||||
let openai = ProviderType::OpenAi;
|
||||
let json = serde_json::to_string(&openai).unwrap();
|
||||
assert_eq!(json, "\"open_ai\"");
|
||||
|
||||
let anthropic = ProviderType::Anthropic;
|
||||
let json = serde_json::to_string(&anthropic).unwrap();
|
||||
assert_eq!(json, "\"anthropic\"");
|
||||
|
||||
let gemini = ProviderType::Gemini;
|
||||
let json = serde_json::to_string(&gemini).unwrap();
|
||||
assert_eq!(json, "\"gemini\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_provider_type_deserialization() {
|
||||
let claude: ProviderType = serde_json::from_str("\"claude_cli\"").unwrap();
|
||||
assert_eq!(claude, ProviderType::ClaudeCli);
|
||||
|
||||
let ollama: ProviderType = serde_json::from_str("\"ollama\"").unwrap();
|
||||
assert_eq!(ollama, ProviderType::Ollama);
|
||||
|
||||
let openai: ProviderType = serde_json::from_str("\"open_ai\"").unwrap();
|
||||
assert_eq!(openai, ProviderType::OpenAi);
|
||||
|
||||
let anthropic: ProviderType = serde_json::from_str("\"anthropic\"").unwrap();
|
||||
assert_eq!(anthropic, ProviderType::Anthropic);
|
||||
|
||||
let gemini: ProviderType = serde_json::from_str("\"gemini\"").unwrap();
|
||||
assert_eq!(gemini, ProviderType::Gemini);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_provider_type_requires_api_key() {
|
||||
assert!(!ProviderType::ClaudeCli.requires_api_key());
|
||||
assert!(!ProviderType::Ollama.requires_api_key());
|
||||
assert!(ProviderType::OpenAi.requires_api_key());
|
||||
assert!(ProviderType::Anthropic.requires_api_key());
|
||||
assert!(ProviderType::Gemini.requires_api_key());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,537 @@
|
||||
// This provider connects to a local Ollama instance for LLM inference.
|
||||
// It will be actively used once providers are fully integrated with BridgeManager.
|
||||
#![allow(dead_code)]
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::providers::traits::{
|
||||
LlmProvider, ModelInfo, ProviderCapabilities, ProviderConfig, ProviderMessage,
|
||||
ProviderStreamEvent, ProviderUsage, StreamCallback,
|
||||
};
|
||||
|
||||
const DEFAULT_OLLAMA_URL: &str = "http://localhost:11434";
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct OllamaChatRequest {
|
||||
model: String,
|
||||
messages: Vec<OllamaMessage>,
|
||||
stream: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
system: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct OllamaMessage {
|
||||
role: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct OllamaChatResponse {
|
||||
#[serde(default)]
|
||||
message: Option<OllamaResponseMessage>,
|
||||
#[serde(default)]
|
||||
done: bool,
|
||||
#[serde(default)]
|
||||
eval_count: Option<u64>,
|
||||
#[serde(default)]
|
||||
prompt_eval_count: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct OllamaResponseMessage {
|
||||
#[serde(default)]
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct OllamaTagsResponse {
|
||||
models: Vec<OllamaModelInfo>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct OllamaModelInfo {
|
||||
name: String,
|
||||
#[serde(default)]
|
||||
details: Option<OllamaModelDetails>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct OllamaModelDetails {
|
||||
#[serde(default)]
|
||||
parameter_size: Option<String>,
|
||||
#[serde(default)]
|
||||
family: Option<String>,
|
||||
}
|
||||
|
||||
pub struct OllamaProvider {
|
||||
config: ProviderConfig,
|
||||
client: reqwest::Client,
|
||||
base_url: String,
|
||||
is_running: Arc<AtomicBool>,
|
||||
conversation_history: Vec<OllamaMessage>,
|
||||
cancel_tx: Option<mpsc::Sender<()>>,
|
||||
}
|
||||
|
||||
impl OllamaProvider {
|
||||
pub fn new(config: ProviderConfig) -> Self {
|
||||
let base_url = config
|
||||
.api_base_url
|
||||
.clone()
|
||||
.unwrap_or_else(|| DEFAULT_OLLAMA_URL.to_string());
|
||||
|
||||
Self {
|
||||
config,
|
||||
client: reqwest::Client::new(),
|
||||
base_url,
|
||||
is_running: Arc::new(AtomicBool::new(false)),
|
||||
conversation_history: Vec::new(),
|
||||
cancel_tx: None,
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_available_models(&self) -> Vec<ModelInfo> {
|
||||
let url = format!("{}/api/tags", self.base_url);
|
||||
|
||||
match self.client.get(&url).send().await {
|
||||
Ok(response) => {
|
||||
if let Ok(tags) = response.json::<OllamaTagsResponse>().await {
|
||||
tags.models
|
||||
.into_iter()
|
||||
.map(|m| {
|
||||
let description = m.details.as_ref().map(|d| {
|
||||
let mut desc_parts = Vec::new();
|
||||
if let Some(ref family) = d.family {
|
||||
desc_parts.push(family.clone());
|
||||
}
|
||||
if let Some(ref size) = d.parameter_size {
|
||||
desc_parts.push(format!("{} parameters", size));
|
||||
}
|
||||
if desc_parts.is_empty() {
|
||||
"Local model".to_string()
|
||||
} else {
|
||||
desc_parts.join(" - ")
|
||||
}
|
||||
});
|
||||
|
||||
ModelInfo {
|
||||
id: m.name.clone(),
|
||||
name: m.name,
|
||||
description,
|
||||
context_window: None,
|
||||
input_cost_per_mtok: Some(0.0), // Local = free!
|
||||
output_cost_per_mtok: Some(0.0),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
default_ollama_models()
|
||||
}
|
||||
}
|
||||
Err(_) => default_ollama_models(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn stream_chat(
|
||||
&mut self,
|
||||
callback: Arc<StreamCallback>,
|
||||
cancel_rx: mpsc::Receiver<()>,
|
||||
) -> Result<(), String> {
|
||||
let model = self
|
||||
.config
|
||||
.model
|
||||
.clone()
|
||||
.unwrap_or_else(|| "llama3.2".to_string());
|
||||
|
||||
let request = OllamaChatRequest {
|
||||
model: model.clone(),
|
||||
messages: self.conversation_history.clone(),
|
||||
stream: true,
|
||||
system: self.config.custom_instructions.clone(),
|
||||
};
|
||||
|
||||
let url = format!("{}/api/chat", self.base_url);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.json(&request)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to connect to Ollama: {}", e))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let error_text = response
|
||||
.text()
|
||||
.await
|
||||
.unwrap_or_else(|_| "Unknown error".to_string());
|
||||
return Err(format!("Ollama error ({}): {}", status, error_text));
|
||||
}
|
||||
|
||||
let mut full_response = String::new();
|
||||
let mut total_input_tokens: u64 = 0;
|
||||
let mut total_output_tokens: u64 = 0;
|
||||
|
||||
let mut stream = response.bytes_stream();
|
||||
let mut cancel_rx = cancel_rx;
|
||||
|
||||
use futures_util::StreamExt;
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = cancel_rx.recv() => {
|
||||
callback(ProviderStreamEvent::Disconnected);
|
||||
return Ok(());
|
||||
}
|
||||
chunk = stream.next() => {
|
||||
match chunk {
|
||||
Some(Ok(bytes)) => {
|
||||
let text = String::from_utf8_lossy(&bytes);
|
||||
|
||||
for line in text.lines() {
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Ok(response) = serde_json::from_str::<OllamaChatResponse>(line) {
|
||||
if let Some(msg) = &response.message {
|
||||
if !msg.content.is_empty() {
|
||||
full_response.push_str(&msg.content);
|
||||
callback(ProviderStreamEvent::TextDelta {
|
||||
text: msg.content.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(tokens) = response.prompt_eval_count {
|
||||
total_input_tokens = tokens;
|
||||
}
|
||||
if let Some(tokens) = response.eval_count {
|
||||
total_output_tokens = tokens;
|
||||
}
|
||||
|
||||
if response.done {
|
||||
self.conversation_history.push(OllamaMessage {
|
||||
role: "assistant".to_string(),
|
||||
content: full_response.clone(),
|
||||
});
|
||||
|
||||
callback(ProviderStreamEvent::MessageComplete {
|
||||
content: vec![ProviderMessage::Text {
|
||||
content: full_response,
|
||||
}],
|
||||
usage: Some(ProviderUsage {
|
||||
input_tokens: total_input_tokens,
|
||||
output_tokens: total_output_tokens,
|
||||
model,
|
||||
}),
|
||||
stop_reason: Some("end_turn".to_string()),
|
||||
});
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(Err(e)) => {
|
||||
return Err(format!("Stream error: {}", e));
|
||||
}
|
||||
None => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl LlmProvider for OllamaProvider {
|
||||
fn name(&self) -> &'static str {
|
||||
"Ollama"
|
||||
}
|
||||
|
||||
fn capabilities(&self) -> ProviderCapabilities {
|
||||
ProviderCapabilities {
|
||||
supports_streaming: true,
|
||||
supports_tools: false, // Ollama doesn't support tools natively yet
|
||||
supports_vision: true, // Some models support vision
|
||||
supports_thinking: false,
|
||||
supports_mcp: false,
|
||||
supports_resume_session: false,
|
||||
max_context_tokens: None, // Varies by model
|
||||
available_models: default_ollama_models(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn start(&mut self, callback: StreamCallback) -> Result<(), String> {
|
||||
if self.is_running.load(Ordering::SeqCst) {
|
||||
return Err("Provider already running".to_string());
|
||||
}
|
||||
|
||||
// Check if Ollama is reachable
|
||||
let url = format!("{}/api/tags", self.base_url);
|
||||
self.client.get(&url).send().await.map_err(|e| {
|
||||
format!(
|
||||
"Cannot connect to Ollama at {}. Is it running? Error: {}",
|
||||
self.base_url, e
|
||||
)
|
||||
})?;
|
||||
|
||||
self.is_running.store(true, Ordering::SeqCst);
|
||||
self.conversation_history.clear();
|
||||
|
||||
// Create cancellation channel
|
||||
let (tx, _rx) = mpsc::channel(1);
|
||||
self.cancel_tx = Some(tx);
|
||||
|
||||
callback(ProviderStreamEvent::Connected { session_id: None });
|
||||
|
||||
// Fetch and report available models
|
||||
let models = self.fetch_available_models().await;
|
||||
if !models.is_empty() {
|
||||
eprintln!(
|
||||
"[Ollama] Available models: {:?}",
|
||||
models.iter().map(|m| &m.id).collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn stop(&mut self) -> Result<(), String> {
|
||||
self.is_running.store(false, Ordering::SeqCst);
|
||||
self.conversation_history.clear();
|
||||
|
||||
if let Some(tx) = self.cancel_tx.take() {
|
||||
let _ = tx.send(()).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_running(&self) -> bool {
|
||||
self.is_running.load(Ordering::SeqCst)
|
||||
}
|
||||
|
||||
async fn send_message(&mut self, message: &str) -> Result<(), String> {
|
||||
if !self.is_running.load(Ordering::SeqCst) {
|
||||
return Err("Provider not running".to_string());
|
||||
}
|
||||
|
||||
// Add user message to history
|
||||
self.conversation_history.push(OllamaMessage {
|
||||
role: "user".to_string(),
|
||||
content: message.to_string(),
|
||||
});
|
||||
|
||||
// Create a new cancel channel for this request
|
||||
let (tx, _rx) = mpsc::channel(1);
|
||||
self.cancel_tx = Some(tx);
|
||||
|
||||
// We need a callback here, but we don't have access to it in send_message
|
||||
// This is a limitation of the current trait design
|
||||
// For now, we'll need to refactor to handle streaming properly
|
||||
// The callback should be stored from the start() call
|
||||
|
||||
// For the MVP, we'll emit events directly
|
||||
// In a real implementation, we'd need to restructure this
|
||||
|
||||
Err("send_message needs refactoring to work with stored callback".to_string())
|
||||
}
|
||||
|
||||
async fn send_tool_result(
|
||||
&mut self,
|
||||
_tool_use_id: &str,
|
||||
_result: serde_json::Value,
|
||||
_is_error: bool,
|
||||
) -> Result<(), String> {
|
||||
// Ollama doesn't support tool use natively
|
||||
Err("Ollama does not support tool use".to_string())
|
||||
}
|
||||
|
||||
fn get_model(&self) -> Option<String> {
|
||||
self.config.model.clone()
|
||||
}
|
||||
|
||||
fn get_working_directory(&self) -> &str {
|
||||
&self.config.working_directory
|
||||
}
|
||||
|
||||
async fn interrupt(&mut self) -> Result<(), String> {
|
||||
if let Some(tx) = self.cancel_tx.take() {
|
||||
let _ = tx.send(()).await;
|
||||
}
|
||||
self.is_running.store(false, Ordering::SeqCst);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn default_ollama_models() -> Vec<ModelInfo> {
|
||||
vec![
|
||||
ModelInfo {
|
||||
id: "llama3.2".to_string(),
|
||||
name: "Llama 3.2".to_string(),
|
||||
description: Some("Meta's latest compact model".to_string()),
|
||||
context_window: Some(128_000),
|
||||
input_cost_per_mtok: Some(0.0),
|
||||
output_cost_per_mtok: Some(0.0),
|
||||
},
|
||||
ModelInfo {
|
||||
id: "llama3.2:1b".to_string(),
|
||||
name: "Llama 3.2 1B".to_string(),
|
||||
description: Some("Smallest Llama 3.2 variant".to_string()),
|
||||
context_window: Some(128_000),
|
||||
input_cost_per_mtok: Some(0.0),
|
||||
output_cost_per_mtok: Some(0.0),
|
||||
},
|
||||
ModelInfo {
|
||||
id: "qwen2.5-coder".to_string(),
|
||||
name: "Qwen 2.5 Coder".to_string(),
|
||||
description: Some("Alibaba's coding-focused model".to_string()),
|
||||
context_window: Some(32_000),
|
||||
input_cost_per_mtok: Some(0.0),
|
||||
output_cost_per_mtok: Some(0.0),
|
||||
},
|
||||
ModelInfo {
|
||||
id: "deepseek-coder-v2".to_string(),
|
||||
name: "DeepSeek Coder V2".to_string(),
|
||||
description: Some("DeepSeek's coding model".to_string()),
|
||||
context_window: Some(128_000),
|
||||
input_cost_per_mtok: Some(0.0),
|
||||
output_cost_per_mtok: Some(0.0),
|
||||
},
|
||||
ModelInfo {
|
||||
id: "mistral".to_string(),
|
||||
name: "Mistral 7B".to_string(),
|
||||
description: Some("Fast and capable".to_string()),
|
||||
context_window: Some(32_000),
|
||||
input_cost_per_mtok: Some(0.0),
|
||||
output_cost_per_mtok: Some(0.0),
|
||||
},
|
||||
ModelInfo {
|
||||
id: "gemma2".to_string(),
|
||||
name: "Gemma 2".to_string(),
|
||||
description: Some("Google's open model".to_string()),
|
||||
context_window: Some(8_000),
|
||||
input_cost_per_mtok: Some(0.0),
|
||||
output_cost_per_mtok: Some(0.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_provider_name() {
|
||||
let config = ProviderConfig::default();
|
||||
let provider = OllamaProvider::new(config);
|
||||
assert_eq!(provider.name(), "Ollama");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_capabilities() {
|
||||
let config = ProviderConfig::default();
|
||||
let provider = OllamaProvider::new(config);
|
||||
let caps = provider.capabilities();
|
||||
|
||||
assert!(caps.supports_streaming);
|
||||
assert!(!caps.supports_tools);
|
||||
assert!(!caps.supports_mcp);
|
||||
assert!(!caps.supports_resume_session);
|
||||
assert!(!caps.available_models.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_url() {
|
||||
let config = ProviderConfig::default();
|
||||
let provider = OllamaProvider::new(config);
|
||||
assert_eq!(provider.base_url, DEFAULT_OLLAMA_URL);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_custom_url() {
|
||||
let config = ProviderConfig {
|
||||
api_base_url: Some("http://custom:8080".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
let provider = OllamaProvider::new(config);
|
||||
assert_eq!(provider.base_url, "http://custom:8080");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_running_initial() {
|
||||
let config = ProviderConfig::default();
|
||||
let provider = OllamaProvider::new(config);
|
||||
assert!(!provider.is_running());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_ollama_models() {
|
||||
let models = default_ollama_models();
|
||||
assert!(!models.is_empty());
|
||||
|
||||
// All models should be free (local)
|
||||
for model in &models {
|
||||
assert_eq!(model.input_cost_per_mtok, Some(0.0));
|
||||
assert_eq!(model.output_cost_per_mtok, Some(0.0));
|
||||
}
|
||||
|
||||
// Should include llama
|
||||
assert!(models.iter().any(|m| m.id.contains("llama")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ollama_message_serialization() {
|
||||
let msg = OllamaMessage {
|
||||
role: "user".to_string(),
|
||||
content: "Hello!".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&msg).unwrap();
|
||||
assert!(json.contains("\"role\":\"user\""));
|
||||
assert!(json.contains("\"content\":\"Hello!\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_chat_request_serialization() {
|
||||
let request = OllamaChatRequest {
|
||||
model: "llama3.2".to_string(),
|
||||
messages: vec![OllamaMessage {
|
||||
role: "user".to_string(),
|
||||
content: "Test".to_string(),
|
||||
}],
|
||||
stream: true,
|
||||
system: Some("You are helpful".to_string()),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&request).unwrap();
|
||||
assert!(json.contains("\"model\":\"llama3.2\""));
|
||||
assert!(json.contains("\"stream\":true"));
|
||||
assert!(json.contains("\"system\":\"You are helpful\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_chat_request_without_system() {
|
||||
let request = OllamaChatRequest {
|
||||
model: "llama3.2".to_string(),
|
||||
messages: vec![],
|
||||
stream: true,
|
||||
system: None,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&request).unwrap();
|
||||
// system should be omitted when None
|
||||
assert!(!json.contains("\"system\""));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,236 @@
|
||||
// These types are all used by the provider abstraction layer.
|
||||
// They will be actively used once providers are fully integrated with BridgeManager.
|
||||
#![allow(dead_code)]
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProviderConfig {
|
||||
pub api_key: Option<String>,
|
||||
pub api_base_url: Option<String>,
|
||||
pub model: Option<String>,
|
||||
pub custom_instructions: Option<String>,
|
||||
pub working_directory: String,
|
||||
pub mcp_servers_json: Option<String>,
|
||||
pub allowed_tools: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub extra_options: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
impl Default for ProviderConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
api_key: None,
|
||||
api_base_url: None,
|
||||
model: None,
|
||||
custom_instructions: None,
|
||||
working_directory: String::new(),
|
||||
mcp_servers_json: None,
|
||||
allowed_tools: Vec::new(),
|
||||
extra_options: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProviderCapabilities {
|
||||
pub supports_streaming: bool,
|
||||
pub supports_tools: bool,
|
||||
pub supports_vision: bool,
|
||||
pub supports_thinking: bool,
|
||||
pub supports_mcp: bool,
|
||||
pub supports_resume_session: bool,
|
||||
pub max_context_tokens: Option<u64>,
|
||||
pub available_models: Vec<ModelInfo>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ModelInfo {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub context_window: Option<u64>,
|
||||
pub input_cost_per_mtok: Option<f64>,
|
||||
pub output_cost_per_mtok: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum ProviderMessage {
|
||||
#[serde(rename = "text")]
|
||||
Text { content: String },
|
||||
#[serde(rename = "tool_use")]
|
||||
ToolUse {
|
||||
id: String,
|
||||
name: String,
|
||||
input: serde_json::Value,
|
||||
},
|
||||
#[serde(rename = "tool_result")]
|
||||
ToolResult {
|
||||
tool_use_id: String,
|
||||
content: String,
|
||||
is_error: bool,
|
||||
},
|
||||
#[serde(rename = "thinking")]
|
||||
Thinking { content: String },
|
||||
#[serde(rename = "image")]
|
||||
Image { media_type: String, data: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProviderUsage {
|
||||
pub input_tokens: u64,
|
||||
pub output_tokens: u64,
|
||||
pub model: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum ProviderStreamEvent {
|
||||
#[serde(rename = "connected")]
|
||||
Connected { session_id: Option<String> },
|
||||
#[serde(rename = "text_delta")]
|
||||
TextDelta { text: String },
|
||||
#[serde(rename = "thinking_delta")]
|
||||
ThinkingDelta { text: String },
|
||||
#[serde(rename = "tool_use_start")]
|
||||
ToolUseStart { id: String, name: String },
|
||||
#[serde(rename = "tool_use_delta")]
|
||||
ToolUseDelta { id: String, input_delta: String },
|
||||
#[serde(rename = "tool_use_end")]
|
||||
ToolUseEnd { id: String, input: serde_json::Value },
|
||||
#[serde(rename = "message_complete")]
|
||||
MessageComplete {
|
||||
content: Vec<ProviderMessage>,
|
||||
usage: Option<ProviderUsage>,
|
||||
stop_reason: Option<String>,
|
||||
},
|
||||
#[serde(rename = "permission_request")]
|
||||
PermissionRequest {
|
||||
id: String,
|
||||
tool_name: String,
|
||||
tool_input: serde_json::Value,
|
||||
description: String,
|
||||
},
|
||||
#[serde(rename = "question")]
|
||||
Question {
|
||||
id: String,
|
||||
question: String,
|
||||
header: Option<String>,
|
||||
options: Vec<QuestionOption>,
|
||||
multi_select: bool,
|
||||
},
|
||||
#[serde(rename = "working_directory")]
|
||||
WorkingDirectory { path: String },
|
||||
#[serde(rename = "error")]
|
||||
Error { message: String },
|
||||
#[serde(rename = "disconnected")]
|
||||
Disconnected,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct QuestionOption {
|
||||
pub label: String,
|
||||
pub description: Option<String>,
|
||||
}
|
||||
|
||||
pub type StreamCallback = Box<dyn Fn(ProviderStreamEvent) + Send + Sync>;
|
||||
|
||||
#[async_trait]
|
||||
pub trait LlmProvider: Send + Sync {
|
||||
fn name(&self) -> &'static str;
|
||||
|
||||
fn capabilities(&self) -> ProviderCapabilities;
|
||||
|
||||
async fn start(&mut self, callback: StreamCallback) -> Result<(), String>;
|
||||
|
||||
async fn stop(&mut self) -> Result<(), String>;
|
||||
|
||||
fn is_running(&self) -> bool;
|
||||
|
||||
async fn send_message(&mut self, message: &str) -> Result<(), String>;
|
||||
|
||||
async fn send_tool_result(
|
||||
&mut self,
|
||||
tool_use_id: &str,
|
||||
result: serde_json::Value,
|
||||
is_error: bool,
|
||||
) -> Result<(), String>;
|
||||
|
||||
fn get_model(&self) -> Option<String>;
|
||||
|
||||
fn get_working_directory(&self) -> &str;
|
||||
|
||||
async fn interrupt(&mut self) -> Result<(), String>;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_provider_config_default() {
|
||||
let config = ProviderConfig::default();
|
||||
assert!(config.api_key.is_none());
|
||||
assert!(config.api_base_url.is_none());
|
||||
assert!(config.model.is_none());
|
||||
assert!(config.working_directory.is_empty());
|
||||
assert!(config.allowed_tools.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_provider_message_serialization() {
|
||||
let msg = ProviderMessage::Text {
|
||||
content: "Hello!".to_string(),
|
||||
};
|
||||
let json = serde_json::to_string(&msg).unwrap();
|
||||
assert!(json.contains("\"type\":\"text\""));
|
||||
assert!(json.contains("\"content\":\"Hello!\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_provider_stream_event_serialization() {
|
||||
let event = ProviderStreamEvent::TextDelta {
|
||||
text: "chunk".to_string(),
|
||||
};
|
||||
let json = serde_json::to_string(&event).unwrap();
|
||||
assert!(json.contains("\"type\":\"text_delta\""));
|
||||
|
||||
let event = ProviderStreamEvent::Connected {
|
||||
session_id: Some("test-123".to_string()),
|
||||
};
|
||||
let json = serde_json::to_string(&event).unwrap();
|
||||
assert!(json.contains("\"session_id\":\"test-123\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_model_info() {
|
||||
let model = ModelInfo {
|
||||
id: "claude-sonnet-4-20250514".to_string(),
|
||||
name: "Claude Sonnet 4".to_string(),
|
||||
description: Some("Fast and intelligent".to_string()),
|
||||
context_window: Some(200000),
|
||||
input_cost_per_mtok: Some(3.0),
|
||||
output_cost_per_mtok: Some(15.0),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&model).unwrap();
|
||||
assert!(json.contains("claude-sonnet-4"));
|
||||
assert!(json.contains("200000"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_provider_usage() {
|
||||
let usage = ProviderUsage {
|
||||
input_tokens: 100,
|
||||
output_tokens: 50,
|
||||
model: "claude-opus-4-20250514".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&usage).unwrap();
|
||||
assert!(json.contains("\"input_tokens\":100"));
|
||||
assert!(json.contains("\"output_tokens\":50"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,373 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_store::StoreExt;
|
||||
|
||||
const QUICK_ACTIONS_STORE_KEY: &str = "quick_actions";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct QuickAction {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub prompt: String,
|
||||
pub icon: String,
|
||||
pub is_default: bool,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
fn get_default_quick_actions() -> Vec<QuickAction> {
|
||||
let now = Utc::now();
|
||||
vec![
|
||||
QuickAction {
|
||||
id: "default-review-pr".to_string(),
|
||||
name: "Review PR".to_string(),
|
||||
prompt: "Please review this pull request and provide feedback on code quality, potential issues, and suggestions for improvement.".to_string(),
|
||||
icon: "git-pull-request".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
QuickAction {
|
||||
id: "default-run-tests".to_string(),
|
||||
name: "Run Tests".to_string(),
|
||||
prompt: "Please run the test suite for this project and report any failures or issues.".to_string(),
|
||||
icon: "play".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
QuickAction {
|
||||
id: "default-explain-file".to_string(),
|
||||
name: "Explain File".to_string(),
|
||||
prompt: "Please explain what this file does, its purpose, and how it fits into the overall project structure.".to_string(),
|
||||
icon: "file-text".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
QuickAction {
|
||||
id: "default-fix-error".to_string(),
|
||||
name: "Fix Error".to_string(),
|
||||
prompt: "I'm getting an error. Can you help me identify the cause and fix it?".to_string(),
|
||||
icon: "alert-circle".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
QuickAction {
|
||||
id: "default-write-tests".to_string(),
|
||||
name: "Write Tests".to_string(),
|
||||
prompt: "Please write comprehensive unit tests for the current code with good coverage.".to_string(),
|
||||
icon: "check-square".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
QuickAction {
|
||||
id: "default-refactor".to_string(),
|
||||
name: "Refactor".to_string(),
|
||||
prompt: "Please refactor this code to improve readability, maintainability, and performance.".to_string(),
|
||||
icon: "refresh-cw".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
fn load_all_quick_actions(app: &AppHandle) -> Result<Vec<QuickAction>, String> {
|
||||
let store = app
|
||||
.store("hikari-quick-actions.json")
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
match store.get(QUICK_ACTIONS_STORE_KEY) {
|
||||
Some(value) => {
|
||||
let mut actions: Vec<QuickAction> =
|
||||
serde_json::from_value(value.clone()).map_err(|e| e.to_string())?;
|
||||
|
||||
let defaults = get_default_quick_actions();
|
||||
for default in defaults {
|
||||
if !actions.iter().any(|a| a.id == default.id) {
|
||||
actions.push(default);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(actions)
|
||||
}
|
||||
None => Ok(get_default_quick_actions()),
|
||||
}
|
||||
}
|
||||
|
||||
fn save_all_quick_actions(app: &AppHandle, actions: &[QuickAction]) -> Result<(), String> {
|
||||
let store = app
|
||||
.store("hikari-quick-actions.json")
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let value = serde_json::to_value(actions).map_err(|e| e.to_string())?;
|
||||
store.set(QUICK_ACTIONS_STORE_KEY, value);
|
||||
store.save().map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn list_quick_actions(app: AppHandle) -> Result<Vec<QuickAction>, String> {
|
||||
let mut actions = load_all_quick_actions(&app)?;
|
||||
|
||||
actions.sort_by(|a, b| {
|
||||
let default_cmp = b.is_default.cmp(&a.is_default);
|
||||
if default_cmp == std::cmp::Ordering::Equal {
|
||||
a.name.cmp(&b.name)
|
||||
} else {
|
||||
default_cmp
|
||||
}
|
||||
});
|
||||
|
||||
Ok(actions)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_quick_action(app: AppHandle, action: QuickAction) -> Result<(), String> {
|
||||
let mut actions = load_all_quick_actions(&app)?;
|
||||
|
||||
if let Some(existing) = actions.iter_mut().find(|a| a.id == action.id) {
|
||||
let mut updated = action;
|
||||
updated.is_default = existing.is_default;
|
||||
*existing = updated;
|
||||
} else {
|
||||
actions.push(action);
|
||||
}
|
||||
|
||||
save_all_quick_actions(&app, &actions)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_quick_action(app: AppHandle, action_id: String) -> Result<(), String> {
|
||||
let mut actions = load_all_quick_actions(&app)?;
|
||||
|
||||
if actions
|
||||
.iter()
|
||||
.any(|a| a.id == action_id && a.is_default)
|
||||
{
|
||||
return Err("Cannot delete default quick actions".to_string());
|
||||
}
|
||||
|
||||
actions.retain(|a| a.id != action_id);
|
||||
save_all_quick_actions(&app, &actions)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn reset_default_quick_actions(app: AppHandle) -> Result<(), String> {
|
||||
let mut actions = load_all_quick_actions(&app)?;
|
||||
|
||||
actions.retain(|a| !a.is_default);
|
||||
actions.extend(get_default_quick_actions());
|
||||
|
||||
save_all_quick_actions(&app, &actions)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn create_test_action(id: &str, name: &str, is_default: bool) -> QuickAction {
|
||||
QuickAction {
|
||||
id: id.to_string(),
|
||||
name: name.to_string(),
|
||||
prompt: "Test prompt".to_string(),
|
||||
icon: "star".to_string(),
|
||||
is_default,
|
||||
created_at: Utc::now(),
|
||||
updated_at: Utc::now(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_quick_actions_exist() {
|
||||
let defaults = get_default_quick_actions();
|
||||
assert!(!defaults.is_empty());
|
||||
assert!(defaults.iter().all(|a| a.is_default));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_quick_actions_have_required_fields() {
|
||||
let defaults = get_default_quick_actions();
|
||||
for action in defaults {
|
||||
assert!(!action.id.is_empty());
|
||||
assert!(!action.name.is_empty());
|
||||
assert!(!action.prompt.is_empty());
|
||||
assert!(!action.icon.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_quick_actions_count() {
|
||||
let defaults = get_default_quick_actions();
|
||||
// Should have 6 default actions
|
||||
assert_eq!(defaults.len(), 6);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_quick_actions_have_unique_ids() {
|
||||
let defaults = get_default_quick_actions();
|
||||
let mut ids: Vec<&String> = defaults.iter().map(|a| &a.id).collect();
|
||||
ids.sort();
|
||||
ids.dedup();
|
||||
assert_eq!(ids.len(), defaults.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_quick_actions_ids_start_with_default() {
|
||||
let defaults = get_default_quick_actions();
|
||||
assert!(defaults.iter().all(|a| a.id.starts_with("default-")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quick_action_serialization() {
|
||||
let action = create_test_action("test-1", "Test Action", false);
|
||||
let json = serde_json::to_string(&action).expect("Failed to serialize");
|
||||
let parsed: QuickAction = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.id, action.id);
|
||||
assert_eq!(parsed.name, action.name);
|
||||
assert_eq!(parsed.prompt, action.prompt);
|
||||
assert_eq!(parsed.icon, action.icon);
|
||||
assert_eq!(parsed.is_default, action.is_default);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quick_action_clone() {
|
||||
let original = create_test_action("clone-test", "Clone Test", true);
|
||||
let cloned = original.clone();
|
||||
|
||||
assert_eq!(original.id, cloned.id);
|
||||
assert_eq!(original.name, cloned.name);
|
||||
assert_eq!(original.is_default, cloned.is_default);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_quick_action_sorting_defaults_first() {
|
||||
let mut actions = vec![
|
||||
create_test_action("custom-z", "Zebra", false),
|
||||
create_test_action("default-a", "Apple", true),
|
||||
create_test_action("custom-a", "Alpha", false),
|
||||
create_test_action("default-z", "Zulu", true),
|
||||
];
|
||||
|
||||
// Sort by: defaults first, then alphabetically by name
|
||||
actions.sort_by(|a, b| {
|
||||
let default_cmp = b.is_default.cmp(&a.is_default);
|
||||
if default_cmp == std::cmp::Ordering::Equal {
|
||||
a.name.cmp(&b.name)
|
||||
} else {
|
||||
default_cmp
|
||||
}
|
||||
});
|
||||
|
||||
// Defaults should come first
|
||||
assert!(actions[0].is_default);
|
||||
assert!(actions[1].is_default);
|
||||
assert!(!actions[2].is_default);
|
||||
assert!(!actions[3].is_default);
|
||||
|
||||
// Within defaults, alphabetically sorted
|
||||
assert_eq!(actions[0].name, "Apple");
|
||||
assert_eq!(actions[1].name, "Zulu");
|
||||
|
||||
// Within non-defaults, alphabetically sorted
|
||||
assert_eq!(actions[2].name, "Alpha");
|
||||
assert_eq!(actions[3].name, "Zebra");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_known_default_actions() {
|
||||
let defaults = get_default_quick_actions();
|
||||
let ids: Vec<&str> = defaults.iter().map(|a| a.id.as_str()).collect();
|
||||
|
||||
assert!(ids.contains(&"default-review-pr"));
|
||||
assert!(ids.contains(&"default-run-tests"));
|
||||
assert!(ids.contains(&"default-explain-file"));
|
||||
assert!(ids.contains(&"default-fix-error"));
|
||||
assert!(ids.contains(&"default-write-tests"));
|
||||
assert!(ids.contains(&"default-refactor"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_action_icons() {
|
||||
let defaults = get_default_quick_actions();
|
||||
let icons: Vec<&str> = defaults.iter().map(|a| a.icon.as_str()).collect();
|
||||
|
||||
assert!(icons.contains(&"git-pull-request"));
|
||||
assert!(icons.contains(&"play"));
|
||||
assert!(icons.contains(&"file-text"));
|
||||
assert!(icons.contains(&"alert-circle"));
|
||||
assert!(icons.contains(&"check-square"));
|
||||
assert!(icons.contains(&"refresh-cw"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quick_action_prompts_not_empty() {
|
||||
let defaults = get_default_quick_actions();
|
||||
for action in defaults {
|
||||
assert!(
|
||||
action.prompt.len() > 10,
|
||||
"Prompt should be meaningful: {}",
|
||||
action.name
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quick_action_timestamps() {
|
||||
let action = create_test_action("time-test", "Time Test", false);
|
||||
assert!(action.created_at <= action.updated_at);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_actions_have_same_timestamps() {
|
||||
let defaults = get_default_quick_actions();
|
||||
// All defaults are created at the same instant
|
||||
let first_created = defaults[0].created_at;
|
||||
let first_updated = defaults[0].updated_at;
|
||||
|
||||
for action in &defaults {
|
||||
assert_eq!(action.created_at, first_created);
|
||||
assert_eq!(action.updated_at, first_updated);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_action_retain_non_default() {
|
||||
let mut actions = vec![
|
||||
create_test_action("default-1", "Default 1", true),
|
||||
create_test_action("custom-1", "Custom 1", false),
|
||||
create_test_action("default-2", "Default 2", true),
|
||||
create_test_action("custom-2", "Custom 2", false),
|
||||
];
|
||||
|
||||
// Mimics reset_default_quick_actions behavior (retain non-defaults)
|
||||
actions.retain(|a| !a.is_default);
|
||||
|
||||
assert_eq!(actions.len(), 2);
|
||||
assert!(actions.iter().all(|a| !a.is_default));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_action_find_by_id() {
|
||||
let actions = vec![
|
||||
create_test_action("action-1", "First", false),
|
||||
create_test_action("action-2", "Second", false),
|
||||
create_test_action("action-3", "Third", false),
|
||||
];
|
||||
|
||||
let found = actions.iter().find(|a| a.id == "action-2");
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().name, "Second");
|
||||
|
||||
let not_found = actions.iter().find(|a| a.id == "action-999");
|
||||
assert!(not_found.is_none());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,374 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_store::StoreExt;
|
||||
|
||||
const SESSIONS_STORE_KEY: &str = "sessions";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SavedSession {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub last_activity_at: DateTime<Utc>,
|
||||
pub working_directory: String,
|
||||
pub message_count: usize,
|
||||
pub preview: String, // First ~100 chars of conversation for preview
|
||||
pub messages: Vec<SavedMessage>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SavedMessage {
|
||||
pub id: String,
|
||||
#[serde(rename = "type")]
|
||||
pub message_type: String,
|
||||
pub content: String,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub tool_name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SessionListItem {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub last_activity_at: DateTime<Utc>,
|
||||
pub working_directory: String,
|
||||
pub message_count: usize,
|
||||
pub preview: String,
|
||||
}
|
||||
|
||||
impl From<&SavedSession> for SessionListItem {
|
||||
fn from(session: &SavedSession) -> Self {
|
||||
SessionListItem {
|
||||
id: session.id.clone(),
|
||||
name: session.name.clone(),
|
||||
created_at: session.created_at,
|
||||
last_activity_at: session.last_activity_at,
|
||||
working_directory: session.working_directory.clone(),
|
||||
message_count: session.message_count,
|
||||
preview: session.preview.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_all_sessions(app: &AppHandle) -> Result<Vec<SavedSession>, String> {
|
||||
let store = app
|
||||
.store("hikari-sessions.json")
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
match store.get(SESSIONS_STORE_KEY) {
|
||||
Some(value) => serde_json::from_value(value.clone()).map_err(|e| e.to_string()),
|
||||
None => Ok(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
fn save_all_sessions(app: &AppHandle, sessions: &[SavedSession]) -> Result<(), String> {
|
||||
let store = app
|
||||
.store("hikari-sessions.json")
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let value = serde_json::to_value(sessions).map_err(|e| e.to_string())?;
|
||||
store.set(SESSIONS_STORE_KEY, value);
|
||||
store.save().map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn list_sessions(app: AppHandle) -> Result<Vec<SessionListItem>, String> {
|
||||
let sessions = load_all_sessions(&app)?;
|
||||
let mut items: Vec<SessionListItem> = sessions.iter().map(SessionListItem::from).collect();
|
||||
|
||||
// Sort by last activity, most recent first
|
||||
items.sort_by(|a, b| b.last_activity_at.cmp(&a.last_activity_at));
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_session(app: AppHandle, session: SavedSession) -> Result<(), String> {
|
||||
let mut sessions = load_all_sessions(&app)?;
|
||||
|
||||
// Update existing or add new
|
||||
if let Some(existing) = sessions.iter_mut().find(|s| s.id == session.id) {
|
||||
*existing = session;
|
||||
} else {
|
||||
sessions.push(session);
|
||||
}
|
||||
|
||||
save_all_sessions(&app, &sessions)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn load_session(app: AppHandle, session_id: String) -> Result<Option<SavedSession>, String> {
|
||||
let sessions = load_all_sessions(&app)?;
|
||||
Ok(sessions.into_iter().find(|s| s.id == session_id))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_session(app: AppHandle, session_id: String) -> Result<(), String> {
|
||||
let mut sessions = load_all_sessions(&app)?;
|
||||
sessions.retain(|s| s.id != session_id);
|
||||
save_all_sessions(&app, &sessions)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn search_sessions(app: AppHandle, query: String) -> Result<Vec<SessionListItem>, String> {
|
||||
let sessions = load_all_sessions(&app)?;
|
||||
let query_lower = query.to_lowercase();
|
||||
|
||||
let mut matching: Vec<SessionListItem> = sessions
|
||||
.iter()
|
||||
.filter(|s| {
|
||||
s.name.to_lowercase().contains(&query_lower)
|
||||
|| s.preview.to_lowercase().contains(&query_lower)
|
||||
|| s.working_directory.to_lowercase().contains(&query_lower)
|
||||
|| s.messages
|
||||
.iter()
|
||||
.any(|m| m.content.to_lowercase().contains(&query_lower))
|
||||
})
|
||||
.map(SessionListItem::from)
|
||||
.collect();
|
||||
|
||||
// Sort by last activity, most recent first
|
||||
matching.sort_by(|a, b| b.last_activity_at.cmp(&a.last_activity_at));
|
||||
|
||||
Ok(matching)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn clear_all_sessions(app: AppHandle) -> Result<(), String> {
|
||||
save_all_sessions(&app, &[])
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::TimeZone;
|
||||
|
||||
fn create_test_session(id: &str, name: &str) -> SavedSession {
|
||||
SavedSession {
|
||||
id: id.to_string(),
|
||||
name: name.to_string(),
|
||||
created_at: Utc::now(),
|
||||
last_activity_at: Utc::now(),
|
||||
working_directory: "/home/test".to_string(),
|
||||
message_count: 5,
|
||||
preview: "Hello world".to_string(),
|
||||
messages: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
fn create_test_message(id: &str, content: &str, msg_type: &str) -> SavedMessage {
|
||||
SavedMessage {
|
||||
id: id.to_string(),
|
||||
message_type: msg_type.to_string(),
|
||||
content: content.to_string(),
|
||||
timestamp: Utc::now(),
|
||||
tool_name: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_list_item_from_saved_session() {
|
||||
let session = SavedSession {
|
||||
id: "test-id".to_string(),
|
||||
name: "Test Session".to_string(),
|
||||
created_at: Utc::now(),
|
||||
last_activity_at: Utc::now(),
|
||||
working_directory: "/home/test".to_string(),
|
||||
message_count: 5,
|
||||
preview: "Hello world".to_string(),
|
||||
messages: vec![],
|
||||
};
|
||||
|
||||
let item = SessionListItem::from(&session);
|
||||
assert_eq!(item.id, "test-id");
|
||||
assert_eq!(item.name, "Test Session");
|
||||
assert_eq!(item.message_count, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_list_item_preserves_all_fields() {
|
||||
let created = Utc.with_ymd_and_hms(2024, 1, 15, 10, 30, 0).unwrap();
|
||||
let last_activity = Utc.with_ymd_and_hms(2024, 1, 15, 14, 45, 0).unwrap();
|
||||
|
||||
let session = SavedSession {
|
||||
id: "sess-123".to_string(),
|
||||
name: "My Chat".to_string(),
|
||||
created_at: created,
|
||||
last_activity_at: last_activity,
|
||||
working_directory: "/home/naomi/project".to_string(),
|
||||
message_count: 42,
|
||||
preview: "What is the meaning of life?".to_string(),
|
||||
messages: vec![],
|
||||
};
|
||||
|
||||
let item = SessionListItem::from(&session);
|
||||
|
||||
assert_eq!(item.id, "sess-123");
|
||||
assert_eq!(item.name, "My Chat");
|
||||
assert_eq!(item.created_at, created);
|
||||
assert_eq!(item.last_activity_at, last_activity);
|
||||
assert_eq!(item.working_directory, "/home/naomi/project");
|
||||
assert_eq!(item.message_count, 42);
|
||||
assert_eq!(item.preview, "What is the meaning of life?");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_saved_session_serialization() {
|
||||
let session = create_test_session("test-1", "Test Session");
|
||||
let json = serde_json::to_string(&session).expect("Failed to serialize");
|
||||
let parsed: SavedSession = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.id, session.id);
|
||||
assert_eq!(parsed.name, session.name);
|
||||
assert_eq!(parsed.working_directory, session.working_directory);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_saved_message_serialization() {
|
||||
let message = create_test_message("msg-1", "Hello!", "user");
|
||||
let json = serde_json::to_string(&message).expect("Failed to serialize");
|
||||
let parsed: SavedMessage = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.id, message.id);
|
||||
assert_eq!(parsed.content, message.content);
|
||||
assert_eq!(parsed.message_type, "user");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_saved_message_with_tool_name() {
|
||||
let message = SavedMessage {
|
||||
id: "msg-tool-1".to_string(),
|
||||
message_type: "tool".to_string(),
|
||||
content: "File read successfully".to_string(),
|
||||
timestamp: Utc::now(),
|
||||
tool_name: Some("Read".to_string()),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&message).expect("Failed to serialize");
|
||||
let parsed: SavedMessage = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.tool_name, Some("Read".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_with_messages_serialization() {
|
||||
let mut session = create_test_session("sess-full", "Full Session");
|
||||
session.messages = vec![
|
||||
create_test_message("msg-1", "Hello!", "user"),
|
||||
create_test_message("msg-2", "Hi there!", "assistant"),
|
||||
create_test_message("msg-3", "Read file", "tool"),
|
||||
];
|
||||
session.message_count = 3;
|
||||
|
||||
let json = serde_json::to_string(&session).expect("Failed to serialize");
|
||||
let parsed: SavedSession = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.messages.len(), 3);
|
||||
assert_eq!(parsed.messages[0].content, "Hello!");
|
||||
assert_eq!(parsed.messages[1].message_type, "assistant");
|
||||
assert_eq!(parsed.messages[2].message_type, "tool");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_list_item_serialization() {
|
||||
let item = SessionListItem {
|
||||
id: "list-item-1".to_string(),
|
||||
name: "Quick Chat".to_string(),
|
||||
created_at: Utc::now(),
|
||||
last_activity_at: Utc::now(),
|
||||
working_directory: "/tmp".to_string(),
|
||||
message_count: 10,
|
||||
preview: "Short preview...".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&item).expect("Failed to serialize");
|
||||
let parsed: SessionListItem = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.id, item.id);
|
||||
assert_eq!(parsed.name, item.name);
|
||||
assert_eq!(parsed.preview, item.preview);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_message_type_field_rename() {
|
||||
// The message_type field is renamed to "type" in JSON
|
||||
let message = create_test_message("msg-1", "Test", "assistant");
|
||||
let json = serde_json::to_string(&message).expect("Failed to serialize");
|
||||
|
||||
assert!(json.contains("\"type\":"));
|
||||
assert!(!json.contains("\"message_type\":"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_default_empty_messages() {
|
||||
let session = SavedSession {
|
||||
id: "empty".to_string(),
|
||||
name: "Empty".to_string(),
|
||||
created_at: Utc::now(),
|
||||
last_activity_at: Utc::now(),
|
||||
working_directory: "/".to_string(),
|
||||
message_count: 0,
|
||||
preview: "".to_string(),
|
||||
messages: vec![],
|
||||
};
|
||||
|
||||
assert!(session.messages.is_empty());
|
||||
assert_eq!(session.message_count, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_session_sorting_by_activity() {
|
||||
let old_time = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let new_time = Utc.with_ymd_and_hms(2024, 6, 15, 12, 0, 0).unwrap();
|
||||
|
||||
let mut sessions = vec![
|
||||
SessionListItem {
|
||||
id: "old".to_string(),
|
||||
name: "Old Session".to_string(),
|
||||
created_at: old_time,
|
||||
last_activity_at: old_time,
|
||||
working_directory: "/old".to_string(),
|
||||
message_count: 1,
|
||||
preview: "Old".to_string(),
|
||||
},
|
||||
SessionListItem {
|
||||
id: "new".to_string(),
|
||||
name: "New Session".to_string(),
|
||||
created_at: new_time,
|
||||
last_activity_at: new_time,
|
||||
working_directory: "/new".to_string(),
|
||||
message_count: 1,
|
||||
preview: "New".to_string(),
|
||||
},
|
||||
];
|
||||
|
||||
// Sort by last activity, most recent first (mimics list_sessions behavior)
|
||||
sessions.sort_by(|a, b| b.last_activity_at.cmp(&a.last_activity_at));
|
||||
|
||||
assert_eq!(sessions[0].id, "new");
|
||||
assert_eq!(sessions[1].id, "old");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_clone() {
|
||||
let original = create_test_session("clone-test", "Clone Test");
|
||||
let cloned = original.clone();
|
||||
|
||||
assert_eq!(original.id, cloned.id);
|
||||
assert_eq!(original.name, cloned.name);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_message_clone() {
|
||||
let original = create_test_message("msg-clone", "Content", "user");
|
||||
let cloned = original.clone();
|
||||
|
||||
assert_eq!(original.id, cloned.id);
|
||||
assert_eq!(original.content, cloned.content);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,439 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_store::StoreExt;
|
||||
|
||||
const SNIPPETS_STORE_KEY: &str = "snippets";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Snippet {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub content: String,
|
||||
pub category: String,
|
||||
pub is_default: bool,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
fn get_default_snippets() -> Vec<Snippet> {
|
||||
let now = Utc::now();
|
||||
vec![
|
||||
Snippet {
|
||||
id: "default-explain-code".to_string(),
|
||||
name: "Explain this code".to_string(),
|
||||
content: "Please explain what this code does, step by step:".to_string(),
|
||||
category: "Code Review".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
Snippet {
|
||||
id: "default-fix-error".to_string(),
|
||||
name: "Fix this error".to_string(),
|
||||
content: "I'm getting the following error. Can you help me fix it?".to_string(),
|
||||
category: "Debugging".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
Snippet {
|
||||
id: "default-write-tests".to_string(),
|
||||
name: "Write tests".to_string(),
|
||||
content: "Please write unit tests for this code with good coverage:".to_string(),
|
||||
category: "Testing".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
Snippet {
|
||||
id: "default-refactor".to_string(),
|
||||
name: "Refactor for clarity".to_string(),
|
||||
content: "Please refactor this code to improve readability and maintainability:".to_string(),
|
||||
category: "Code Review".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
Snippet {
|
||||
id: "default-optimize".to_string(),
|
||||
name: "Optimize performance".to_string(),
|
||||
content: "Please analyze this code for performance issues and suggest optimizations:".to_string(),
|
||||
category: "Performance".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
Snippet {
|
||||
id: "default-review-pr".to_string(),
|
||||
name: "Review PR".to_string(),
|
||||
content: "Please review this pull request and provide feedback on code quality, potential issues, and suggestions for improvement.".to_string(),
|
||||
category: "Code Review".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
Snippet {
|
||||
id: "default-add-comments".to_string(),
|
||||
name: "Add documentation".to_string(),
|
||||
content: "Please add clear documentation comments to this code explaining what it does:".to_string(),
|
||||
category: "Documentation".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
Snippet {
|
||||
id: "default-security-review".to_string(),
|
||||
name: "Security review".to_string(),
|
||||
content: "Please review this code for security vulnerabilities and suggest fixes:".to_string(),
|
||||
category: "Security".to_string(),
|
||||
is_default: true,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
fn load_all_snippets(app: &AppHandle) -> Result<Vec<Snippet>, String> {
|
||||
let store = app
|
||||
.store("hikari-snippets.json")
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
match store.get(SNIPPETS_STORE_KEY) {
|
||||
Some(value) => {
|
||||
let mut snippets: Vec<Snippet> =
|
||||
serde_json::from_value(value.clone()).map_err(|e| e.to_string())?;
|
||||
|
||||
// Ensure default snippets exist (in case new ones were added in an update)
|
||||
let defaults = get_default_snippets();
|
||||
for default in defaults {
|
||||
if !snippets.iter().any(|s| s.id == default.id) {
|
||||
snippets.push(default);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(snippets)
|
||||
}
|
||||
None => Ok(get_default_snippets()),
|
||||
}
|
||||
}
|
||||
|
||||
fn save_all_snippets(app: &AppHandle, snippets: &[Snippet]) -> Result<(), String> {
|
||||
let store = app
|
||||
.store("hikari-snippets.json")
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let value = serde_json::to_value(snippets).map_err(|e| e.to_string())?;
|
||||
store.set(SNIPPETS_STORE_KEY, value);
|
||||
store.save().map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn list_snippets(app: AppHandle) -> Result<Vec<Snippet>, String> {
|
||||
let mut snippets = load_all_snippets(&app)?;
|
||||
|
||||
// Sort by category, then by name
|
||||
snippets.sort_by(|a, b| {
|
||||
let cat_cmp = a.category.cmp(&b.category);
|
||||
if cat_cmp == std::cmp::Ordering::Equal {
|
||||
a.name.cmp(&b.name)
|
||||
} else {
|
||||
cat_cmp
|
||||
}
|
||||
});
|
||||
|
||||
Ok(snippets)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_snippet(app: AppHandle, snippet: Snippet) -> Result<(), String> {
|
||||
let mut snippets = load_all_snippets(&app)?;
|
||||
|
||||
// Update existing or add new
|
||||
if let Some(existing) = snippets.iter_mut().find(|s| s.id == snippet.id) {
|
||||
// Don't allow editing default snippets' is_default flag
|
||||
let mut updated = snippet;
|
||||
updated.is_default = existing.is_default;
|
||||
*existing = updated;
|
||||
} else {
|
||||
snippets.push(snippet);
|
||||
}
|
||||
|
||||
save_all_snippets(&app, &snippets)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_snippet(app: AppHandle, snippet_id: String) -> Result<(), String> {
|
||||
let mut snippets = load_all_snippets(&app)?;
|
||||
|
||||
// Don't allow deleting default snippets
|
||||
if snippets
|
||||
.iter()
|
||||
.any(|s| s.id == snippet_id && s.is_default)
|
||||
{
|
||||
return Err("Cannot delete default snippets".to_string());
|
||||
}
|
||||
|
||||
snippets.retain(|s| s.id != snippet_id);
|
||||
save_all_snippets(&app, &snippets)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_snippet_categories(app: AppHandle) -> Result<Vec<String>, String> {
|
||||
let snippets = load_all_snippets(&app)?;
|
||||
let mut categories: Vec<String> = snippets.iter().map(|s| s.category.clone()).collect();
|
||||
categories.sort();
|
||||
categories.dedup();
|
||||
Ok(categories)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn reset_default_snippets(app: AppHandle) -> Result<(), String> {
|
||||
let mut snippets = load_all_snippets(&app)?;
|
||||
|
||||
// Remove all default snippets
|
||||
snippets.retain(|s| !s.is_default);
|
||||
|
||||
// Add fresh default snippets
|
||||
snippets.extend(get_default_snippets());
|
||||
|
||||
save_all_snippets(&app, &snippets)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashSet;
|
||||
|
||||
fn create_test_snippet(id: &str, name: &str, category: &str, is_default: bool) -> Snippet {
|
||||
Snippet {
|
||||
id: id.to_string(),
|
||||
name: name.to_string(),
|
||||
content: "Test content".to_string(),
|
||||
category: category.to_string(),
|
||||
is_default,
|
||||
created_at: Utc::now(),
|
||||
updated_at: Utc::now(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_exist() {
|
||||
let defaults = get_default_snippets();
|
||||
assert!(!defaults.is_empty());
|
||||
assert!(defaults.iter().all(|s| s.is_default));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_have_required_fields() {
|
||||
let defaults = get_default_snippets();
|
||||
for snippet in defaults {
|
||||
assert!(!snippet.id.is_empty());
|
||||
assert!(!snippet.name.is_empty());
|
||||
assert!(!snippet.content.is_empty());
|
||||
assert!(!snippet.category.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_count() {
|
||||
let defaults = get_default_snippets();
|
||||
// Should have 8 default snippets
|
||||
assert_eq!(defaults.len(), 8);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_have_unique_ids() {
|
||||
let defaults = get_default_snippets();
|
||||
let ids: HashSet<&String> = defaults.iter().map(|s| &s.id).collect();
|
||||
assert_eq!(ids.len(), defaults.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_ids_start_with_default() {
|
||||
let defaults = get_default_snippets();
|
||||
assert!(defaults.iter().all(|s| s.id.starts_with("default-")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_serialization() {
|
||||
let snippet = create_test_snippet("test-1", "Test Snippet", "Testing", false);
|
||||
let json = serde_json::to_string(&snippet).expect("Failed to serialize");
|
||||
let parsed: Snippet = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.id, snippet.id);
|
||||
assert_eq!(parsed.name, snippet.name);
|
||||
assert_eq!(parsed.content, snippet.content);
|
||||
assert_eq!(parsed.category, snippet.category);
|
||||
assert_eq!(parsed.is_default, snippet.is_default);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_clone() {
|
||||
let original = create_test_snippet("clone-test", "Clone Test", "Category", true);
|
||||
let cloned = original.clone();
|
||||
|
||||
assert_eq!(original.id, cloned.id);
|
||||
assert_eq!(original.name, cloned.name);
|
||||
assert_eq!(original.is_default, cloned.is_default);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_snippet_sorting_by_category_then_name() {
|
||||
let mut snippets = vec![
|
||||
create_test_snippet("s1", "Zebra", "B-Category", false),
|
||||
create_test_snippet("s2", "Apple", "A-Category", false),
|
||||
create_test_snippet("s3", "Banana", "B-Category", false),
|
||||
create_test_snippet("s4", "Alpha", "A-Category", false),
|
||||
];
|
||||
|
||||
// Sort by category, then by name (mimics list_snippets behavior)
|
||||
snippets.sort_by(|a, b| {
|
||||
let cat_cmp = a.category.cmp(&b.category);
|
||||
if cat_cmp == std::cmp::Ordering::Equal {
|
||||
a.name.cmp(&b.name)
|
||||
} else {
|
||||
cat_cmp
|
||||
}
|
||||
});
|
||||
|
||||
// A-Category should come first
|
||||
assert_eq!(snippets[0].category, "A-Category");
|
||||
assert_eq!(snippets[1].category, "A-Category");
|
||||
assert_eq!(snippets[2].category, "B-Category");
|
||||
assert_eq!(snippets[3].category, "B-Category");
|
||||
|
||||
// Within categories, alphabetically by name
|
||||
assert_eq!(snippets[0].name, "Alpha");
|
||||
assert_eq!(snippets[1].name, "Apple");
|
||||
assert_eq!(snippets[2].name, "Banana");
|
||||
assert_eq!(snippets[3].name, "Zebra");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_known_default_snippets() {
|
||||
let defaults = get_default_snippets();
|
||||
let ids: Vec<&str> = defaults.iter().map(|s| s.id.as_str()).collect();
|
||||
|
||||
assert!(ids.contains(&"default-explain-code"));
|
||||
assert!(ids.contains(&"default-fix-error"));
|
||||
assert!(ids.contains(&"default-write-tests"));
|
||||
assert!(ids.contains(&"default-refactor"));
|
||||
assert!(ids.contains(&"default-optimize"));
|
||||
assert!(ids.contains(&"default-review-pr"));
|
||||
assert!(ids.contains(&"default-add-comments"));
|
||||
assert!(ids.contains(&"default-security-review"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippet_categories() {
|
||||
let defaults = get_default_snippets();
|
||||
let categories: HashSet<&String> = defaults.iter().map(|s| &s.category).collect();
|
||||
|
||||
assert!(categories.contains(&"Code Review".to_string()));
|
||||
assert!(categories.contains(&"Debugging".to_string()));
|
||||
assert!(categories.contains(&"Testing".to_string()));
|
||||
assert!(categories.contains(&"Performance".to_string()));
|
||||
assert!(categories.contains(&"Documentation".to_string()));
|
||||
assert!(categories.contains(&"Security".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_content_not_empty() {
|
||||
let defaults = get_default_snippets();
|
||||
for snippet in defaults {
|
||||
assert!(
|
||||
snippet.content.len() > 10,
|
||||
"Content should be meaningful: {}",
|
||||
snippet.name
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_timestamps() {
|
||||
let snippet = create_test_snippet("time-test", "Time Test", "Cat", false);
|
||||
assert!(snippet.created_at <= snippet.updated_at);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_snippets_have_same_timestamps() {
|
||||
let defaults = get_default_snippets();
|
||||
// All defaults are created at the same instant
|
||||
let first_created = defaults[0].created_at;
|
||||
let first_updated = defaults[0].updated_at;
|
||||
|
||||
for snippet in &defaults {
|
||||
assert_eq!(snippet.created_at, first_created);
|
||||
assert_eq!(snippet.updated_at, first_updated);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_retain_non_default() {
|
||||
let mut snippets = vec![
|
||||
create_test_snippet("default-1", "Default 1", "Cat", true),
|
||||
create_test_snippet("custom-1", "Custom 1", "Cat", false),
|
||||
create_test_snippet("default-2", "Default 2", "Cat", true),
|
||||
create_test_snippet("custom-2", "Custom 2", "Cat", false),
|
||||
];
|
||||
|
||||
// Mimics reset_default_snippets behavior (retain non-defaults)
|
||||
snippets.retain(|s| !s.is_default);
|
||||
|
||||
assert_eq!(snippets.len(), 2);
|
||||
assert!(snippets.iter().all(|s| !s.is_default));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_snippet_find_by_id() {
|
||||
let snippets = vec![
|
||||
create_test_snippet("snippet-1", "First", "Cat", false),
|
||||
create_test_snippet("snippet-2", "Second", "Cat", false),
|
||||
create_test_snippet("snippet-3", "Third", "Cat", false),
|
||||
];
|
||||
|
||||
let found = snippets.iter().find(|s| s.id == "snippet-2");
|
||||
assert!(found.is_some());
|
||||
assert_eq!(found.unwrap().name, "Second");
|
||||
|
||||
let not_found = snippets.iter().find(|s| s.id == "snippet-999");
|
||||
assert!(not_found.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::useless_vec)]
|
||||
fn test_extract_categories_sorted_and_deduped() {
|
||||
let snippets = vec![
|
||||
create_test_snippet("s1", "S1", "Zebra", false),
|
||||
create_test_snippet("s2", "S2", "Alpha", false),
|
||||
create_test_snippet("s3", "S3", "Beta", false),
|
||||
create_test_snippet("s4", "S4", "Alpha", false), // Duplicate
|
||||
];
|
||||
|
||||
let mut categories: Vec<String> = snippets.iter().map(|s| s.category.clone()).collect();
|
||||
categories.sort();
|
||||
categories.dedup();
|
||||
|
||||
assert_eq!(categories.len(), 3);
|
||||
assert_eq!(categories[0], "Alpha");
|
||||
assert_eq!(categories[1], "Beta");
|
||||
assert_eq!(categories[2], "Zebra");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snippet_category_code_review_count() {
|
||||
let defaults = get_default_snippets();
|
||||
let code_review_count = defaults
|
||||
.iter()
|
||||
.filter(|s| s.category == "Code Review")
|
||||
.count();
|
||||
|
||||
// There should be multiple code review snippets
|
||||
assert!(code_review_count >= 2);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
use crate::achievements::{check_achievements, AchievementProgress};
|
||||
use chrono::{Local, Timelike};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::time::Instant;
|
||||
use crate::achievements::{AchievementProgress, check_achievements};
|
||||
use tauri_plugin_store::StoreExt;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct UsageStats {
|
||||
@@ -28,6 +30,14 @@ pub struct UsageStats {
|
||||
#[serde(skip)]
|
||||
pub session_start: Option<Instant>,
|
||||
|
||||
// Extended tracking for achievements
|
||||
pub sessions_started: u64,
|
||||
pub consecutive_days: u64,
|
||||
pub total_days_used: u64,
|
||||
pub morning_sessions: u64, // Sessions started before 9 AM
|
||||
pub night_sessions: u64, // Sessions started after 10 PM
|
||||
pub last_session_date: Option<String>, // ISO date string for streak tracking
|
||||
|
||||
// Achievement tracking
|
||||
#[serde(skip)]
|
||||
pub achievements: AchievementProgress,
|
||||
@@ -65,6 +75,47 @@ impl UsageStats {
|
||||
self.session_duration_seconds = 0;
|
||||
self.session_start = Some(Instant::now());
|
||||
self.achievements.start_session();
|
||||
|
||||
// Track session start for achievements
|
||||
self.track_session_start();
|
||||
}
|
||||
|
||||
pub fn track_session_start(&mut self) {
|
||||
let now = Local::now();
|
||||
let today = now.format("%Y-%m-%d").to_string();
|
||||
let hour = now.hour();
|
||||
|
||||
// Increment session count
|
||||
self.sessions_started += 1;
|
||||
|
||||
// Track morning/night sessions
|
||||
if hour < 9 {
|
||||
self.morning_sessions += 1;
|
||||
}
|
||||
if hour >= 22 {
|
||||
self.night_sessions += 1;
|
||||
}
|
||||
|
||||
// Track consecutive days and total days
|
||||
if let Some(last_date) = &self.last_session_date {
|
||||
if last_date != &today {
|
||||
// Check if it's the next day (consecutive)
|
||||
if is_consecutive_day(last_date, &today) {
|
||||
self.consecutive_days += 1;
|
||||
} else {
|
||||
// Streak broken
|
||||
self.consecutive_days = 1;
|
||||
}
|
||||
self.total_days_used += 1;
|
||||
self.last_session_date = Some(today);
|
||||
}
|
||||
// Same day - don't increment anything
|
||||
} else {
|
||||
// First session ever
|
||||
self.consecutive_days = 1;
|
||||
self.total_days_used = 1;
|
||||
self.last_session_date = Some(today);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn increment_messages(&mut self) {
|
||||
@@ -89,7 +140,10 @@ impl UsageStats {
|
||||
|
||||
pub fn increment_tool_usage(&mut self, tool_name: &str) {
|
||||
*self.tools_usage.entry(tool_name.to_string()).or_insert(0) += 1;
|
||||
*self.session_tools_usage.entry(tool_name.to_string()).or_insert(0) += 1;
|
||||
*self
|
||||
.session_tools_usage
|
||||
.entry(tool_name.to_string())
|
||||
.or_insert(0) += 1;
|
||||
}
|
||||
|
||||
pub fn get_session_duration(&mut self) -> u64 {
|
||||
@@ -124,12 +178,34 @@ impl UsageStats {
|
||||
session_tools_usage: self.session_tools_usage.clone(),
|
||||
session_duration_seconds: self.session_duration_seconds,
|
||||
session_start: self.session_start,
|
||||
sessions_started: self.sessions_started,
|
||||
consecutive_days: self.consecutive_days,
|
||||
total_days_used: self.total_days_used,
|
||||
morning_sessions: self.morning_sessions,
|
||||
night_sessions: self.night_sessions,
|
||||
last_session_date: self.last_session_date.clone(),
|
||||
achievements: AchievementProgress::new(), // Dummy for copy
|
||||
};
|
||||
check_achievements(&stats_copy, &mut self.achievements)
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to check if two dates are consecutive
|
||||
fn is_consecutive_day(prev_date: &str, current_date: &str) -> bool {
|
||||
use chrono::NaiveDate;
|
||||
|
||||
let prev = NaiveDate::parse_from_str(prev_date, "%Y-%m-%d").ok();
|
||||
let current = NaiveDate::parse_from_str(current_date, "%Y-%m-%d").ok();
|
||||
|
||||
match (prev, current) {
|
||||
(Some(p), Some(c)) => {
|
||||
let diff = c.signed_duration_since(p).num_days();
|
||||
diff == 1
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
// Pricing as of January 2025
|
||||
// https://www.anthropic.com/pricing
|
||||
fn calculate_cost(input_tokens: u64, output_tokens: u64, model: &str) -> f64 {
|
||||
@@ -166,6 +242,111 @@ pub struct StatsUpdateEvent {
|
||||
pub stats: UsageStats,
|
||||
}
|
||||
|
||||
/// Serializable struct for persisting only lifetime (total) stats
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct PersistedStats {
|
||||
pub total_input_tokens: u64,
|
||||
pub total_output_tokens: u64,
|
||||
pub total_cost_usd: f64,
|
||||
pub messages_exchanged: u64,
|
||||
pub code_blocks_generated: u64,
|
||||
pub files_edited: u64,
|
||||
pub files_created: u64,
|
||||
pub tools_usage: HashMap<String, u64>,
|
||||
pub sessions_started: u64,
|
||||
pub consecutive_days: u64,
|
||||
pub total_days_used: u64,
|
||||
pub morning_sessions: u64,
|
||||
pub night_sessions: u64,
|
||||
pub last_session_date: Option<String>,
|
||||
}
|
||||
|
||||
impl From<&UsageStats> for PersistedStats {
|
||||
fn from(stats: &UsageStats) -> Self {
|
||||
PersistedStats {
|
||||
total_input_tokens: stats.total_input_tokens,
|
||||
total_output_tokens: stats.total_output_tokens,
|
||||
total_cost_usd: stats.total_cost_usd,
|
||||
messages_exchanged: stats.messages_exchanged,
|
||||
code_blocks_generated: stats.code_blocks_generated,
|
||||
files_edited: stats.files_edited,
|
||||
files_created: stats.files_created,
|
||||
tools_usage: stats.tools_usage.clone(),
|
||||
sessions_started: stats.sessions_started,
|
||||
consecutive_days: stats.consecutive_days,
|
||||
total_days_used: stats.total_days_used,
|
||||
morning_sessions: stats.morning_sessions,
|
||||
night_sessions: stats.night_sessions,
|
||||
last_session_date: stats.last_session_date.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl UsageStats {
|
||||
/// Apply persisted stats to restore lifetime totals
|
||||
pub fn apply_persisted(&mut self, persisted: PersistedStats) {
|
||||
self.total_input_tokens = persisted.total_input_tokens;
|
||||
self.total_output_tokens = persisted.total_output_tokens;
|
||||
self.total_cost_usd = persisted.total_cost_usd;
|
||||
self.messages_exchanged = persisted.messages_exchanged;
|
||||
self.code_blocks_generated = persisted.code_blocks_generated;
|
||||
self.files_edited = persisted.files_edited;
|
||||
self.files_created = persisted.files_created;
|
||||
self.tools_usage = persisted.tools_usage;
|
||||
self.sessions_started = persisted.sessions_started;
|
||||
self.consecutive_days = persisted.consecutive_days;
|
||||
self.total_days_used = persisted.total_days_used;
|
||||
self.morning_sessions = persisted.morning_sessions;
|
||||
self.night_sessions = persisted.night_sessions;
|
||||
self.last_session_date = persisted.last_session_date;
|
||||
}
|
||||
}
|
||||
|
||||
/// Save lifetime stats to persistent store
|
||||
pub async fn save_stats(app: &tauri::AppHandle, stats: &UsageStats) -> Result<(), String> {
|
||||
let store = app.store("stats.json").map_err(|e| e.to_string())?;
|
||||
|
||||
let persisted = PersistedStats::from(stats);
|
||||
|
||||
println!("Saving stats: {:?}", persisted);
|
||||
|
||||
store.set(
|
||||
"lifetime_stats",
|
||||
serde_json::to_value(persisted).map_err(|e| e.to_string())?,
|
||||
);
|
||||
store.save().map_err(|e| e.to_string())?;
|
||||
|
||||
println!("Stats saved successfully");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Load lifetime stats from persistent store
|
||||
pub async fn load_stats(app: &tauri::AppHandle) -> Option<PersistedStats> {
|
||||
println!("Loading stats from store...");
|
||||
|
||||
let store = match app.store("stats.json") {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
println!("Failed to open stats store: {}", e);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(stats_value) = store.get("lifetime_stats") {
|
||||
println!("Found lifetime stats in store: {:?}", stats_value);
|
||||
if let Ok(persisted) = serde_json::from_value::<PersistedStats>(stats_value.clone()) {
|
||||
println!("Loaded lifetime stats successfully");
|
||||
return Some(persisted);
|
||||
} else {
|
||||
println!("Failed to parse lifetime stats");
|
||||
}
|
||||
} else {
|
||||
println!("No lifetime stats found in store");
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -188,6 +369,36 @@ mod tests {
|
||||
assert!((cost - 0.165).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cost_calculation_opus_45() {
|
||||
let cost = calculate_cost(1000, 2000, "claude-opus-4-5-20251101");
|
||||
// Same pricing as Opus 4
|
||||
assert!((cost - 0.165).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cost_calculation_haiku() {
|
||||
let cost = calculate_cost(1000, 2000, "claude-3-5-haiku-20241022");
|
||||
// 1000 input * $1/M = $0.001
|
||||
// 2000 output * $5/M = $0.010
|
||||
// Total = $0.011
|
||||
assert!((cost - 0.011).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cost_calculation_unknown_defaults_to_sonnet() {
|
||||
let cost = calculate_cost(1000, 2000, "some-unknown-model");
|
||||
// Should default to Sonnet pricing
|
||||
assert!((cost - 0.033).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cost_calculation_legacy_sonnet() {
|
||||
let cost = calculate_cost(1000, 2000, "claude-3-5-sonnet-20241022");
|
||||
// Same as Sonnet 4 pricing
|
||||
assert!((cost - 0.033).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_stats_accumulation() {
|
||||
let mut stats = UsageStats::new();
|
||||
@@ -200,6 +411,28 @@ mod tests {
|
||||
assert!((stats.total_cost_usd - 0.033).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_stats_multiple_accumulations() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.add_usage(1000, 1000, "claude-sonnet-4-20250514");
|
||||
stats.add_usage(500, 500, "claude-sonnet-4-20250514");
|
||||
|
||||
assert_eq!(stats.total_input_tokens, 1500);
|
||||
assert_eq!(stats.total_output_tokens, 1500);
|
||||
assert_eq!(stats.session_input_tokens, 1500);
|
||||
assert_eq!(stats.session_output_tokens, 1500);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_stats_model_updated() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.add_usage(1000, 1000, "claude-sonnet-4-20250514");
|
||||
assert_eq!(stats.model, Some("claude-sonnet-4-20250514".to_string()));
|
||||
|
||||
stats.add_usage(500, 500, "claude-opus-4-20250514");
|
||||
assert_eq!(stats.model, Some("claude-opus-4-20250514".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_reset() {
|
||||
let mut stats = UsageStats::new();
|
||||
@@ -213,4 +446,230 @@ mod tests {
|
||||
assert_eq!(stats.session_cost_usd, 0.0);
|
||||
assert!(stats.total_cost_usd > 0.0);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_reset_clears_session_stats() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_messages();
|
||||
stats.increment_messages();
|
||||
stats.increment_code_blocks();
|
||||
stats.increment_files_edited();
|
||||
stats.increment_files_created();
|
||||
stats.increment_tool_usage("Read");
|
||||
|
||||
stats.reset_session();
|
||||
|
||||
assert_eq!(stats.session_messages_exchanged, 0);
|
||||
assert_eq!(stats.session_code_blocks_generated, 0);
|
||||
assert_eq!(stats.session_files_edited, 0);
|
||||
assert_eq!(stats.session_files_created, 0);
|
||||
assert!(stats.session_tools_usage.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_messages() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_messages();
|
||||
stats.increment_messages();
|
||||
stats.increment_messages();
|
||||
|
||||
assert_eq!(stats.messages_exchanged, 3);
|
||||
assert_eq!(stats.session_messages_exchanged, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_code_blocks() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_code_blocks();
|
||||
stats.increment_code_blocks();
|
||||
|
||||
assert_eq!(stats.code_blocks_generated, 2);
|
||||
assert_eq!(stats.session_code_blocks_generated, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_files_edited() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_files_edited();
|
||||
|
||||
assert_eq!(stats.files_edited, 1);
|
||||
assert_eq!(stats.session_files_edited, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_files_created() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_files_created();
|
||||
|
||||
assert_eq!(stats.files_created, 1);
|
||||
assert_eq!(stats.session_files_created, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_tool_usage() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.increment_tool_usage("Read");
|
||||
stats.increment_tool_usage("Read");
|
||||
stats.increment_tool_usage("Write");
|
||||
|
||||
assert_eq!(stats.tools_usage.get("Read"), Some(&2));
|
||||
assert_eq!(stats.tools_usage.get("Write"), Some(&1));
|
||||
assert_eq!(stats.session_tools_usage.get("Read"), Some(&2));
|
||||
assert_eq!(stats.session_tools_usage.get("Write"), Some(&1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_duration_tracking() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.session_start = Some(Instant::now());
|
||||
|
||||
// Verify duration is returned (u64 is always non-negative)
|
||||
let _duration = stats.get_session_duration();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_session_duration_without_start() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.session_start = None;
|
||||
stats.session_duration_seconds = 100;
|
||||
|
||||
// Should return the stored value when no start time
|
||||
let duration = stats.get_session_duration();
|
||||
assert_eq!(duration, 100);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_consecutive_day_true() {
|
||||
assert!(is_consecutive_day("2024-01-15", "2024-01-16"));
|
||||
assert!(is_consecutive_day("2024-12-31", "2025-01-01"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_consecutive_day_false() {
|
||||
assert!(!is_consecutive_day("2024-01-15", "2024-01-15")); // Same day
|
||||
assert!(!is_consecutive_day("2024-01-15", "2024-01-17")); // Gap
|
||||
assert!(!is_consecutive_day("2024-01-15", "2024-01-14")); // Backwards
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_consecutive_day_invalid_dates() {
|
||||
assert!(!is_consecutive_day("invalid", "2024-01-01"));
|
||||
assert!(!is_consecutive_day("2024-01-01", "invalid"));
|
||||
assert!(!is_consecutive_day("invalid", "also-invalid"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_persisted_stats_from_usage_stats() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.total_input_tokens = 5000;
|
||||
stats.total_output_tokens = 10000;
|
||||
stats.total_cost_usd = 1.23;
|
||||
stats.messages_exchanged = 50;
|
||||
stats.sessions_started = 5;
|
||||
stats.consecutive_days = 3;
|
||||
|
||||
let persisted = PersistedStats::from(&stats);
|
||||
|
||||
assert_eq!(persisted.total_input_tokens, 5000);
|
||||
assert_eq!(persisted.total_output_tokens, 10000);
|
||||
assert_eq!(persisted.total_cost_usd, 1.23);
|
||||
assert_eq!(persisted.messages_exchanged, 50);
|
||||
assert_eq!(persisted.sessions_started, 5);
|
||||
assert_eq!(persisted.consecutive_days, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_persisted_stats() {
|
||||
let persisted = PersistedStats {
|
||||
total_input_tokens: 10000,
|
||||
total_output_tokens: 20000,
|
||||
total_cost_usd: 5.50,
|
||||
messages_exchanged: 100,
|
||||
code_blocks_generated: 25,
|
||||
files_edited: 10,
|
||||
files_created: 5,
|
||||
tools_usage: {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("Read".to_string(), 50);
|
||||
map
|
||||
},
|
||||
sessions_started: 10,
|
||||
consecutive_days: 7,
|
||||
total_days_used: 14,
|
||||
morning_sessions: 3,
|
||||
night_sessions: 2,
|
||||
last_session_date: Some("2024-06-15".to_string()),
|
||||
};
|
||||
|
||||
let mut stats = UsageStats::new();
|
||||
stats.apply_persisted(persisted);
|
||||
|
||||
assert_eq!(stats.total_input_tokens, 10000);
|
||||
assert_eq!(stats.total_output_tokens, 20000);
|
||||
assert_eq!(stats.total_cost_usd, 5.50);
|
||||
assert_eq!(stats.messages_exchanged, 100);
|
||||
assert_eq!(stats.tools_usage.get("Read"), Some(&50));
|
||||
assert_eq!(stats.consecutive_days, 7);
|
||||
assert_eq!(stats.morning_sessions, 3);
|
||||
assert_eq!(stats.last_session_date, Some("2024-06-15".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_stats_default() {
|
||||
let stats = UsageStats::default();
|
||||
|
||||
assert_eq!(stats.total_input_tokens, 0);
|
||||
assert_eq!(stats.total_output_tokens, 0);
|
||||
assert_eq!(stats.total_cost_usd, 0.0);
|
||||
assert!(stats.model.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_persisted_stats_default() {
|
||||
let persisted = PersistedStats::default();
|
||||
|
||||
assert_eq!(persisted.total_input_tokens, 0);
|
||||
assert!(persisted.last_session_date.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_stats_serialization() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.add_usage(1000, 2000, "claude-sonnet-4-20250514");
|
||||
stats.increment_messages();
|
||||
|
||||
// UsageStats should be serializable (for events)
|
||||
let json = serde_json::to_string(&stats).expect("Failed to serialize");
|
||||
assert!(json.contains("total_input_tokens"));
|
||||
assert!(json.contains("1000"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_persisted_stats_serialization() {
|
||||
let persisted = PersistedStats {
|
||||
total_input_tokens: 1234,
|
||||
total_output_tokens: 5678,
|
||||
total_cost_usd: 0.99,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&persisted).expect("Failed to serialize");
|
||||
let parsed: PersistedStats = serde_json::from_str(&json).expect("Failed to deserialize");
|
||||
|
||||
assert_eq!(parsed.total_input_tokens, 1234);
|
||||
assert_eq!(parsed.total_output_tokens, 5678);
|
||||
assert!((parsed.total_cost_usd - 0.99).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stats_update_event_serialization() {
|
||||
let mut stats = UsageStats::new();
|
||||
stats.add_usage(100, 200, "claude-sonnet-4-20250514");
|
||||
|
||||
let event = StatsUpdateEvent { stats };
|
||||
let json = serde_json::to_string(&event).expect("Failed to serialize");
|
||||
|
||||
assert!(json.contains("stats"));
|
||||
assert!(json.contains("total_input_tokens"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,426 @@
|
||||
use parking_lot::Mutex;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use uuid::Uuid;
|
||||
|
||||
const TEMP_DIR_NAME: &str = "hikari-uploads";
|
||||
|
||||
pub struct TempFileManager {
|
||||
base_dir: PathBuf,
|
||||
files: HashMap<String, Vec<PathBuf>>,
|
||||
}
|
||||
|
||||
impl TempFileManager {
|
||||
pub fn new() -> Result<Self, String> {
|
||||
let base_dir = std::env::temp_dir().join(TEMP_DIR_NAME);
|
||||
|
||||
if !base_dir.exists() {
|
||||
fs::create_dir_all(&base_dir)
|
||||
.map_err(|e| format!("Failed to create temp directory: {}", e))?;
|
||||
}
|
||||
|
||||
Ok(TempFileManager {
|
||||
base_dir,
|
||||
files: HashMap::new(),
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn get_base_dir(&self) -> &Path {
|
||||
&self.base_dir
|
||||
}
|
||||
|
||||
pub fn save_file(
|
||||
&mut self,
|
||||
conversation_id: &str,
|
||||
data: &[u8],
|
||||
original_filename: Option<&str>,
|
||||
) -> Result<PathBuf, String> {
|
||||
let unique_id = Uuid::new_v4();
|
||||
let extension = original_filename
|
||||
.and_then(|name| Path::new(name).extension())
|
||||
.and_then(|ext| ext.to_str())
|
||||
.unwrap_or("bin");
|
||||
|
||||
let filename = format!("{}_{}.{}", conversation_id, unique_id, extension);
|
||||
let file_path = self.base_dir.join(&filename);
|
||||
|
||||
fs::write(&file_path, data)
|
||||
.map_err(|e| format!("Failed to write temp file: {}", e))?;
|
||||
|
||||
self.files
|
||||
.entry(conversation_id.to_string())
|
||||
.or_default()
|
||||
.push(file_path.clone());
|
||||
|
||||
Ok(file_path)
|
||||
}
|
||||
|
||||
pub fn register_file(&mut self, conversation_id: &str, file_path: PathBuf) {
|
||||
self.files
|
||||
.entry(conversation_id.to_string())
|
||||
.or_default()
|
||||
.push(file_path);
|
||||
}
|
||||
|
||||
pub fn get_files_for_conversation(&self, conversation_id: &str) -> Vec<PathBuf> {
|
||||
self.files
|
||||
.get(conversation_id)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn cleanup_conversation(&mut self, conversation_id: &str) -> Result<(), String> {
|
||||
if let Some(files) = self.files.remove(conversation_id) {
|
||||
for file_path in files {
|
||||
if file_path.exists() {
|
||||
if let Err(e) = fs::remove_file(&file_path) {
|
||||
eprintln!(
|
||||
"Warning: Failed to remove temp file {:?}: {}",
|
||||
file_path, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cleanup_all(&mut self) -> Result<(), String> {
|
||||
let conversation_ids: Vec<String> = self.files.keys().cloned().collect();
|
||||
|
||||
for conversation_id in conversation_ids {
|
||||
self.cleanup_conversation(&conversation_id)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cleanup_orphaned_files(&mut self) -> Result<usize, String> {
|
||||
let mut cleaned_count = 0;
|
||||
|
||||
if !self.base_dir.exists() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let tracked_files: std::collections::HashSet<PathBuf> =
|
||||
self.files.values().flatten().cloned().collect();
|
||||
|
||||
let entries = fs::read_dir(&self.base_dir)
|
||||
.map_err(|e| format!("Failed to read temp directory: {}", e))?;
|
||||
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_file() && !tracked_files.contains(&path) {
|
||||
if let Err(e) = fs::remove_file(&path) {
|
||||
eprintln!("Warning: Failed to remove orphaned file {:?}: {}", path, e);
|
||||
} else {
|
||||
cleaned_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(cleaned_count)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TempFileManager {
|
||||
fn default() -> Self {
|
||||
Self::new().expect("Failed to create TempFileManager")
|
||||
}
|
||||
}
|
||||
|
||||
pub type SharedTempFileManager = Arc<Mutex<TempFileManager>>;
|
||||
|
||||
pub fn create_shared_temp_manager() -> Result<SharedTempFileManager, String> {
|
||||
Ok(Arc::new(Mutex::new(TempFileManager::new()?)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper to create a TempFileManager with a custom base directory for testing
|
||||
fn create_test_manager(base_dir: PathBuf) -> TempFileManager {
|
||||
if !base_dir.exists() {
|
||||
fs::create_dir_all(&base_dir).expect("Failed to create test temp dir");
|
||||
}
|
||||
TempFileManager {
|
||||
base_dir,
|
||||
files: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_creates_base_directory() {
|
||||
let manager = TempFileManager::new().expect("Failed to create TempFileManager");
|
||||
assert!(manager.base_dir.exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_base_dir_returns_correct_path() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let manager = create_test_manager(base_path.clone());
|
||||
|
||||
assert_eq!(manager.get_base_dir(), base_path.as_path());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_file_creates_file_with_content() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"Hello, world!";
|
||||
let result = manager.save_file("conv-1", data, Some("test.txt"));
|
||||
|
||||
assert!(result.is_ok());
|
||||
let file_path = result.unwrap();
|
||||
assert!(file_path.exists());
|
||||
|
||||
let content = fs::read(&file_path).expect("Failed to read file");
|
||||
assert_eq!(content, data);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_file_uses_correct_extension() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"test data";
|
||||
let result = manager.save_file("conv-1", data, Some("document.pdf"));
|
||||
|
||||
assert!(result.is_ok());
|
||||
let file_path = result.unwrap();
|
||||
assert_eq!(file_path.extension().unwrap(), "pdf");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_file_uses_bin_extension_when_no_filename() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"binary data";
|
||||
let result = manager.save_file("conv-1", data, None);
|
||||
|
||||
assert!(result.is_ok());
|
||||
let file_path = result.unwrap();
|
||||
assert_eq!(file_path.extension().unwrap(), "bin");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_register_file_tracks_file_path() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let file_path = PathBuf::from("/some/path/file.txt");
|
||||
manager.register_file("conv-1", file_path.clone());
|
||||
|
||||
let files = manager.get_files_for_conversation("conv-1");
|
||||
assert_eq!(files.len(), 1);
|
||||
assert_eq!(files[0], file_path);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_files_for_conversation_returns_empty_for_unknown() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let manager = create_test_manager(base_path);
|
||||
|
||||
let files = manager.get_files_for_conversation("unknown-conv");
|
||||
assert!(files.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_files_for_conversation_returns_all_files() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"test";
|
||||
manager.save_file("conv-1", data, Some("file1.txt")).unwrap();
|
||||
manager.save_file("conv-1", data, Some("file2.txt")).unwrap();
|
||||
manager.save_file("conv-2", data, Some("file3.txt")).unwrap();
|
||||
|
||||
let files_conv1 = manager.get_files_for_conversation("conv-1");
|
||||
let files_conv2 = manager.get_files_for_conversation("conv-2");
|
||||
|
||||
assert_eq!(files_conv1.len(), 2);
|
||||
assert_eq!(files_conv2.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_conversation_removes_files() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"test";
|
||||
let file_path = manager.save_file("conv-1", data, Some("test.txt")).unwrap();
|
||||
assert!(file_path.exists());
|
||||
|
||||
let result = manager.cleanup_conversation("conv-1");
|
||||
assert!(result.is_ok());
|
||||
assert!(!file_path.exists());
|
||||
assert!(manager.get_files_for_conversation("conv-1").is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_conversation_handles_missing_files() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
// Register a file that doesn't exist
|
||||
manager.register_file("conv-1", PathBuf::from("/nonexistent/file.txt"));
|
||||
|
||||
// Should not error, just skip missing files
|
||||
let result = manager.cleanup_conversation("conv-1");
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_conversation_for_unknown_returns_ok() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let result = manager.cleanup_conversation("unknown-conv");
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_all_removes_all_files() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"test";
|
||||
let file1 = manager.save_file("conv-1", data, Some("f1.txt")).unwrap();
|
||||
let file2 = manager.save_file("conv-2", data, Some("f2.txt")).unwrap();
|
||||
|
||||
assert!(file1.exists());
|
||||
assert!(file2.exists());
|
||||
|
||||
let result = manager.cleanup_all();
|
||||
assert!(result.is_ok());
|
||||
|
||||
assert!(!file1.exists());
|
||||
assert!(!file2.exists());
|
||||
assert!(manager.files.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_orphaned_files_removes_untracked() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path.clone());
|
||||
|
||||
// Create a tracked file
|
||||
let data = b"tracked";
|
||||
let tracked_path = manager.save_file("conv-1", data, Some("tracked.txt")).unwrap();
|
||||
|
||||
// Create an untracked (orphaned) file directly in the temp directory
|
||||
let orphan_path = base_path.join("orphan.txt");
|
||||
fs::write(&orphan_path, b"orphan").expect("Failed to create orphan file");
|
||||
|
||||
assert!(tracked_path.exists());
|
||||
assert!(orphan_path.exists());
|
||||
|
||||
let result = manager.cleanup_orphaned_files();
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 1); // One orphan removed
|
||||
|
||||
assert!(tracked_path.exists()); // Tracked file still exists
|
||||
assert!(!orphan_path.exists()); // Orphan removed
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_orphaned_returns_zero_when_none() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let data = b"test";
|
||||
manager.save_file("conv-1", data, Some("test.txt")).unwrap();
|
||||
|
||||
let result = manager.cleanup_orphaned_files();
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleanup_orphaned_returns_zero_when_dir_missing() {
|
||||
let mut manager = TempFileManager {
|
||||
base_dir: PathBuf::from("/nonexistent/dir"),
|
||||
files: HashMap::new(),
|
||||
};
|
||||
|
||||
let result = manager.cleanup_orphaned_files();
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_creates_manager() {
|
||||
// Default should work as long as we can create temp directories
|
||||
let manager = TempFileManager::default();
|
||||
assert!(manager.base_dir.exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_create_shared_temp_manager() {
|
||||
let result = create_shared_temp_manager();
|
||||
assert!(result.is_ok());
|
||||
|
||||
let shared = result.unwrap();
|
||||
let manager = shared.lock();
|
||||
assert!(manager.base_dir.exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_files_same_conversation() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
// Save multiple files to same conversation
|
||||
for i in 0..5 {
|
||||
let data = format!("content {}", i);
|
||||
manager
|
||||
.save_file("conv-1", data.as_bytes(), Some(&format!("file{}.txt", i)))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let files = manager.get_files_for_conversation("conv-1");
|
||||
assert_eq!(files.len(), 5);
|
||||
|
||||
// Each file should have unique content
|
||||
for (i, file_path) in files.iter().enumerate() {
|
||||
let content = fs::read_to_string(file_path).expect("Failed to read");
|
||||
assert_eq!(content, format!("content {}", i));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_file_paths_contain_conversation_id() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let base_path = temp_dir.path().join("hikari-test");
|
||||
let mut manager = create_test_manager(base_path);
|
||||
|
||||
let file_path = manager
|
||||
.save_file("my-conversation-id", b"test", Some("test.txt"))
|
||||
.unwrap();
|
||||
|
||||
let filename = file_path.file_name().unwrap().to_str().unwrap();
|
||||
assert!(filename.starts_with("my-conversation-id_"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
use tauri::{
|
||||
menu::{Menu, MenuItem},
|
||||
tray::{MouseButton, MouseButtonState, TrayIconBuilder, TrayIconEvent},
|
||||
AppHandle, Manager,
|
||||
};
|
||||
|
||||
use crate::config::HikariConfig;
|
||||
|
||||
pub fn setup_tray(app: &AppHandle) -> tauri::Result<()> {
|
||||
let show_item = MenuItem::with_id(app, "show", "Show Hikari", true, None::<&str>)?;
|
||||
let quit_item = MenuItem::with_id(app, "quit", "Quit", true, None::<&str>)?;
|
||||
|
||||
let menu = Menu::with_items(app, &[&show_item, &quit_item])?;
|
||||
|
||||
let _tray = TrayIconBuilder::with_id("main")
|
||||
.icon(app.default_window_icon().unwrap().clone())
|
||||
.menu(&menu)
|
||||
.tooltip("Hikari - Claude Code Assistant")
|
||||
.on_menu_event(|app, event| match event.id.as_ref() {
|
||||
"show" => {
|
||||
if let Some(window) = app.get_webview_window("main") {
|
||||
let _ = window.show();
|
||||
let _ = window.unminimize();
|
||||
let _ = window.set_focus();
|
||||
}
|
||||
}
|
||||
"quit" => {
|
||||
app.exit(0);
|
||||
}
|
||||
_ => {}
|
||||
})
|
||||
.on_tray_icon_event(|tray, event| {
|
||||
if let TrayIconEvent::Click {
|
||||
button: MouseButton::Left,
|
||||
button_state: MouseButtonState::Up,
|
||||
..
|
||||
} = event
|
||||
{
|
||||
let app = tray.app_handle();
|
||||
if let Some(window) = app.get_webview_window("main") {
|
||||
let _ = window.show();
|
||||
let _ = window.unminimize();
|
||||
let _ = window.set_focus();
|
||||
}
|
||||
}
|
||||
})
|
||||
.build(app)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn should_minimize_to_tray(app: &AppHandle) -> bool {
|
||||
let config_path = app
|
||||
.path()
|
||||
.app_config_dir()
|
||||
.ok()
|
||||
.map(|p| p.join("hikari-config.json"));
|
||||
|
||||
if let Some(path) = config_path {
|
||||
if let Ok(content) = std::fs::read_to_string(&path) {
|
||||
if let Ok(config) = serde_json::from_str::<HikariConfig>(&content) {
|
||||
return config.minimize_to_tray;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
@@ -216,6 +216,24 @@ pub struct WorkingDirectoryEvent {
|
||||
pub conversation_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct QuestionOption {
|
||||
pub label: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UserQuestionEvent {
|
||||
pub id: String,
|
||||
pub question: String,
|
||||
pub header: Option<String>,
|
||||
pub options: Vec<QuestionOption>,
|
||||
pub multi_select: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub conversation_id: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::process::Command;
|
||||
use std::io::Write;
|
||||
use tempfile::NamedTempFile;
|
||||
use std::process::Command;
|
||||
use tauri::command;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
#[command]
|
||||
pub async fn send_vbs_notification(title: String, body: String) -> Result<(), String> {
|
||||
@@ -17,8 +17,8 @@ objShell.Popup "{}" & vbCrLf & vbCrLf & "{}", 5, "{}", 64
|
||||
);
|
||||
|
||||
// Create a temporary VBS file
|
||||
let mut temp_file = NamedTempFile::new()
|
||||
.map_err(|e| format!("Failed to create temp file: {}", e))?;
|
||||
let mut temp_file =
|
||||
NamedTempFile::new().map_err(|e| format!("Failed to create temp file: {}", e))?;
|
||||
|
||||
temp_file
|
||||
.write_all(vbs_content.as_bytes())
|
||||
@@ -40,10 +40,7 @@ objShell.Popup "{}" & vbCrLf & vbCrLf & "{}", 5, "{}", 64
|
||||
} else if temp_path.starts_with("/tmp/") {
|
||||
// WSL temp files might be in a different location
|
||||
// Try to use wslpath to convert
|
||||
let output = Command::new("wslpath")
|
||||
.arg("-w")
|
||||
.arg(&temp_path)
|
||||
.output();
|
||||
let output = Command::new("wslpath").arg("-w").arg(&temp_path).output();
|
||||
|
||||
if let Ok(result) = output {
|
||||
if result.status.success() {
|
||||
@@ -71,4 +68,4 @@ objShell.Popup "{}" & vbCrLf & vbCrLf & "{}", 5, "{}", 64
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use tauri::command;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use windows::{
|
||||
core::{HSTRING, Result as WindowsResult},
|
||||
core::{Result as WindowsResult, HSTRING},
|
||||
Data::Xml::Dom::*,
|
||||
UI::Notifications::*,
|
||||
};
|
||||
@@ -38,7 +38,8 @@ fn show_toast_notification(title: &str, body: &str) -> WindowsResult<()> {
|
||||
let toast = ToastNotification::CreateToastNotification(&xml_doc)?;
|
||||
|
||||
// Create a toast notifier with an application ID
|
||||
let notifier = ToastNotificationManager::CreateToastNotifierWithId(&HSTRING::from("Hikari Desktop"))?;
|
||||
let notifier =
|
||||
ToastNotificationManager::CreateToastNotifierWithId(&HSTRING::from("Hikari Desktop"))?;
|
||||
|
||||
// Show the notification
|
||||
notifier.Show(&toast)?;
|
||||
@@ -60,4 +61,4 @@ fn escape_xml(text: &str) -> String {
|
||||
#[command]
|
||||
pub async fn send_windows_toast(_title: String, _body: String) -> Result<(), String> {
|
||||
Err("Windows toast notifications are only available on Windows".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,11 +8,15 @@ use tempfile::NamedTempFile;
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
use crate::config::ClaudeStartOptions;
|
||||
use crate::stats::{UsageStats, StatsUpdateEvent};
|
||||
use parking_lot::RwLock;
|
||||
use crate::types::{CharacterState, ClaudeMessage, ConnectionStatus, ContentBlock, StateChangeEvent, OutputEvent, PermissionPromptEvent, ConnectionEvent, SessionEvent, WorkingDirectoryEvent};
|
||||
use crate::achievements::{get_achievement_info, AchievementUnlockedEvent};
|
||||
use crate::config::ClaudeStartOptions;
|
||||
use crate::stats::{StatsUpdateEvent, UsageStats};
|
||||
use crate::types::{
|
||||
CharacterState, ClaudeMessage, ConnectionEvent, ConnectionStatus, ContentBlock, OutputEvent,
|
||||
PermissionPromptEvent, QuestionOption, SessionEvent, StateChangeEvent, UserQuestionEvent,
|
||||
WorkingDirectoryEvent,
|
||||
};
|
||||
use parking_lot::RwLock;
|
||||
|
||||
const SEARCH_TOOLS: [&str; 5] = ["Read", "Glob", "Grep", "WebSearch", "WebFetch"];
|
||||
const CODING_TOOLS: [&str; 3] = ["Edit", "Write", "NotebookEdit"];
|
||||
@@ -103,26 +107,42 @@ impl WslBridge {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn start(&mut self, app: AppHandle, options: ClaudeStartOptions) -> Result<(), String> {
|
||||
if self.process.is_some() {
|
||||
return Err("Process already running".to_string());
|
||||
}
|
||||
|
||||
// Load saved achievements when starting a new session
|
||||
// Load saved achievements and stats when starting a new session
|
||||
let app_clone = app.clone();
|
||||
let stats = self.stats.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
println!("Loading saved achievements...");
|
||||
let achievements = crate::achievements::load_achievements(&app_clone).await;
|
||||
println!("Loaded {} unlocked achievements", achievements.unlocked.len());
|
||||
stats.write().achievements = achievements;
|
||||
println!(
|
||||
"Loaded {} unlocked achievements",
|
||||
achievements.unlocked.len()
|
||||
);
|
||||
|
||||
println!("Loading saved stats...");
|
||||
let persisted_stats = crate::stats::load_stats(&app_clone).await;
|
||||
|
||||
let mut stats_guard = stats.write();
|
||||
stats_guard.achievements = achievements;
|
||||
|
||||
if let Some(persisted) = persisted_stats {
|
||||
println!("Applying persisted lifetime stats");
|
||||
stats_guard.apply_persisted(persisted);
|
||||
}
|
||||
});
|
||||
|
||||
let working_dir = &options.working_dir;
|
||||
self.working_directory = working_dir.clone();
|
||||
|
||||
emit_connection_status(&app, ConnectionStatus::Connecting, self.conversation_id.clone());
|
||||
emit_connection_status(
|
||||
&app,
|
||||
ConnectionStatus::Connecting,
|
||||
self.conversation_id.clone(),
|
||||
);
|
||||
|
||||
// Create temp file for MCP config if provided
|
||||
let mcp_config_path = if let Some(ref mcp_json) = options.mcp_servers_json {
|
||||
@@ -158,16 +178,19 @@ impl WslBridge {
|
||||
let mut command = if is_wsl {
|
||||
// Running inside WSL - call claude directly
|
||||
// Try to find claude in common locations since GUI apps may not inherit shell PATH
|
||||
let claude_path = find_claude_binary()
|
||||
.ok_or_else(|| "Could not find claude binary. Is Claude Code installed?".to_string())?;
|
||||
let claude_path = find_claude_binary().ok_or_else(|| {
|
||||
"Could not find claude binary. Is Claude Code installed?".to_string()
|
||||
})?;
|
||||
|
||||
eprintln!("[DEBUG] Found claude at: {}", claude_path);
|
||||
eprintln!("[DEBUG] Working dir: {}", working_dir);
|
||||
|
||||
let mut cmd = Command::new(&claude_path);
|
||||
cmd.args([
|
||||
"--output-format", "stream-json",
|
||||
"--input-format", "stream-json",
|
||||
"--output-format",
|
||||
"stream-json",
|
||||
"--input-format",
|
||||
"stream-json",
|
||||
"--verbose",
|
||||
]);
|
||||
|
||||
@@ -195,6 +218,13 @@ impl WslBridge {
|
||||
cmd.args(["--mcp-config", mcp_path]);
|
||||
}
|
||||
|
||||
// Add resume flag if session ID provided
|
||||
if let Some(ref session_id) = options.resume_session_id {
|
||||
if !session_id.is_empty() {
|
||||
cmd.args(["--resume", session_id]);
|
||||
}
|
||||
}
|
||||
|
||||
cmd.current_dir(working_dir);
|
||||
|
||||
// Set API key as environment variable if specified
|
||||
@@ -211,10 +241,7 @@ impl WslBridge {
|
||||
let mut cmd = Command::new("wsl");
|
||||
|
||||
// Build the claude command with all arguments
|
||||
let mut claude_cmd = format!(
|
||||
"cd '{}' && ",
|
||||
working_dir
|
||||
);
|
||||
let mut claude_cmd = format!("cd '{}' && ", working_dir);
|
||||
|
||||
// Set API key as environment variable if specified
|
||||
if let Some(ref api_key) = options.api_key {
|
||||
@@ -223,7 +250,9 @@ impl WslBridge {
|
||||
}
|
||||
}
|
||||
|
||||
claude_cmd.push_str("claude --output-format stream-json --input-format stream-json --verbose");
|
||||
claude_cmd.push_str(
|
||||
"claude --output-format stream-json --input-format stream-json --verbose",
|
||||
);
|
||||
|
||||
// Add model if specified
|
||||
if let Some(ref model) = options.model {
|
||||
@@ -251,6 +280,13 @@ impl WslBridge {
|
||||
claude_cmd.push_str(&format!(" --mcp-config '{}'", mcp_path));
|
||||
}
|
||||
|
||||
// Add resume flag if session ID provided
|
||||
if let Some(ref session_id) = options.resume_session_id {
|
||||
if !session_id.is_empty() {
|
||||
claude_cmd.push_str(&format!(" --resume '{}'", session_id));
|
||||
}
|
||||
}
|
||||
|
||||
// Use bash -lc to load login profile (ensures PATH includes claude)
|
||||
cmd.args(["-e", "bash", "-lc", &claude_cmd]);
|
||||
|
||||
@@ -278,8 +314,8 @@ impl WslBridge {
|
||||
self.stdin = stdin;
|
||||
self.process = Some(child);
|
||||
|
||||
// Reset session stats when starting new session
|
||||
self.stats.write().reset_session();
|
||||
// Note: We no longer reset stats here - stats persist across reconnects
|
||||
// Stats are only reset when explicitly disconnecting via stop()
|
||||
|
||||
// Load saved achievements
|
||||
let app_handle = app.clone();
|
||||
@@ -306,7 +342,11 @@ impl WslBridge {
|
||||
});
|
||||
}
|
||||
|
||||
emit_connection_status(&app, ConnectionStatus::Connected, self.conversation_id.clone());
|
||||
emit_connection_status(
|
||||
&app,
|
||||
ConnectionStatus::Connected,
|
||||
self.conversation_id.clone(),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -331,7 +371,44 @@ impl WslBridge {
|
||||
.write_all(format!("{}\n", json_line).as_bytes())
|
||||
.map_err(|e| format!("Failed to write to stdin: {}", e))?;
|
||||
|
||||
stdin.flush().map_err(|e| format!("Failed to flush stdin: {}", e))?;
|
||||
stdin
|
||||
.flush()
|
||||
.map_err(|e| format!("Failed to flush stdin: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn send_tool_result(
|
||||
&mut self,
|
||||
tool_use_id: &str,
|
||||
result: serde_json::Value,
|
||||
) -> Result<(), String> {
|
||||
let stdin = self.stdin.as_mut().ok_or("Process not running")?;
|
||||
|
||||
// The content should be a JSON string representation of the result
|
||||
let content_str = serde_json::to_string(&result).map_err(|e| e.to_string())?;
|
||||
|
||||
let input = serde_json::json!({
|
||||
"type": "user",
|
||||
"message": {
|
||||
"role": "user",
|
||||
"content": [{
|
||||
"type": "tool_result",
|
||||
"tool_use_id": tool_use_id,
|
||||
"content": content_str
|
||||
}]
|
||||
}
|
||||
});
|
||||
|
||||
let json_line = serde_json::to_string(&input).map_err(|e| e.to_string())?;
|
||||
|
||||
stdin
|
||||
.write_all(format!("{}\n", json_line).as_bytes())
|
||||
.map_err(|e| format!("Failed to write to stdin: {}", e))?;
|
||||
|
||||
stdin
|
||||
.flush()
|
||||
.map_err(|e| format!("Failed to flush stdin: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -352,7 +429,11 @@ impl WslBridge {
|
||||
// The user will see what session was interrupted
|
||||
|
||||
// Emit disconnected status
|
||||
emit_connection_status(app, ConnectionStatus::Disconnected, self.conversation_id.clone());
|
||||
emit_connection_status(
|
||||
app,
|
||||
ConnectionStatus::Disconnected,
|
||||
self.conversation_id.clone(),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
@@ -368,7 +449,27 @@ impl WslBridge {
|
||||
self.stdin = None;
|
||||
self.session_id = None;
|
||||
self.mcp_config_file = None; // Temp file is automatically deleted when dropped
|
||||
emit_connection_status(app, ConnectionStatus::Disconnected, self.conversation_id.clone());
|
||||
|
||||
// Save lifetime stats before resetting session
|
||||
let stats_snapshot = self.stats.read().clone();
|
||||
let app_clone = app.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
println!("Saving stats on session stop...");
|
||||
if let Err(e) = crate::stats::save_stats(&app_clone, &stats_snapshot).await {
|
||||
eprintln!("Failed to save stats: {}", e);
|
||||
} else {
|
||||
println!("Stats saved successfully on session stop");
|
||||
}
|
||||
});
|
||||
|
||||
// Reset session stats on explicit disconnect
|
||||
self.stats.write().reset_session();
|
||||
|
||||
emit_connection_status(
|
||||
app,
|
||||
ConnectionStatus::Disconnected,
|
||||
self.conversation_id.clone(),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn is_running(&self) -> bool {
|
||||
@@ -382,7 +483,6 @@ impl WslBridge {
|
||||
pub fn get_stats(&self) -> UsageStats {
|
||||
self.stats.read().clone()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Default for WslBridge {
|
||||
@@ -391,7 +491,12 @@ impl Default for WslBridge {
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_stdout(stdout: std::process::ChildStdout, app: AppHandle, stats: Arc<RwLock<UsageStats>>, conversation_id: Option<String>) {
|
||||
fn handle_stdout(
|
||||
stdout: std::process::ChildStdout,
|
||||
app: AppHandle,
|
||||
stats: Arc<RwLock<UsageStats>>,
|
||||
conversation_id: Option<String>,
|
||||
) {
|
||||
let reader = BufReader::new(stdout);
|
||||
|
||||
for line in reader.lines() {
|
||||
@@ -412,18 +517,25 @@ fn handle_stdout(stdout: std::process::ChildStdout, app: AppHandle, stats: Arc<R
|
||||
emit_connection_status(&app, ConnectionStatus::Disconnected, conversation_id);
|
||||
}
|
||||
|
||||
fn handle_stderr(stderr: std::process::ChildStderr, app: AppHandle, conversation_id: Option<String>) {
|
||||
fn handle_stderr(
|
||||
stderr: std::process::ChildStderr,
|
||||
app: AppHandle,
|
||||
conversation_id: Option<String>,
|
||||
) {
|
||||
let reader = BufReader::new(stderr);
|
||||
|
||||
for line in reader.lines() {
|
||||
match line {
|
||||
Ok(line) if !line.is_empty() => {
|
||||
let _ = app.emit("claude:output", OutputEvent {
|
||||
line_type: "error".to_string(),
|
||||
content: line,
|
||||
tool_name: None,
|
||||
conversation_id: conversation_id.clone(),
|
||||
});
|
||||
let _ = app.emit(
|
||||
"claude:output",
|
||||
OutputEvent {
|
||||
line_type: "error".to_string(),
|
||||
content: line,
|
||||
tool_name: None,
|
||||
conversation_id: conversation_id.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
Err(_) => break,
|
||||
_ => {}
|
||||
@@ -431,24 +543,40 @@ fn handle_stderr(stderr: std::process::ChildStderr, app: AppHandle, conversation
|
||||
}
|
||||
}
|
||||
|
||||
fn process_json_line(line: &str, app: &AppHandle, stats: &Arc<RwLock<UsageStats>>, conversation_id: &Option<String>) -> Result<(), String> {
|
||||
fn process_json_line(
|
||||
line: &str,
|
||||
app: &AppHandle,
|
||||
stats: &Arc<RwLock<UsageStats>>,
|
||||
conversation_id: &Option<String>,
|
||||
) -> Result<(), String> {
|
||||
let message: ClaudeMessage = serde_json::from_str(line)
|
||||
.map_err(|e| format!("Failed to parse JSON: {} - Line: {}", e, line))?;
|
||||
|
||||
match &message {
|
||||
ClaudeMessage::System { subtype, session_id, cwd, .. } => {
|
||||
ClaudeMessage::System {
|
||||
subtype,
|
||||
session_id,
|
||||
cwd,
|
||||
..
|
||||
} => {
|
||||
if subtype == "init" {
|
||||
if let Some(id) = session_id {
|
||||
let _ = app.emit("claude:session", SessionEvent {
|
||||
session_id: id.clone(),
|
||||
conversation_id: conversation_id.clone(),
|
||||
});
|
||||
let _ = app.emit(
|
||||
"claude:session",
|
||||
SessionEvent {
|
||||
session_id: id.clone(),
|
||||
conversation_id: conversation_id.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
if let Some(dir) = cwd {
|
||||
let _ = app.emit("claude:cwd", WorkingDirectoryEvent {
|
||||
directory: dir.clone(),
|
||||
conversation_id: conversation_id.clone(),
|
||||
});
|
||||
let _ = app.emit(
|
||||
"claude:cwd",
|
||||
WorkingDirectoryEvent {
|
||||
directory: dir.clone(),
|
||||
conversation_id: conversation_id.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
emit_state_change(app, CharacterState::Idle, None, conversation_id.clone());
|
||||
}
|
||||
@@ -500,12 +628,15 @@ fn process_json_line(line: &str, app: &AppHandle, stats: &Arc<RwLock<UsageStats>
|
||||
}
|
||||
|
||||
let desc = format_tool_description(name, input);
|
||||
let _ = app.emit("claude:output", OutputEvent {
|
||||
line_type: "tool".to_string(),
|
||||
content: desc,
|
||||
tool_name: Some(name.clone()),
|
||||
conversation_id: conversation_id.clone(),
|
||||
});
|
||||
let _ = app.emit(
|
||||
"claude:output",
|
||||
OutputEvent {
|
||||
line_type: "tool".to_string(),
|
||||
content: desc,
|
||||
tool_name: Some(name.clone()),
|
||||
conversation_id: conversation_id.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
ContentBlock::Text { text } => {
|
||||
// Count code blocks in the text
|
||||
@@ -514,21 +645,27 @@ fn process_json_line(line: &str, app: &AppHandle, stats: &Arc<RwLock<UsageStats>
|
||||
stats.write().increment_code_blocks();
|
||||
}
|
||||
|
||||
let _ = app.emit("claude:output", OutputEvent {
|
||||
line_type: "assistant".to_string(),
|
||||
content: text.clone(),
|
||||
tool_name: None,
|
||||
conversation_id: conversation_id.clone(),
|
||||
});
|
||||
let _ = app.emit(
|
||||
"claude:output",
|
||||
OutputEvent {
|
||||
line_type: "assistant".to_string(),
|
||||
content: text.clone(),
|
||||
tool_name: None,
|
||||
conversation_id: conversation_id.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
ContentBlock::Thinking { thinking } => {
|
||||
state = CharacterState::Thinking;
|
||||
let _ = app.emit("claude:output", OutputEvent {
|
||||
line_type: "system".to_string(),
|
||||
content: format!("[Thinking] {}", thinking),
|
||||
tool_name: None,
|
||||
conversation_id: conversation_id.clone(),
|
||||
});
|
||||
let _ = app.emit(
|
||||
"claude:output",
|
||||
OutputEvent {
|
||||
line_type: "system".to_string(),
|
||||
content: format!("[Thinking] {}", thinking),
|
||||
tool_name: None,
|
||||
conversation_id: conversation_id.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@@ -563,13 +700,34 @@ fn process_json_line(line: &str, app: &AppHandle, stats: &Arc<RwLock<UsageStats>
|
||||
}
|
||||
}
|
||||
|
||||
ClaudeMessage::Result { subtype, result, permission_denials, usage: _, .. } => {
|
||||
ClaudeMessage::Result {
|
||||
subtype,
|
||||
result,
|
||||
permission_denials,
|
||||
usage,
|
||||
..
|
||||
} => {
|
||||
let state = if subtype == "success" {
|
||||
CharacterState::Success
|
||||
} else {
|
||||
CharacterState::Error
|
||||
};
|
||||
|
||||
// Track token usage from Result messages if available
|
||||
// This captures tokens from tool outputs and other operations
|
||||
if let Some(usage_info) = usage {
|
||||
// We need the model info to calculate cost properly
|
||||
// For now, use the last known model from stats
|
||||
let model = {
|
||||
let stats_guard = stats.read();
|
||||
stats_guard.model.clone().unwrap_or_else(|| "claude-opus-4-20250514".to_string())
|
||||
};
|
||||
|
||||
let mut stats_guard = stats.write();
|
||||
stats_guard.add_usage(usage_info.input_tokens, usage_info.output_tokens, &model);
|
||||
println!("Result message tokens - input: {}, output: {}", usage_info.input_tokens, usage_info.output_tokens);
|
||||
}
|
||||
|
||||
// Always emit updated stats on result message (less frequent)
|
||||
// This includes the latest session duration
|
||||
let newly_unlocked = {
|
||||
@@ -584,9 +742,10 @@ fn process_json_line(line: &str, app: &AppHandle, stats: &Arc<RwLock<UsageStats>
|
||||
// Emit achievement events for any newly unlocked achievements
|
||||
for achievement_id in &newly_unlocked {
|
||||
let info = get_achievement_info(achievement_id);
|
||||
let _ = app.emit("achievement:unlocked", AchievementUnlockedEvent {
|
||||
achievement: info,
|
||||
});
|
||||
let _ = app.emit(
|
||||
"achievement:unlocked",
|
||||
AchievementUnlockedEvent { achievement: info },
|
||||
);
|
||||
}
|
||||
|
||||
// Save achievements after unlocking new ones
|
||||
@@ -598,7 +757,10 @@ fn process_json_line(line: &str, app: &AppHandle, stats: &Arc<RwLock<UsageStats>
|
||||
// Use Tauri's async runtime instead of tokio::spawn
|
||||
tauri::async_runtime::spawn(async move {
|
||||
println!("Spawned save task for achievements");
|
||||
if let Err(e) = crate::achievements::save_achievements(&app_handle, &achievements_progress).await {
|
||||
if let Err(e) =
|
||||
crate::achievements::save_achievements(&app_handle, &achievements_progress)
|
||||
.await
|
||||
{
|
||||
eprintln!("Failed to save achievements: {}", e);
|
||||
} else {
|
||||
println!("Achievement save task completed successfully");
|
||||
@@ -608,38 +770,127 @@ fn process_json_line(line: &str, app: &AppHandle, stats: &Arc<RwLock<UsageStats>
|
||||
|
||||
let current_stats = stats.read().clone();
|
||||
let stats_event = StatsUpdateEvent {
|
||||
stats: current_stats,
|
||||
stats: current_stats.clone(),
|
||||
};
|
||||
let _ = app.emit("claude:stats", stats_event);
|
||||
|
||||
// Save stats periodically (every 10 messages to avoid excessive disk writes)
|
||||
if current_stats.session_messages_exchanged.is_multiple_of(10)
|
||||
&& current_stats.session_messages_exchanged > 0
|
||||
{
|
||||
let app_handle = app.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
println!("Periodic stats save (every 10 messages)...");
|
||||
if let Err(e) = crate::stats::save_stats(&app_handle, ¤t_stats).await {
|
||||
eprintln!("Failed to save stats: {}", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Only emit error results - success content is already sent via Assistant message
|
||||
if subtype != "success" {
|
||||
if let Some(text) = result {
|
||||
let _ = app.emit("claude:output", OutputEvent {
|
||||
line_type: "error".to_string(),
|
||||
content: text.clone(),
|
||||
tool_name: None,
|
||||
conversation_id: conversation_id.clone(),
|
||||
});
|
||||
let _ = app.emit(
|
||||
"claude:output",
|
||||
OutputEvent {
|
||||
line_type: "error".to_string(),
|
||||
content: text.clone(),
|
||||
tool_name: None,
|
||||
conversation_id: conversation_id.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for permission denials and emit prompts for each
|
||||
if let Some(denials) = permission_denials {
|
||||
let mut has_regular_denials = false;
|
||||
|
||||
for denial in denials {
|
||||
let description = format_tool_description(&denial.tool_name, &denial.tool_input);
|
||||
let _ = app.emit("claude:permission", PermissionPromptEvent {
|
||||
id: denial.tool_use_id.clone(),
|
||||
tool_name: denial.tool_name.clone(),
|
||||
tool_input: denial.tool_input.clone(),
|
||||
description,
|
||||
conversation_id: conversation_id.clone(),
|
||||
});
|
||||
// Special handling for AskUserQuestion tool
|
||||
if denial.tool_name == "AskUserQuestion" {
|
||||
if let Some(questions) = denial
|
||||
.tool_input
|
||||
.get("questions")
|
||||
.and_then(|q| q.as_array())
|
||||
{
|
||||
// For now, handle the first question (most common case)
|
||||
if let Some(first_question) = questions.first() {
|
||||
let question_text = first_question
|
||||
.get("question")
|
||||
.and_then(|q| q.as_str())
|
||||
.unwrap_or("Claude has a question for you")
|
||||
.to_string();
|
||||
|
||||
let header = first_question
|
||||
.get("header")
|
||||
.and_then(|h| h.as_str())
|
||||
.map(|s| s.to_string());
|
||||
|
||||
let multi_select = first_question
|
||||
.get("multiSelect")
|
||||
.and_then(|m| m.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
let options: Vec<QuestionOption> = first_question
|
||||
.get("options")
|
||||
.and_then(|opts| opts.as_array())
|
||||
.map(|opts| {
|
||||
opts.iter()
|
||||
.filter_map(|opt| {
|
||||
let label =
|
||||
opt.get("label").and_then(|l| l.as_str())?;
|
||||
let description = opt
|
||||
.get("description")
|
||||
.and_then(|d| d.as_str())
|
||||
.map(|s| s.to_string());
|
||||
Some(QuestionOption {
|
||||
label: label.to_string(),
|
||||
description,
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let _ = app.emit(
|
||||
"claude:question",
|
||||
UserQuestionEvent {
|
||||
id: denial.tool_use_id.clone(),
|
||||
question: question_text,
|
||||
header,
|
||||
options,
|
||||
multi_select,
|
||||
conversation_id: conversation_id.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
has_regular_denials = true;
|
||||
let description =
|
||||
format_tool_description(&denial.tool_name, &denial.tool_input);
|
||||
let _ = app.emit(
|
||||
"claude:permission",
|
||||
PermissionPromptEvent {
|
||||
id: denial.tool_use_id.clone(),
|
||||
tool_name: denial.tool_name.clone(),
|
||||
tool_input: denial.tool_input.clone(),
|
||||
description,
|
||||
conversation_id: conversation_id.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Show permission state if there were denials
|
||||
if !denials.is_empty() {
|
||||
emit_state_change(app, CharacterState::Permission, None, conversation_id.clone());
|
||||
// Show permission state if there were any denials (questions or regular)
|
||||
if has_regular_denials || !denials.is_empty() {
|
||||
emit_state_change(
|
||||
app,
|
||||
CharacterState::Permission,
|
||||
None,
|
||||
conversation_id.clone(),
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
@@ -652,7 +903,9 @@ fn process_json_line(line: &str, app: &AppHandle, stats: &Arc<RwLock<UsageStats>
|
||||
stats.write().increment_messages();
|
||||
|
||||
// Extract text content from the message
|
||||
let message_text = message.content.iter()
|
||||
let message_text = message
|
||||
.content
|
||||
.iter()
|
||||
.filter_map(|block| match block {
|
||||
crate::types::ContentBlock::Text { text } => Some(text.clone()),
|
||||
_ => None,
|
||||
@@ -682,9 +935,10 @@ fn process_json_line(line: &str, app: &AppHandle, stats: &Arc<RwLock<UsageStats>
|
||||
for achievement_id in &newly_unlocked {
|
||||
println!("User message unlocked achievement: {:?}", achievement_id);
|
||||
let info = get_achievement_info(achievement_id);
|
||||
let _ = app.emit("achievement:unlocked", AchievementUnlockedEvent {
|
||||
achievement: info,
|
||||
});
|
||||
let _ = app.emit(
|
||||
"achievement:unlocked",
|
||||
AchievementUnlockedEvent { achievement: info },
|
||||
);
|
||||
}
|
||||
|
||||
// Save achievements after unlocking new ones
|
||||
@@ -693,7 +947,10 @@ fn process_json_line(line: &str, app: &AppHandle, stats: &Arc<RwLock<UsageStats>
|
||||
let app_handle = app.clone();
|
||||
let achievements_progress = stats.read().achievements.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
if let Err(e) = crate::achievements::save_achievements(&app_handle, &achievements_progress).await {
|
||||
if let Err(e) =
|
||||
crate::achievements::save_achievements(&app_handle, &achievements_progress)
|
||||
.await
|
||||
{
|
||||
eprintln!("Failed to save achievements: {}", e);
|
||||
} else {
|
||||
println!("Achievements saved after user message");
|
||||
@@ -768,15 +1025,36 @@ fn format_tool_description(name: &str, input: &serde_json::Value) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
fn emit_state_change(app: &AppHandle, state: CharacterState, tool_name: Option<String>, conversation_id: Option<String>) {
|
||||
let _ = app.emit("claude:state", StateChangeEvent { state, tool_name, conversation_id });
|
||||
fn emit_state_change(
|
||||
app: &AppHandle,
|
||||
state: CharacterState,
|
||||
tool_name: Option<String>,
|
||||
conversation_id: Option<String>,
|
||||
) {
|
||||
let _ = app.emit(
|
||||
"claude:state",
|
||||
StateChangeEvent {
|
||||
state,
|
||||
tool_name,
|
||||
conversation_id,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
fn emit_connection_status(app: &AppHandle, status: ConnectionStatus, conversation_id: Option<String>) {
|
||||
let _ = app.emit("claude:connection", ConnectionEvent { status, conversation_id });
|
||||
fn emit_connection_status(
|
||||
app: &AppHandle,
|
||||
status: ConnectionStatus,
|
||||
conversation_id: Option<String>,
|
||||
) {
|
||||
let _ = app.emit(
|
||||
"claude:connection",
|
||||
ConnectionEvent {
|
||||
status,
|
||||
conversation_id,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -786,21 +1064,36 @@ mod tests {
|
||||
assert!(matches!(get_tool_state("Read"), CharacterState::Searching));
|
||||
assert!(matches!(get_tool_state("Glob"), CharacterState::Searching));
|
||||
assert!(matches!(get_tool_state("Grep"), CharacterState::Searching));
|
||||
assert!(matches!(get_tool_state("WebSearch"), CharacterState::Searching));
|
||||
assert!(matches!(get_tool_state("WebFetch"), CharacterState::Searching));
|
||||
assert!(matches!(
|
||||
get_tool_state("WebSearch"),
|
||||
CharacterState::Searching
|
||||
));
|
||||
assert!(matches!(
|
||||
get_tool_state("WebFetch"),
|
||||
CharacterState::Searching
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_tool_state_coding_tools() {
|
||||
assert!(matches!(get_tool_state("Edit"), CharacterState::Coding));
|
||||
assert!(matches!(get_tool_state("Write"), CharacterState::Coding));
|
||||
assert!(matches!(get_tool_state("NotebookEdit"), CharacterState::Coding));
|
||||
assert!(matches!(
|
||||
get_tool_state("NotebookEdit"),
|
||||
CharacterState::Coding
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_tool_state_mcp_tools() {
|
||||
assert!(matches!(get_tool_state("mcp__github__create_issue"), CharacterState::Mcp));
|
||||
assert!(matches!(get_tool_state("mcp__notion__search"), CharacterState::Mcp));
|
||||
assert!(matches!(
|
||||
get_tool_state("mcp__github__create_issue"),
|
||||
CharacterState::Mcp
|
||||
));
|
||||
assert!(matches!(
|
||||
get_tool_state("mcp__notion__search"),
|
||||
CharacterState::Mcp
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -810,7 +1103,10 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_get_tool_state_unknown() {
|
||||
assert!(matches!(get_tool_state("SomeUnknownTool"), CharacterState::Typing));
|
||||
assert!(matches!(
|
||||
get_tool_state("SomeUnknownTool"),
|
||||
CharacterState::Typing
|
||||
));
|
||||
assert!(matches!(get_tool_state("Bash"), CharacterState::Typing));
|
||||
}
|
||||
|
||||
|
||||
@@ -81,4 +81,4 @@ $notifier.Show($toast)
|
||||
|
||||
// If all methods fail, return an error
|
||||
Err("All WSL notification methods failed".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://schema.tauri.app/config/2",
|
||||
"productName": "hikari-desktop",
|
||||
"version": "0.2.0",
|
||||
"version": "1.1.1",
|
||||
"identifier": "com.naomi.hikari-desktop",
|
||||
"build": {
|
||||
"beforeDevCommand": "pnpm dev",
|
||||
@@ -22,6 +22,12 @@
|
||||
],
|
||||
"security": {
|
||||
"csp": null
|
||||
},
|
||||
"trayIcon": {
|
||||
"id": "main",
|
||||
"iconPath": "icons/32x32.png",
|
||||
"iconAsTemplate": false,
|
||||
"tooltip": "Hikari - Claude Code Assistant"
|
||||
}
|
||||
},
|
||||
"bundle": {
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
--bg-secondary: #16213e;
|
||||
--bg-terminal: #0f0f1a;
|
||||
--bg-hover: #2a2a4a;
|
||||
--bg-code: #1e1e2e;
|
||||
--accent-primary: #e94560;
|
||||
--accent-secondary: #ff6b9d;
|
||||
--text-primary: #ffffff;
|
||||
@@ -13,11 +14,40 @@
|
||||
--text-tertiary: #6b7280;
|
||||
--border-color: #2a2a4a;
|
||||
|
||||
/* Trans pride colors */
|
||||
--trans-blue: #5bcefa;
|
||||
--trans-pink: #f5a9b8;
|
||||
--trans-white: #ffffff;
|
||||
--trans-gradient: linear-gradient(
|
||||
135deg,
|
||||
var(--trans-blue) 0%,
|
||||
var(--trans-pink) 50%,
|
||||
var(--trans-white) 100%
|
||||
);
|
||||
--trans-gradient-vibrant: linear-gradient(
|
||||
135deg,
|
||||
var(--trans-blue) 0%,
|
||||
var(--trans-pink) 35%,
|
||||
var(--trans-white) 50%,
|
||||
var(--trans-pink) 65%,
|
||||
var(--trans-blue) 100%
|
||||
);
|
||||
|
||||
/* Terminal specific colors */
|
||||
--terminal-user: #22d3ee;
|
||||
--terminal-tool: #c084fc;
|
||||
--terminal-tool-name: #ddd6fe;
|
||||
--terminal-error: #f87171;
|
||||
|
||||
/* Syntax highlighting colors (dark) */
|
||||
--hljs-keyword: #f472b6;
|
||||
--hljs-string: #a3e635;
|
||||
--hljs-number: #fbbf24;
|
||||
--hljs-comment: #6b7280;
|
||||
--hljs-function: #c084fc;
|
||||
--hljs-type: #22d3ee;
|
||||
--hljs-variable: #fb923c;
|
||||
--hljs-meta: #94a3b8;
|
||||
}
|
||||
|
||||
[data-theme="light"] {
|
||||
@@ -25,6 +55,7 @@
|
||||
--bg-secondary: #ffffff;
|
||||
--bg-terminal: #f1f3f4;
|
||||
--bg-hover: #e8e8e8;
|
||||
--bg-code: #f5f5f5;
|
||||
--accent-primary: #e94560;
|
||||
--accent-secondary: #ff6b9d;
|
||||
--text-primary: #1a1a2e;
|
||||
@@ -32,11 +63,89 @@
|
||||
--text-tertiary: #9ca3af;
|
||||
--border-color: #d0d0e0;
|
||||
|
||||
/* Trans pride colors */
|
||||
--trans-blue: #5bcefa;
|
||||
--trans-pink: #f5a9b8;
|
||||
--trans-white: #ffffff;
|
||||
--trans-gradient: linear-gradient(
|
||||
135deg,
|
||||
var(--trans-blue) 0%,
|
||||
var(--trans-pink) 50%,
|
||||
var(--trans-white) 100%
|
||||
);
|
||||
--trans-gradient-vibrant: linear-gradient(
|
||||
135deg,
|
||||
var(--trans-blue) 0%,
|
||||
var(--trans-pink) 35%,
|
||||
var(--trans-white) 50%,
|
||||
var(--trans-pink) 65%,
|
||||
var(--trans-blue) 100%
|
||||
);
|
||||
|
||||
/* Terminal specific colors */
|
||||
--terminal-user: #0891b2;
|
||||
--terminal-tool: #7c3aed;
|
||||
--terminal-tool-name: #8b5cf6;
|
||||
--terminal-error: #dc2626;
|
||||
|
||||
/* Syntax highlighting colors (light) */
|
||||
--hljs-keyword: #d946ef;
|
||||
--hljs-string: #16a34a;
|
||||
--hljs-number: #d97706;
|
||||
--hljs-comment: #9ca3af;
|
||||
--hljs-function: #7c3aed;
|
||||
--hljs-type: #0891b2;
|
||||
--hljs-variable: #ea580c;
|
||||
--hljs-meta: #64748b;
|
||||
}
|
||||
|
||||
[data-theme="high-contrast"] {
|
||||
--bg-primary: #000000;
|
||||
--bg-secondary: #0a0a0a;
|
||||
--bg-terminal: #000000;
|
||||
--bg-hover: #1a1a1a;
|
||||
--bg-code: #0a0a0a;
|
||||
--accent-primary: #ff4d6d;
|
||||
--accent-secondary: #ff85a1;
|
||||
--text-primary: #ffffff;
|
||||
--text-secondary: #e0e0e0;
|
||||
--text-tertiary: #b0b0b0;
|
||||
--border-color: #ffffff;
|
||||
|
||||
/* Trans pride colors (high contrast) */
|
||||
--trans-blue: #00d4ff;
|
||||
--trans-pink: #ff99cc;
|
||||
--trans-white: #ffffff;
|
||||
--trans-gradient: linear-gradient(
|
||||
135deg,
|
||||
var(--trans-blue) 0%,
|
||||
var(--trans-pink) 50%,
|
||||
var(--trans-white) 100%
|
||||
);
|
||||
--trans-gradient-vibrant: linear-gradient(
|
||||
135deg,
|
||||
var(--trans-blue) 0%,
|
||||
var(--trans-pink) 35%,
|
||||
var(--trans-white) 50%,
|
||||
var(--trans-pink) 65%,
|
||||
var(--trans-blue) 100%
|
||||
);
|
||||
|
||||
/* Terminal specific colors - bright and saturated */
|
||||
--terminal-user: #00ffff;
|
||||
--terminal-tool: #ff00ff;
|
||||
--terminal-tool-name: #ffaaff;
|
||||
--terminal-error: #ff5555;
|
||||
|
||||
/* Syntax highlighting colors (high contrast) */
|
||||
--hljs-keyword: #ff66ff;
|
||||
--hljs-string: #66ff66;
|
||||
--hljs-number: #ffff00;
|
||||
--hljs-comment: #aaaaaa;
|
||||
--hljs-function: #ff99ff;
|
||||
--hljs-type: #00ffff;
|
||||
--hljs-variable: #ffaa00;
|
||||
--hljs-meta: #cccccc;
|
||||
}
|
||||
|
||||
html,
|
||||
@@ -79,3 +188,52 @@ body {
|
||||
background: var(--accent-primary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
/* Trans gradient button - primary action buttons */
|
||||
.btn-trans-gradient {
|
||||
background: var(--trans-gradient-vibrant) !important;
|
||||
border: none !important;
|
||||
color: #1a1a2e !important;
|
||||
font-weight: 600;
|
||||
text-shadow: 0 0 2px rgba(255, 255, 255, 0.5);
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.btn-trans-gradient:hover:not(:disabled) {
|
||||
filter: brightness(1.1);
|
||||
box-shadow:
|
||||
0 0 20px rgba(91, 206, 250, 0.4),
|
||||
0 0 30px rgba(245, 169, 184, 0.3);
|
||||
}
|
||||
|
||||
.btn-trans-gradient:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
filter: grayscale(0.3);
|
||||
}
|
||||
|
||||
/* Trans gradient focus border for inputs */
|
||||
.input-trans-focus {
|
||||
position: relative;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.input-trans-focus:focus {
|
||||
border-color: var(--trans-pink) !important;
|
||||
box-shadow:
|
||||
0 0 0 1px var(--trans-blue),
|
||||
0 0 12px rgba(91, 206, 250, 0.3),
|
||||
0 0 20px rgba(245, 169, 184, 0.2) !important;
|
||||
outline: none !important;
|
||||
}
|
||||
|
||||
/* Trans gradient hover for icon buttons */
|
||||
.icon-trans-hover {
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.icon-trans-hover:hover {
|
||||
color: var(--trans-pink) !important;
|
||||
filter: drop-shadow(0 0 6px rgba(91, 206, 250, 0.5))
|
||||
drop-shadow(0 0 10px rgba(245, 169, 184, 0.4));
|
||||
}
|
||||
|
||||
@@ -0,0 +1,414 @@
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
import {
|
||||
slashCommands,
|
||||
parseSlashCommand,
|
||||
getMatchingCommands,
|
||||
isSlashCommand,
|
||||
type SlashCommand,
|
||||
} from "./slashCommands";
|
||||
|
||||
// Mock all external dependencies
|
||||
vi.mock("svelte/store", () => ({
|
||||
get: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("@tauri-apps/api/core", () => ({
|
||||
invoke: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("$lib/stores/claude", () => ({
|
||||
claudeStore: {
|
||||
addLine: vi.fn(),
|
||||
clearTerminal: vi.fn(),
|
||||
activeConversationId: { subscribe: vi.fn() },
|
||||
currentWorkingDirectory: { subscribe: vi.fn() },
|
||||
setWorkingDirectory: vi.fn(),
|
||||
getConversationHistory: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock("$lib/stores/character", () => ({
|
||||
characterState: {
|
||||
setState: vi.fn(),
|
||||
setTemporaryState: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock("$lib/tauri", () => ({
|
||||
setSkipNextGreeting: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("$lib/stores/search", () => ({
|
||||
searchState: {
|
||||
setQuery: vi.fn(),
|
||||
clear: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe("slashCommands", () => {
|
||||
describe("slashCommands array", () => {
|
||||
it("contains expected commands", () => {
|
||||
const commandNames = slashCommands.map((cmd) => cmd.name);
|
||||
expect(commandNames).toContain("cd");
|
||||
expect(commandNames).toContain("clear");
|
||||
expect(commandNames).toContain("new");
|
||||
expect(commandNames).toContain("help");
|
||||
expect(commandNames).toContain("search");
|
||||
expect(commandNames).toContain("summarise");
|
||||
expect(commandNames).toContain("skill");
|
||||
});
|
||||
|
||||
it("has 7 commands total", () => {
|
||||
expect(slashCommands.length).toBe(7);
|
||||
});
|
||||
|
||||
it("each command has required properties", () => {
|
||||
slashCommands.forEach((cmd) => {
|
||||
expect(cmd.name).toBeDefined();
|
||||
expect(typeof cmd.name).toBe("string");
|
||||
expect(cmd.name.length).toBeGreaterThan(0);
|
||||
|
||||
expect(cmd.description).toBeDefined();
|
||||
expect(typeof cmd.description).toBe("string");
|
||||
expect(cmd.description.length).toBeGreaterThan(0);
|
||||
|
||||
expect(cmd.usage).toBeDefined();
|
||||
expect(typeof cmd.usage).toBe("string");
|
||||
expect(cmd.usage.startsWith("/")).toBe(true);
|
||||
|
||||
expect(cmd.execute).toBeDefined();
|
||||
expect(typeof cmd.execute).toBe("function");
|
||||
});
|
||||
});
|
||||
|
||||
it("cd command has correct metadata", () => {
|
||||
const cdCmd = slashCommands.find((cmd) => cmd.name === "cd");
|
||||
expect(cdCmd).toBeDefined();
|
||||
expect(cdCmd!.description).toBe("Change the working directory");
|
||||
expect(cdCmd!.usage).toBe("/cd <path>");
|
||||
});
|
||||
|
||||
it("clear command has correct metadata", () => {
|
||||
const clearCmd = slashCommands.find((cmd) => cmd.name === "clear");
|
||||
expect(clearCmd).toBeDefined();
|
||||
expect(clearCmd!.description).toBe("Clear the terminal display (keeps conversation context)");
|
||||
expect(clearCmd!.usage).toBe("/clear");
|
||||
});
|
||||
|
||||
it("new command has correct metadata", () => {
|
||||
const newCmd = slashCommands.find((cmd) => cmd.name === "new");
|
||||
expect(newCmd).toBeDefined();
|
||||
expect(newCmd!.description).toBe("Start a fresh conversation (resets context)");
|
||||
expect(newCmd!.usage).toBe("/new");
|
||||
});
|
||||
|
||||
it("help command has correct metadata", () => {
|
||||
const helpCmd = slashCommands.find((cmd) => cmd.name === "help");
|
||||
expect(helpCmd).toBeDefined();
|
||||
expect(helpCmd!.description).toBe("Show available slash commands");
|
||||
expect(helpCmd!.usage).toBe("/help");
|
||||
});
|
||||
|
||||
it("search command has correct metadata", () => {
|
||||
const searchCmd = slashCommands.find((cmd) => cmd.name === "search");
|
||||
expect(searchCmd).toBeDefined();
|
||||
expect(searchCmd!.description).toBe("Search within the conversation (use /search to clear)");
|
||||
expect(searchCmd!.usage).toBe("/search [query]");
|
||||
});
|
||||
|
||||
it("summarise command has correct metadata", () => {
|
||||
const summariseCmd = slashCommands.find((cmd) => cmd.name === "summarise");
|
||||
expect(summariseCmd).toBeDefined();
|
||||
expect(summariseCmd!.description).toBe("Get a summary of the entire conversation");
|
||||
expect(summariseCmd!.usage).toBe("/summarise");
|
||||
});
|
||||
|
||||
it("skill command has correct metadata", () => {
|
||||
const skillCmd = slashCommands.find((cmd) => cmd.name === "skill");
|
||||
expect(skillCmd).toBeDefined();
|
||||
expect(skillCmd!.description).toBe("Invoke a Claude Code skill from ~/.claude/skills/");
|
||||
expect(skillCmd!.usage).toBe("/skill [name] [data]");
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseSlashCommand", () => {
|
||||
it("returns null for non-slash input", () => {
|
||||
const result = parseSlashCommand("hello world");
|
||||
expect(result.command).toBeNull();
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("returns null for empty string", () => {
|
||||
const result = parseSlashCommand("");
|
||||
expect(result.command).toBeNull();
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("returns null for whitespace only", () => {
|
||||
const result = parseSlashCommand(" ");
|
||||
expect(result.command).toBeNull();
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /cd command without args", () => {
|
||||
const result = parseSlashCommand("/cd");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("cd");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /cd command with path argument", () => {
|
||||
const result = parseSlashCommand("/cd /home/naomi/code");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("cd");
|
||||
expect(result.args).toBe("/home/naomi/code");
|
||||
});
|
||||
|
||||
it("parses /clear command", () => {
|
||||
const result = parseSlashCommand("/clear");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("clear");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /new command", () => {
|
||||
const result = parseSlashCommand("/new");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("new");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /help command", () => {
|
||||
const result = parseSlashCommand("/help");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("help");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /search command with query", () => {
|
||||
const result = parseSlashCommand("/search hello world");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("search");
|
||||
expect(result.args).toBe("hello world");
|
||||
});
|
||||
|
||||
it("parses /search command without query", () => {
|
||||
const result = parseSlashCommand("/search");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("search");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /summarise command", () => {
|
||||
const result = parseSlashCommand("/summarise");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("summarise");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("parses /skill command with name and data", () => {
|
||||
const result = parseSlashCommand("/skill onboard-mentee john@example.com");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("skill");
|
||||
expect(result.args).toBe("onboard-mentee john@example.com");
|
||||
});
|
||||
|
||||
it("parses /skill command with name only", () => {
|
||||
const result = parseSlashCommand("/skill onboard-mentee");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("skill");
|
||||
expect(result.args).toBe("onboard-mentee");
|
||||
});
|
||||
|
||||
it("parses /skill command without arguments", () => {
|
||||
const result = parseSlashCommand("/skill");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("skill");
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("returns null for unknown command", () => {
|
||||
const result = parseSlashCommand("/unknown");
|
||||
expect(result.command).toBeNull();
|
||||
expect(result.args).toBe("");
|
||||
});
|
||||
|
||||
it("is case insensitive for command names", () => {
|
||||
const result1 = parseSlashCommand("/CD /path");
|
||||
expect(result1.command).not.toBeNull();
|
||||
expect(result1.command!.name).toBe("cd");
|
||||
|
||||
const result2 = parseSlashCommand("/CLEAR");
|
||||
expect(result2.command).not.toBeNull();
|
||||
expect(result2.command!.name).toBe("clear");
|
||||
|
||||
const result3 = parseSlashCommand("/Help");
|
||||
expect(result3.command).not.toBeNull();
|
||||
expect(result3.command!.name).toBe("help");
|
||||
});
|
||||
|
||||
it("handles leading whitespace", () => {
|
||||
const result = parseSlashCommand(" /cd /path");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("cd");
|
||||
expect(result.args).toBe("/path");
|
||||
});
|
||||
|
||||
it("handles trailing whitespace", () => {
|
||||
const result = parseSlashCommand("/cd /path ");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("cd");
|
||||
expect(result.args).toBe("/path");
|
||||
});
|
||||
|
||||
it("handles multiple spaces between args", () => {
|
||||
const result = parseSlashCommand("/search hello world");
|
||||
expect(result.command).not.toBeNull();
|
||||
expect(result.command!.name).toBe("search");
|
||||
expect(result.args).toBe("hello world");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getMatchingCommands", () => {
|
||||
it("returns empty array for non-slash input", () => {
|
||||
const result = getMatchingCommands("hello");
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns empty array for empty string", () => {
|
||||
const result = getMatchingCommands("");
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns all commands for just slash", () => {
|
||||
const result = getMatchingCommands("/");
|
||||
expect(result.length).toBe(slashCommands.length);
|
||||
});
|
||||
|
||||
it("returns matching commands for partial input", () => {
|
||||
const result = getMatchingCommands("/c");
|
||||
const names = result.map((cmd) => cmd.name);
|
||||
expect(names).toContain("cd");
|
||||
expect(names).toContain("clear");
|
||||
expect(names).not.toContain("help");
|
||||
});
|
||||
|
||||
it("returns single command for exact match", () => {
|
||||
const result = getMatchingCommands("/cd");
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].name).toBe("cd");
|
||||
});
|
||||
|
||||
it("returns single command for partial unique match", () => {
|
||||
const result = getMatchingCommands("/cl");
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].name).toBe("clear");
|
||||
});
|
||||
|
||||
it("returns matching commands for /s prefix", () => {
|
||||
const result = getMatchingCommands("/s");
|
||||
const names = result.map((cmd) => cmd.name);
|
||||
expect(names).toContain("search");
|
||||
expect(names).toContain("summarise");
|
||||
expect(names).toContain("skill");
|
||||
});
|
||||
|
||||
it("is case insensitive", () => {
|
||||
const result1 = getMatchingCommands("/C");
|
||||
const result2 = getMatchingCommands("/c");
|
||||
expect(result1.length).toBe(result2.length);
|
||||
});
|
||||
|
||||
it("returns empty array for no matches", () => {
|
||||
const result = getMatchingCommands("/xyz");
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("handles whitespace correctly", () => {
|
||||
const result = getMatchingCommands(" /c");
|
||||
const names = result.map((cmd) => cmd.name);
|
||||
expect(names).toContain("cd");
|
||||
expect(names).toContain("clear");
|
||||
});
|
||||
|
||||
it("returns command for full command name", () => {
|
||||
const result = getMatchingCommands("/help");
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].name).toBe("help");
|
||||
});
|
||||
|
||||
it("returns command for /new", () => {
|
||||
const result = getMatchingCommands("/n");
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0].name).toBe("new");
|
||||
});
|
||||
});
|
||||
|
||||
describe("isSlashCommand", () => {
|
||||
it("returns true for input starting with slash", () => {
|
||||
expect(isSlashCommand("/cd")).toBe(true);
|
||||
expect(isSlashCommand("/")).toBe(true);
|
||||
expect(isSlashCommand("/help")).toBe(true);
|
||||
expect(isSlashCommand("/unknown")).toBe(true);
|
||||
});
|
||||
|
||||
it("returns false for non-slash input", () => {
|
||||
expect(isSlashCommand("hello")).toBe(false);
|
||||
expect(isSlashCommand("")).toBe(false);
|
||||
expect(isSlashCommand("cd")).toBe(false);
|
||||
});
|
||||
|
||||
it("handles whitespace correctly", () => {
|
||||
expect(isSlashCommand(" /cd")).toBe(true);
|
||||
expect(isSlashCommand(" hello")).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false for slash in middle of string", () => {
|
||||
expect(isSlashCommand("hello/world")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("SlashCommand interface", () => {
|
||||
it("can create a valid slash command object", () => {
|
||||
const testCommand: SlashCommand = {
|
||||
name: "test",
|
||||
description: "A test command",
|
||||
usage: "/test [arg]",
|
||||
execute: vi.fn(),
|
||||
};
|
||||
|
||||
expect(testCommand.name).toBe("test");
|
||||
expect(testCommand.description).toBe("A test command");
|
||||
expect(testCommand.usage).toBe("/test [arg]");
|
||||
expect(typeof testCommand.execute).toBe("function");
|
||||
});
|
||||
|
||||
it("execute can be async function", () => {
|
||||
const asyncCommand: SlashCommand = {
|
||||
name: "async",
|
||||
description: "An async command",
|
||||
usage: "/async",
|
||||
execute: async () => {
|
||||
await Promise.resolve();
|
||||
},
|
||||
};
|
||||
|
||||
expect(asyncCommand.execute("")).toBeInstanceOf(Promise);
|
||||
});
|
||||
|
||||
it("execute can be sync function", () => {
|
||||
const syncCommand: SlashCommand = {
|
||||
name: "sync",
|
||||
description: "A sync command",
|
||||
usage: "/sync",
|
||||
execute: () => {
|
||||
// Synchronous execution
|
||||
},
|
||||
};
|
||||
|
||||
const result = syncCommand.execute("");
|
||||
// Sync function returns undefined, not a Promise
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,280 @@
|
||||
import { get } from "svelte/store";
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { claudeStore } from "$lib/stores/claude";
|
||||
import { characterState } from "$lib/stores/character";
|
||||
import { setSkipNextGreeting } from "$lib/tauri";
|
||||
import { searchState } from "$lib/stores/search";
|
||||
|
||||
export interface SlashCommand {
|
||||
name: string;
|
||||
description: string;
|
||||
usage: string;
|
||||
execute: (args: string) => Promise<void> | void;
|
||||
}
|
||||
|
||||
async function changeDirectory(path: string): Promise<void> {
|
||||
const conversationId = get(claudeStore.activeConversationId);
|
||||
if (!conversationId) {
|
||||
claudeStore.addLine("error", "No active conversation");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!path.trim()) {
|
||||
const currentDir = get(claudeStore.currentWorkingDirectory);
|
||||
claudeStore.addLine("system", `Current directory: ${currentDir}`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
characterState.setState("thinking");
|
||||
claudeStore.addLine("system", `Changing directory to: ${path}`);
|
||||
|
||||
const currentDir = get(claudeStore.currentWorkingDirectory);
|
||||
const validatedPath = await invoke<string>("validate_directory", { path, currentDir });
|
||||
|
||||
// Capture conversation history before disconnecting
|
||||
const conversationHistory = claudeStore.getConversationHistory();
|
||||
|
||||
await invoke("stop_claude", { conversationId });
|
||||
|
||||
// Wait for clean shutdown
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
claudeStore.setWorkingDirectory(validatedPath);
|
||||
|
||||
setSkipNextGreeting(true);
|
||||
|
||||
await invoke("start_claude", {
|
||||
conversationId,
|
||||
options: {
|
||||
working_dir: validatedPath,
|
||||
},
|
||||
});
|
||||
|
||||
// Wait for connection to establish
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
|
||||
// Restore context if there was conversation history
|
||||
if (conversationHistory) {
|
||||
const contextMessage = `[CONTEXT RESTORATION]
|
||||
I just changed the working directory from ${currentDir} to ${validatedPath}. Here's our conversation so far:
|
||||
|
||||
${conversationHistory}
|
||||
|
||||
Please continue where we left off. You are now operating in the new directory.`;
|
||||
|
||||
await invoke("send_prompt", {
|
||||
conversationId,
|
||||
message: contextMessage,
|
||||
});
|
||||
}
|
||||
|
||||
claudeStore.addLine("system", `Changed directory to: ${validatedPath}`);
|
||||
characterState.setState("idle");
|
||||
} catch (error) {
|
||||
claudeStore.addLine("error", `Failed to change directory: ${error}`);
|
||||
characterState.setTemporaryState("error", 3000);
|
||||
}
|
||||
}
|
||||
|
||||
async function startNewConversation(): Promise<void> {
|
||||
const conversationId = get(claudeStore.activeConversationId);
|
||||
if (!conversationId) {
|
||||
claudeStore.addLine("error", "No active conversation");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const workingDir = await invoke<string>("get_working_directory", {
|
||||
conversationId,
|
||||
});
|
||||
|
||||
claudeStore.addLine("system", "Starting new conversation...");
|
||||
characterState.setState("thinking");
|
||||
|
||||
await invoke("interrupt_claude", { conversationId });
|
||||
|
||||
claudeStore.clearTerminal();
|
||||
|
||||
setSkipNextGreeting(true);
|
||||
|
||||
await invoke("start_claude", {
|
||||
conversationId,
|
||||
options: {
|
||||
working_dir: workingDir,
|
||||
},
|
||||
});
|
||||
|
||||
claudeStore.addLine("system", "New conversation started!");
|
||||
characterState.setState("idle");
|
||||
} catch (error) {
|
||||
claudeStore.addLine("error", `Failed to start new conversation: ${error}`);
|
||||
characterState.setTemporaryState("error", 3000);
|
||||
}
|
||||
}
|
||||
|
||||
export const slashCommands: SlashCommand[] = [
|
||||
{
|
||||
name: "cd",
|
||||
description: "Change the working directory",
|
||||
usage: "/cd <path>",
|
||||
execute: changeDirectory,
|
||||
},
|
||||
{
|
||||
name: "clear",
|
||||
description: "Clear the terminal display (keeps conversation context)",
|
||||
usage: "/clear",
|
||||
execute: () => {
|
||||
claudeStore.clearTerminal();
|
||||
claudeStore.addLine("system", "Terminal cleared");
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "new",
|
||||
description: "Start a fresh conversation (resets context)",
|
||||
usage: "/new",
|
||||
execute: startNewConversation,
|
||||
},
|
||||
{
|
||||
name: "help",
|
||||
description: "Show available slash commands",
|
||||
usage: "/help",
|
||||
execute: () => {
|
||||
const helpText = slashCommands
|
||||
.map((cmd) => ` ${cmd.usage.padEnd(12)} - ${cmd.description}`)
|
||||
.join("\n");
|
||||
claudeStore.addLine("system", `Available commands:\n${helpText}`);
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "search",
|
||||
description: "Search within the conversation (use /search to clear)",
|
||||
usage: "/search [query]",
|
||||
execute: (args: string) => {
|
||||
if (!args.trim()) {
|
||||
searchState.clear();
|
||||
claudeStore.addLine("system", "Search cleared");
|
||||
return;
|
||||
}
|
||||
searchState.setQuery(args.trim());
|
||||
claudeStore.addLine("system", `Searching for: "${args.trim()}"`);
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "summarise",
|
||||
description: "Get a summary of the entire conversation",
|
||||
usage: "/summarise",
|
||||
execute: async () => {
|
||||
const conversationId = get(claudeStore.activeConversationId);
|
||||
if (!conversationId) {
|
||||
claudeStore.addLine("error", "No active conversation");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
claudeStore.addLine("system", "Requesting conversation summary...");
|
||||
await invoke("send_prompt", {
|
||||
conversationId,
|
||||
message:
|
||||
"Please provide a comprehensive summary of our entire conversation so far, including the key topics we've discussed, decisions made, and any important context.",
|
||||
});
|
||||
} catch (error) {
|
||||
claudeStore.addLine("error", `Failed to request summary: ${error}`);
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "skill",
|
||||
description: "Invoke a Claude Code skill from ~/.claude/skills/",
|
||||
usage: "/skill [name] [data]",
|
||||
execute: async (args: string) => {
|
||||
const conversationId = get(claudeStore.activeConversationId);
|
||||
if (!conversationId) {
|
||||
claudeStore.addLine("error", "No active conversation");
|
||||
return;
|
||||
}
|
||||
|
||||
const parts = args.trim().split(/\s+/);
|
||||
const skillName = parts[0];
|
||||
const skillData = parts.slice(1).join(" ");
|
||||
|
||||
// If no skill name provided, list available skills
|
||||
if (!skillName) {
|
||||
try {
|
||||
const skills = await invoke<string[]>("list_skills");
|
||||
if (skills.length === 0) {
|
||||
claudeStore.addLine(
|
||||
"system",
|
||||
"No skills found in ~/.claude/skills/\nCreate a skill by adding a folder with a SKILL.md file."
|
||||
);
|
||||
} else {
|
||||
const skillList = skills.map((s) => ` • ${s}`).join("\n");
|
||||
claudeStore.addLine(
|
||||
"system",
|
||||
`Available skills:\n${skillList}\n\nUsage: /skill <skill-name> [data]`
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
claudeStore.addLine("error", `Failed to list skills: ${error}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
claudeStore.addLine("system", `Invoking skill: ${skillName}`);
|
||||
characterState.setState("thinking");
|
||||
|
||||
const message = skillData
|
||||
? `Please run the /${skillName} skill with the following data:\n\n${skillData}`
|
||||
: `Please run the /${skillName} skill.`;
|
||||
|
||||
await invoke("send_prompt", {
|
||||
conversationId,
|
||||
message,
|
||||
});
|
||||
} catch (error) {
|
||||
claudeStore.addLine("error", `Failed to invoke skill: ${error}`);
|
||||
characterState.setTemporaryState("error", 3000);
|
||||
}
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
export function parseSlashCommand(input: string): {
|
||||
command: SlashCommand | null;
|
||||
args: string;
|
||||
} {
|
||||
const trimmed = input.trim();
|
||||
|
||||
if (!trimmed.startsWith("/")) {
|
||||
return { command: null, args: "" };
|
||||
}
|
||||
|
||||
const parts = trimmed.slice(1).split(/\s+/);
|
||||
const commandName = parts[0]?.toLowerCase();
|
||||
const args = parts.slice(1).join(" ");
|
||||
|
||||
const command = slashCommands.find((cmd) => cmd.name.toLowerCase() === commandName);
|
||||
|
||||
return { command: command || null, args };
|
||||
}
|
||||
|
||||
export function getMatchingCommands(input: string): SlashCommand[] {
|
||||
const trimmed = input.trim();
|
||||
|
||||
if (!trimmed.startsWith("/")) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const partial = trimmed.slice(1).toLowerCase();
|
||||
|
||||
if (partial === "") {
|
||||
return slashCommands;
|
||||
}
|
||||
|
||||
return slashCommands.filter((cmd) => cmd.name.toLowerCase().startsWith(partial));
|
||||
}
|
||||
|
||||
export function isSlashCommand(input: string): boolean {
|
||||
return input.trim().startsWith("/");
|
||||
}
|
||||
@@ -40,10 +40,12 @@
|
||||
tabindex="-1"
|
||||
>
|
||||
<div class="flex items-center justify-between mb-4">
|
||||
<h2 id="about-title" class="text-xl font-semibold text-gray-100">About Hikari Desktop</h2>
|
||||
<h2 id="about-title" class="text-xl font-semibold text-[var(--text-primary)]">
|
||||
About Hikari Desktop
|
||||
</h2>
|
||||
<button
|
||||
onclick={onClose}
|
||||
class="p-1 text-gray-500 hover:text-gray-300 transition-colors"
|
||||
class="p-1 text-[var(--text-secondary)] hover:text-[var(--text-primary)] transition-colors"
|
||||
aria-label="Close"
|
||||
>
|
||||
<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
@@ -59,16 +61,16 @@
|
||||
|
||||
<div class="space-y-4 text-sm">
|
||||
<div>
|
||||
<h3 class="font-medium text-gray-200 mb-2">What is Hikari Desktop?</h3>
|
||||
<p class="text-gray-400">
|
||||
<h3 class="font-medium text-[var(--text-primary)] mb-2">What is Hikari Desktop?</h3>
|
||||
<p class="text-[var(--text-secondary)]">
|
||||
Hikari Desktop is an AI-powered desktop assistant that brings Claude directly to your
|
||||
desktop. Built with love using Tauri, Svelte, and Rust for a fast, native experience.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h3 class="font-medium text-gray-200 mb-2">Version</h3>
|
||||
<p class="text-gray-400 mb-1">
|
||||
<h3 class="font-medium text-[var(--text-primary)] mb-2">Version</h3>
|
||||
<p class="text-[var(--text-secondary)] mb-1">
|
||||
{appVersion || "Loading..."}
|
||||
</p>
|
||||
<button
|
||||
@@ -80,7 +82,7 @@
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h3 class="font-medium text-gray-200 mb-2">Source Code</h3>
|
||||
<h3 class="font-medium text-[var(--text-primary)] mb-2">Source Code</h3>
|
||||
<button
|
||||
onclick={() => openUrl(links.source)}
|
||||
class="text-[var(--accent-primary)] hover:text-[var(--accent-primary-hover)] transition-colors underline"
|
||||
@@ -90,8 +92,8 @@
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h3 class="font-medium text-gray-200 mb-2">Support & Community</h3>
|
||||
<p class="text-gray-400 mb-1">Found a bug or have a suggestion?</p>
|
||||
<h3 class="font-medium text-[var(--text-primary)] mb-2">Support & Community</h3>
|
||||
<p class="text-[var(--text-secondary)] mb-1">Found a bug or have a suggestion?</p>
|
||||
<button
|
||||
onclick={() => openUrl(links.discord)}
|
||||
class="text-[var(--accent-primary)] hover:text-[var(--accent-primary-hover)] transition-colors underline"
|
||||
@@ -101,7 +103,7 @@
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h3 class="font-medium text-gray-200 mb-2">Built with đź’• by</h3>
|
||||
<h3 class="font-medium text-[var(--text-primary)] mb-2">Built with đź’• by</h3>
|
||||
<button
|
||||
onclick={() => openUrl(links.website)}
|
||||
class="text-[var(--accent-primary)] hover:text-[var(--accent-primary-hover)] transition-colors underline"
|
||||
@@ -111,8 +113,8 @@
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h3 class="font-medium text-gray-200 mb-2">License</h3>
|
||||
<p class="text-gray-400 mb-1">
|
||||
<h3 class="font-medium text-[var(--text-primary)] mb-2">License</h3>
|
||||
<p class="text-[var(--text-secondary)] mb-1">
|
||||
This project is open source and available under our license terms.
|
||||
</p>
|
||||
<button
|
||||
@@ -124,7 +126,7 @@
|
||||
</div>
|
||||
|
||||
<div class="pt-4 mt-4 border-t border-[var(--border-color)]">
|
||||
<p class="text-xs text-gray-500 text-center">
|
||||
<p class="text-xs text-[var(--text-tertiary)] text-center">
|
||||
Copyright © {new Date().getFullYear()} Naomi Carrigan. All rights reserved.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@@ -34,53 +34,34 @@
|
||||
return "animate-idle";
|
||||
}
|
||||
}
|
||||
|
||||
function getBackgroundGlow(): string {
|
||||
switch (currentState) {
|
||||
case "thinking":
|
||||
return "shadow-thinking";
|
||||
case "typing":
|
||||
return "shadow-typing";
|
||||
case "searching":
|
||||
return "shadow-searching";
|
||||
case "coding":
|
||||
return "shadow-coding";
|
||||
case "mcp":
|
||||
return "shadow-mcp";
|
||||
case "success":
|
||||
return "shadow-success";
|
||||
case "error":
|
||||
return "shadow-error";
|
||||
default:
|
||||
return "";
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="anime-girl-container flex flex-col items-center justify-end h-full p-4">
|
||||
<div class="character-frame relative {getBackgroundGlow()} w-full max-w-md">
|
||||
<div class="sprite-container {getAnimationClass()}">
|
||||
<div
|
||||
class="anime-girl-container flex flex-col items-center justify-between h-full p-4 overflow-hidden"
|
||||
>
|
||||
<div class="character-frame relative flex-1 flex items-center justify-center min-h-0">
|
||||
<div class="sprite-container {getAnimationClass()} h-full flex items-center justify-center">
|
||||
<img
|
||||
src="/sprites/{info.spriteFile}"
|
||||
alt="Hikari - {info.label}"
|
||||
class="character-sprite w-full h-auto object-contain"
|
||||
class="character-sprite h-full w-auto max-w-full object-contain"
|
||||
onerror={(e) => {
|
||||
const target = e.currentTarget as HTMLImageElement;
|
||||
target.src = "/sprites/placeholder.svg";
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="state-indicator absolute -bottom-2 left-1/2 transform -translate-x-1/2">
|
||||
<div
|
||||
class="px-3 py-1 rounded-full text-xs font-medium bg-[var(--bg-secondary)] border border-[var(--border-color)] text-[var(--accent-primary)]"
|
||||
>
|
||||
{info.label}
|
||||
</div>
|
||||
<div class="state-indicator mt-2">
|
||||
<div
|
||||
class="px-3 py-1 rounded-full text-xs font-medium bg-[var(--bg-secondary)] border border-[var(--border-color)] text-[var(--accent-primary)]"
|
||||
>
|
||||
{info.label}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="speech-bubble mt-4 max-w-xs">
|
||||
<div class="speech-bubble mt-2 max-w-xs flex-shrink-0">
|
||||
<div
|
||||
class="relative bg-[var(--bg-secondary)] rounded-lg px-4 py-2 border border-[var(--border-color)]"
|
||||
>
|
||||
@@ -93,37 +74,12 @@
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.anime-girl-container {
|
||||
transition: all 0.3s ease;
|
||||
}
|
||||
|
||||
.character-frame {
|
||||
border-radius: 50%;
|
||||
transition: box-shadow 0.3s ease;
|
||||
}
|
||||
|
||||
.shadow-thinking {
|
||||
box-shadow: 0 0 30px rgba(147, 51, 234, 0.5);
|
||||
}
|
||||
|
||||
.shadow-typing {
|
||||
box-shadow: 0 0 30px rgba(59, 130, 246, 0.5);
|
||||
}
|
||||
|
||||
.shadow-searching {
|
||||
box-shadow: 0 0 30px rgba(234, 179, 8, 0.5);
|
||||
}
|
||||
|
||||
.shadow-coding {
|
||||
box-shadow: 0 0 30px rgba(34, 197, 94, 0.5);
|
||||
}
|
||||
|
||||
.shadow-mcp {
|
||||
box-shadow: 0 0 30px rgba(236, 72, 153, 0.5);
|
||||
}
|
||||
|
||||
.shadow-success {
|
||||
box-shadow: 0 0 30px rgba(16, 185, 129, 0.5);
|
||||
}
|
||||
|
||||
.shadow-error {
|
||||
box-shadow: 0 0 30px rgba(239, 68, 68, 0.5);
|
||||
transition: all 0.3s ease;
|
||||
}
|
||||
|
||||
@keyframes idle-bob {
|
||||
|
||||
@@ -0,0 +1,209 @@
|
||||
<script lang="ts">
|
||||
import type { Attachment } from "$lib/types/messages";
|
||||
|
||||
interface Props {
|
||||
attachments: Attachment[];
|
||||
onRemove: (id: string) => void;
|
||||
}
|
||||
|
||||
let { attachments, onRemove }: Props = $props();
|
||||
|
||||
function formatFileSize(bytes: number): string {
|
||||
if (bytes < 1024) return `${bytes} B`;
|
||||
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
||||
}
|
||||
|
||||
function getFileIcon(type: Attachment["type"]): string {
|
||||
switch (type) {
|
||||
case "image":
|
||||
return "🖼️";
|
||||
case "document":
|
||||
return "đź“„";
|
||||
default:
|
||||
return "📎";
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if attachments.length > 0}
|
||||
<div class="attachment-preview-container">
|
||||
<div class="attachment-header">
|
||||
<span class="attachment-count"
|
||||
>{attachments.length} attachment{attachments.length !== 1 ? "s" : ""}</span
|
||||
>
|
||||
</div>
|
||||
<div class="attachment-list">
|
||||
{#each attachments as attachment (attachment.id)}
|
||||
<div class="attachment-item" class:is-image={attachment.type === "image"}>
|
||||
{#if attachment.type === "image" && attachment.previewUrl}
|
||||
<div class="image-preview">
|
||||
<img src={attachment.previewUrl} alt={attachment.filename} />
|
||||
</div>
|
||||
{:else}
|
||||
<div class="file-icon">
|
||||
{getFileIcon(attachment.type)}
|
||||
</div>
|
||||
{/if}
|
||||
<div class="attachment-info">
|
||||
<span class="attachment-filename" title={attachment.filename}>
|
||||
{attachment.filename}
|
||||
</span>
|
||||
<span class="attachment-size">
|
||||
{formatFileSize(attachment.size)}
|
||||
</span>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
class="remove-button"
|
||||
onclick={() => onRemove(attachment.id)}
|
||||
title="Remove attachment"
|
||||
>
|
||||
<svg
|
||||
width="14"
|
||||
height="14"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
stroke-width="2"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
>
|
||||
<line x1="18" y1="6" x2="6" y2="18"></line>
|
||||
<line x1="6" y1="6" x2="18" y2="18"></line>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.attachment-preview-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
padding: 8px;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 8px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.attachment-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.attachment-count {
|
||||
font-size: 12px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.attachment-list {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.attachment-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 6px 8px;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 6px;
|
||||
max-width: 200px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.attachment-item.is-image {
|
||||
flex-direction: column;
|
||||
padding: 4px;
|
||||
max-width: 120px;
|
||||
}
|
||||
|
||||
.image-preview {
|
||||
width: 100%;
|
||||
max-width: 110px;
|
||||
max-height: 80px;
|
||||
border-radius: 4px;
|
||||
overflow: hidden;
|
||||
background: var(--bg-primary);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.image-preview img {
|
||||
max-width: 100%;
|
||||
max-height: 80px;
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
.file-icon {
|
||||
font-size: 24px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.attachment-info {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
overflow: hidden;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.is-image .attachment-info {
|
||||
width: 100%;
|
||||
padding: 0 4px;
|
||||
}
|
||||
|
||||
.attachment-filename {
|
||||
font-size: 12px;
|
||||
color: var(--text-primary);
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.attachment-size {
|
||||
font-size: 10px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.remove-button {
|
||||
position: absolute;
|
||||
top: -6px;
|
||||
right: -6px;
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
padding: 0;
|
||||
background: var(--bg-primary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 50%;
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
opacity: 0;
|
||||
transition:
|
||||
opacity 0.2s,
|
||||
background 0.2s,
|
||||
color 0.2s;
|
||||
}
|
||||
|
||||
.attachment-item:hover .remove-button {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.remove-button:hover {
|
||||
background: var(--error-color, #ef4444);
|
||||
border-color: var(--error-color, #ef4444);
|
||||
color: white;
|
||||
}
|
||||
</style>
|
||||
@@ -0,0 +1,497 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { clipboardStore, type ClipboardEntry } from "$lib/stores/clipboard";
|
||||
|
||||
export let isOpen = false;
|
||||
export let onClose: () => void;
|
||||
export let onInsert: (content: string) => void = () => {};
|
||||
|
||||
let searchQuery = "";
|
||||
let selectedLanguage: string | null = null;
|
||||
let confirmingDeleteId: string | null = null;
|
||||
let copiedId: string | null = null;
|
||||
|
||||
// Subscribe to derived stores
|
||||
const filteredEntries = clipboardStore.filteredEntries;
|
||||
const languagesStore = clipboardStore.languages;
|
||||
const isLoadingStore = clipboardStore.isLoading;
|
||||
|
||||
$: entries = $filteredEntries;
|
||||
$: languages = $languagesStore;
|
||||
$: isLoading = $isLoadingStore;
|
||||
|
||||
onMount(() => {
|
||||
if (isOpen) {
|
||||
clipboardStore.loadEntries();
|
||||
}
|
||||
});
|
||||
|
||||
$: if (isOpen) {
|
||||
clipboardStore.loadEntries();
|
||||
}
|
||||
|
||||
function handleSearch() {
|
||||
clipboardStore.setSearchQuery(searchQuery);
|
||||
}
|
||||
|
||||
function handleLanguageFilter(lang: string | null) {
|
||||
selectedLanguage = lang;
|
||||
clipboardStore.setLanguageFilter(lang);
|
||||
}
|
||||
|
||||
async function handleCopy(entry: ClipboardEntry) {
|
||||
const success = await clipboardStore.copyToClipboard(entry.content);
|
||||
if (success) {
|
||||
copiedId = entry.id;
|
||||
setTimeout(() => {
|
||||
copiedId = null;
|
||||
}, 2000);
|
||||
}
|
||||
}
|
||||
|
||||
function handleInsert(entry: ClipboardEntry) {
|
||||
onInsert(entry.content);
|
||||
onClose();
|
||||
}
|
||||
|
||||
async function handleDelete(id: string) {
|
||||
await clipboardStore.deleteEntry(id);
|
||||
confirmingDeleteId = null;
|
||||
}
|
||||
|
||||
async function handleTogglePin(id: string) {
|
||||
await clipboardStore.togglePin(id);
|
||||
}
|
||||
|
||||
async function handleClearHistory() {
|
||||
if (confirm("Clear all non-pinned clipboard entries?")) {
|
||||
await clipboardStore.clearHistory();
|
||||
}
|
||||
}
|
||||
|
||||
function truncateContent(content: string, maxLength: number = 200): string {
|
||||
if (content.length <= maxLength) return content;
|
||||
return content.substring(0, maxLength) + "...";
|
||||
}
|
||||
|
||||
function getLanguageIcon(language: string | null): string {
|
||||
const icons: Record<string, string> = {
|
||||
typescript: "TS",
|
||||
javascript: "JS",
|
||||
python: "PY",
|
||||
rust: "RS",
|
||||
go: "GO",
|
||||
java: "JV",
|
||||
c: "C",
|
||||
cpp: "C++",
|
||||
csharp: "C#",
|
||||
php: "PHP",
|
||||
ruby: "RB",
|
||||
swift: "SW",
|
||||
kotlin: "KT",
|
||||
sql: "SQL",
|
||||
html: "HTML",
|
||||
css: "CSS",
|
||||
json: "JSON",
|
||||
yaml: "YAML",
|
||||
bash: "SH",
|
||||
shell: "SH",
|
||||
};
|
||||
return language ? icons[language.toLowerCase()] || language.toUpperCase().slice(0, 3) : "TXT";
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if isOpen}
|
||||
<!-- svelte-ignore a11y_no_static_element_interactions -->
|
||||
<!-- svelte-ignore a11y_click_events_have_key_events -->
|
||||
<div class="clipboard-overlay" on:click={onClose}>
|
||||
<div class="clipboard-panel" on:click|stopPropagation>
|
||||
<div class="clipboard-header">
|
||||
<h2>đź“‹ Clipboard History</h2>
|
||||
<div class="header-actions">
|
||||
{#if entries.length > 0}
|
||||
<button
|
||||
class="clear-btn"
|
||||
on:click={handleClearHistory}
|
||||
title="Clear non-pinned entries"
|
||||
>
|
||||
🗑️ Clear
|
||||
</button>
|
||||
{/if}
|
||||
<button class="close-btn" on:click={onClose}>âś•</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="clipboard-controls">
|
||||
<div class="search-box">
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search clipboard..."
|
||||
bind:value={searchQuery}
|
||||
on:input={handleSearch}
|
||||
/>
|
||||
</div>
|
||||
<div class="language-filter">
|
||||
<button
|
||||
class="filter-btn"
|
||||
class:active={selectedLanguage === null}
|
||||
on:click={() => handleLanguageFilter(null)}
|
||||
>
|
||||
All
|
||||
</button>
|
||||
{#each languages as lang (lang)}
|
||||
<button
|
||||
class="filter-btn"
|
||||
class:active={selectedLanguage === lang}
|
||||
on:click={() => handleLanguageFilter(lang)}
|
||||
>
|
||||
{getLanguageIcon(lang)}
|
||||
</button>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="clipboard-content">
|
||||
{#if isLoading}
|
||||
<div class="loading">Loading...</div>
|
||||
{:else if entries.length === 0}
|
||||
<div class="empty-state">
|
||||
<p>đź“ No clipboard entries yet</p>
|
||||
<p class="hint">
|
||||
Copy code from Claude's responses or use the copy button on code blocks to save them
|
||||
here.
|
||||
</p>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="entries-list">
|
||||
{#each entries as entry (entry.id)}
|
||||
<div class="entry" class:pinned={entry.is_pinned}>
|
||||
<div class="entry-header">
|
||||
<div class="entry-meta">
|
||||
<span class="language-badge">{getLanguageIcon(entry.language)}</span>
|
||||
<span class="timestamp">{clipboardStore.formatTimestamp(entry.timestamp)}</span>
|
||||
{#if entry.is_pinned}
|
||||
<span class="pin-badge">📌</span>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="entry-actions">
|
||||
<button
|
||||
class="action-btn"
|
||||
title={entry.is_pinned ? "Unpin" : "Pin"}
|
||||
on:click={() => handleTogglePin(entry.id)}
|
||||
>
|
||||
{entry.is_pinned ? "📌" : "📍"}
|
||||
</button>
|
||||
<button
|
||||
class="action-btn"
|
||||
title="Copy to clipboard"
|
||||
on:click={() => handleCopy(entry)}
|
||||
>
|
||||
{copiedId === entry.id ? "âś“" : "đź“‹"}
|
||||
</button>
|
||||
<button
|
||||
class="action-btn insert-btn"
|
||||
title="Insert"
|
||||
on:click={() => handleInsert(entry)}
|
||||
>
|
||||
➡️
|
||||
</button>
|
||||
{#if confirmingDeleteId === entry.id}
|
||||
<button
|
||||
class="action-btn confirm-delete"
|
||||
on:click={() => handleDelete(entry.id)}
|
||||
>
|
||||
âś“
|
||||
</button>
|
||||
<button class="action-btn" on:click={() => (confirmingDeleteId = null)}>
|
||||
âś•
|
||||
</button>
|
||||
{:else}
|
||||
<button
|
||||
class="action-btn delete-btn"
|
||||
title="Delete"
|
||||
on:click={() => (confirmingDeleteId = entry.id)}
|
||||
>
|
||||
🗑️
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
<pre class="entry-content"><code>{truncateContent(entry.content)}</code></pre>
|
||||
{#if entry.source}
|
||||
<div class="entry-source">From: {entry.source}</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.clipboard-overlay {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 1000;
|
||||
}
|
||||
|
||||
.clipboard-panel {
|
||||
background: var(--bg-primary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 12px;
|
||||
width: 90%;
|
||||
max-width: 700px;
|
||||
max-height: 80vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.clipboard-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 16px 20px;
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.clipboard-header h2 {
|
||||
margin: 0;
|
||||
font-size: 18px;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.header-actions {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.clear-btn {
|
||||
background: transparent;
|
||||
border: 1px solid var(--border-color);
|
||||
color: var(--text-secondary);
|
||||
padding: 6px 12px;
|
||||
border-radius: 6px;
|
||||
cursor: pointer;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.clear-btn:hover {
|
||||
background: var(--bg-secondary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
background: transparent;
|
||||
border: none;
|
||||
color: var(--text-secondary);
|
||||
font-size: 20px;
|
||||
cursor: pointer;
|
||||
padding: 4px 8px;
|
||||
}
|
||||
|
||||
.close-btn:hover {
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.clipboard-controls {
|
||||
padding: 12px 20px;
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.search-box input {
|
||||
width: 100%;
|
||||
padding: 10px 14px;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 8px;
|
||||
background: var(--bg-secondary);
|
||||
color: var(--text-primary);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.search-box input:focus {
|
||||
outline: none;
|
||||
border-color: var(--trans-pink);
|
||||
box-shadow: 0 0 0 2px rgba(245, 169, 184, 0.2);
|
||||
}
|
||||
|
||||
.language-filter {
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.filter-btn {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-color);
|
||||
color: var(--text-secondary);
|
||||
padding: 4px 10px;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-size: 11px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.filter-btn:hover {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.filter-btn.active {
|
||||
background: var(--trans-gradient-vibrant);
|
||||
color: #1a1a2e;
|
||||
border-color: transparent;
|
||||
}
|
||||
|
||||
.clipboard-content {
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
padding: 16px 20px;
|
||||
}
|
||||
|
||||
.loading,
|
||||
.empty-state {
|
||||
text-align: center;
|
||||
padding: 40px 20px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.empty-state p {
|
||||
margin: 8px 0;
|
||||
}
|
||||
|
||||
.empty-state .hint {
|
||||
font-size: 13px;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.entries-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.entry {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 8px;
|
||||
padding: 12px;
|
||||
transition: border-color 0.2s;
|
||||
}
|
||||
|
||||
.entry:hover {
|
||||
border-color: var(--trans-pink);
|
||||
}
|
||||
|
||||
.entry.pinned {
|
||||
border-color: var(--trans-blue);
|
||||
background: linear-gradient(
|
||||
135deg,
|
||||
rgba(91, 206, 250, 0.05) 0%,
|
||||
rgba(245, 169, 184, 0.05) 100%
|
||||
);
|
||||
}
|
||||
|
||||
.entry-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.entry-meta {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.language-badge {
|
||||
background: var(--bg-tertiary);
|
||||
color: var(--text-secondary);
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
.timestamp {
|
||||
color: var(--text-tertiary);
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
.pin-badge {
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.entry-actions {
|
||||
display: flex;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.action-btn {
|
||||
background: transparent;
|
||||
border: none;
|
||||
padding: 4px 6px;
|
||||
cursor: pointer;
|
||||
font-size: 14px;
|
||||
opacity: 0.6;
|
||||
transition: opacity 0.2s;
|
||||
}
|
||||
|
||||
.action-btn:hover {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.insert-btn {
|
||||
background: var(--trans-gradient-vibrant);
|
||||
border-radius: 4px;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.delete-btn:hover {
|
||||
color: #ff6b6b;
|
||||
}
|
||||
|
||||
.confirm-delete {
|
||||
color: #ff6b6b;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.entry-content {
|
||||
margin: 0;
|
||||
padding: 10px;
|
||||
background: var(--bg-primary);
|
||||
border-radius: 6px;
|
||||
font-size: 12px;
|
||||
line-height: 1.5;
|
||||
overflow-x: auto;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.entry-content code {
|
||||
font-family: "JetBrains Mono", "Fira Code", monospace;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.entry-source {
|
||||
margin-top: 8px;
|
||||
font-size: 11px;
|
||||
color: var(--text-tertiary);
|
||||
font-style: italic;
|
||||
}
|
||||
</style>
|
||||
@@ -0,0 +1,107 @@
|
||||
<script lang="ts">
|
||||
interface Props {
|
||||
isOpen: boolean;
|
||||
tabName: string;
|
||||
onConfirm: () => void;
|
||||
onCancel: () => void;
|
||||
}
|
||||
|
||||
const { isOpen, tabName, onConfirm, onCancel }: Props = $props();
|
||||
|
||||
function handleKeydown(event: KeyboardEvent) {
|
||||
if (!isOpen) return;
|
||||
|
||||
if (event.key === "Enter") {
|
||||
event.preventDefault();
|
||||
onConfirm();
|
||||
} else if (event.key === "Escape") {
|
||||
event.preventDefault();
|
||||
onCancel();
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:window onkeydown={handleKeydown} />
|
||||
|
||||
{#if isOpen}
|
||||
<div
|
||||
class="fixed inset-0 bg-black/50 backdrop-blur-sm z-50 flex items-center justify-center p-4"
|
||||
onclick={onCancel}
|
||||
role="button"
|
||||
tabindex="0"
|
||||
onkeydown={(e) => e.key === " " && onCancel()}
|
||||
>
|
||||
<div
|
||||
class="bg-[var(--bg-primary)] border border-[var(--border-color)] rounded-lg shadow-xl max-w-md w-full"
|
||||
onclick={(e) => e.stopPropagation()}
|
||||
onkeydown={(e) => e.stopPropagation()}
|
||||
role="dialog"
|
||||
aria-labelledby="confirm-title"
|
||||
aria-describedby="confirm-message"
|
||||
tabindex="-1"
|
||||
>
|
||||
<div class="p-6">
|
||||
<div class="flex items-start gap-4">
|
||||
<div
|
||||
class="w-10 h-10 rounded-lg bg-yellow-500/20 flex items-center justify-center flex-shrink-0"
|
||||
>
|
||||
<svg
|
||||
class="w-6 h-6 text-yellow-500"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
stroke-width="2"
|
||||
d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class="flex-1">
|
||||
<h3 id="confirm-title" class="text-lg font-semibold text-[var(--text-primary)] mb-1">
|
||||
Close Connected Tab?
|
||||
</h3>
|
||||
<p id="confirm-message" class="text-sm text-[var(--text-secondary)]">
|
||||
The tab "{tabName}" is currently connected to Claude. Are you sure you want to close
|
||||
it? This will disconnect the session.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex gap-3 mt-6 justify-end">
|
||||
<button
|
||||
onclick={onCancel}
|
||||
class="px-4 py-2 text-sm font-medium text-gray-300 bg-[var(--bg-secondary)] hover:bg-[var(--bg-tertiary)] border border-[var(--border-color)] rounded-lg transition-colors"
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
<button
|
||||
onclick={onConfirm}
|
||||
class="px-4 py-2 text-sm font-medium text-white bg-red-600 hover:bg-red-700 rounded-lg transition-colors"
|
||||
>
|
||||
Close Tab
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
[role="dialog"] {
|
||||
animation: slideIn 0.2s ease-out;
|
||||
}
|
||||
|
||||
@keyframes slideIn {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: scale(0.95) translateY(10px);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: scale(1) translateY(0);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
@@ -0,0 +1,563 @@
|
||||
<script lang="ts">
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { get } from "svelte/store";
|
||||
import { claudeStore, isClaudeProcessing } from "$lib/stores/claude";
|
||||
import { characterState, characterInfo } from "$lib/stores/character";
|
||||
import { isStreamerMode } from "$lib/stores/config";
|
||||
import { handleNewUserMessage } from "$lib/notifications/rules";
|
||||
import type { CharacterState, CharacterStateInfo } from "$lib/types/states";
|
||||
|
||||
interface Props {
|
||||
onExpand: () => void;
|
||||
}
|
||||
|
||||
let { onExpand }: Props = $props();
|
||||
|
||||
let inputValue = $state("");
|
||||
let isSubmitting = $state(false);
|
||||
let isConnected = $state(false);
|
||||
let isProcessing = $state(false);
|
||||
let streamerModeActive = $state(false);
|
||||
let currentState: CharacterState = $state("idle");
|
||||
let info: CharacterStateInfo = $state({
|
||||
state: "idle",
|
||||
label: "Ready",
|
||||
description: "Waiting for your command~",
|
||||
spriteFile: "idle.png",
|
||||
});
|
||||
|
||||
// Recent messages for compact display
|
||||
let recentMessages = $state<Array<{ type: string; content: string }>>([]);
|
||||
const MAX_RECENT_MESSAGES = 3;
|
||||
|
||||
claudeStore.connectionStatus.subscribe((status) => {
|
||||
isConnected = status === "connected";
|
||||
});
|
||||
|
||||
isClaudeProcessing.subscribe((processing) => {
|
||||
isProcessing = processing;
|
||||
});
|
||||
|
||||
isStreamerMode.subscribe((value) => {
|
||||
streamerModeActive = value;
|
||||
});
|
||||
|
||||
characterState.subscribe((state) => {
|
||||
currentState = state;
|
||||
});
|
||||
|
||||
characterInfo.subscribe((i) => {
|
||||
info = i;
|
||||
});
|
||||
|
||||
// Track recent terminal output
|
||||
claudeStore.terminalLines.subscribe((lines) => {
|
||||
const recent = lines.slice(-MAX_RECENT_MESSAGES).map((line) => ({
|
||||
type: line.type,
|
||||
content: line.content.substring(0, 100) + (line.content.length > 100 ? "..." : ""),
|
||||
}));
|
||||
recentMessages = recent;
|
||||
});
|
||||
|
||||
function getAnimationClass(): string {
|
||||
switch (currentState) {
|
||||
case "thinking":
|
||||
return "animate-thinking";
|
||||
case "typing":
|
||||
return "animate-typing";
|
||||
case "searching":
|
||||
return "animate-searching";
|
||||
case "success":
|
||||
return "animate-celebrate";
|
||||
case "error":
|
||||
return "animate-shake";
|
||||
default:
|
||||
return "animate-idle";
|
||||
}
|
||||
}
|
||||
|
||||
function getStateGlow(): string {
|
||||
switch (currentState) {
|
||||
case "thinking":
|
||||
return "glow-thinking";
|
||||
case "typing":
|
||||
return "glow-typing";
|
||||
case "success":
|
||||
return "glow-success";
|
||||
case "error":
|
||||
return "glow-error";
|
||||
default:
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSubmit(event: Event) {
|
||||
event.preventDefault();
|
||||
|
||||
const message = inputValue.trim();
|
||||
if (!message || isSubmitting || !isConnected) return;
|
||||
|
||||
isSubmitting = true;
|
||||
inputValue = "";
|
||||
|
||||
handleNewUserMessage();
|
||||
claudeStore.addLine("user", message);
|
||||
characterState.setState("thinking");
|
||||
|
||||
try {
|
||||
const conversationId = get(claudeStore.activeConversationId);
|
||||
if (!conversationId) {
|
||||
throw new Error("No active conversation");
|
||||
}
|
||||
await invoke("send_prompt", {
|
||||
conversationId,
|
||||
message,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to send prompt:", error);
|
||||
claudeStore.addLine("error", `Failed to send: ${error}`);
|
||||
characterState.setTemporaryState("error", 3000);
|
||||
} finally {
|
||||
isSubmitting = false;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleInterrupt() {
|
||||
try {
|
||||
const conversationId = get(claudeStore.activeConversationId);
|
||||
if (!conversationId) return;
|
||||
|
||||
await invoke("interrupt_claude", { conversationId });
|
||||
claudeStore.addLine("system", "Interrupted");
|
||||
characterState.setState("idle");
|
||||
} catch (error) {
|
||||
console.error("Failed to interrupt:", error);
|
||||
}
|
||||
}
|
||||
|
||||
function handleKeyDown(event: KeyboardEvent) {
|
||||
if (event.key === "Enter" && !event.shiftKey) {
|
||||
handleSubmit(event);
|
||||
}
|
||||
// Escape expands to full mode
|
||||
if (event.key === "Escape") {
|
||||
onExpand();
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="compact-container {getStateGlow()}">
|
||||
<!-- Character sprite (smaller) -->
|
||||
<div class="compact-character">
|
||||
<div class="sprite-wrapper {getAnimationClass()}">
|
||||
<img
|
||||
src="/sprites/{info.spriteFile}"
|
||||
alt="Hikari - {info.label}"
|
||||
class="compact-sprite"
|
||||
onerror={(e) => {
|
||||
const target = e.currentTarget as HTMLImageElement;
|
||||
target.src = "/sprites/placeholder.svg";
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<div class="state-badge">{info.label}</div>
|
||||
</div>
|
||||
|
||||
<!-- Recent message preview -->
|
||||
<div class="message-preview">
|
||||
{#if recentMessages.length > 0}
|
||||
{#each recentMessages.slice(-1) as msg (msg.content)}
|
||||
<div class="preview-message {msg.type}">
|
||||
{msg.content}
|
||||
</div>
|
||||
{/each}
|
||||
{:else}
|
||||
<div class="preview-message system">Ask me anything~</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Compact input -->
|
||||
<form onsubmit={handleSubmit} class="compact-input-form">
|
||||
<input
|
||||
type="text"
|
||||
bind:value={inputValue}
|
||||
onkeydown={handleKeyDown}
|
||||
placeholder={isConnected ? "Quick message..." : "Not connected"}
|
||||
disabled={isSubmitting || !isConnected}
|
||||
class="compact-input"
|
||||
/>
|
||||
|
||||
<div class="compact-buttons">
|
||||
{#if isProcessing}
|
||||
<button type="button" onclick={handleInterrupt} class="compact-btn stop-btn" title="Stop">
|
||||
â–
|
||||
</button>
|
||||
{:else}
|
||||
<button
|
||||
type="submit"
|
||||
disabled={!isConnected || isSubmitting || !inputValue.trim()}
|
||||
class="compact-btn send-btn"
|
||||
title="Send"
|
||||
>
|
||||
→
|
||||
</button>
|
||||
{/if}
|
||||
|
||||
<button type="button" onclick={onExpand} class="compact-btn expand-btn" title="Expand (Esc)">
|
||||
<svg
|
||||
width="14"
|
||||
height="14"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
stroke-width="2"
|
||||
>
|
||||
<polyline points="15 3 21 3 21 9"></polyline>
|
||||
<polyline points="9 21 3 21 3 15"></polyline>
|
||||
<line x1="21" y1="3" x2="14" y2="10"></line>
|
||||
<line x1="3" y1="21" x2="10" y2="14"></line>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<!-- Streamer mode indicator -->
|
||||
{#if streamerModeActive}
|
||||
<div class="compact-live-indicator" title="Streamer mode active"></div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.compact-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
background: linear-gradient(180deg, var(--bg-secondary) 0%, var(--bg-primary) 100%);
|
||||
padding: 12px;
|
||||
gap: 8px;
|
||||
position: relative;
|
||||
transition: all 0.3s ease;
|
||||
}
|
||||
|
||||
.compact-container::before {
|
||||
content: "";
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
padding: 2px;
|
||||
background: transparent;
|
||||
-webkit-mask:
|
||||
linear-gradient(#fff 0 0) content-box,
|
||||
linear-gradient(#fff 0 0);
|
||||
mask:
|
||||
linear-gradient(#fff 0 0) content-box,
|
||||
linear-gradient(#fff 0 0);
|
||||
-webkit-mask-composite: xor;
|
||||
mask-composite: exclude;
|
||||
opacity: 0;
|
||||
transition: opacity 0.5s ease;
|
||||
pointer-events: none;
|
||||
border-radius: 8px;
|
||||
}
|
||||
|
||||
.glow-thinking {
|
||||
box-shadow: inset 0 0 30px rgba(147, 51, 234, 0.15);
|
||||
}
|
||||
|
||||
.glow-thinking::before {
|
||||
background: linear-gradient(180deg, #9333ea, var(--trans-blue));
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.glow-typing {
|
||||
box-shadow: inset 0 0 30px rgba(59, 130, 246, 0.15);
|
||||
}
|
||||
|
||||
.glow-typing::before {
|
||||
background: linear-gradient(180deg, #3b82f6, var(--trans-pink));
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.glow-success {
|
||||
box-shadow: inset 0 0 30px rgba(16, 185, 129, 0.15);
|
||||
}
|
||||
|
||||
.glow-success::before {
|
||||
background: linear-gradient(180deg, #10b981, var(--trans-blue));
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.glow-error {
|
||||
box-shadow: inset 0 0 30px rgba(239, 68, 68, 0.15);
|
||||
}
|
||||
|
||||
.glow-error::before {
|
||||
background: linear-gradient(180deg, #ef4444, var(--trans-pink));
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.compact-character {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.sprite-wrapper {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
.compact-sprite {
|
||||
max-height: 100%;
|
||||
max-width: 100%;
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
.state-badge {
|
||||
padding: 2px 8px;
|
||||
border-radius: 999px;
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
background: var(--bg-tertiary);
|
||||
border: 1px solid var(--border-color);
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.message-preview {
|
||||
min-height: 24px;
|
||||
max-height: 48px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.preview-message {
|
||||
font-size: 11px;
|
||||
line-height: 1.3;
|
||||
color: var(--text-secondary);
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
display: -webkit-box;
|
||||
-webkit-line-clamp: 2;
|
||||
line-clamp: 2;
|
||||
-webkit-box-orient: vertical;
|
||||
padding: 4px 8px;
|
||||
background: var(--bg-tertiary);
|
||||
border-radius: 6px;
|
||||
}
|
||||
|
||||
.preview-message.user {
|
||||
color: var(--trans-pink);
|
||||
}
|
||||
|
||||
.preview-message.assistant {
|
||||
color: var(--trans-blue);
|
||||
}
|
||||
|
||||
.preview-message.error {
|
||||
color: #ef4444;
|
||||
}
|
||||
|
||||
.compact-input-form {
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.compact-input {
|
||||
flex: 1;
|
||||
padding: 8px 12px;
|
||||
font-size: 12px;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 6px;
|
||||
color: var(--text-primary);
|
||||
outline: none;
|
||||
transition: all 0.2s;
|
||||
}
|
||||
|
||||
.compact-input:focus {
|
||||
border-color: var(--trans-blue);
|
||||
box-shadow: 0 0 0 2px rgba(91, 206, 250, 0.2);
|
||||
}
|
||||
|
||||
.compact-input:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.compact-input::placeholder {
|
||||
color: var(--text-tertiary);
|
||||
}
|
||||
|
||||
.compact-buttons {
|
||||
display: flex;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.compact-btn {
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border-radius: 6px;
|
||||
border: 1px solid var(--border-color);
|
||||
background: var(--bg-secondary);
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all 0.2s;
|
||||
font-size: 14px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.compact-btn:hover:not(:disabled) {
|
||||
background: var(--bg-tertiary);
|
||||
border-color: var(--accent-primary);
|
||||
color: var(--accent-primary);
|
||||
}
|
||||
|
||||
.compact-btn:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.send-btn:not(:disabled) {
|
||||
background: var(--trans-gradient-vibrant);
|
||||
border-color: transparent;
|
||||
color: #1a1a2e;
|
||||
}
|
||||
|
||||
.stop-btn {
|
||||
background: rgba(239, 68, 68, 0.2);
|
||||
border-color: rgb(239, 68, 68);
|
||||
color: rgb(248, 113, 113);
|
||||
}
|
||||
|
||||
.stop-btn:hover {
|
||||
background: rgba(239, 68, 68, 0.3);
|
||||
}
|
||||
|
||||
.expand-btn {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.compact-live-indicator {
|
||||
position: absolute;
|
||||
top: 8px;
|
||||
right: 8px;
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background: rgb(239, 68, 68);
|
||||
animation: pulse-live 1.5s ease-in-out infinite;
|
||||
}
|
||||
|
||||
@keyframes pulse-live {
|
||||
0%,
|
||||
100% {
|
||||
opacity: 1;
|
||||
box-shadow: 0 0 0 0 rgba(239, 68, 68, 0.4);
|
||||
}
|
||||
50% {
|
||||
opacity: 0.7;
|
||||
box-shadow: 0 0 0 4px rgba(239, 68, 68, 0);
|
||||
}
|
||||
}
|
||||
|
||||
/* Character animations (smaller scale for compact) */
|
||||
@keyframes idle-bob {
|
||||
0%,
|
||||
100% {
|
||||
transform: translateY(0);
|
||||
}
|
||||
50% {
|
||||
transform: translateY(-3px);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes thinking-sway {
|
||||
0%,
|
||||
100% {
|
||||
transform: rotate(-1deg);
|
||||
}
|
||||
50% {
|
||||
transform: rotate(1deg);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes typing-bounce {
|
||||
0%,
|
||||
100% {
|
||||
transform: translateY(0) scale(1);
|
||||
}
|
||||
50% {
|
||||
transform: translateY(-2px) scale(1.01);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes searching-look {
|
||||
0%,
|
||||
100% {
|
||||
transform: translateX(0);
|
||||
}
|
||||
25% {
|
||||
transform: translateX(-3px);
|
||||
}
|
||||
75% {
|
||||
transform: translateX(3px);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes celebrate {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(1) rotate(0deg);
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.05) rotate(3deg);
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes shake {
|
||||
0%,
|
||||
100% {
|
||||
transform: translateX(0);
|
||||
}
|
||||
25%,
|
||||
75% {
|
||||
transform: translateX(-3px);
|
||||
}
|
||||
50% {
|
||||
transform: translateX(3px);
|
||||
}
|
||||
}
|
||||
|
||||
.animate-idle {
|
||||
animation: idle-bob 3s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.animate-thinking {
|
||||
animation: thinking-sway 2s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.animate-typing {
|
||||
animation: typing-bounce 0.5s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.animate-searching {
|
||||
animation: searching-look 1.5s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.animate-celebrate {
|
||||
animation: celebrate 0.8s ease-in-out;
|
||||
}
|
||||
|
||||
.animate-shake {
|
||||
animation: shake 0.5s ease-in-out;
|
||||
}
|
||||
</style>
|
||||
@@ -3,49 +3,35 @@
|
||||
import { onMount } from "svelte";
|
||||
import type { Conversation } from "$lib/stores/conversations";
|
||||
import { SvelteMap } from "svelte/reactivity";
|
||||
import CloseTabConfirmModal from "./CloseTabConfirmModal.svelte";
|
||||
|
||||
let conversations: Map<string, Conversation> = new Map();
|
||||
let activeConversationId: string | null = null;
|
||||
let editingTabId: string | null = null;
|
||||
let editingName = "";
|
||||
// Use store subscriptions with $ syntax
|
||||
const conversations = $derived(claudeStore.conversations);
|
||||
const activeConversationId = $derived(claudeStore.activeConversationId);
|
||||
|
||||
// Track which conversation actually has the Claude connection
|
||||
let connectedConversationId: string | null = null;
|
||||
let editingTabId = $state<string | null>(null);
|
||||
let editingName = $state("");
|
||||
|
||||
// Track last seen message count for each conversation
|
||||
let lastSeenMessageCount = new SvelteMap<string, number>();
|
||||
|
||||
claudeStore.conversations.subscribe((convs) => {
|
||||
conversations = convs;
|
||||
// Confirmation modal state
|
||||
let showConfirmModal = $state(false);
|
||||
let tabToClose = $state<string | null>(null);
|
||||
let tabToCloseName = $state("");
|
||||
|
||||
// Update the last seen count for the active conversation
|
||||
if (activeConversationId) {
|
||||
const activeConv = convs.get(activeConversationId);
|
||||
// Update last seen count when active conversation changes
|
||||
$effect(() => {
|
||||
if ($activeConversationId) {
|
||||
const activeConv = $conversations.get($activeConversationId);
|
||||
if (activeConv) {
|
||||
lastSeenMessageCount.set(activeConversationId, activeConv.terminalLines.length);
|
||||
lastSeenMessageCount.set($activeConversationId, activeConv.terminalLines.length);
|
||||
// Trigger reactivity
|
||||
lastSeenMessageCount = lastSeenMessageCount;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
claudeStore.activeConversationId.subscribe((id) => {
|
||||
activeConversationId = id;
|
||||
});
|
||||
|
||||
// Find the connected conversation
|
||||
$: {
|
||||
let foundConnected = false;
|
||||
for (const [id, conv] of conversations) {
|
||||
if (conv.connectionStatus === "connected" || conv.connectionStatus === "connecting") {
|
||||
connectedConversationId = id;
|
||||
foundConnected = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!foundConnected) {
|
||||
connectedConversationId = null;
|
||||
}
|
||||
}
|
||||
|
||||
function createNewTab() {
|
||||
claudeStore.createConversation();
|
||||
}
|
||||
@@ -57,7 +43,7 @@
|
||||
await claudeStore.switchConversation(id);
|
||||
|
||||
// Mark messages as seen when switching to this tab
|
||||
const conv = conversations.get(id);
|
||||
const conv = $conversations.get(id);
|
||||
if (conv) {
|
||||
lastSeenMessageCount.set(id, conv.terminalLines.length);
|
||||
// Trigger reactivity
|
||||
@@ -67,11 +53,35 @@
|
||||
|
||||
function deleteTab(id: string, event: MouseEvent) {
|
||||
event.stopPropagation();
|
||||
if (conversations.size > 1) {
|
||||
claudeStore.deleteConversation(id);
|
||||
if ($conversations.size > 1) {
|
||||
const conversation = $conversations.get(id);
|
||||
if (conversation && conversation.connectionStatus === "connected") {
|
||||
// Show confirmation modal for connected tabs
|
||||
tabToClose = id;
|
||||
tabToCloseName = conversation.name;
|
||||
showConfirmModal = true;
|
||||
} else {
|
||||
// Close disconnected tabs immediately
|
||||
claudeStore.deleteConversation(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function confirmCloseTab() {
|
||||
if (tabToClose) {
|
||||
claudeStore.deleteConversation(tabToClose);
|
||||
}
|
||||
showConfirmModal = false;
|
||||
tabToClose = null;
|
||||
tabToCloseName = "";
|
||||
}
|
||||
|
||||
function cancelCloseTab() {
|
||||
showConfirmModal = false;
|
||||
tabToClose = null;
|
||||
tabToCloseName = "";
|
||||
}
|
||||
|
||||
function startEditing(id: string, name: string, event: MouseEvent) {
|
||||
event.stopPropagation();
|
||||
editingTabId = id;
|
||||
@@ -105,7 +115,7 @@
|
||||
}
|
||||
|
||||
function hasUnreadMessages(id: string, conversation: Conversation): boolean {
|
||||
if (id === activeConversationId) return false; // Active tab never has unread
|
||||
if (id === $activeConversationId) return false; // Active tab never has unread
|
||||
const lastSeen = lastSeenMessageCount.get(id) || 0;
|
||||
return conversation.terminalLines.length > lastSeen;
|
||||
}
|
||||
@@ -116,6 +126,8 @@
|
||||
} else if (event.key === "Escape") {
|
||||
editingTabId = null;
|
||||
editingName = "";
|
||||
} else if (event.key === " ") {
|
||||
event.stopPropagation();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,15 +149,24 @@
|
||||
// Ctrl/Cmd + W: Close current tab
|
||||
else if ((event.ctrlKey || event.metaKey) && event.key === "w") {
|
||||
event.preventDefault();
|
||||
if (activeConversationId && conversations.size > 1) {
|
||||
claudeStore.deleteConversation(activeConversationId);
|
||||
if ($activeConversationId && $conversations.size > 1) {
|
||||
const conversation = $conversations.get($activeConversationId);
|
||||
if (conversation && conversation.connectionStatus === "connected") {
|
||||
// Show confirmation modal for connected tabs
|
||||
tabToClose = $activeConversationId;
|
||||
tabToCloseName = conversation.name;
|
||||
showConfirmModal = true;
|
||||
} else {
|
||||
// Close disconnected tabs immediately
|
||||
claudeStore.deleteConversation($activeConversationId);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Ctrl/Cmd + Tab: Next tab
|
||||
else if ((event.ctrlKey || event.metaKey) && event.key === "Tab" && !event.shiftKey) {
|
||||
event.preventDefault();
|
||||
const tabs = Array.from(conversations.keys());
|
||||
const currentIndex = tabs.findIndex((id) => id === activeConversationId);
|
||||
const tabs = Array.from($conversations.keys());
|
||||
const currentIndex = tabs.findIndex((id) => id === $activeConversationId);
|
||||
if (currentIndex !== -1) {
|
||||
const nextIndex = (currentIndex + 1) % tabs.length;
|
||||
claudeStore.switchConversation(tabs[nextIndex]);
|
||||
@@ -154,8 +175,8 @@
|
||||
// Ctrl/Cmd + Shift + Tab: Previous tab
|
||||
else if ((event.ctrlKey || event.metaKey) && event.key === "Tab" && event.shiftKey) {
|
||||
event.preventDefault();
|
||||
const tabs = Array.from(conversations.keys());
|
||||
const currentIndex = tabs.findIndex((id) => id === activeConversationId);
|
||||
const tabs = Array.from($conversations.keys());
|
||||
const currentIndex = tabs.findIndex((id) => id === $activeConversationId);
|
||||
if (currentIndex !== -1) {
|
||||
const prevIndex = (currentIndex - 1 + tabs.length) % tabs.length;
|
||||
claudeStore.switchConversation(tabs[prevIndex]);
|
||||
@@ -171,17 +192,17 @@
|
||||
<div
|
||||
class="terminal-tabs flex items-center gap-1 px-2 py-1 bg-[var(--bg-secondary)] border-b border-[var(--border-color)]"
|
||||
>
|
||||
{#each Array.from(conversations.entries()) as [id, conversation] (id)}
|
||||
{#each Array.from($conversations.entries()) as [id, conversation] (id)}
|
||||
<div
|
||||
class="tab-item group relative flex items-center px-3 py-1.5 rounded-t cursor-pointer transition-all
|
||||
{id === activeConversationId
|
||||
{id === $activeConversationId
|
||||
? 'bg-[var(--bg-terminal)] text-[var(--text-primary)] border-t border-l border-r border-[var(--border-color)]'
|
||||
: 'bg-[var(--bg-tertiary)] text-[var(--text-secondary)] hover:bg-[var(--bg-terminal)]/50'}"
|
||||
onclick={() => switchTab(id)}
|
||||
onkeydown={(e) => handleTabKeydown(id, e)}
|
||||
role="tab"
|
||||
tabindex={0}
|
||||
aria-selected={id === activeConversationId}
|
||||
aria-selected={id === $activeConversationId}
|
||||
>
|
||||
{#if editingTabId === id}
|
||||
<input
|
||||
@@ -196,58 +217,51 @@
|
||||
<div class="flex items-center gap-2">
|
||||
<div
|
||||
class="w-2 h-2 rounded-full {getConnectionStatusColor(conversation.connectionStatus)}"
|
||||
title="Connection: {conversation.connectionStatus}{id !== connectedConversationId &&
|
||||
connectedConversationId
|
||||
? ' (Another tab is connected)'
|
||||
: ''}"
|
||||
title="Connection: {conversation.connectionStatus}"
|
||||
></div>
|
||||
<span
|
||||
class="text-sm pr-6 max-w-[150px] truncate"
|
||||
class="text-sm pr-2 max-w-[150px] truncate"
|
||||
ondblclick={(e) => startEditing(id, conversation.name, e)}
|
||||
role="button"
|
||||
tabindex={-1}
|
||||
>
|
||||
{conversation.name}
|
||||
</span>
|
||||
{#if id !== activeConversationId && id === connectedConversationId}
|
||||
<span
|
||||
class="text-xs text-[var(--text-tertiary)]"
|
||||
title="This tab has the Claude connection"
|
||||
>
|
||||
(connected)
|
||||
</span>
|
||||
{/if}
|
||||
{#if hasUnreadMessages(id, conversation)}
|
||||
<div
|
||||
class="absolute -top-1 -right-1 w-2 h-2 rounded-full bg-blue-500 animate-pulse"
|
||||
class="absolute -top-1 -right-1 w-2 h-2 rounded-full bg-blue-500 animate-pulse pointer-events-none"
|
||||
title="New messages"
|
||||
></div>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if conversations.size > 1}
|
||||
<button
|
||||
onclick={(e) => deleteTab(id, e)}
|
||||
class="absolute right-1 top-1/2 -translate-y-1/2 w-4 h-4 flex items-center justify-center rounded hover:bg-[var(--bg-secondary)] opacity-0 group-hover:opacity-100 transition-opacity"
|
||||
title="Close tab"
|
||||
>
|
||||
<svg
|
||||
class="w-3 h-3"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
<div
|
||||
class="absolute right-1 top-1/2 -translate-y-1/2 flex items-center gap-0.5 opacity-0 group-hover:opacity-100 transition-opacity"
|
||||
>
|
||||
{#if $conversations.size > 1}
|
||||
<button
|
||||
onclick={(e) => deleteTab(id, e)}
|
||||
class="w-4 h-4 flex items-center justify-center rounded hover:bg-[var(--bg-secondary)]"
|
||||
title="Close tab"
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
stroke-width="2"
|
||||
d="M6 18L18 6M6 6l12 12"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
{/if}
|
||||
<svg
|
||||
class="w-3 h-3"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
stroke-width="2"
|
||||
d="M6 18L18 6M6 6l12 12"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
|
||||
@@ -268,6 +282,13 @@
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<CloseTabConfirmModal
|
||||
isOpen={showConfirmModal}
|
||||
tabName={tabToCloseName}
|
||||
onConfirm={confirmCloseTab}
|
||||
onCancel={cancelCloseTab}
|
||||
/>
|
||||
|
||||
<style>
|
||||
.terminal-tabs {
|
||||
min-height: 36px;
|
||||
|
||||
@@ -43,14 +43,7 @@
|
||||
"đź”’ Grant tool permissions as needed for security",
|
||||
"📌 Pin important conversations for quick access",
|
||||
"🎨 Customize your theme and preferences in Settings",
|
||||
],
|
||||
},
|
||||
{
|
||||
title: "Keyboard Shortcuts",
|
||||
items: [
|
||||
"Ctrl/Cmd + Enter: Send message",
|
||||
"Ctrl/Cmd + K: Clear chat (when supported)",
|
||||
"Escape: Close modals and panels",
|
||||
"⌨️ Check the keyboard icon for available shortcuts",
|
||||
],
|
||||
},
|
||||
];
|
||||
@@ -72,10 +65,12 @@
|
||||
tabindex="-1"
|
||||
>
|
||||
<div class="flex items-center justify-between p-6 pb-4 border-b border-[var(--border-color)]">
|
||||
<h2 id="help-title" class="text-xl font-semibold text-gray-100">How to Use Hikari Desktop</h2>
|
||||
<h2 id="help-title" class="text-xl font-semibold text-[var(--text-primary)]">
|
||||
How to Use Hikari Desktop
|
||||
</h2>
|
||||
<button
|
||||
onclick={onClose}
|
||||
class="p-1 text-gray-500 hover:text-gray-300 transition-colors"
|
||||
class="p-1 text-[var(--text-secondary)] hover:text-[var(--text-primary)] transition-colors"
|
||||
aria-label="Close"
|
||||
>
|
||||
<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
@@ -92,8 +87,8 @@
|
||||
<div class="overflow-y-auto flex-1 p-6 space-y-6">
|
||||
{#each sections as section (section.title)}
|
||||
<div>
|
||||
<h3 class="font-medium text-gray-200 mb-3">{section.title}</h3>
|
||||
<ul class="space-y-2 text-sm text-gray-400">
|
||||
<h3 class="font-medium text-[var(--text-primary)] mb-3">{section.title}</h3>
|
||||
<ul class="space-y-2 text-sm text-[var(--text-secondary)]">
|
||||
{#each section.items as item (item)}
|
||||
<li class="flex items-start">
|
||||
<span class="text-[var(--accent-primary)] mr-2 mt-0.5">•</span>
|
||||
@@ -105,7 +100,7 @@
|
||||
{/each}
|
||||
|
||||
<div class="pt-4 border-t border-[var(--border-color)]">
|
||||
<p class="text-sm text-gray-500">
|
||||
<p class="text-sm text-[var(--text-tertiary)]">
|
||||
<strong>Need more help?</strong> Join our Discord community for support and updates!
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
<script lang="ts">
|
||||
export let content: string;
|
||||
export let searchQuery: string;
|
||||
|
||||
interface TextPart {
|
||||
text: string;
|
||||
isMatch: boolean;
|
||||
}
|
||||
|
||||
function getHighlightedParts(text: string, query: string): TextPart[] {
|
||||
if (!query) {
|
||||
return [{ text, isMatch: false }];
|
||||
}
|
||||
|
||||
const escapedQuery = query.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const regex = new RegExp(`(${escapedQuery})`, "gi");
|
||||
const parts: TextPart[] = [];
|
||||
let lastIndex = 0;
|
||||
let match: RegExpExecArray | null;
|
||||
|
||||
while ((match = regex.exec(text)) !== null) {
|
||||
// Add non-matching text before the match
|
||||
if (match.index > lastIndex) {
|
||||
parts.push({
|
||||
text: text.slice(lastIndex, match.index),
|
||||
isMatch: false,
|
||||
});
|
||||
}
|
||||
|
||||
// Add the matching text
|
||||
parts.push({
|
||||
text: match[1],
|
||||
isMatch: true,
|
||||
});
|
||||
|
||||
lastIndex = regex.lastIndex;
|
||||
}
|
||||
|
||||
// Add any remaining text after the last match
|
||||
if (lastIndex < text.length) {
|
||||
parts.push({
|
||||
text: text.slice(lastIndex),
|
||||
isMatch: false,
|
||||
});
|
||||
}
|
||||
|
||||
return parts;
|
||||
}
|
||||
|
||||
$: parts = getHighlightedParts(content, searchQuery);
|
||||
</script>
|
||||
|
||||
<span class="whitespace-pre-wrap">
|
||||
{#each parts as part, index (index)}
|
||||
{#if part.isMatch}
|
||||
<mark class="search-highlight">{part.text}</mark>
|
||||
{:else}
|
||||
{part.text}
|
||||
{/if}
|
||||
{/each}
|
||||
</span>
|
||||