feat: add meeting transcription app scaffolding
Security Scan and Upload / Security & DefectDojo Upload (pull_request) Successful in 48s
CI / Lint & Test (pull_request) Successful in 14m18s
CI / Build Linux (pull_request) Successful in 14m19s
CI / Build Windows (cross-compile) (pull_request) Failing after 19m39s

- Add Python backend structure with FastAPI for transcription/summarization
- Add React UI with audio recording, transcript, and summary views
- Configure Tauri to manage Python backend lifecycle
- Set up Windows cross-compilation with cargo-xwin
- Add Gitea CI workflow for lint, test, and multi-platform builds
- Configure ESLint, Prettier, and Vitest for code quality

Note: App scaffolding only - Python env and models not yet set up
This commit is contained in:
2026-01-21 20:18:03 -08:00
parent 96494a9997
commit 3c8a46e5a6
41 changed files with 2679 additions and 1797 deletions
+128
View File
@@ -0,0 +1,128 @@
"""Build script to create a Windows executable with bundled Python environment.
This script should be run ON WINDOWS, not cross-compiled from Linux.
"""
import shutil
import subprocess
import sys
from pathlib import Path
def main():
"""Create a Windows executable bundle for Chronara."""
project_root = Path(__file__).parent.parent
dist_dir = project_root / "dist-bundle"
print("🔨 Chronara Windows Build Script")
print("=" * 50)
print("\n⚠️ NOTE: This script must be run on Windows!")
print(" Cross-compilation from Linux is not supported.\n")
if sys.platform != "win32":
print("❌ This script must be run on Windows.")
print(" Please run this on a Windows machine or in a Windows VM.")
sys.exit(1)
# Clean previous builds
if dist_dir.exists():
print("Cleaning previous build...")
shutil.rmtree(dist_dir)
dist_dir.mkdir(exist_ok=True)
# Step 1: Create Python bundle using PyInstaller
print("\n📦 Creating Python bundle...")
# Create a temporary spec file for PyInstaller
spec_content = f"""
# -*- mode: python ; coding: utf-8 -*-
a = Analysis(
['{project_root / "src" / "backend" / "main.py"}'],
pathex=['{project_root / "src"}'],
binaries=[],
datas=[
('{project_root / "models" / "*.gguf"}', 'models'),
],
hiddenimports=['uvicorn', 'fastapi', 'whisperx', 'llama_cpp'],
hookspath=[],
hooksconfig={{}},
runtime_hooks=[],
excludes=[],
noarchive=False,
)
pyz = PYZ(a.pure)
exe = EXE(
pyz,
a.scripts,
a.binaries,
a.datas,
[],
name='chronara-backend',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=False,
upx_exclude=[],
runtime_tmpdir=None,
console=True,
disable_windowed_traceback=False,
argv_emulation=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None,
)
"""
spec_path = project_root / "chronara-backend.spec"
spec_path.write_text(spec_content)
try:
subprocess.run([
sys.executable,
"-m",
"PyInstaller",
str(spec_path),
"--clean",
"--noconfirm",
"--distpath", str(dist_dir / "backend")
], check=True)
except subprocess.CalledProcessError:
print("⚠️ PyInstaller not found. Installing...")
subprocess.run([sys.executable, "-m", "pip", "install", "pyinstaller"], check=True)
subprocess.run([
sys.executable,
"-m",
"PyInstaller",
str(spec_path),
"--clean",
"--noconfirm",
"--distpath", str(dist_dir / "backend")
], check=True)
# Clean up spec file
spec_path.unlink()
# Step 2: Build Tauri app (creates NSIS installer on Windows)
print("\n🦀 Building Tauri app with NSIS installer...")
subprocess.run(
"pnpm tauri build",
cwd=project_root,
check=True,
shell=True
)
print("\n✅ Build complete!")
print("\nWindows installer (.exe) location:")
print(" src-tauri/target/release/bundle/nsis/Chronara_0.1.0_x64_en-US.exe")
print("\nThis installer includes:")
print(" - Tauri app with React frontend")
print(" - Python backend (bundled)")
print(" - Llama model files")
print(" - All dependencies")
if __name__ == "__main__":
main()
+90
View File
@@ -0,0 +1,90 @@
"""Download required models for Chronara."""
import os
import sys
from pathlib import Path
from urllib.request import urlretrieve
# Model download URLs
MODELS = {
"llama-3.2-1B": {
"url": "https://huggingface.co/bartowski/Llama-3.2-1B-Instruct-GGUF/resolve/main/Llama-3.2-1B-Instruct-Q4_K_M.gguf",
"filename": "llama-3.2-1B-instruct-Q4_K_M.gguf",
"size": "1.2GB",
},
"llama-3.2-3B": {
"url": "https://huggingface.co/bartowski/Llama-3.2-3B-Instruct-GGUF/resolve/main/Llama-3.2-3B-Instruct-Q4_K_M.gguf",
"filename": "llama-3.2-3B-instruct-Q4_K_M.gguf",
"size": "2.5GB",
},
}
def download_with_progress(url: str, filepath: Path):
"""Download file with progress bar."""
def _progress(block_num, block_size, total_size):
downloaded = block_num * block_size
percent = min(downloaded * 100 / total_size, 100)
progress = int(50 * percent / 100)
sys.stdout.write(
f"\r[{'=' * progress}{' ' * (50 - progress)}] {percent:.1f}%"
)
sys.stdout.flush()
print(f"Downloading {filepath.name}...")
urlretrieve(url, filepath, reporthook=_progress)
print() # New line after progress bar
def main():
"""Download all required models."""
# Get project root
project_root = Path(__file__).parent.parent
models_dir = project_root / "models"
models_dir.mkdir(exist_ok=True)
print("🤖 Chronara Model Downloader")
print("=" * 50)
# Ask which model to download
print("\nWhich Llama model would you like to use?")
print("1. Llama 3.2 1B (1.2GB) - Faster, good for basic summaries")
print("2. Llama 3.2 3B (2.5GB) - Better quality summaries")
print("3. Both models")
choice = input("\nEnter your choice (1/2/3): ").strip()
models_to_download = []
if choice == "1":
models_to_download = ["llama-3.2-1B"]
elif choice == "2":
models_to_download = ["llama-3.2-3B"]
elif choice == "3":
models_to_download = ["llama-3.2-1B", "llama-3.2-3B"]
else:
print("Invalid choice!")
return
# Download selected models
for model_name in models_to_download:
model_info = MODELS[model_name]
filepath = models_dir / model_info["filename"]
if filepath.exists():
print(f"\n{model_name} already downloaded")
continue
print(f"\n📥 Downloading {model_name} ({model_info['size']})...")
try:
download_with_progress(model_info["url"], filepath)
print(f"✓ Downloaded {model_name} successfully!")
except Exception as e:
print(f"✗ Failed to download {model_name}: {e}")
print("\n✨ Model download complete!")
print("\nNote: WhisperX models will be downloaded automatically on first run.")
if __name__ == "__main__":
main()
+24
View File
@@ -0,0 +1,24 @@
"""Start the Chronara backend server."""
import os
import sys
import subprocess
from pathlib import Path
# Add src directory to Python path
src_path = Path(__file__).parent.parent / "src"
sys.path.insert(0, str(src_path))
# Set environment for bundled deployment
os.environ["CHRONARA_BUNDLED"] = "1"
# Start the FastAPI server
subprocess.run([
sys.executable,
"-m",
"uvicorn",
"backend.main:app",
"--host", "127.0.0.1",
"--port", "8000",
"--reload" if os.environ.get("CHRONARA_DEV") else ""
])