feat: display progress for finalising audio and summarising

This commit is contained in:
2026-01-29 12:34:25 -08:00
parent a6843cb3f1
commit 43a544a886
7 changed files with 540 additions and 63 deletions
+210
View File
@@ -585,4 +585,214 @@ body {
background-color: #451a03;
color: #fbbf24;
}
.recording-item {
background-color: var(--surface-color);
border-color: var(--border-color);
}
.recording-item:hover {
background-color: var(--bg-color);
}
}
/* New layout styles */
.app-layout {
display: flex;
flex: 1;
gap: 2rem;
min-height: 0;
}
.recordings-list {
width: 300px;
background-color: var(--surface-color);
border-radius: 0.5rem;
padding: 1.5rem;
overflow-y: auto;
}
.recordings-list h3 {
margin-bottom: 1rem;
font-size: 1.125rem;
}
.no-recordings {
color: var(--text-secondary);
font-style: italic;
}
.recordings-items {
list-style: none;
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.recording-item {
padding: 0.75rem;
border-radius: 0.375rem;
cursor: pointer;
transition: all 0.2s;
background-color: white;
border: 1px solid var(--border-color);
}
.recording-item:hover {
background-color: var(--surface-color);
}
.recording-item.selected {
background-color: var(--primary-color);
color: white;
}
.recording-item.selected .recording-time {
color: white;
}
.recording-item.selected .recording-status {
color: rgba(255, 255, 255, 0.8);
}
.recording-time {
font-weight: 500;
font-size: 0.875rem;
}
.recording-status {
font-size: 0.75rem;
color: var(--text-secondary);
margin-top: 0.25rem;
}
.main-content {
flex: 1;
display: flex;
flex-direction: column;
gap: 2rem;
min-width: 0;
}
.recording-details {
flex: 1;
display: flex;
flex-direction: column;
gap: 1.5rem;
}
.recording-header {
display: flex;
justify-content: space-between;
align-items: center;
flex-wrap: wrap;
gap: 1rem;
}
.recording-header h2 {
font-size: 1.25rem;
margin: 0;
}
.recording-actions {
display: flex;
gap: 0.75rem;
flex-wrap: wrap;
}
.empty-state {
flex: 1;
display: flex;
align-items: center;
justify-content: center;
padding: 4rem 2rem;
text-align: center;
color: var(--text-secondary);
font-size: 1.125rem;
}
/* Summary display improvements */
.summary-text {
white-space: pre-wrap;
line-height: 1.6;
}
.summary-text strong {
display: block;
margin-top: 1rem;
margin-bottom: 0.5rem;
}
/* Progress Bar */
.progress-container {
width: 100%;
max-width: 300px;
margin: 1rem auto;
}
.progress-label {
font-size: 0.875rem;
color: var(--text-secondary);
margin-bottom: 0.5rem;
text-align: center;
}
.progress-bar {
width: 100%;
height: 8px;
background-color: var(--border-color);
border-radius: 4px;
overflow: hidden;
position: relative;
}
.progress-fill {
height: 100%;
background-color: var(--primary-color);
border-radius: 4px;
transition: width 0.3s ease;
position: relative;
overflow: hidden;
}
.progress-fill::after {
content: "";
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: linear-gradient(
90deg,
transparent 0%,
rgba(255, 255, 255, 0.2) 50%,
transparent 100%
);
animation: shimmer 1.5s infinite;
}
@keyframes shimmer {
0% {
transform: translateX(-100%);
}
100% {
transform: translateX(100%);
}
}
.progress-text {
font-size: 0.75rem;
color: var(--text-secondary);
text-align: center;
margin-top: 0.25rem;
font-variant-numeric: tabular-nums;
}
/* Transcribing indicator update */
.transcribing-indicator {
display: flex;
flex-direction: column;
align-items: center;
gap: 0.5rem;
padding: 1rem;
}
+230 -50
View File
@@ -1,9 +1,11 @@
import { useState, useEffect, useRef } from "react";
import { invoke } from "@tauri-apps/api/core";
import { listen, type UnlistenFn } from "@tauri-apps/api/event";
import "./App.css";
import { TranscriptDisplay } from "./components/TranscriptDisplay";
import { SummaryDisplay } from "./components/SummaryDisplay";
import { BackendLogs } from "./components/BackendLogs";
import { ProgressBar } from "./components/ProgressBar";
interface TranscriptSegment {
start: number;
@@ -12,22 +14,35 @@ interface TranscriptSegment {
speaker: string;
}
interface Recording {
id: string;
timestamp: Date;
duration: number;
transcriptSegments: TranscriptSegment[];
summary: string | null;
isGeneratingSummary: boolean;
summaryProgress?: number;
}
type AppState = "checking" | "downloading_models" | "initializing" | "ready" | "recording" | "transcribing" | "error";
function App() {
const [transcriptSegments, setTranscriptSegments] = useState<TranscriptSegment[]>([]);
const [summary, setSummary] = useState<string | null>(null);
const [isGeneratingSummary, setIsGeneratingSummary] = useState(false);
const [recordings, setRecordings] = useState<Recording[]>([]);
const [activeRecording, setActiveRecording] = useState<Recording | null>(null);
const [selectedRecordingId, setSelectedRecordingId] = useState<string | null>(null);
const [appState, setAppState] = useState<AppState>("checking");
const [statusMessage, setStatusMessage] = useState("Checking setup...");
const [showLogs, setShowLogs] = useState(false);
const [errorMessage, setErrorMessage] = useState<string | null>(null);
const [recordingDuration, setRecordingDuration] = useState(0);
const [transcriptionProgress, setTranscriptionProgress] = useState(0);
const initStarted = useRef(false);
const recordingTimer = useRef<number | null>(null);
const transcriptionTimer = useRef<number | null>(null);
const audioOffset = useRef(0);
const totalProcessedSamples = useRef(0);
const transcriptionProgressUnlisten = useRef<UnlistenFn | null>(null);
const summaryProgressUnlisten = useRef<UnlistenFn | null>(null);
useEffect(() => {
if (initStarted.current) return;
@@ -36,7 +51,7 @@ function App() {
initializeApp();
}, []);
// Cleanup timers on unmount
// Cleanup timers and listeners on unmount
useEffect(() => {
return () => {
if (recordingTimer.current) {
@@ -45,9 +60,37 @@ function App() {
if (transcriptionTimer.current) {
clearInterval(transcriptionTimer.current);
}
if (transcriptionProgressUnlisten.current) {
transcriptionProgressUnlisten.current();
}
if (summaryProgressUnlisten.current) {
summaryProgressUnlisten.current();
}
};
}, []);
// Set up event listeners for progress updates
useEffect(() => {
const setupListeners = async () => {
// Listen for transcription progress
transcriptionProgressUnlisten.current = await listen<number>('transcription-progress', (event) => {
setTranscriptionProgress(event.payload * 100); // Convert 0-1 to 0-100
});
// Listen for summary progress
summaryProgressUnlisten.current = await listen<number>('summary-progress', (event) => {
const recordingId = selectedRecordingId || recordings[0]?.id;
if (recordingId) {
setRecordings(prev => prev.map(r =>
r.id === recordingId ? { ...r, summaryProgress: event.payload * 100 } : r
));
}
});
};
setupListeners();
}, [selectedRecordingId, recordings]);
const initializeApp = async () => {
try {
setAppState("checking");
@@ -122,7 +165,10 @@ function App() {
end: seg.end + baseTime,
}));
setTranscriptSegments(prev => [...prev, ...adjustedSegments]);
setActiveRecording(prev => prev ? {
...prev,
transcriptSegments: [...prev.transcriptSegments, ...adjustedSegments]
} : null);
}
// Track total processed samples
@@ -140,11 +186,20 @@ function App() {
try {
setAppState("recording");
setRecordingDuration(0);
setTranscriptSegments([]);
setSummary(null);
audioOffset.current = 0;
totalProcessedSamples.current = 0;
// Create a new active recording
const newRecording: Recording = {
id: Date.now().toString(),
timestamp: new Date(),
duration: 0,
transcriptSegments: [],
summary: null,
isGeneratingSummary: false,
};
setActiveRecording(newRecording);
await invoke("start_recording");
// Start timer to show recording duration
@@ -177,6 +232,7 @@ function App() {
setAppState("transcribing");
setStatusMessage("Processing final audio...");
setTranscriptionProgress(0);
// First, process any audio that hasn't been processed yet
try {
@@ -186,6 +242,7 @@ function App() {
if (finalChunk.length > 0) {
console.log(`Processing final chunk of ${finalChunk.length} samples`);
// The progress will be updated via events from the backend
const finalSegments = await invoke<TranscriptSegment[]>("transcribe_chunk", {
audioData: finalChunk
});
@@ -198,7 +255,10 @@ function App() {
end: seg.end + baseTime,
}));
setTranscriptSegments(prev => [...prev, ...adjustedSegments]);
setActiveRecording(prev => prev ? {
...prev,
transcriptSegments: [...prev.transcriptSegments, ...adjustedSegments]
} : null);
}
}
} catch (chunkError) {
@@ -209,8 +269,23 @@ function App() {
// Now stop the recording
await invoke<string>("stop_recording");
// Save the recording to history
if (activeRecording) {
const finalRecording = {
...activeRecording,
duration: recordingDuration
};
setRecordings(prev => [finalRecording, ...prev]);
setSelectedRecordingId(finalRecording.id);
setActiveRecording(null);
}
// Brief delay to show completion
await new Promise(resolve => setTimeout(resolve, 500));
setAppState("ready");
setStatusMessage("");
setTranscriptionProgress(0);
} catch (error) {
console.error("Failed to stop recording:", error);
setAppState("ready");
@@ -219,28 +294,61 @@ function App() {
}
};
const generateSummary = async () => {
if (transcriptSegments.length === 0) return;
const generateSummary = async (recordingId: string) => {
const recording = recordings.find(r => r.id === recordingId);
if (!recording || recording.transcriptSegments.length === 0) return;
setIsGeneratingSummary(true);
// Update the recording to show it's generating
setRecordings(prev => prev.map(r =>
r.id === recordingId ? { ...r, isGeneratingSummary: true, summaryProgress: 0 } : r
));
const fullTranscript = transcriptSegments
const fullTranscript = recording.transcriptSegments
.map((seg) => `${seg.speaker}: ${seg.text}`)
.join("\n");
try {
// Progress will be updated via events from the backend
const summaryResult = await invoke<string>("summarize", { transcript: fullTranscript });
setSummary(summaryResult);
// Update the recording with the summary
setRecordings(prev => prev.map(r =>
r.id === recordingId
? { ...r, summary: summaryResult, isGeneratingSummary: false, summaryProgress: 100 }
: r
));
// Clear progress after a brief delay
setTimeout(() => {
setRecordings(prev => prev.map(r =>
r.id === recordingId ? { ...r, summaryProgress: undefined } : r
));
}, 1000);
} catch (error) {
console.error("Failed to generate summary:", error);
setErrorMessage(String(error));
} finally {
setIsGeneratingSummary(false);
// Reset generating state on error
setRecordings(prev => prev.map(r =>
r.id === recordingId ? { ...r, isGeneratingSummary: false, summaryProgress: undefined } : r
));
}
};
const downloadTranscript = () => {
const content = transcriptSegments
const copyToClipboard = async (text: string) => {
try {
await navigator.clipboard.writeText(text);
// You could add a toast notification here
} catch (error) {
console.error("Failed to copy to clipboard:", error);
}
};
const downloadTranscript = (recordingId: string) => {
const recording = recordings.find(r => r.id === recordingId);
if (!recording) return;
const content = recording.transcriptSegments
.map((seg) => `[${formatTime(seg.start)}] ${seg.speaker}: ${seg.text}`)
.join("\n");
@@ -248,19 +356,20 @@ function App() {
const url = URL.createObjectURL(blob);
const a = document.createElement("a");
a.href = url;
a.download = `meeting-transcript-${new Date().toISOString().split("T")[0]}.txt`;
a.download = `meeting-transcript-${recording.timestamp.toISOString().split("T")[0]}.txt`;
a.click();
URL.revokeObjectURL(url);
};
const downloadSummary = () => {
if (!summary) return;
const downloadSummary = (recordingId: string) => {
const recording = recordings.find(r => r.id === recordingId);
if (!recording || !recording.summary) return;
const blob = new Blob([summary], { type: "text/plain" });
const blob = new Blob([recording.summary], { type: "text/plain" });
const url = URL.createObjectURL(blob);
const a = document.createElement("a");
a.href = url;
a.download = `meeting-summary-${new Date().toISOString().split("T")[0]}.txt`;
a.download = `meeting-summary-${recording.timestamp.toISOString().split("T")[0]}.txt`;
a.click();
URL.revokeObjectURL(url);
};
@@ -277,6 +386,10 @@ function App() {
return `${mins}:${secs.toString().padStart(2, "0")}`;
};
// Get the currently selected recording or active recording
const displayedRecording = activeRecording ||
(selectedRecordingId ? recordings.find(r => r.id === selectedRecordingId) : null);
const renderSetupScreen = () => (
<div className="setup-screen">
<div className="setup-content">
@@ -347,7 +460,7 @@ function App() {
<div className="recording-indicator">
<span className="recording-dot" />
Recording: {formatDuration(recordingDuration)}
{transcriptSegments.length > 0 && (
{activeRecording && activeRecording.transcriptSegments.length > 0 && (
<span className="real-time-indicator"> (Real-time transcription active)</span>
)}
</div>
@@ -359,28 +472,42 @@ function App() {
{appState === "transcribing" && (
<div className="transcribing-indicator">
<div className="loading-spinner small" />
<span>Transcribing audio...</span>
<ProgressBar
progress={transcriptionProgress}
label="Processing final audio..."
/>
</div>
)}
{appState === "ready" && transcriptSegments.length > 0 && (
<div className="action-buttons">
<button className="secondary-button" onClick={downloadTranscript}>
📄 Download Transcript
</button>
<button
className="primary-button"
onClick={generateSummary}
disabled={isGeneratingSummary}
>
Generate Summary
</button>
</div>
)}
</section>
);
const renderRecordingsList = () => (
<aside className="recordings-list">
<h3>Recording History</h3>
{recordings.length === 0 ? (
<p className="no-recordings">No recordings yet</p>
) : (
<ul className="recordings-items">
{recordings.map(recording => (
<li
key={recording.id}
className={`recording-item ${selectedRecordingId === recording.id ? 'selected' : ''}`}
onClick={() => setSelectedRecordingId(recording.id)}
>
<div className="recording-time">
{recording.timestamp.toLocaleTimeString()} - {formatDuration(recording.duration)}
</div>
<div className="recording-status">
{recording.summary ? '✓ Summary' : recording.isGeneratingSummary ? '⏳ Summarizing...' : ''}
</div>
</li>
))}
</ul>
)}
</aside>
);
// Show setup screen for non-ready states
if (appState === "checking" || appState === "downloading_models" || appState === "initializing" || appState === "error") {
return (
@@ -401,20 +528,73 @@ function App() {
<p>Local Meeting Transcription & Summarization</p>
</header>
<div className="app-content">
{renderRecordingControls()}
<div className="app-layout">
{/* Left sidebar with recordings list */}
{renderRecordingsList()}
<div className="content-grid">
<TranscriptDisplay segments={transcriptSegments} />
<SummaryDisplay
summary={summary}
isLoading={isGeneratingSummary}
onDownload={downloadSummary}
/>
{/* Main content area */}
<div className="main-content">
{renderRecordingControls()}
{/* Display selected recording or active recording */}
{displayedRecording && (
<div className="recording-details">
<div className="recording-header">
<h2>Recording from {displayedRecording.timestamp.toLocaleString()}</h2>
<div className="recording-actions">
<button
className="secondary-button"
onClick={() => downloadTranscript(displayedRecording.id)}
>
📄 Download Transcript
</button>
{!displayedRecording.summary && !displayedRecording.isGeneratingSummary && (
<button
className="primary-button"
onClick={() => generateSummary(displayedRecording.id)}
>
Generate Summary
</button>
)}
{displayedRecording.summary && (
<>
<button
className="secondary-button"
onClick={() => copyToClipboard(displayedRecording.summary!)}
>
📋 Copy Summary
</button>
<button
className="secondary-button"
onClick={() => downloadSummary(displayedRecording.id)}
>
💾 Download Summary
</button>
</>
)}
</div>
</div>
<div className="content-grid">
<TranscriptDisplay segments={displayedRecording.transcriptSegments} />
<SummaryDisplay
summary={displayedRecording.summary}
isLoading={displayedRecording.isGeneratingSummary}
progress={displayedRecording.summaryProgress}
/>
</div>
</div>
)}
{!displayedRecording && recordings.length === 0 && appState === "ready" && (
<div className="empty-state">
<p>Click "Start Recording" to begin your first meeting transcription!</p>
</div>
)}
</div>
<BackendLogs isVisible={showLogs} onToggle={() => setShowLogs(!showLogs)} />
</div>
<BackendLogs isVisible={showLogs} onToggle={() => setShowLogs(!showLogs)} />
</main>
);
}
+19
View File
@@ -0,0 +1,19 @@
interface ProgressBarProps {
progress: number; // 0-100
label?: string;
}
export function ProgressBar({ progress, label }: ProgressBarProps) {
return (
<div className="progress-container">
{label && <div className="progress-label">{label}</div>}
<div className="progress-bar">
<div
className="progress-fill"
style={{ width: `${Math.min(100, Math.max(0, progress))}%` }}
/>
</div>
<div className="progress-text">{Math.round(progress)}%</div>
</div>
);
}
+15 -9
View File
@@ -1,25 +1,31 @@
interface SummaryDisplayProps {
summary: string | null;
isLoading: boolean;
onDownload: () => void;
progress?: number;
}
export function SummaryDisplay({ summary, isLoading, onDownload }: SummaryDisplayProps) {
import { ProgressBar } from "./ProgressBar";
export function SummaryDisplay({ summary, isLoading, progress }: SummaryDisplayProps) {
return (
<div className="summary-display">
<div className="summary-header">
<h2>Meeting Summary</h2>
{summary && (
<button className="download-button" onClick={onDownload}>
📥 Download
</button>
)}
</div>
<div className="summary-content">
{isLoading ? (
<div className="loading">
<div className="spinner"></div>
<p>Generating summary...</p>
{progress !== undefined ? (
<ProgressBar
progress={progress}
label="Generating summary..."
/>
) : (
<>
<div className="spinner"></div>
<p>Generating summary...</p>
</>
)}
</div>
) : summary ? (
<div className="summary-text">{summary}</div>