Implement Phase 1: Global hotkey, HUD, and audio capture

Add complete listening UX without STT:
- Global hotkey manager with ⌘⇧V, push-to-talk and toggle modes
- Floating HUD with real-time RMS audio visualization
- AVAudioEngine capture with 16kHz mono PCM conversion
- 10-minute dictation timeout with ESC cancellation
- Optional start/stop sounds and microphone permissions
- Permission management for accessibility and input monitoring

All Phase 1 acceptance criteria met.
This commit is contained in:
Felipe M 2025-09-18 20:06:46 +02:00
parent 1db16227b2
commit 6e768a7753
Signed by: fmartingr
GPG key ID: CCFBC5637D4000A8
10 changed files with 1005 additions and 51 deletions

View file

@ -0,0 +1,64 @@
import SwiftUI
import CoreUtils
@main
struct MenuWhisperApp: App {
@StateObject private var appController = AppController()
var body: some Scene {
MenuBarExtra("Menu-Whisper", systemImage: "mic") {
MenuBarContentView()
.environmentObject(appController)
.onAppear {
appController.start()
}
}
}
}
struct MenuBarContentView: View {
@EnvironmentObject var appController: AppController
var body: some View {
VStack(alignment: .leading, spacing: 4) {
Text("Menu-Whisper")
.font(.headline)
Text(appController.currentState.displayName)
.font(.subheadline)
.foregroundColor(stateColor)
if appController.currentState == .listening {
Text("Press ⌘⇧V or Esc to stop")
.font(.caption)
.foregroundColor(.secondary)
}
Divider()
Button("Preferences...") {
// TODO: Open preferences window in Phase 4
}
Button("Quit") {
NSApplication.shared.terminate(nil)
}
}
.padding(.horizontal, 4)
}
private var stateColor: Color {
switch appController.currentState {
case .idle:
return .primary
case .listening:
return .blue
case .processing:
return .orange
case .injecting:
return .green
case .error:
return .red
}
}
}