Skip to content

Commit 646bcdb

Browse files
committed
refactor: constant 정리 및 주석 제거
1 parent e7bc547 commit 646bcdb

File tree

7 files changed

+89
-108
lines changed

7 files changed

+89
-108
lines changed

Hippo/Features/Vision/Runtime/VoiceControlManager+Executor.swift

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,24 @@
22
// VoiceControlManager+Executor.swift
33
// Hippo
44
//
5+
// VoiceCommandExecutor implementation for VoiceControlManager
6+
//
57

68
import Foundation
79

10+
// MARK: - VoiceCommandExecutor
11+
812
extension VoiceControlManager: VoiceCommandExecutor {
913

14+
/// Execute voice command intent
15+
///
16+
/// Maps voice command intents to concrete actions:
17+
/// - Menu control (open/close)
18+
/// - Video control (show/hide endoscope)
19+
/// - Entity manipulation (rotate)
20+
///
21+
/// - Parameter intent: The command intent to execute
22+
/// - Throws: VoiceControlError if execution fails
1023
public func execute(_ intent: VoiceCommandIntent) async throws {
1124
switch intent {
1225
case .closeMenu:

Hippo/Features/Vision/Runtime/VoiceControlManager.swift

Lines changed: 23 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -13,50 +13,57 @@ final class VoiceControlManager {
1313
let opacityManager: OpacityManager
1414
let dataViewModel: OperationViewModel
1515
let windowController: WindowController
16-
17-
init(runtime: ImmersiveSceneRuntime, immersiveViewModel: ImmersiveViewModel, opacityManager: OpacityManager, dataViewModel: OperationViewModel, windowController: WindowController) {
16+
17+
init(
18+
runtime: ImmersiveSceneRuntime,
19+
immersiveViewModel: ImmersiveViewModel,
20+
opacityManager: OpacityManager,
21+
dataViewModel: OperationViewModel,
22+
windowController: WindowController
23+
) {
1824
self.runtime = runtime
1925
self.immersiveViewModel = immersiveViewModel
2026
self.opacityManager = opacityManager
2127
self.dataViewModel = dataViewModel
2228
self.windowController = windowController
2329
}
24-
30+
2531
func openHeadController() {
2632
immersiveViewModel.openMenuSetting(windowController: windowController)
2733
}
28-
34+
2935
func closeHeadController() {
3036
immersiveViewModel.closeMenuSetting(windowController: windowController)
3137
}
32-
38+
3339
func openEndoscopicView() {
34-
//if immersiveViewModel.isEndoscopicActive {
40+
if !immersiveViewModel.isEndoscopicActive {
3541
immersiveViewModel.toggleEndoscope()
36-
//}
42+
}
3743
}
38-
44+
3945
func closeEndoscopicView() {
40-
//if !immersiveViewModel.isEndoscopicActive {
46+
if immersiveViewModel.isEndoscopicActive {
4147
immersiveViewModel.toggleEndoscope()
42-
//}
48+
}
4349
}
44-
50+
4551
// 등록된 엔티티 중 i번째 엔티티 생성
4652
func addEntity(index: Int) {
4753
var fileURLs: [URL] {
4854
dataViewModel.state.operation?.assets.map { $0.fileURL } ?? []
4955
}
50-
51-
guard let selectedURL = fileURLs.indices.contains(index) ? fileURLs[index] : nil else {
56+
57+
guard
58+
let selectedURL = fileURLs.indices.contains(index)
59+
? fileURLs[index] : nil
60+
else {
5261
return
5362
}
54-
63+
5564
Task {
5665
await runtime.placeEntity(url: selectedURL)
5766
}
5867
}
59-
60-
6168

6269
}

Hippo/Features/Vision/UI/VoiceControl/Helpers/WakeWordListener.swift

Lines changed: 8 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,44 +1,15 @@
1-
//
2-
// WakeWordListener.swift
3-
// Hippo
4-
//
5-
// Presentation helper for continuous wake word detection
6-
//
7-
// Responsibilities:
8-
// - Continuously listen for wake words in Standby mode
9-
// - Notify when wake word is detected
10-
// - Manage listening lifecycle (start/stop)
11-
//
12-
// This is a presentation-layer helper, not a domain service.
13-
// Wake word detection is specific to this app's UX flow.
14-
//
15-
161
import Foundation
172
import Dependencies
183

19-
/// Wake Word Listener
20-
///
21-
/// A lightweight helper that continuously listens for wake words
22-
/// (e.g., "Hippo", "히포") and notifies when detected.
23-
///
24-
/// **Usage:**
25-
/// ```swift
26-
/// let listener = WakeWordListener()
27-
/// listener.start(wakeWords: ["hippo", "히포"]) {
28-
/// print("Wake word detected!")
29-
/// }
30-
/// // Later...
31-
/// listener.stop()
32-
/// ```
33-
///
34-
/// **Design rationale:**
35-
/// - Lives in Presentation layer (not Domain) because wake word detection
36-
/// is specific to this app's UX, not a reusable domain concept
37-
/// - Uses SpeechRecognitionService (Domain) for actual STT
38-
/// - Implements the continuous listening loop and wake word matching
394
@MainActor
405
final class WakeWordListener {
416

7+
// MARK: - Constants
8+
9+
private enum Constants {
10+
static let errorRetryDelay: UInt64 = 500_000_000 // 0.5 seconds
11+
}
12+
4213
// MARK: - Dependencies
4314

4415
@Dependency(\.speechRecognitionService) private var speechRecognition
@@ -47,13 +18,9 @@ final class WakeWordListener {
4718

4819
private var listeningTask: Task<Void, Never>?
4920

50-
// MARK: - Public API
21+
// MARK: - Public Methods
5122

5223
/// Start listening for wake words
53-
///
54-
/// This method continuously listens for the specified wake words.
55-
/// When detected, it calls the `onDetected` callback and stops listening.
56-
///
5724
/// - Parameters:
5825
/// - wakeWords: Array of wake words to detect (case-insensitive)
5926
/// - onDetected: Callback called when wake word is detected
@@ -82,19 +49,14 @@ final class WakeWordListener {
8249

8350
} catch {
8451
print("⚠️ [WakeWordListener] Error: \(error)")
85-
// Continue listening even on error, with small delay
86-
// to avoid tight loop on repeated errors
87-
try? await Task.sleep(nanoseconds: 500_000_000) // 0.5 seconds
52+
try? await Task.sleep(nanoseconds: Constants.errorRetryDelay)
8853
}
8954
}
9055

9156
print("🎧 [WakeWordListener] Stopped listening")
9257
}
9358
}
9459

95-
/// Stop listening for wake words
96-
///
97-
/// This cancels the ongoing wake word detection task.
9860
func stop() {
9961
listeningTask?.cancel()
10062
listeningTask = nil

Hippo/Features/Vision/UI/VoiceControl/ViewModels/VoiceControlUIState.swift

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,24 @@ import Foundation
1818
/// All UI-related logic (error messages, feedback) is encapsulated here.
1919
public struct VoiceControlUIState: Equatable {
2020

21-
// MARK: - Properties
21+
// MARK: - Properties - State
2222

2323
/// Core domain state
2424
public var state: VoiceControlState
2525

26+
/// Type of current feedback message (for styling)
27+
public var feedbackType: FeedbackType = .info
28+
29+
// MARK: - Properties - Messages
30+
2631
/// Current feedback message to display to user
2732
public var feedbackMessage: String?
2833

34+
/// Last error message (for user feedback)
35+
public var lastErrorMessage: String?
36+
37+
// MARK: - Properties - Transcriptions
38+
2939
/// Real-time partial transcription (shown while listening)
3040
public var partialTranscription: String?
3141

@@ -35,15 +45,11 @@ public struct VoiceControlUIState: Equatable {
3545
/// Last parsed intent (for debugging/UI feedback)
3646
public var lastParsedIntent: String?
3747

38-
/// Last error message (for user feedback)
39-
public var lastErrorMessage: String?
48+
// MARK: - Properties - Processing
4049

4150
/// Whether currently processing (recognition or parsing)
4251
public var isProcessing: Bool = false
4352

44-
/// Type of current feedback message (for styling)
45-
public var feedbackType: FeedbackType = .info
46-
4753
// MARK: - Initialization
4854

4955
public init(

Hippo/Features/Vision/UI/VoiceControl/ViewModels/VoiceControlViewModel.swift

Lines changed: 23 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,22 @@ import Observation
4242
@Observable
4343
public final class VoiceControlViewModel {
4444

45+
// MARK: - Constants
46+
47+
private enum Constants {
48+
/// Delay after wake word detection to clear audio buffer
49+
static let wakeWordBufferDelay: UInt64 = 800_000_000 // 0.8 seconds
50+
51+
/// Duration to show success message
52+
static let successMessageDuration: UInt64 = 1_500_000_000 // 1.5 seconds
53+
54+
/// Duration to show error message before retry
55+
static let errorMessageDuration: UInt64 = 1_000_000_000 // 1 second
56+
57+
/// Retry deadline in seconds
58+
static let retryDeadlineSeconds: TimeInterval = 3.0
59+
}
60+
4561
// MARK: - Dependencies
4662

4763
@ObservationIgnored @Dependency(\.speechRecognitionService) private var speechRecognition
@@ -164,7 +180,7 @@ public final class VoiceControlViewModel {
164180

165181
// Add small delay to clear audio buffer and show feedback
166182
Task { @MainActor in
167-
try? await Task.sleep(nanoseconds: 800_000_000) // 0.8 seconds
183+
try? await Task.sleep(nanoseconds: Constants.wakeWordBufferDelay)
168184
print("🎯 [VoiceControl] Audio buffer cleared, starting command listening")
169185
self.startListeningFlow()
170186
}
@@ -305,10 +321,10 @@ public final class VoiceControlViewModel {
305321
uiState.lastParsedIntent = nil
306322

307323
// Show success message briefly
308-
try? await Task.sleep(nanoseconds: 1_500_000_000) // 1.5 seconds
324+
try? await Task.sleep(nanoseconds: Constants.successMessageDuration)
309325

310326
// Success: Return to idle
311-
print(" [VoiceControl] Flow completed → Idle")
327+
print(" [VoiceControl] Flow completed → Idle")
312328
clearUIState()
313329
uiState.setState(.idle)
314330
}
@@ -352,10 +368,9 @@ public final class VoiceControlViewModel {
352368

353369
/// Start retry flow with automatic timeout
354370
///
355-
/// Shows error message for 1 second, then automatically retries
356-
/// within 3 seconds deadline.
371+
/// Shows error message briefly, then automatically retries within deadline.
357372
private func startRetryFlow() {
358-
let deadline = Date().addingTimeInterval(3.0)
373+
let deadline = Date().addingTimeInterval(Constants.retryDeadlineSeconds)
359374
uiState.setState(.retry(attempt: 1, deadline: deadline))
360375

361376
// Cancel any existing retry task
@@ -365,8 +380,8 @@ public final class VoiceControlViewModel {
365380
retryTask = Task { @MainActor [weak self] in
366381
guard let self else { return }
367382

368-
// Show error message for 1 second
369-
try? await Task.sleep(nanoseconds: 1_000_000_000)
383+
// Show error message briefly
384+
try? await Task.sleep(nanoseconds: Constants.errorMessageDuration)
370385

371386
// Early return if state changed
372387
guard case .retry = self.uiState.state else { return }

Hippo/Features/Vision/UI/VoiceControl/Views/VoiceControlOverlay.swift

Lines changed: 0 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -9,36 +9,6 @@
99
// - Show transcription results (for debugging/feedback)
1010
// - Provide visual feedback during different states
1111
//
12-
// Usage Example:
13-
// ```swift
14-
// struct ImmersiveView: View {
15-
// @State private var voiceControlManager: VoiceControlManager
16-
// @State private var voiceControlVM: VoiceControlViewModel
17-
//
18-
// init(manager: VoiceControlManager) {
19-
// _voiceControlManager = State(initialValue: manager)
20-
// _voiceControlVM = State(initialValue: VoiceControlViewModel(
21-
// commandExecutor: manager
22-
// ))
23-
// }
24-
//
25-
// var body: some View {
26-
// ZStack {
27-
// // Main 3D content
28-
// RealityView { ... }
29-
//
30-
// // Voice control button (triggers voice control)
31-
// VoiceControlMenuButton(manager: voiceControlManager) {
32-
// // Toggle menu action
33-
// }
34-
//
35-
// // Voice control feedback overlay (shows state messages)
36-
// VoiceControlOverlay(viewModel: voiceControlVM)
37-
// }
38-
// }
39-
// }
40-
// ```
41-
//
4212
// Note: Button and Overlay should share the same VoiceControlViewModel instance
4313
// for synchronized state updates.
4414
//

Hippo/Shared/Data/VoiceControl/SpeechRecognition/AppleSpeechRecognitionService.swift

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,14 @@ import AVFoundation
2727
@MainActor
2828
public final class AppleSpeechRecognitionService: SpeechRecognitionService {
2929

30+
// MARK: - Constants
31+
32+
private enum Constants {
33+
static let audioBufferSize: AVAudioFrameCount = 1024
34+
static let recognitionTimeoutNanoseconds: UInt64 = 7_000_000_000 // 7 seconds
35+
static let errorRetryDelayNanoseconds: UInt64 = 500_000_000 // 0.5 seconds
36+
}
37+
3038
// MARK: - Properties
3139

3240
private let speechRecognizer: SFSpeechRecognizer
@@ -145,7 +153,7 @@ private extension AppleSpeechRecognitionService {
145153

146154
inputNode.installTap(
147155
onBus: 0,
148-
bufferSize: 1024,
156+
bufferSize: Constants.audioBufferSize,
149157
format: recordingFormat
150158
) { buffer, _ in
151159
request.append(buffer)
@@ -251,7 +259,7 @@ private extension AppleSpeechRecognitionService {
251259
guard let self = self else { return }
252260

253261
// Wait 7 seconds (increased from 5 to allow full command phrases)
254-
try? await Task.sleep(nanoseconds: 7_000_000_000)
262+
try? await Task.sleep(nanoseconds: Constants.recognitionTimeoutNanoseconds)
255263

256264
guard !state.hasResumed else { return }
257265
state.hasResumed = true

0 commit comments

Comments
 (0)