Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions app/ios/Runner/AppDelegate.swift
Original file line number Diff line number Diff line change
Expand Up @@ -396,6 +396,19 @@ class SpeechRecognitionHandler: NSObject {

let language = args["language"] as? String ?? "en-US"
transcribe(filePath: path, language: language, result: result)
} else if call.method == "requestPermission" {
SFSpeechRecognizer.requestAuthorization { authStatus in
DispatchQueue.main.async {
switch authStatus {
case .authorized:
result(true)
case .denied, .restricted, .notDetermined:
result(false)
@unknown default:
result(false)
}
}
}
} else {
result(FlutterMethodNotImplemented)
}
Expand Down
18 changes: 17 additions & 1 deletion app/lib/pages/settings/transcription_settings_page.dart
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import 'package:disk_space_2/disk_space_2.dart';
import 'package:path_provider/path_provider.dart';
import 'package:omi/services/services.dart';
import 'package:omi/services/sockets/transcription_service.dart';
import 'package:omi/services/sockets/on_device_apple_provider.dart';
import 'package:omi/services/custom_stt_log_service.dart';
import 'package:omi/utils/l10n_extensions.dart';
import 'package:provider/provider.dart';
Expand Down Expand Up @@ -70,7 +71,13 @@ class _TranscriptionSettingsPageState extends State<TranscriptionSettingsPage> {

SttProviderConfig get _currentConfig => SttProviderConfig.get(_selectedProvider);
CustomSttConfig? get _currentProviderConfig => _configsPerProvider[_selectedProvider];
String get _currentLanguage => _currentProviderConfig?.language ?? _currentConfig.defaultLanguage;
String get _currentLanguage {
final lang = _currentProviderConfig?.language ?? _currentConfig.defaultLanguage;
if (Platform.isIOS && lang == 'multi') {
return 'en';
}
return lang;
}
String get _currentModel => _currentProviderConfig?.model ?? _currentConfig.defaultModel;
String get _currentRequestJson => _requestJsonPerProvider[_selectedProvider] ?? '{}';
String get _currentSchemaJson => _schemaJsonPerProvider[_selectedProvider] ?? '{}';
Expand Down Expand Up @@ -1040,6 +1047,15 @@ class _TranscriptionSettingsPageState extends State<TranscriptionSettingsPage> {
}
MixpanelManager().transcriptionSourceSelected(source: isIOS ? 'custom_on_device_ios' : 'custom_on_device');
});

if (isIOS) {
final granted = await OnDeviceAppleProvider.requestPermission();
if (!granted && mounted) {
ScaffoldMessenger.of(context).showSnackBar(
const SnackBar(content: Text('Speech recognition permission is required. Please enable it in Settings.')),
);
}
}
}

Widget _buildSourceSelector() {
Expand Down
13 changes: 13 additions & 0 deletions app/lib/services/sockets/on_device_apple_provider.dart
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,19 @@ class OnDeviceAppleProvider implements ISttProvider {
this.language = 'en',
});

static Future<bool> requestPermission() async {
try {
final bool? granted = await _channel.invokeMethod('requestPermission');
if (granted == false) {
CustomSttLogService.instance.warning('OnDeviceApple', 'Speech recognition permission not granted.');
}
return granted ?? false;
} catch (e) {
CustomSttLogService.instance.error('OnDeviceApple', 'Permission request error: $e');
return false;
}
}

@override
Future<SttTranscriptionResult?> transcribe(
Uint8List audioData, {
Expand Down