diff --git a/app/ios/Runner/AppDelegate.swift b/app/ios/Runner/AppDelegate.swift index 5840594f1d..c98fcca3d6 100644 --- a/app/ios/Runner/AppDelegate.swift +++ b/app/ios/Runner/AppDelegate.swift @@ -396,6 +396,19 @@ class SpeechRecognitionHandler: NSObject { let language = args["language"] as? String ?? "en-US" transcribe(filePath: path, language: language, result: result) + } else if call.method == "requestPermission" { + SFSpeechRecognizer.requestAuthorization { authStatus in + DispatchQueue.main.async { + switch authStatus { + case .authorized: + result(true) + case .denied, .restricted, .notDetermined: + result(false) + @unknown default: + result(false) + } + } + } } else { result(FlutterMethodNotImplemented) } diff --git a/app/lib/pages/settings/transcription_settings_page.dart b/app/lib/pages/settings/transcription_settings_page.dart index b67a675fd5..8bde8f9e74 100644 --- a/app/lib/pages/settings/transcription_settings_page.dart +++ b/app/lib/pages/settings/transcription_settings_page.dart @@ -19,6 +19,7 @@ import 'package:disk_space_2/disk_space_2.dart'; import 'package:path_provider/path_provider.dart'; import 'package:omi/services/services.dart'; import 'package:omi/services/sockets/transcription_service.dart'; +import 'package:omi/services/sockets/on_device_apple_provider.dart'; import 'package:omi/services/custom_stt_log_service.dart'; import 'package:omi/utils/l10n_extensions.dart'; import 'package:provider/provider.dart'; @@ -70,7 +71,13 @@ class _TranscriptionSettingsPageState extends State { SttProviderConfig get _currentConfig => SttProviderConfig.get(_selectedProvider); CustomSttConfig? get _currentProviderConfig => _configsPerProvider[_selectedProvider]; - String get _currentLanguage => _currentProviderConfig?.language ?? _currentConfig.defaultLanguage; + String get _currentLanguage { + final lang = _currentProviderConfig?.language ?? _currentConfig.defaultLanguage; + if (Platform.isIOS && lang == 'multi') { + return 'en'; + } + return lang; + } String get _currentModel => _currentProviderConfig?.model ?? _currentConfig.defaultModel; String get _currentRequestJson => _requestJsonPerProvider[_selectedProvider] ?? '{}'; String get _currentSchemaJson => _schemaJsonPerProvider[_selectedProvider] ?? '{}'; @@ -1040,6 +1047,15 @@ class _TranscriptionSettingsPageState extends State { } MixpanelManager().transcriptionSourceSelected(source: isIOS ? 'custom_on_device_ios' : 'custom_on_device'); }); + + if (isIOS) { + final granted = await OnDeviceAppleProvider.requestPermission(); + if (!granted && mounted) { + ScaffoldMessenger.of(context).showSnackBar( + const SnackBar(content: Text('Speech recognition permission is required. Please enable it in Settings.')), + ); + } + } } Widget _buildSourceSelector() { diff --git a/app/lib/services/sockets/on_device_apple_provider.dart b/app/lib/services/sockets/on_device_apple_provider.dart index c43514ab7a..22edaaafb2 100644 --- a/app/lib/services/sockets/on_device_apple_provider.dart +++ b/app/lib/services/sockets/on_device_apple_provider.dart @@ -15,6 +15,19 @@ class OnDeviceAppleProvider implements ISttProvider { this.language = 'en', }); + static Future requestPermission() async { + try { + final bool? granted = await _channel.invokeMethod('requestPermission'); + if (granted == false) { + CustomSttLogService.instance.warning('OnDeviceApple', 'Speech recognition permission not granted.'); + } + return granted ?? false; + } catch (e) { + CustomSttLogService.instance.error('OnDeviceApple', 'Permission request error: $e'); + return false; + } + } + @override Future transcribe( Uint8List audioData, {