From ded95326c7c3e5377334c6045558c869fdd2325f Mon Sep 17 00:00:00 2001 From: Tahsin Masrur Date: Tue, 2 Dec 2025 06:06:15 +0000 Subject: [PATCH 1/6] Update CameraX version to use feature group APIs with latest fixes --- gradle/libs.versions.toml | 4 ++-- settings.gradle.kts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index bb3ef7362..2f6e19d68 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -1,6 +1,6 @@ [versions] # Used directly in build.gradle.kts files -compileSdk = "35" +compileSdk = "36" desugar_jdk_libs = "2.1.5" orchestrator = "1.5.1" minSdk = "23" @@ -21,7 +21,7 @@ androidxActivityCompose = "1.10.1" androidxAppCompat = "1.7.1" androidxAnnotation = "1.7.1" androidxBenchmark = "1.3.4" -androidxCamera = "1.5.0-SNAPSHOT" +androidxCamera = "1.6.0-SNAPSHOT" androidxConcurrentFutures = "1.3.0" androidxCoreKtx = "1.16.0" androidxDatastore = "1.1.7" diff --git a/settings.gradle.kts b/settings.gradle.kts index d20010988..d11213107 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -26,7 +26,7 @@ dependencyResolutionManagement { repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS) repositories { maven { - setUrl("https://androidx.dev/snapshots/builds/13672667/artifacts/repository") + setUrl("https://androidx.dev/snapshots/builds/14496918/artifacts/repository") } google() mavenCentral() From 070246649752a36c38170a2a8385ea2278c5a491 Mon Sep 17 00:00:00 2001 From: Tahsin Masrur Date: Tue, 2 Dec 2025 21:17:15 +0000 Subject: [PATCH 2/6] Replace UseCaseGroup with SessionConfig for single camera session --- .../jetpackcamera/core/camera/CameraExt.kt | 9 +-- .../core/camera/CameraSession.kt | 72 ++++++++++-------- .../core/camera/ConcurrentCameraSession.kt | 75 ++++++++++++++++++- .../core/camera/CoroutineCameraProvider.kt | 5 +- 4 files changed, 118 insertions(+), 43 deletions(-) diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraExt.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraExt.kt index d48f7fbd1..923e7d485 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraExt.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraExt.kt @@ -29,7 +29,6 @@ import androidx.camera.core.DynamicRange as CXDynamicRange import androidx.camera.core.ImageCapture import androidx.camera.core.Preview import androidx.camera.core.UseCase -import androidx.camera.core.UseCaseGroup import androidx.camera.video.Quality import androidx.camera.video.Recorder import androidx.camera.video.VideoCapture @@ -224,9 +223,9 @@ val CameraInfo.supportedImageFormats: Set .mapNotNull(Int::toAppImageFormat) .toSet() -fun UseCaseGroup.getVideoCapture() = getUseCaseOrNull>() -fun UseCaseGroup.getImageCapture() = getUseCaseOrNull() +fun List.getVideoCapture() = getUseCaseOrNull>() +fun List.getImageCapture() = getUseCaseOrNull() -private inline fun UseCaseGroup.getUseCaseOrNull(): T? { - return useCases.filterIsInstance().singleOrNull() +private inline fun List.getUseCaseOrNull(): T? { + return filterIsInstance().singleOrNull() } diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt index aaf147f53..41a1ea62b 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt @@ -44,8 +44,9 @@ import androidx.camera.core.CameraInfo import androidx.camera.core.CameraState as CXCameraState import androidx.camera.core.ImageCapture import androidx.camera.core.Preview +import androidx.camera.core.SessionConfig import androidx.camera.core.TorchState -import androidx.camera.core.UseCaseGroup +import androidx.camera.core.UseCase import androidx.camera.core.ViewPort import androidx.camera.core.resolutionselector.AspectRatioStrategy import androidx.camera.core.resolutionselector.ResolutionSelector @@ -217,7 +218,7 @@ internal suspend fun runSingleCameraSession( ) { cameraEffect = SingleSurfaceForcingEffect(this@sessionScope) } - val useCaseGroup = createUseCaseGroup( + val sessionConfig = createSessionConfig( cameraInfo = cameraProvider.getCameraInfo(currentCameraSelector), videoCaptureUseCase = videoCaptureUseCase, initialTransientSettings = currentTransientSettings, @@ -230,12 +231,12 @@ internal suspend fun runSingleCameraSession( captureResults = captureResults ).apply { - getImageCapture()?.let(onImageCaptureCreated) + useCases.getImageCapture()?.let(onImageCaptureCreated) } cameraProvider.runWith( currentCameraSelector, - useCaseGroup + sessionConfig ) { camera -> Log.d(TAG, "Camera session started") launch { @@ -351,11 +352,14 @@ internal suspend fun runSingleCameraSession( } } - applyDeviceRotation(currentTransientSettings.deviceRotation, useCaseGroup) + applyDeviceRotation( + currentTransientSettings.deviceRotation, + sessionConfig.useCases + ) processTransientSettingEvents( camera, cameraConstraints, - useCaseGroup, + sessionConfig.useCases, currentTransientSettings, transientSettings, sessionSettings @@ -370,7 +374,7 @@ context(CameraSessionContext) internal suspend fun processTransientSettingEvents( camera: Camera, cameraConstraints: CameraConstraints?, - useCaseGroup: UseCaseGroup, + useCases: List, initialTransientSettings: TransientSessionSettings, transientSettings: StateFlow, sessionSettings: PerpetualSessionSettings.SingleCamera? @@ -420,7 +424,7 @@ internal suspend fun processTransientSettingEvents( } // apply camera torch mode to image capture - useCaseGroup.getImageCapture()?.let { imageCapture -> + useCases.getImageCapture()?.let { imageCapture -> if (prevTransientSettings.flashMode != newTransientSettings.flashMode) { setFlashModeInternal( imageCapture = imageCapture, @@ -439,7 +443,7 @@ internal suspend fun processTransientSettingEvents( "${prevTransientSettings.deviceRotation} -> " + "${newTransientSettings.deviceRotation}" ) - applyDeviceRotation(newTransientSettings.deviceRotation, useCaseGroup) + applyDeviceRotation(newTransientSettings.deviceRotation, useCases) } // setzoomratio when the primary zoom value changes. @@ -560,9 +564,9 @@ private suspend fun updateCamera2RequestOptions( } } -internal fun applyDeviceRotation(deviceRotation: DeviceRotation, useCaseGroup: UseCaseGroup) { +internal fun applyDeviceRotation(deviceRotation: DeviceRotation, useCases: List) { val targetRotation = deviceRotation.toUiSurfaceRotation() - useCaseGroup.useCases.forEach { + useCases.forEach { when (it) { is Preview -> { // Preview's target rotation should not be updated with device rotation. @@ -584,7 +588,7 @@ internal fun applyDeviceRotation(deviceRotation: DeviceRotation, useCaseGroup: U } context(CameraSessionContext) -internal fun createUseCaseGroup( +internal fun createSessionConfig( cameraInfo: CameraInfo, initialTransientSettings: TransientSessionSettings, stabilizationMode: StabilizationMode, @@ -595,7 +599,7 @@ internal fun createUseCaseGroup( captureMode: CaptureMode, effect: CameraEffect? = null, captureResults: MutableStateFlow? = null -): UseCaseGroup { +): SessionConfig { val previewUseCase = createPreviewUseCase( cameraInfo, @@ -619,26 +623,28 @@ internal fun createUseCaseGroup( ) } - return UseCaseGroup.Builder().apply { - Log.d( - TAG, - "Setting initial device rotation to ${initialTransientSettings.deviceRotation}" - ) - setViewPort( - ViewPort.Builder( - Rational(aspectRatio.numerator, aspectRatio.denominator), - // Initialize rotation to Preview's rotation, which comes from Display rotation - previewUseCase.targetRotation - ).build() - ) - addUseCase(previewUseCase) + val useCases = buildList { + add(previewUseCase) // image and video use cases are only created if supported by the configuration - imageCaptureUseCase?.let { addUseCase(imageCaptureUseCase) } - videoCaptureUseCase?.let { addUseCase(videoCaptureUseCase) } + imageCaptureUseCase?.let { add(imageCaptureUseCase) } + videoCaptureUseCase?.let { add(videoCaptureUseCase) } + } - effect?.let { addEffect(it) } - }.build() + Log.d( + TAG, + "Setting initial device rotation to ${initialTransientSettings.deviceRotation}" + ) + + return SessionConfig( + useCases = useCases, + viewPort = ViewPort.Builder( + Rational(aspectRatio.numerator, aspectRatio.denominator), + // Initialize rotation to Preview's rotation, which comes from Display rotation + previewUseCase.targetRotation + ).build(), + effects = effect?.let { listOf(it) } ?: emptyList() + ) } private fun getVideoQualityFromResolution(resolution: Size?): VideoQuality = @@ -662,7 +668,7 @@ private fun getHeightFromCropRect(cropRect: Rect?): Int { return abs(cropRect.left - cropRect.right) } -private fun createImageUseCase( +internal fun createImageUseCase( cameraInfo: CameraInfo, aspectRatio: AspectRatio, dynamicRange: DynamicRange, @@ -738,7 +744,7 @@ private fun getAspectRatioForUseCase(sensorLandscapeRatio: Float, aspectRatio: A } context(CameraSessionContext) -private fun createPreviewUseCase( +internal fun createPreviewUseCase( cameraInfo: CameraInfo, aspectRatio: AspectRatio, stabilizationMode: StabilizationMode, @@ -809,7 +815,7 @@ private fun getResolutionSelector( } context(CameraSessionContext) -private fun setFlashModeInternal( +internal fun setFlashModeInternal( imageCapture: ImageCapture, flashMode: FlashMode, isFrontFacing: Boolean diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt index 5fdf91f1f..a03c76bfd 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt @@ -16,19 +16,30 @@ package com.google.jetpackcamera.core.camera import android.annotation.SuppressLint +import android.hardware.camera2.TotalCaptureResult import android.os.Build import android.util.Log +import android.util.Rational +import androidx.camera.core.CameraEffect +import androidx.camera.core.CameraInfo import androidx.camera.core.CameraState as CXCameraState import androidx.camera.core.CompositionSettings import androidx.camera.core.TorchState +import androidx.camera.core.UseCaseGroup +import androidx.camera.core.ViewPort +import androidx.camera.video.Recorder +import androidx.camera.video.VideoCapture import androidx.lifecycle.asFlow +import com.google.jetpackcamera.model.AspectRatio import com.google.jetpackcamera.model.CaptureMode import com.google.jetpackcamera.model.DynamicRange import com.google.jetpackcamera.model.ImageOutputFormat +import com.google.jetpackcamera.model.LensFacing import com.google.jetpackcamera.model.StabilizationMode import com.google.jetpackcamera.model.VideoQuality import com.google.jetpackcamera.settings.model.CameraConstraints import kotlinx.coroutines.coroutineScope +import kotlinx.coroutines.flow.MutableStateFlow import kotlinx.coroutines.flow.collectLatest import kotlinx.coroutines.flow.distinctUntilChanged import kotlinx.coroutines.flow.filterNotNull @@ -119,7 +130,7 @@ internal suspend fun runConcurrentCameraSession( launch { processVideoControlEvents( - useCaseGroup.getVideoCapture(), + useCaseGroup.useCases.getVideoCapture(), captureTypeSuffix = "DualCam" ) } @@ -187,14 +198,72 @@ internal suspend fun runConcurrentCameraSession( } } - applyDeviceRotation(initialTransientSettings.deviceRotation, useCaseGroup) + applyDeviceRotation(initialTransientSettings.deviceRotation, useCaseGroup.useCases) processTransientSettingEvents( primaryCamera, cameraConstraints, - useCaseGroup, + useCaseGroup.useCases, initialTransientSettings, transientSettings, null ) } } + +context(CameraSessionContext) +internal fun createUseCaseGroup( + cameraInfo: CameraInfo, + initialTransientSettings: TransientSessionSettings, + stabilizationMode: StabilizationMode, + aspectRatio: AspectRatio, + videoCaptureUseCase: VideoCapture?, + dynamicRange: DynamicRange, + imageFormat: ImageOutputFormat, + captureMode: CaptureMode, + effect: CameraEffect? = null, + captureResults: MutableStateFlow? = null +): UseCaseGroup { + val previewUseCase = + createPreviewUseCase( + cameraInfo, + aspectRatio, + stabilizationMode, + captureResults + ) + + // only create image use case in image or standard + val imageCaptureUseCase = if (captureMode != CaptureMode.VIDEO_ONLY) { + createImageUseCase(cameraInfo, aspectRatio, dynamicRange, imageFormat) + } else { + null + } + + imageCaptureUseCase?.let { + setFlashModeInternal( + imageCapture = imageCaptureUseCase, + flashMode = initialTransientSettings.flashMode, + isFrontFacing = cameraInfo.appLensFacing == LensFacing.FRONT + ) + } + + return UseCaseGroup.Builder().apply { + Log.d( + TAG, + "Setting initial device rotation to ${initialTransientSettings.deviceRotation}" + ) + setViewPort( + ViewPort.Builder( + Rational(aspectRatio.numerator, aspectRatio.denominator), + // Initialize rotation to Preview's rotation, which comes from Display rotation + previewUseCase.targetRotation + ).build() + ) + addUseCase(previewUseCase) + + // image and video use cases are only created if supported by the configuration + imageCaptureUseCase?.let { addUseCase(imageCaptureUseCase) } + videoCaptureUseCase?.let { addUseCase(videoCaptureUseCase) } + + effect?.let { addEffect(it) } + }.build() +} diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CoroutineCameraProvider.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CoroutineCameraProvider.kt index a6a032f71..223368e88 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CoroutineCameraProvider.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CoroutineCameraProvider.kt @@ -21,6 +21,7 @@ import androidx.camera.core.CameraSelector import androidx.camera.core.CompositionSettings import androidx.camera.core.ConcurrentCamera import androidx.camera.core.ConcurrentCamera.SingleCameraConfig +import androidx.camera.core.SessionConfig import androidx.camera.core.UseCaseGroup import androidx.camera.lifecycle.ProcessCameraProvider import androidx.lifecycle.Lifecycle @@ -40,11 +41,11 @@ import kotlinx.coroutines.coroutineScope */ suspend fun ProcessCameraProvider.runWith( cameraSelector: CameraSelector, - useCases: UseCaseGroup, + sessionConfig: SessionConfig, block: suspend CoroutineScope.(Camera) -> R ): R = coroutineScope { val scopedLifecycle = CoroutineLifecycleOwner(coroutineContext) - block(this@runWith.bindToLifecycle(scopedLifecycle, cameraSelector, useCases)) + block(this@runWith.bindToLifecycle(scopedLifecycle, cameraSelector, sessionConfig)) } @SuppressLint("RestrictedApi") From 46930d3d3ce929fb3a1d98b8b549003d93ebd27c Mon Sep 17 00:00:00 2001 From: Tahsin Masrur Date: Fri, 5 Dec 2025 10:25:49 +0000 Subject: [PATCH 3/6] Update system constraints using CameraX feature groups --- .../core/camera/FeatureGroupHandlerTest.kt | 140 ++++++ .../core/camera/CameraSession.kt | 252 ++++++---- .../core/camera/CameraXCameraSystem.kt | 377 +++++++++++---- .../core/camera/ConcurrentCameraSession.kt | 12 +- .../core/camera/FeatureGroupHandler.kt | 436 ++++++++++++++++++ .../core/camera/FeatureGroupability.kt | 175 +++++++ .../core/camera/FeatureGroupabilityTest.kt | 287 ++++++++++++ 7 files changed, 1470 insertions(+), 209 deletions(-) create mode 100644 core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/FeatureGroupHandlerTest.kt create mode 100644 core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupHandler.kt create mode 100644 core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupability.kt create mode 100644 core/camera/src/test/java/com/google/jetpackcamera/core/camera/FeatureGroupabilityTest.kt diff --git a/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/FeatureGroupHandlerTest.kt b/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/FeatureGroupHandlerTest.kt new file mode 100644 index 000000000..2e2800ca4 --- /dev/null +++ b/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/FeatureGroupHandlerTest.kt @@ -0,0 +1,140 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.jetpackcamera.core.camera + +import android.app.Application +import androidx.camera.lifecycle.ProcessCameraProvider +import androidx.camera.lifecycle.awaitInstance +import androidx.test.ext.junit.runners.AndroidJUnit4 +import androidx.test.platform.app.InstrumentationRegistry +import com.google.common.truth.Truth.assertThat +import com.google.jetpackcamera.core.common.FakeFilePathGenerator +import com.google.jetpackcamera.settings.SettableConstraintsRepositoryImpl +import com.google.jetpackcamera.settings.model.DEFAULT_CAMERA_APP_SETTINGS +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.runBlocking +import org.junit.Before +import org.junit.Test +import org.junit.runner.RunWith + +@RunWith(AndroidJUnit4::class) +class FeatureGroupHandlerTest { + + private lateinit var application: Application + private lateinit var cameraSystem: CameraXCameraSystem + private lateinit var featureGroupHandler: FeatureGroupHandler + private lateinit var cameraProvider: ProcessCameraProvider + private lateinit var constraintsRepository: SettableConstraintsRepositoryImpl + + @Before + fun setup() = runBlocking { + val instrumentation = InstrumentationRegistry.getInstrumentation() + application = instrumentation.targetContext.applicationContext as Application + + cameraProvider = ProcessCameraProvider.awaitInstance(application) + + constraintsRepository = SettableConstraintsRepositoryImpl() + cameraSystem = CameraXCameraSystem( + application = application, + defaultDispatcher = Dispatchers.Main, + iODispatcher = Dispatchers.IO, + constraintsRepository = constraintsRepository, + filePathGenerator = FakeFilePathGenerator(), + availabilityCheckers = emptyMap(), + effectProviders = emptyMap() + ) + cameraSystem.initialize(DEFAULT_CAMERA_APP_SETTINGS) {} + + featureGroupHandler = FeatureGroupHandler( + cameraSystem = cameraSystem, + cameraProvider = cameraProvider, + defaultCameraSessionContext = cameraSystem.defaultCameraSessionContext, + defaultDispatcher = Dispatchers.Main + ) + } + + @Test + fun isGroupingSupported_returnsTrue_forDefaultSettings() = runBlocking { + val currentSettings = DEFAULT_CAMERA_APP_SETTINGS + val cameraSelector = currentSettings.cameraLensFacing.toCameraSelector() + val cameraInfo = cameraProvider.getCameraInfo(cameraSelector) + + val result = featureGroupHandler.isGroupingSupported( + cameraAppSettings = currentSettings, + cameraInfo = cameraInfo, + initialSystemConstraints = constraintsRepository.systemConstraints.value!! + ) + + assertThat(result).isTrue() + } + + @Test + fun filterSystemConstraints_returnsValidConstraints() = runBlocking { + val currentSettings = DEFAULT_CAMERA_APP_SETTINGS + val initialConstraints = constraintsRepository.systemConstraints.value!! + + val result = featureGroupHandler.filterSystemConstraints( + currentSettings = currentSettings, + initialSystemConstraints = initialConstraints, + currentSystemConstraints = initialConstraints + ) + + assertThat(result).isNotNull() + assertThat( + result.availableLenses + ).containsAtLeastElementsIn(initialConstraints.availableLenses) + + // Ensure per-lens constraints for current lens are present + val lensFacing = currentSettings.cameraLensFacing + assertThat(result.perLensConstraints).containsKey(lensFacing) + } + + @Test + fun filterSystemConstraints_forAllAvailableLenses() = runBlocking { + val initialConstraints = constraintsRepository.systemConstraints.value!! + for (lensFacing in initialConstraints.availableLenses) { + val currentSettings = DEFAULT_CAMERA_APP_SETTINGS.copy(cameraLensFacing = lensFacing) + val result = featureGroupHandler.filterSystemConstraints( + currentSettings = currentSettings, + initialSystemConstraints = initialConstraints, + currentSystemConstraints = initialConstraints + ) + assertThat(result.perLensConstraints).containsKey(lensFacing) + } + } + + @Test + fun isHdrSupportedWithJpegR_initiallyNull() { + // Based on atomic(null) initialization + assertThat(featureGroupHandler.isHdrSupportedWithJpegR()).isNull() + } + + @Test + fun isHdrSupportedWithJpegR_updatesAfterConstraintsUpdate() = runBlocking { + val currentSettings = DEFAULT_CAMERA_APP_SETTINGS + val initialConstraints = constraintsRepository.systemConstraints.value!! + + featureGroupHandler.filterSystemConstraints( + currentSettings = currentSettings, + initialSystemConstraints = initialConstraints, + currentSystemConstraints = initialConstraints + ) + + // After update, it should return a boolean value (true or false), confirming the cache + // logic executed. + assertThat(featureGroupHandler.isHdrSupportedWithJpegR()).isNotNull() + } +} diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt index 41a1ea62b..6fe34cb31 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt @@ -48,6 +48,7 @@ import androidx.camera.core.SessionConfig import androidx.camera.core.TorchState import androidx.camera.core.UseCase import androidx.camera.core.ViewPort +import androidx.camera.core.featuregroup.GroupableFeature import androidx.camera.core.resolutionselector.AspectRatioStrategy import androidx.camera.core.resolutionselector.ResolutionSelector import androidx.camera.video.ExperimentalPersistentRecording @@ -67,6 +68,9 @@ import androidx.core.content.ContextCompat import androidx.core.content.ContextCompat.checkSelfPermission import androidx.core.net.toFile import androidx.lifecycle.asFlow +import com.google.jetpackcamera.core.camera.FeatureGroupability.ExplicitlyGroupable +import com.google.jetpackcamera.core.camera.FeatureGroupability.ImplicitlyGroupable +import com.google.jetpackcamera.core.camera.FeatureGroupability.Ungroupable import com.google.jetpackcamera.core.camera.effects.SingleSurfaceForcingEffect import com.google.jetpackcamera.core.common.FilePathGenerator import com.google.jetpackcamera.model.AspectRatio @@ -96,6 +100,8 @@ import kotlin.math.abs import kotlin.time.Duration.Companion.milliseconds import kotlinx.atomicfu.atomic import kotlinx.coroutines.CoroutineDispatcher +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.asExecutor import kotlinx.coroutines.channels.Channel import kotlinx.coroutines.coroutineScope @@ -112,6 +118,7 @@ import kotlinx.coroutines.flow.onCompletion import kotlinx.coroutines.flow.transform import kotlinx.coroutines.flow.update import kotlinx.coroutines.launch +import kotlinx.coroutines.withContext private const val TAG = "CameraSession" private val QUALITY_RANGE_MAP = mapOf( @@ -134,22 +141,17 @@ internal suspend fun runSingleCameraSession( .primaryLensFacing.toCameraSelector() // only create video use case in standard or video_only - val videoCaptureUseCase = when (sessionSettings.captureMode) { - CaptureMode.STANDARD, CaptureMode.VIDEO_ONLY -> - createVideoUseCase( - cameraProvider.getCameraInfo(initialCameraSelector), - sessionSettings.aspectRatio, - sessionSettings.targetFrameRate, - sessionSettings.stabilizationMode, - sessionSettings.dynamicRange, - sessionSettings.videoQuality, - backgroundDispatcher - ) - - else -> { - null - } - } + val videoCaptureUseCase = + createVideoUseCase( + cameraProvider.getCameraInfo(initialCameraSelector), + sessionSettings.aspectRatio, + sessionSettings.captureMode, + backgroundDispatcher, + sessionSettings.targetFrameRate.takeIfFeatureGroupInvalid(sessionSettings), + sessionSettings.stabilizationMode.takeIfFeatureGroupInvalid(sessionSettings), + sessionSettings.dynamicRange.takeIfFeatureGroupInvalid(sessionSettings), + sessionSettings.videoQuality.takeIfFeatureGroupInvalid(sessionSettings) + ) launch { processVideoControlEvents( @@ -175,67 +177,18 @@ internal suspend fun runSingleCameraSession( .collectLatest { currentTransientSettings -> coroutineScope sessionScope@{ cameraProvider.unbindAll() - val currentCameraSelector = currentTransientSettings.primaryLensFacing - .toCameraSelector() - val cameraInfo = cameraProvider.getCameraInfo(currentCameraSelector) - val camera2Info = Camera2CameraInfo.from(cameraInfo) - val cameraId = camera2Info.cameraId - - var cameraEffect: CameraEffect? = null - var captureResults: MutableStateFlow? = null - if (currentTransientSettings.flashMode == FlashMode.LOW_LIGHT_BOOST) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R && - cameraConstraints?.supportedIlluminants?.contains( - Illuminant.LOW_LIGHT_BOOST_CAMERA_EFFECT - ) == true && lowLightBoostEffectProvider != null - ) { - captureResults = MutableStateFlow(null) - cameraEffect = lowLightBoostEffectProvider.create( - cameraId = cameraId, - captureResults = captureResults, - coroutineScope = this@sessionScope, - onSceneBrightnessChanged = { boostStrength -> - val strength = LowLightBoostState.Active(strength = boostStrength) - currentCameraState.update { old -> - if (old.lowLightBoostState != strength) { - old.copy(lowLightBoostState = strength) - } else { - old - } - } - }, - onLowLightBoostError = { e -> - Log.w(TAG, "Emitting LLB Error", e) - currentCameraState.update { old -> - old.copy(lowLightBoostState = LowLightBoostState.Error(e)) - } - } - ) - } - } - if (cameraEffect == null && - sessionSettings.streamConfig == StreamConfig.SINGLE_STREAM - ) { - cameraEffect = SingleSurfaceForcingEffect(this@sessionScope) - } val sessionConfig = createSessionConfig( - cameraInfo = cameraProvider.getCameraInfo(currentCameraSelector), + cameraConstraints = cameraConstraints, videoCaptureUseCase = videoCaptureUseCase, initialTransientSettings = currentTransientSettings, - stabilizationMode = sessionSettings.stabilizationMode, - aspectRatio = sessionSettings.aspectRatio, - dynamicRange = sessionSettings.dynamicRange, - imageFormat = sessionSettings.imageFormat, - captureMode = sessionSettings.captureMode, - effect = cameraEffect, - captureResults = captureResults - + sessionSettings = sessionSettings, + sessionScope = this@sessionScope ).apply { useCases.getImageCapture()?.let(onImageCaptureCreated) } cameraProvider.runWith( - currentCameraSelector, + currentTransientSettings.primaryLensFacing.toCameraSelector(), sessionConfig ) { camera -> Log.d(TAG, "Camera session started") @@ -258,11 +211,13 @@ internal suspend fun runSingleCameraSession( val videoQuality = getVideoQualityFromResolution( videoCaptureUseCase.resolutionInfo?.resolution ) - if (videoQuality != sessionSettings.videoQuality) { + if (sessionSettings.videoQuality != VideoQuality.UNSPECIFIED && + videoQuality != sessionSettings.videoQuality + ) { Log.e( TAG, - "Failed to select video quality: $sessionSettings.videoQuality. " + - "Fallback: $videoQuality" + "Failed to select video quality:" + + " ${sessionSettings.videoQuality}. Fallback: $videoQuality" ) } launch { @@ -588,29 +543,74 @@ internal fun applyDeviceRotation(deviceRotation: DeviceRotation, useCases: List< } context(CameraSessionContext) -internal fun createSessionConfig( - cameraInfo: CameraInfo, +@OptIn(ExperimentalCamera2Interop::class) +internal suspend fun createSessionConfig( + cameraConstraints: CameraConstraints?, initialTransientSettings: TransientSessionSettings, - stabilizationMode: StabilizationMode, - aspectRatio: AspectRatio, + sessionSettings: PerpetualSessionSettings.SingleCamera, videoCaptureUseCase: VideoCapture?, - dynamicRange: DynamicRange, - imageFormat: ImageOutputFormat, - captureMode: CaptureMode, - effect: CameraEffect? = null, - captureResults: MutableStateFlow? = null + sessionScope: CoroutineScope ): SessionConfig { + val currentCameraSelector = initialTransientSettings.primaryLensFacing + .toCameraSelector() + val cameraInfo = cameraProvider.getCameraInfo(currentCameraSelector) + val camera2Info = Camera2CameraInfo.from(cameraInfo) + val cameraId = camera2Info.cameraId + + var cameraEffect: CameraEffect? = null + var captureResults: MutableStateFlow? = null + if (initialTransientSettings.flashMode == FlashMode.LOW_LIGHT_BOOST) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R && + cameraConstraints?.supportedIlluminants?.contains( + Illuminant.LOW_LIGHT_BOOST_CAMERA_EFFECT + ) == true && lowLightBoostEffectProvider != null + ) { + captureResults = MutableStateFlow(null) + cameraEffect = lowLightBoostEffectProvider.create( + cameraId = cameraId, + captureResults = captureResults, + coroutineScope = sessionScope, + onSceneBrightnessChanged = { boostStrength -> + val strength = LowLightBoostState.Active(strength = boostStrength) + currentCameraState.update { old -> + if (old.lowLightBoostState != strength) { + old.copy(lowLightBoostState = strength) + } else { + old + } + } + }, + onLowLightBoostError = { e -> + Log.w(TAG, "Emitting LLB Error", e) + currentCameraState.update { old -> + old.copy(lowLightBoostState = LowLightBoostState.Error(e)) + } + } + ) + } + } + if (cameraEffect == null && + sessionSettings.streamConfig == StreamConfig.SINGLE_STREAM + ) { + cameraEffect = SingleSurfaceForcingEffect(sessionScope) + } + val previewUseCase = createPreviewUseCase( cameraInfo, - aspectRatio, - stabilizationMode, + sessionSettings.aspectRatio, + sessionSettings.stabilizationMode.takeIfFeatureGroupInvalid(sessionSettings), captureResults ) // only create image use case in image or standard - val imageCaptureUseCase = if (captureMode != CaptureMode.VIDEO_ONLY) { - createImageUseCase(cameraInfo, aspectRatio, dynamicRange, imageFormat) + val imageCaptureUseCase = if (sessionSettings.captureMode != CaptureMode.VIDEO_ONLY) { + createImageUseCase( + cameraInfo, + sessionSettings.aspectRatio, + sessionSettings.dynamicRange, + sessionSettings.imageFormat.takeIfFeatureGroupInvalid(sessionSettings) + ) } else { null } @@ -636,17 +636,57 @@ internal fun createSessionConfig( "Setting initial device rotation to ${initialTransientSettings.deviceRotation}" ) + val features = sessionSettings.toGroupableFeatures() ?: emptySet() + + Log.d(TAG, "createSessionConfig: sessionSettings = $sessionSettings, features = $features") + return SessionConfig( useCases = useCases, viewPort = ViewPort.Builder( - Rational(aspectRatio.numerator, aspectRatio.denominator), + Rational( + sessionSettings.aspectRatio.numerator, + sessionSettings.aspectRatio.denominator + ), // Initialize rotation to Preview's rotation, which comes from Display rotation previewUseCase.targetRotation ).build(), - effects = effect?.let { listOf(it) } ?: emptyList() + effects = cameraEffect?.let { listOf(it) } ?: emptyList(), + requiredFeatureGroup = features ) } +/** + * Creates a set of [GroupableFeature] from a [PerpetualSessionSettings.SingleCamera]. + * + * Only the [PerpetualSessionSettings.SingleCamera] values that are supported by CameraX feature + * group APIs are included in the returned set. + * + * A null value is returned if the feature groups API can't be used for some value in + * [PerpetualSessionSettings.SingleCamera], e.g. optical stabilization, or 15 FPS. + */ +internal fun PerpetualSessionSettings.SingleCamera.toGroupableFeatures(): Set? { + return buildSet { + this@toGroupableFeatures.toFeatureGroupabilities().forEach { + when (it) { + is ExplicitlyGroupable -> { + val shouldAdd = when { + it.feature == GroupableFeature.IMAGE_ULTRA_HDR -> + captureMode != CaptureMode.VIDEO_ONLY + it.feature.featureType == GroupableFeature.FEATURE_TYPE_RECORDING_QUALITY -> + captureMode != CaptureMode.IMAGE_ONLY + else -> true + } + if (shouldAdd) { + add(it.feature) + } + } + is ImplicitlyGroupable -> {} // No-op. + is Ungroupable -> return null + } + } + }.toSet() +} + private fun getVideoQualityFromResolution(resolution: Size?): VideoQuality = resolution?.let { res -> QUALITY_RANGE_MAP.firstNotNullOfOrNull { @@ -672,7 +712,7 @@ internal fun createImageUseCase( cameraInfo: CameraInfo, aspectRatio: AspectRatio, dynamicRange: DynamicRange, - imageFormat: ImageOutputFormat + imageFormat: ImageOutputFormat? = null ): ImageCapture { val builder = ImageCapture.Builder() builder.setResolutionSelector( @@ -688,12 +728,17 @@ internal fun createImageUseCase( internal fun createVideoUseCase( cameraInfo: CameraInfo, aspectRatio: AspectRatio, - targetFrameRate: Int, - stabilizationMode: StabilizationMode, - dynamicRange: DynamicRange, - videoQuality: VideoQuality, - backgroundDispatcher: CoroutineDispatcher -): VideoCapture { + captureMode: CaptureMode, + backgroundDispatcher: CoroutineDispatcher, + targetFrameRate: Int? = null, + stabilizationMode: StabilizationMode? = null, + dynamicRange: DynamicRange? = null, + videoQuality: VideoQuality? = null +): VideoCapture? { + if (captureMode != CaptureMode.STANDARD && captureMode != CaptureMode.VIDEO_ONLY) { + return null + } + val sensorLandscapeRatio = cameraInfo.sensorLandscapeRatio val recorder = Recorder.Builder() .setAspectRatio( @@ -701,7 +746,7 @@ internal fun createVideoUseCase( ) .setExecutor(backgroundDispatcher.asExecutor()) .apply { - videoQuality.toQuality()?.let { quality -> + videoQuality?.toQuality()?.let { quality -> // No fallback strategy is used. The app will crash if the quality is unsupported setQualitySelector( QualitySelector.from( @@ -718,11 +763,13 @@ internal fun createVideoUseCase( setVideoStabilizationEnabled(true) } // set target fps - if (targetFrameRate != TARGET_FPS_AUTO) { + if (targetFrameRate != TARGET_FPS_AUTO && targetFrameRate != null) { setTargetFrameRate(Range(targetFrameRate, targetFrameRate)) } - setDynamicRange(dynamicRange.toCXDynamicRange()) + if (dynamicRange != null) { + setDynamicRange(dynamicRange.toCXDynamicRange()) + } }.build() } @@ -744,10 +791,10 @@ private fun getAspectRatioForUseCase(sensorLandscapeRatio: Float, aspectRatio: A } context(CameraSessionContext) -internal fun createPreviewUseCase( +internal suspend fun createPreviewUseCase( cameraInfo: CameraInfo, aspectRatio: AspectRatio, - stabilizationMode: StabilizationMode, + stabilizationMode: StabilizationMode? = null, captureResults: MutableStateFlow? = null ): Preview = Preview.Builder().apply { updateCameraStateWithCaptureResults( @@ -761,6 +808,7 @@ internal fun createPreviewUseCase( StabilizationMode.OPTICAL -> setOpticalStabilizationModeEnabled(true) StabilizationMode.OFF -> setOpticalStabilizationModeEnabled(false) StabilizationMode.HIGH_QUALITY -> {} // No-op. Handled by VideoCapture use case. + null -> {} // No-op. Handled by feature groups API. else -> throw UnsupportedOperationException( "Unexpected stabilization mode: $stabilizationMode. Stabilization mode should always " + "an explicit mode, such as ON, OPTICAL, OFF or HIGH_QUALITY" @@ -772,8 +820,10 @@ internal fun createPreviewUseCase( ) }.build() .apply { - setSurfaceProvider { surfaceRequest -> - surfaceRequests.update { surfaceRequest } + withContext(Dispatchers.Main) { + setSurfaceProvider { surfaceRequest -> + surfaceRequests.update { surfaceRequest } + } } } diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt index 36f262c6d..338f92422 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt @@ -81,6 +81,7 @@ import java.io.File import java.io.FileNotFoundException import javax.inject.Inject import javax.inject.Provider +import kotlin.time.measureTime import kotlinx.coroutines.CoroutineDispatcher import kotlinx.coroutines.channels.Channel import kotlinx.coroutines.coroutineScope @@ -123,9 +124,11 @@ constructor( Map> ) : CameraSystem { private lateinit var cameraProvider: ProcessCameraProvider + private lateinit var featureGroupHandler: FeatureGroupHandler private var imageCaptureUseCase: ImageCapture? = null + private lateinit var initialSystemConstraints: CameraSystemConstraints private lateinit var systemConstraints: CameraSystemConstraints private val screenFlashEvents: Channel = @@ -147,6 +150,8 @@ constructor( private val lowLightBoostAvailabilityChecker: LowLightBoostAvailabilityChecker? private val lowLightBoostEffectProvider: LowLightBoostEffectProvider? + internal lateinit var defaultCameraSessionContext: CameraSessionContext + init { val entry = availabilityCheckers.entries.firstOrNull() if (entry == null) { @@ -173,6 +178,27 @@ constructor( singleLensMode = debugSettings.singleLensMode ) + defaultCameraSessionContext = CameraSessionContext( + context = application, + cameraProvider = cameraProvider, + backgroundDispatcher = defaultDispatcher, + screenFlashEvents = Channel(), + filePathGenerator = filePathGenerator, + focusMeteringEvents = Channel(), + videoCaptureControlEvents = Channel(), + currentCameraState = MutableStateFlow(CameraState()), + surfaceRequests = MutableStateFlow(null), + transientSettings = MutableStateFlow(null), + lowLightBoostEffectProvider = lowLightBoostEffectProvider + ) + + featureGroupHandler = FeatureGroupHandler( + cameraSystem = this, + cameraProvider = cameraProvider, + defaultCameraSessionContext = defaultCameraSessionContext, + defaultDispatcher = defaultDispatcher + ) + // updates values for available cameras val availableCameraLenses = listOf( @@ -284,6 +310,8 @@ constructor( } ) + initialSystemConstraints = systemConstraints + constraintsRepository.updateSystemConstraints(systemConstraints) currentSettings.value = @@ -383,6 +411,39 @@ constructor( } } + internal fun CameraAppSettings.toTransientSessionSettings(): TransientSessionSettings { + return TransientSessionSettings( + isAudioEnabled = audioEnabled, + deviceRotation = deviceRotation, + flashMode = flashMode, + primaryLensFacing = cameraLensFacing, + zoomRatios = defaultZoomRatios, + testPattern = debugSettings.testPattern + ) + } + + internal suspend fun CameraAppSettings.toSingleCameraSessionSettings( + cameraConstraints: CameraConstraints + ): PerpetualSessionSettings.SingleCamera { + val resolvedStabilizationMode = resolveStabilizationMode( + requestedStabilizationMode = stabilizationMode, + cameraAppSettings = this, + cameraConstraints = cameraConstraints + ) + + return PerpetualSessionSettings.SingleCamera( + aspectRatio = aspectRatio, + captureMode = captureMode, + streamConfig = streamConfig, + targetFrameRate = targetFrameRate, + stabilizationMode = resolvedStabilizationMode, + dynamicRange = dynamicRange, + videoQuality = videoQuality, + imageFormat = imageFormat, + lowLightBoostPriority = lowLightBoostPriority + ) + } + @OptIn(ExperimentalCamera2Interop::class) override suspend fun runCamera() = coroutineScope { Log.d(TAG, "runCamera") @@ -395,14 +456,7 @@ constructor( currentSettings .filterNotNull() .map { currentCameraSettings -> - transientSettings.value = TransientSessionSettings( - isAudioEnabled = currentCameraSettings.audioEnabled, - deviceRotation = currentCameraSettings.deviceRotation, - flashMode = currentCameraSettings.flashMode, - primaryLensFacing = currentCameraSettings.cameraLensFacing, - zoomRatios = currentCameraSettings.defaultZoomRatios, - testPattern = currentCameraSettings.debugSettings.testPattern - ) + transientSettings.value = currentCameraSettings.toTransientSessionSettings() when (currentCameraSettings.concurrentCameraMode) { ConcurrentCameraMode.OFF -> { @@ -413,24 +467,7 @@ constructor( "${currentCameraSettings.cameraLensFacing}" } - val resolvedStabilizationMode = resolveStabilizationMode( - requestedStabilizationMode = currentCameraSettings.stabilizationMode, - targetFrameRate = currentCameraSettings.targetFrameRate, - cameraConstraints = cameraConstraints, - concurrentCameraMode = currentCameraSettings.concurrentCameraMode - ) - - PerpetualSessionSettings.SingleCamera( - aspectRatio = currentCameraSettings.aspectRatio, - captureMode = currentCameraSettings.captureMode, - streamConfig = currentCameraSettings.streamConfig, - targetFrameRate = currentCameraSettings.targetFrameRate, - stabilizationMode = resolvedStabilizationMode, - dynamicRange = currentCameraSettings.dynamicRange, - videoQuality = currentCameraSettings.videoQuality, - imageFormat = currentCameraSettings.imageFormat, - lowLightBoostPriority = currentCameraSettings.lowLightBoostPriority - ) + currentCameraSettings.toSingleCameraSessionSettings(cameraConstraints) } ConcurrentCameraMode.DUAL -> { @@ -479,13 +516,29 @@ constructor( ) { try { when (sessionSettings) { - is PerpetualSessionSettings.SingleCamera -> runSingleCameraSession( - sessionSettings, - systemConstraints.forCurrentLens(currentSettings.value!!), - onImageCaptureCreated = { imageCapture -> - imageCaptureUseCase = imageCapture + is PerpetualSessionSettings.SingleCamera -> { + launch(backgroundDispatcher) { + // runSingleCameraSession never completes due to + // collectLatest on a StateFlow, so this must be launched + // beforehand + + val duration = + measureTime { updateSystemConstraintsByFeatureGroups() } + Log.d( + TAG, + "runCamera: updateSystemConstraints" + + " completed in $duration" + ) } - ) + + runSingleCameraSession( + sessionSettings, + systemConstraints.forCurrentLens(currentSettings.value!!), + onImageCaptureCreated = { imageCapture -> + imageCaptureUseCase = imageCapture + } + ) + } is PerpetualSessionSettings.ConcurrentCamera -> runConcurrentCameraSession( @@ -504,37 +557,63 @@ constructor( } } - private fun resolveStabilizationMode( + private suspend fun updateSystemConstraintsByFeatureGroups() { + val cameraAppSettings = requireNotNull(currentSettings.value) + + systemConstraints = featureGroupHandler.filterSystemConstraints( + currentSettings = cameraAppSettings, + initialSystemConstraints = initialSystemConstraints, + currentSystemConstraints = systemConstraints + ) + + constraintsRepository.updateSystemConstraints(systemConstraints) + } + + internal suspend fun resolveStabilizationMode( requestedStabilizationMode: StabilizationMode, - targetFrameRate: Int, - cameraConstraints: CameraConstraints, - concurrentCameraMode: ConcurrentCameraMode - ): StabilizationMode = if (concurrentCameraMode == ConcurrentCameraMode.DUAL) { - StabilizationMode.OFF - } else { - with(cameraConstraints) { - // Convert AUTO stabilization mode to the first supported stabilization mode - val stabilizationMode = if (requestedStabilizationMode == StabilizationMode.AUTO) { - // Choose between ON, OPTICAL, or OFF, depending on support, in that order - sequenceOf(StabilizationMode.ON, StabilizationMode.OPTICAL, StabilizationMode.OFF) - .first { - it in supportedStabilizationModes && - targetFrameRate !in it.unsupportedFpsSet - } - } else { - requestedStabilizationMode - } + cameraAppSettings: CameraAppSettings, + cameraConstraints: CameraConstraints + ): StabilizationMode = + if (cameraAppSettings.concurrentCameraMode == ConcurrentCameraMode.DUAL) { + StabilizationMode.OFF + } else { + with(cameraConstraints) { + // Convert AUTO stabilization mode to the first supported stabilization mode + val stabilizationMode = if (requestedStabilizationMode == StabilizationMode.AUTO) { + // Choose between ON, OPTICAL, or OFF, depending on support, in that order + sequenceOf( + StabilizationMode.ON, + StabilizationMode.OPTICAL, + StabilizationMode.OFF + ) + .first { + it in supportedStabilizationModes && + cameraAppSettings.targetFrameRate !in it.unsupportedFpsSet && ( + it == StabilizationMode.OFF || + featureGroupHandler.isGroupingSupported( + cameraAppSettings.applyStabilizationMode(it), + cameraProvider.getCameraInfo( + cameraAppSettings + .cameraLensFacing.toCameraSelector() + ), + initialSystemConstraints + ) + ) + } + } else { + requestedStabilizationMode + } - // Check that the stabilization mode can be supported, otherwise return OFF - if (stabilizationMode in supportedStabilizationModes && - targetFrameRate !in stabilizationMode.unsupportedFpsSet - ) { - stabilizationMode - } else { - StabilizationMode.OFF + // Check that the stabilization mode can be supported, otherwise return OFF + if (stabilizationMode in supportedStabilizationModes && + cameraAppSettings.targetFrameRate !in stabilizationMode.unsupportedFpsSet + ) { + stabilizationMode + } else { + StabilizationMode.OFF + } } } - } override suspend fun takePicture(onCaptureStarted: (() -> Unit)) { if (imageCaptureUseCase == null) { @@ -694,7 +773,8 @@ constructor( * mode will be applied. If left null, it will not change the current capture mode. */ private fun CameraAppSettings.tryApplyCaptureModeConstraints( - defaultCaptureMode: CaptureMode? = null + defaultCaptureMode: CaptureMode? = null, + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints ): CameraAppSettings { Log.d(TAG, "applying capture mode constraints") return systemConstraints.perLensConstraints[cameraLensFacing]?.let { constraints -> @@ -776,13 +856,29 @@ constructor( } ?: this } - private fun CameraAppSettings.tryApplyDynamicRangeConstraints(): CameraAppSettings = + private fun CameraAppSettings.tryApplyDynamicRangeConstraints( + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings = systemConstraints.perLensConstraints[cameraLensFacing]?.let { constraints -> with(constraints.supportedDynamicRanges) { val newDynamicRange = if (contains(dynamicRange) && flashMode != FlashMode.LOW_LIGHT_BOOST ) { - dynamicRange + if (captureMode == CaptureMode.IMAGE_ONLY) { + // Reaching this point in code flow means that JPEG_R will be requested + // later, and some devices may not support HDR and JPEG_R together. So, + // we should enable HDR here only if it is supported with JPEG_R. However, + // the value of isHdrSupportedWithJpegR is updated asynchronously and may + // not be up-to-date in rare cases, so this is done on a best-effort basis. + + if (featureGroupHandler.isHdrSupportedWithJpegR() == false) { + DynamicRange.SDR + } else { + dynamicRange + } + } else { + dynamicRange + } } else { DynamicRange.SDR } @@ -804,10 +900,14 @@ constructor( this.copy(aspectRatio = AspectRatio.NINE_SIXTEEN) } - private fun CameraAppSettings.tryApplyImageFormatConstraints(): CameraAppSettings = + private fun CameraAppSettings.tryApplyImageFormatConstraints( + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings = systemConstraints.perLensConstraints[cameraLensFacing]?.let { constraints -> with(constraints.supportedImageFormatsMap[streamConfig]) { - val newImageFormat = if (this != null && contains(imageFormat)) { + val newImageFormat = if (this != null && contains(imageFormat) && + captureMode != CaptureMode.VIDEO_ONLY + ) { imageFormat } else { ImageOutputFormat.JPEG @@ -819,7 +919,9 @@ constructor( } } ?: this - private fun CameraAppSettings.tryApplyFrameRateConstraints(): CameraAppSettings = + private fun CameraAppSettings.tryApplyFrameRateConstraints( + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings = systemConstraints.perLensConstraints[cameraLensFacing]?.let { constraints -> with(constraints.supportedFixedFrameRates) { val newTargetFrameRate = if (contains(targetFrameRate)) { @@ -834,7 +936,9 @@ constructor( } } ?: this - private fun CameraAppSettings.tryApplyStabilizationConstraints(): CameraAppSettings = + private fun CameraAppSettings.tryApplyStabilizationConstraints( + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings = systemConstraints.perLensConstraints[cameraLensFacing]?.let { constraints -> with(constraints) { val newStabilizationMode = if (stabilizationMode != StabilizationMode.AUTO && @@ -852,24 +956,27 @@ constructor( } } ?: this - private fun CameraAppSettings.tryApplyConcurrentCameraModeConstraints(): CameraAppSettings = - when (concurrentCameraMode) { - ConcurrentCameraMode.OFF -> this - else -> - if (systemConstraints.concurrentCamerasSupported && - dynamicRange == DynamicRange.SDR && - streamConfig == StreamConfig.MULTI_STREAM && - flashMode != FlashMode.LOW_LIGHT_BOOST - ) { - copy( - targetFrameRate = TARGET_FPS_AUTO - ) - } else { - copy(concurrentCameraMode = ConcurrentCameraMode.OFF) - } - } + private fun CameraAppSettings.tryApplyConcurrentCameraModeConstraints( + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings = when (concurrentCameraMode) { + ConcurrentCameraMode.OFF -> this + else -> + if (systemConstraints.concurrentCamerasSupported && + dynamicRange == DynamicRange.SDR && + streamConfig == StreamConfig.MULTI_STREAM && + flashMode != FlashMode.LOW_LIGHT_BOOST + ) { + copy( + targetFrameRate = TARGET_FPS_AUTO + ) + } else { + copy(concurrentCameraMode = ConcurrentCameraMode.OFF) + } + } - private fun CameraAppSettings.tryApplyVideoQualityConstraints(): CameraAppSettings = + private fun CameraAppSettings.tryApplyVideoQualityConstraints( + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings = systemConstraints.perLensConstraints[cameraLensFacing]?.let { constraints -> with(constraints.supportedVideoQualitiesMap) { val newVideoQuality = get(dynamicRange).let { @@ -939,11 +1046,19 @@ constructor( override suspend fun setVideoQuality(videoQuality: VideoQuality) { currentSettings.update { old -> - old?.copy(videoQuality = videoQuality) - ?.tryApplyVideoQualityConstraints() + old?.applyVideoQuality(videoQuality = videoQuality) } } + /** Returns a new [CameraAppSettings] with the provided [VideoQuality] applied. */ + internal fun CameraAppSettings.applyVideoQuality( + videoQuality: VideoQuality, + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings { + return copy(videoQuality = videoQuality) + .tryApplyVideoQualityConstraints(systemConstraints) + } + override suspend fun setLowLightBoostPriority(lowLightBoostPriority: LowLightBoostPriority) { currentSettings.update { old -> old?.copy(lowLightBoostPriority = lowLightBoostPriority) @@ -952,23 +1067,39 @@ constructor( override suspend fun setStreamConfig(streamConfig: StreamConfig) { currentSettings.update { old -> - old?.copy(streamConfig = streamConfig) - ?.tryApplyImageFormatConstraints() - ?.tryApplyConcurrentCameraModeConstraints() - ?.tryApplyCaptureModeConstraints() - ?.tryApplyVideoQualityConstraints() + old?.applyStreamConfig(streamConfig = streamConfig) } } + /** Returns a new [CameraAppSettings] with the provided [StreamConfig] applied. */ + internal fun CameraAppSettings.applyStreamConfig( + streamConfig: StreamConfig, + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings { + return copy(streamConfig = streamConfig) + .tryApplyImageFormatConstraints(systemConstraints) + .tryApplyConcurrentCameraModeConstraints(systemConstraints) + .tryApplyCaptureModeConstraints(systemConstraints = systemConstraints) + .tryApplyVideoQualityConstraints(systemConstraints) + } + override suspend fun setDynamicRange(dynamicRange: DynamicRange) { currentSettings.update { old -> - old?.copy(dynamicRange = dynamicRange) - ?.tryApplyDynamicRangeConstraints() - ?.tryApplyConcurrentCameraModeConstraints() - ?.tryApplyCaptureModeConstraints(CaptureMode.STANDARD) + old?.applyDynamicRange(dynamicRange) } } + /** Returns a new [CameraAppSettings] with the provided [DynamicRange] applied. */ + internal fun CameraAppSettings.applyDynamicRange( + dynamicRange: DynamicRange, + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings { + return copy(dynamicRange = dynamicRange) + .tryApplyDynamicRangeConstraints(systemConstraints) + .tryApplyConcurrentCameraModeConstraints(systemConstraints) + .tryApplyCaptureModeConstraints(CaptureMode.STANDARD, systemConstraints) + } + override fun setDeviceRotation(deviceRotation: DeviceRotation) { currentSettings.update { old -> old?.copy(deviceRotation = deviceRotation) @@ -985,12 +1116,20 @@ constructor( override suspend fun setImageFormat(imageFormat: ImageOutputFormat) { currentSettings.update { old -> - old?.copy(imageFormat = imageFormat) - ?.tryApplyImageFormatConstraints() - ?.tryApplyCaptureModeConstraints(CaptureMode.STANDARD) + old?.applyImageFormat(imageFormat = imageFormat) } } + /** Returns a new [CameraAppSettings] with the provided [ImageOutputFormat] applied. */ + internal fun CameraAppSettings.applyImageFormat( + imageFormat: ImageOutputFormat, + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings { + return copy(imageFormat = imageFormat) + .tryApplyImageFormatConstraints(systemConstraints) + .tryApplyCaptureModeConstraints(CaptureMode.STANDARD, systemConstraints) + } + override suspend fun setMaxVideoDuration(durationInMillis: Long) { currentSettings.update { old -> old?.copy( @@ -1001,17 +1140,33 @@ constructor( override suspend fun setStabilizationMode(stabilizationMode: StabilizationMode) { currentSettings.update { old -> - old?.copy(stabilizationMode = stabilizationMode) + old?.applyStabilizationMode(stabilizationMode = stabilizationMode) } } + /** Returns a new [CameraAppSettings] with the provided [StabilizationMode] applied. */ + internal fun CameraAppSettings.applyStabilizationMode( + stabilizationMode: StabilizationMode + ): CameraAppSettings { + return copy(stabilizationMode = stabilizationMode) + } + override suspend fun setTargetFrameRate(targetFrameRate: Int) { currentSettings.update { old -> - old?.copy(targetFrameRate = targetFrameRate)?.tryApplyFrameRateConstraints() - ?.tryApplyConcurrentCameraModeConstraints() + old?.applyTargetFrameRate(targetFrameRate) } } + /** Returns a new [CameraAppSettings] with the provided frame rate applied. */ + internal fun CameraAppSettings.applyTargetFrameRate( + targetFrameRate: Int, + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings { + return copy(targetFrameRate = targetFrameRate) + .tryApplyFrameRateConstraints(systemConstraints) + .tryApplyConcurrentCameraModeConstraints(systemConstraints) + } + override suspend fun setAudioEnabled(isAudioEnabled: Boolean) { currentSettings.update { old -> old?.copy(audioEnabled = isAudioEnabled) @@ -1020,10 +1175,30 @@ constructor( override suspend fun setCaptureMode(captureMode: CaptureMode) { currentSettings.update { old -> - old?.copy(captureMode = captureMode) + old?.applyCaptureMode(captureMode = captureMode) } } + /** Returns a new [CameraAppSettings] with the provided [captureMode] applied. */ + internal fun CameraAppSettings.applyCaptureMode( + captureMode: CaptureMode, + systemConstraints: CameraSystemConstraints = this@CameraXCameraSystem.systemConstraints + ): CameraAppSettings { + val isHdrOn = + dynamicRange == DynamicRange.HLG10 || + imageFormat == ImageOutputFormat.JPEG_ULTRA_HDR + + return copy( + captureMode = captureMode, + dynamicRange = + if (isHdrOn) DynamicRange.HLG10 else DynamicRange.SDR, + imageFormat = + if (isHdrOn) ImageOutputFormat.JPEG_ULTRA_HDR else ImageOutputFormat.JPEG + ) + .tryApplyDynamicRangeConstraints() + .tryApplyImageFormatConstraints() + } + private suspend fun handleLowLightBoostErrors() { currentCameraState.map { it.lowLightBoostState }.distinctUntilChanged().collect { state -> if (state is LowLightBoostState.Error) { diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt index a03c76bfd..00db20efc 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ConcurrentCameraSession.kt @@ -68,21 +68,19 @@ internal suspend fun runConcurrentCameraSession( .filterNotNull() .first() - val videoCapture = if (sessionSettings.captureMode != CaptureMode.IMAGE_ONLY) { + val videoCapture = createVideoUseCase( cameraProvider.getCameraInfo( initialTransientSettings.primaryLensFacing.toCameraSelector() ), sessionSettings.aspectRatio, + sessionSettings.captureMode, + backgroundDispatcher, TARGET_FPS_AUTO, StabilizationMode.OFF, DynamicRange.SDR, - VideoQuality.UNSPECIFIED, - backgroundDispatcher + VideoQuality.UNSPECIFIED ) - } else { - null - } val useCaseGroup = createUseCaseGroup( cameraInfo = sessionSettings.primaryCameraInfo, @@ -211,7 +209,7 @@ internal suspend fun runConcurrentCameraSession( } context(CameraSessionContext) -internal fun createUseCaseGroup( +internal suspend fun createUseCaseGroup( cameraInfo: CameraInfo, initialTransientSettings: TransientSessionSettings, stabilizationMode: StabilizationMode, diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupHandler.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupHandler.kt new file mode 100644 index 000000000..103e4d61a --- /dev/null +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupHandler.kt @@ -0,0 +1,436 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.jetpackcamera.core.camera + +import android.util.Log +import androidx.camera.core.CameraInfo +import androidx.camera.lifecycle.ProcessCameraProvider +import com.google.jetpackcamera.model.CaptureMode +import com.google.jetpackcamera.model.DynamicRange +import com.google.jetpackcamera.model.ImageOutputFormat +import com.google.jetpackcamera.model.StabilizationMode +import com.google.jetpackcamera.model.StreamConfig +import com.google.jetpackcamera.model.VideoQuality +import com.google.jetpackcamera.settings.model.CameraAppSettings +import com.google.jetpackcamera.settings.model.CameraConstraints +import com.google.jetpackcamera.settings.model.CameraSystemConstraints +import com.google.jetpackcamera.settings.model.forCurrentLens +import kotlinx.atomicfu.atomic +import kotlinx.coroutines.CoroutineDispatcher +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Job +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.asStateFlow + +private const val TAG = "FeatureGroupHandler" + +/** + * Handles logic related to CameraX feature groups. + * + * This class encapsulates the operations required to ensure camera settings are compatible with + * device capabilities using the CameraX feature group API. + * + * Key functionalities include: + * - [filterSystemConstraints]: Validates the current camera settings against feature group requirements + * and updates [CameraSystemConstraints] to filter out incompatible options. + * - [isGroupingSupported]: Checks if a specific combination of [CameraAppSettings] is supported + * as a valid feature group on the device. + */ +internal class FeatureGroupHandler( + // TODO: Remove the CameraXCameraSystem dependency from here by refactoring out all camera + // setting applying APIs (e.g. applyDynamicRange) from CameraXCameraSystem + private val cameraSystem: CameraXCameraSystem, + private val cameraProvider: ProcessCameraProvider, + private val defaultCameraSessionContext: CameraSessionContext, + private val defaultDispatcher: CoroutineDispatcher +) { + private var isHdrSupportedWithJpegR = atomic(null) + + /** + * Filters the [CameraSystemConstraints] based on feature group compatibility. + * + * This function checks various combinations of settings (dynamic range, frame rate, + * stabilization, etc.) against the device's capabilities using the CameraX feature groups API. + * It filters out unsupported options from the system constraints, ensuring that the UI only + * presents valid combinations to the user. + */ + suspend fun filterSystemConstraints( + currentSettings: CameraAppSettings, + initialSystemConstraints: CameraSystemConstraints, + currentSystemConstraints: CameraSystemConstraints + ): CameraSystemConstraints { + val initialCameraConstraints = + requireNotNull(initialSystemConstraints.forCurrentLens(currentSettings)) + + Log.d( + TAG, + "filterSystemConstraints: cameraAppSettings = $currentSettings" + + ", initialCameraConstraints = $initialCameraConstraints" + ) + + // Access internal extension function from CameraXCameraSystem + val sessionSettings = with(cameraSystem) { + currentSettings.toSingleCameraSessionSettings(initialCameraConstraints) + } + val featureDataSet = sessionSettings.toFeatureGroupabilities() + + if (featureDataSet.isInvalid()) { + Log.i( + TAG, + "filterSystemConstraints: since the settings is incompatible" + + " with CameraX feature groups API, falling back to initial" + + " system constraints without using feature groups. featureDataSet = " + + " $featureDataSet." + ) + return initialSystemConstraints + } + + val cameraInfo = + cameraProvider.getCameraInfo(currentSettings.cameraLensFacing.toCameraSelector()) + + // TODO: More stabilization + FPS pairs can be supported with CameraX feature group API. + // However, while the following code does provide such support, this function is called + // only when camera session is recreated. So, updating unsupportedStabilizationFpsMap now + // can cause regressions in scenarios where user tries to change both stabilization mode + // and FPS mode from settings page directly. We need to ensure this function is used + // for each setting value update to avoid that. + +// val unsupportedStabilizationFpsMap = buildMap { +// initialCameraConstraints +// .unsupportedStabilizationFpsMap +// .forEach { (stabilizationMode, fpsList) -> +// if (stabilizationMode.toFeatureGroupability() is Nongroupable) { +// put(stabilizationMode, fpsList) +// return@forEach +// } +// +// fpsList.forEach { fps -> +// if (fps.toFpsFeatureGroupability() is Nongroupable) { +// put(stabilizationMode, fpsList) +// return@forEach +// } +// +// if (!cameraAppSettings.copyStabilizationMode(stabilizationMode) +// .copyTargetFrameRate(fps).isGroupingSupported(cameraInfo) +// ) { +// put(stabilizationMode, fpsList) +// } +// } +// } +// } + + val updatedPerLensConstraints = initialSystemConstraints.perLensConstraints.toMutableMap() + + updatedPerLensConstraints[currentSettings.cameraLensFacing] = + initialCameraConstraints + .copy( + supportedDynamicRanges = filterGroupableDynamicRanges( + currentSettings, + initialSystemConstraints, + initialCameraConstraints, + cameraInfo + ), + supportedFixedFrameRates = filterGroupableFrameRates( + currentSettings, + initialSystemConstraints, + initialCameraConstraints, + cameraInfo + ), + supportedStabilizationModes = filterGroupableStabilizationModes( + currentSettings, + initialSystemConstraints, + initialCameraConstraints, + cameraInfo + ), + supportedImageFormatsMap = filterGroupableImageFormatsMap( + currentSettings, + initialSystemConstraints, + initialCameraConstraints, + cameraInfo + ), + supportedVideoQualitiesMap = filterGroupableVideoQualitiesMap( + currentSettings, + initialSystemConstraints, + initialCameraConstraints, + cameraInfo + ) +// unsupportedStabilizationFpsMap = unsupportedStabilizationFpsMap + ) + + val newConstraints = currentSystemConstraints.copy( + perLensConstraints = updatedPerLensConstraints + ) + + Log.d(TAG, "filterSystemConstraints: updated systemConstraints = $newConstraints") + + cacheConcurrentHdrJpegRCapability(currentSettings, newConstraints) + + return newConstraints + } + + /** + * Filters supported [DynamicRange]s by checking groupability with current settings. + */ + private suspend fun filterGroupableDynamicRanges( + cameraAppSettings: CameraAppSettings, + initialSystemConstraints: CameraSystemConstraints, + initialCameraConstraints: CameraConstraints, + cameraInfo: CameraInfo + ): Set { + Log.d(TAG, "filterGroupableDynamicRanges") + + return initialCameraConstraints.supportedDynamicRanges.filter { + val settings = with(cameraSystem) { + cameraAppSettings.applyDynamicRange(it, initialSystemConstraints) + } + isGroupingSupported(settings, cameraInfo, initialSystemConstraints) + }.toSet() + } + + /** + * Filters supported frame rates by checking groupability with current settings. + */ + private suspend fun filterGroupableFrameRates( + cameraAppSettings: CameraAppSettings, + initialSystemConstraints: CameraSystemConstraints, + initialCameraConstraints: CameraConstraints, + cameraInfo: CameraInfo + ): Set { + Log.d(TAG, "filterGroupableFrameRates") + + return initialCameraConstraints.supportedFixedFrameRates.filter { + val settings = with(cameraSystem) { + cameraAppSettings.applyTargetFrameRate(it, initialSystemConstraints) + } + isGroupingSupported(settings, cameraInfo, initialSystemConstraints) + }.toSet() + } + + /** + * Filters supported [StabilizationMode]s by checking groupability with current settings. + */ + private suspend fun filterGroupableStabilizationModes( + cameraAppSettings: CameraAppSettings, + initialSystemConstraints: CameraSystemConstraints, + initialCameraConstraints: CameraConstraints, + cameraInfo: CameraInfo + ): Set { + Log.d(TAG, "filterGroupableStabilizationModes") + + return initialCameraConstraints.supportedStabilizationModes.filter { + Log.d(TAG, "filterGroupableStabilizationModes: it = $it") + + val resolvedStabilizationMode = with(cameraSystem) { + resolveStabilizationMode( + requestedStabilizationMode = it, + cameraAppSettings = cameraAppSettings, + cameraConstraints = initialCameraConstraints + ) + } + val settings = with(cameraSystem) { + cameraAppSettings.applyStabilizationMode(resolvedStabilizationMode) + } + isGroupingSupported(settings, cameraInfo, initialSystemConstraints) + }.toSet() + } + + /** + * Filters supported [ImageOutputFormat]s by checking groupability with current settings. + */ + private suspend fun filterGroupableImageFormatsMap( + cameraAppSettings: CameraAppSettings, + initialSystemConstraints: CameraSystemConstraints, + initialCameraConstraints: CameraConstraints, + cameraInfo: CameraInfo + ): Map> { + Log.d(TAG, "filterGroupableImageFormatsMap") + + return buildMap { + initialCameraConstraints + .supportedImageFormatsMap + .forEach { (streamConfig, imageFormats) -> + put( + streamConfig, + imageFormats.filter { + val settings = with(cameraSystem) { + cameraAppSettings + .applyStreamConfig(streamConfig, initialSystemConstraints) + .run { + if (it == ImageOutputFormat.JPEG_ULTRA_HDR) { + // tryApplyImageFormatConstraints changes capture mode + // to VIDEO_ONLY if both JPEG_R and HDR are supported + // with STANDARD capture mode. VIDEO_ONLY capture mode + // leads to IMAGE_ULTRA_HDR GroupableFeature not being + // set to CameraX. To workaround this issue, capture + // mode is manually set to IMAGE_ONLY to ensure the + // capability checking is correct. + + applyCaptureMode( + CaptureMode.IMAGE_ONLY, + initialSystemConstraints + ) + } else { + this + } + } + .applyImageFormat(it, initialSystemConstraints) + } + isGroupingSupported(settings, cameraInfo, initialSystemConstraints) + }.toSet() + ) + } + } + } + + /** + * Filters supported [VideoQuality]s by checking groupability with current settings. + */ + private suspend fun filterGroupableVideoQualitiesMap( + cameraAppSettings: CameraAppSettings, + initialSystemConstraints: CameraSystemConstraints, + initialCameraConstraints: CameraConstraints, + cameraInfo: CameraInfo + ): Map> { + Log.d(TAG, "filterGroupableVideoQualitiesMap") + + return buildMap { + initialCameraConstraints + .supportedVideoQualitiesMap + .forEach { (dynamicRange, videoQualities) -> + put( + dynamicRange, + videoQualities.filter { + val settings = with(cameraSystem) { + cameraAppSettings + .applyDynamicRange(dynamicRange, initialSystemConstraints) + .applyVideoQuality(it, initialSystemConstraints) + } + isGroupingSupported(settings, cameraInfo, initialSystemConstraints) + } + ) + } + } + } + + /** + * Returns whether a [CameraAppSettings] is supported together as a group. + * + * High quality features sometimes can be supported individually while being unsupported + * together as a group. This API utilizes the CameraX feature group APIs to know if a + * [CameraAppSettings] is supported as a group. + * + * However, not all [CameraAppSettings] feature values are queryable through the feature group + * APIs. So, this API works in a best-effort manner by using only the queryable features + */ + internal suspend fun isGroupingSupported( + cameraAppSettings: CameraAppSettings, + cameraInfo: CameraInfo, + initialSystemConstraints: CameraSystemConstraints + ): Boolean { + // TODO: Optimize feature group queries via pre-calculation and persistence. + // Reconstructing the full SessionConfig and UseCases for every query is expensive. + // Consider pre-calculating the 16 possible combinations of groupable features + // (HDR, 60FPS, etc.) and persisting the results in a database. This would make UI checks + // O(1) across app launches since hardware capabilities are generally static. + + val cameraConstraints = + requireNotNull(initialSystemConstraints.forCurrentLens(cameraAppSettings)) + + val transientSettings = + with(cameraSystem) { cameraAppSettings.toTransientSessionSettings() } + + val sessionSettings = with(cameraSystem) { + cameraAppSettings.toSingleCameraSessionSettings(cameraConstraints) + } + + if (sessionSettings.toFeatureGroupabilities().isInvalid()) { + Log.d( + TAG, + "isGroupingSupported: CameraX feature group is not compatible with this" + + " session settings, so returning unsupported early." + + " sessionSettings = $sessionSettings" + ) + return false + } + + // An explicit job is used here because simpler approach like `coroutineScope { ... }` + // seems to get stuck forever for StreamConfig.SINGLE_STREAM. The code flow for + // SINGLE_STREAM seems to be keeping the coroutine scope forever busy and thus the + // `coroutineScope` block never completes. + val job = Job() + + val sessionConfig = with( + defaultCameraSessionContext.copy( + transientSettings = MutableStateFlow(transientSettings).asStateFlow() + ) + ) { + val videoCaptureUseCase = + createVideoUseCase( + cameraInfo, + cameraAppSettings.aspectRatio, + cameraAppSettings.captureMode, + backgroundDispatcher, + cameraAppSettings.targetFrameRate.takeIfFeatureGroupInvalid(sessionSettings), + cameraAppSettings.stabilizationMode.takeIfFeatureGroupInvalid(sessionSettings), + cameraAppSettings.dynamicRange.takeIfFeatureGroupInvalid(sessionSettings), + cameraAppSettings.videoQuality.takeIfFeatureGroupInvalid(sessionSettings) + ) + + createSessionConfig( + cameraConstraints = cameraConstraints, + initialTransientSettings = transientSettings, + videoCaptureUseCase = videoCaptureUseCase, + sessionSettings = sessionSettings, + sessionScope = CoroutineScope(defaultDispatcher + job) + ) + } + + return cameraInfo.isSessionConfigSupported(sessionConfig).apply { + job.cancel() + } + } + + private suspend fun cacheConcurrentHdrJpegRCapability( + cameraAppSettings: CameraAppSettings, + systemConstraints: CameraSystemConstraints + ) { + with(cameraSystem) { + val supported = isGroupingSupported( + cameraAppSettings + .copy(dynamicRange = cameraAppSettings.dynamicRange) + .applyImageFormat( + imageFormat = ImageOutputFormat.JPEG_ULTRA_HDR, + systemConstraints = systemConstraints.copy() + ), + cameraProvider + .getCameraInfo( + cameraAppSettings + .cameraLensFacing + .toCameraSelector() + ), + systemConstraints + ) + + isHdrSupportedWithJpegR.value = supported + } + } + + /** + * Returns whether HDR is supported with JPEG_R, a null value represents the result is still + * unknown. + */ + fun isHdrSupportedWithJpegR(): Boolean? = isHdrSupportedWithJpegR.value +} diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupability.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupability.kt new file mode 100644 index 000000000..bc80429fc --- /dev/null +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupability.kt @@ -0,0 +1,175 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.jetpackcamera.core.camera + +import androidx.camera.core.featuregroup.GroupableFeature +import androidx.camera.video.GroupableFeatures +import com.google.jetpackcamera.core.camera.FeatureGroupability.ExplicitlyGroupable +import com.google.jetpackcamera.core.camera.FeatureGroupability.ImplicitlyGroupable +import com.google.jetpackcamera.core.camera.FeatureGroupability.Ungroupable +import com.google.jetpackcamera.model.DynamicRange +import com.google.jetpackcamera.model.ImageOutputFormat +import com.google.jetpackcamera.model.ImageOutputFormat.JPEG +import com.google.jetpackcamera.model.ImageOutputFormat.JPEG_ULTRA_HDR +import com.google.jetpackcamera.model.StabilizationMode +import com.google.jetpackcamera.model.StabilizationMode.AUTO +import com.google.jetpackcamera.model.StabilizationMode.HIGH_QUALITY +import com.google.jetpackcamera.model.StabilizationMode.OFF +import com.google.jetpackcamera.model.StabilizationMode.ON +import com.google.jetpackcamera.model.StabilizationMode.OPTICAL +import com.google.jetpackcamera.model.VideoQuality +import com.google.jetpackcamera.model.VideoQuality.FHD +import com.google.jetpackcamera.model.VideoQuality.HD +import com.google.jetpackcamera.model.VideoQuality.SD +import com.google.jetpackcamera.model.VideoQuality.UHD +import com.google.jetpackcamera.model.VideoQuality.UNSPECIFIED + +/** + * Categorizes how a camera feature can be used with CameraX's feature group query API. + * + * This allows internal JCA models like [DynamicRange] and [VideoQuality] to be mapped to CameraX + * [GroupableFeature]s for compatibility checking. + */ +sealed interface FeatureGroupability { + /** Corresponds to a specific [GroupableFeature] object. */ + data class ExplicitlyGroupable(val feature: GroupableFeature) : FeatureGroupability + + /** + * Does not correspond to a specific [GroupableFeature] object, but implicitly groupable as + * it is equivalent to the base value CameraX feature groups API will use. + */ + object ImplicitlyGroupable : FeatureGroupability + + /** Feature value that is not usable with CameraX feature groups API. */ + data class Ungroupable(val featureValue: T) : FeatureGroupability +} + +/** + * Returns whether a collection of [FeatureGroupability] represents that CameraX feature group + * query API should be used. + * + * This is based on the fact that feature group query API should be used whenever there's at least + * two [GroupableFeature]. + */ +fun Collection>.requiresFeatureGroupQuery(): Boolean { + return filter { it is ExplicitlyGroupable }.size >= 2 +} + +/** + * Returns whether a collection of [FeatureGroupability] is invalid. + * + * The collection should not be used for camera session i.e. invalid whenever it has a + * [Ungroupable] element and [requiresFeatureGroupQuery] is true for the collection. + */ +internal fun Collection>.isInvalid(): Boolean { + return requiresFeatureGroupQuery() && any { it is Ungroupable } +} + +/** + * Converts this [DynamicRange] to a [FeatureGroupability]. + * + * This allows the dynamic range to be used in CameraX feature group compatibility checks. + */ +fun DynamicRange.toFeatureGroupability(): FeatureGroupability { + return when (this) { + DynamicRange.SDR -> ImplicitlyGroupable + DynamicRange.HLG10 -> ExplicitlyGroupable(GroupableFeature.HDR_HLG10) + } +} + +/** + * Converts this [VideoQuality] to a [FeatureGroupability]. + * + * This allows the video quality to be used in CameraX feature group compatibility checks. + */ +fun VideoQuality.toFeatureGroupability(): FeatureGroupability { + return when (this) { + SD -> ExplicitlyGroupable(GroupableFeatures.SD_RECORDING) + HD -> ExplicitlyGroupable(GroupableFeatures.HD_RECORDING) + FHD -> ExplicitlyGroupable(GroupableFeatures.FHD_RECORDING) + UHD -> ExplicitlyGroupable(GroupableFeatures.UHD_RECORDING) + UNSPECIFIED -> ImplicitlyGroupable + } +} + +/** + * Converts this [ImageOutputFormat] to a [FeatureGroupability]. + * + * This allows the image output format to be used in CameraX feature group compatibility checks. + */ +fun ImageOutputFormat.toFeatureGroupability(): FeatureGroupability { + return when (this) { + JPEG -> ImplicitlyGroupable + JPEG_ULTRA_HDR -> ExplicitlyGroupable(GroupableFeature.IMAGE_ULTRA_HDR) + } +} + +/** + * Converts this [StabilizationMode] to a [FeatureGroupability]. + * + * This allows the stabilization mode to be used in CameraX feature group compatibility checks. + * + * @throws IllegalStateException When the value of this `StabilizationMode` is [AUTO]. + */ +fun StabilizationMode.toFeatureGroupability(): FeatureGroupability { + return when (this) { + OFF -> ImplicitlyGroupable + ON -> ExplicitlyGroupable(GroupableFeature.PREVIEW_STABILIZATION) + HIGH_QUALITY -> ExplicitlyGroupable(GroupableFeatures.VIDEO_STABILIZATION) + AUTO -> + throw IllegalStateException( + "AUTO should be resolved to concrete value before this API is called!" + ) + OPTICAL -> Ungroupable(this) + } +} + +/** + * Converts this integer FPS value to a [FeatureGroupability]. + * + * This allows the frame rate to be used in CameraX feature group compatibility checks. + */ +fun Int.toFpsFeatureGroupability(): FeatureGroupability { + return when (this) { + 60 -> ExplicitlyGroupable(GroupableFeature.FPS_60) + 30, 0 -> ImplicitlyGroupable + else -> Ungroupable(this) + } +} + +/** + * Creates a set of [FeatureGroupability] from a [PerpetualSessionSettings.SingleCamera]. + */ +internal fun PerpetualSessionSettings.SingleCamera.toFeatureGroupabilities(): + Set> { + return setOf( + dynamicRange.toFeatureGroupability(), + targetFrameRate.toFpsFeatureGroupability(), + imageFormat.toFeatureGroupability(), + stabilizationMode.toFeatureGroupability(), + videoQuality.toFeatureGroupability() + ) +} + +/** + * Returns `this` value if the provided [sessionSettings] is not compatible with CameraX feature + * groups. + */ +internal fun T.takeIfFeatureGroupInvalid( + sessionSettings: PerpetualSessionSettings.SingleCamera +): T? { + return if (sessionSettings.toFeatureGroupabilities().isInvalid()) this else null +} diff --git a/core/camera/src/test/java/com/google/jetpackcamera/core/camera/FeatureGroupabilityTest.kt b/core/camera/src/test/java/com/google/jetpackcamera/core/camera/FeatureGroupabilityTest.kt new file mode 100644 index 000000000..04ebf6814 --- /dev/null +++ b/core/camera/src/test/java/com/google/jetpackcamera/core/camera/FeatureGroupabilityTest.kt @@ -0,0 +1,287 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.jetpackcamera.core.camera + +import androidx.camera.core.featuregroup.GroupableFeature +import androidx.camera.video.GroupableFeatures +import com.google.common.truth.Truth.assertThat +import com.google.jetpackcamera.core.camera.FeatureGroupability.ExplicitlyGroupable +import com.google.jetpackcamera.core.camera.FeatureGroupability.ImplicitlyGroupable +import com.google.jetpackcamera.core.camera.FeatureGroupability.Ungroupable +import com.google.jetpackcamera.model.AspectRatio +import com.google.jetpackcamera.model.CaptureMode +import com.google.jetpackcamera.model.DynamicRange +import com.google.jetpackcamera.model.ImageOutputFormat +import com.google.jetpackcamera.model.LowLightBoostPriority +import com.google.jetpackcamera.model.StabilizationMode +import com.google.jetpackcamera.model.StreamConfig +import com.google.jetpackcamera.model.VideoQuality +import org.junit.Assert.assertThrows +import org.junit.Test +import org.junit.runner.RunWith +import org.robolectric.RobolectricTestRunner + +@RunWith(RobolectricTestRunner::class) +class FeatureGroupabilityTest { + @Test + fun requiresFeatureGroupQuery_returnsTrue_whenTwoOrMoreExplicit() { + val collection = listOf( + ExplicitlyGroupable(GroupableFeatures.SD_RECORDING), + ExplicitlyGroupable(GroupableFeature.IMAGE_ULTRA_HDR), + ImplicitlyGroupable + ) + assertThat(collection.requiresFeatureGroupQuery()).isTrue() + } + + @Test + fun requiresFeatureGroupQuery_returnsFalse_whenLessThanTwoExplicit() { + val collection1 = listOf( + ExplicitlyGroupable(GroupableFeatures.SD_RECORDING), + ImplicitlyGroupable + ) + val collection2 = listOf( + ImplicitlyGroupable, + ImplicitlyGroupable + ) + assertThat(collection1.requiresFeatureGroupQuery()).isFalse() + assertThat(collection2.requiresFeatureGroupQuery()).isFalse() + } + + @Test + fun isInvalid_returnsTrue_whenRequiresQueryAndHasUngroupable() { + val collection = listOf( + ExplicitlyGroupable(GroupableFeatures.SD_RECORDING), + ExplicitlyGroupable(GroupableFeature.IMAGE_ULTRA_HDR), + Ungroupable("someValue") + ) + assertThat(collection.isInvalid()).isTrue() + } + + @Test + fun isInvalid_returnsFalse_whenRequiresQueryAndNoUngroupable() { + val collection = listOf( + ExplicitlyGroupable(GroupableFeatures.SD_RECORDING), + ExplicitlyGroupable(GroupableFeature.IMAGE_ULTRA_HDR), + ImplicitlyGroupable + ) + assertThat(collection.isInvalid()).isFalse() + } + + @Test + fun isInvalid_returnsFalse_whenNotRequiresQueryAndHasUngroupable() { + val collection = listOf( + ExplicitlyGroupable(GroupableFeatures.SD_RECORDING), + Ungroupable("someValue") + ) + assertThat(collection.isInvalid()).isFalse() + } + + @Test + fun dynamicRange_toFeatureGroupability_mapsCorrectly() { + assertThat(DynamicRange.SDR.toFeatureGroupability()).isEqualTo(ImplicitlyGroupable) + assertThat( + DynamicRange.HLG10.toFeatureGroupability() + ).isEqualTo(ExplicitlyGroupable(GroupableFeature.HDR_HLG10)) + } + + @Test + fun videoQuality_toFeatureGroupability_mapsCorrectly() { + assertThat( + VideoQuality.SD.toFeatureGroupability() + ).isEqualTo(ExplicitlyGroupable(GroupableFeatures.SD_RECORDING)) + assertThat( + VideoQuality.HD.toFeatureGroupability() + ).isEqualTo(ExplicitlyGroupable(GroupableFeatures.HD_RECORDING)) + assertThat( + VideoQuality.FHD.toFeatureGroupability() + ).isEqualTo(ExplicitlyGroupable(GroupableFeatures.FHD_RECORDING)) + assertThat( + VideoQuality.UHD.toFeatureGroupability() + ).isEqualTo(ExplicitlyGroupable(GroupableFeatures.UHD_RECORDING)) + } + + @Test + fun imageOutputFormat_toFeatureGroupability_mapsCorrectly() { + assertThat(ImageOutputFormat.JPEG.toFeatureGroupability()).isEqualTo(ImplicitlyGroupable) + assertThat( + ImageOutputFormat.JPEG_ULTRA_HDR.toFeatureGroupability() + ).isEqualTo(ExplicitlyGroupable(GroupableFeature.IMAGE_ULTRA_HDR)) + } + + @Test + fun stabilizationMode_toFeatureGroupability_mapsCorrectly() { + assertThat(StabilizationMode.OFF.toFeatureGroupability()).isEqualTo(ImplicitlyGroupable) + assertThat( + StabilizationMode.ON.toFeatureGroupability() + ).isEqualTo(ExplicitlyGroupable(GroupableFeature.PREVIEW_STABILIZATION)) + assertThat( + StabilizationMode.HIGH_QUALITY.toFeatureGroupability() + ).isEqualTo(ExplicitlyGroupable(GroupableFeatures.VIDEO_STABILIZATION)) + assertThrows(IllegalStateException::class.java) { + StabilizationMode.AUTO.toFeatureGroupability() + } + assertThat( + StabilizationMode.OPTICAL.toFeatureGroupability() + ).isInstanceOf(Ungroupable::class.java) + } + + @Test + fun int_toFpsFeatureGroupability_mapsCorrectly() { + assertThat( + 60.toFpsFeatureGroupability() + ).isEqualTo(ExplicitlyGroupable(GroupableFeature.FPS_60)) + assertThat(30.toFpsFeatureGroupability()).isEqualTo(ImplicitlyGroupable) + assertThat(0.toFpsFeatureGroupability()).isEqualTo(ImplicitlyGroupable) + assertThat(15.toFpsFeatureGroupability()).isInstanceOf(Ungroupable::class.java) + } + + @Test + fun singleCamera_toFeatureGroupabilities_containsCorrectElements() { + val settings = PerpetualSessionSettings.SingleCamera( + aspectRatio = AspectRatio.THREE_FOUR, + captureMode = CaptureMode.STANDARD, + streamConfig = StreamConfig.MULTI_STREAM, + targetFrameRate = 60, + stabilizationMode = StabilizationMode.ON, + dynamicRange = DynamicRange.HLG10, + videoQuality = VideoQuality.UHD, + imageFormat = ImageOutputFormat.JPEG_ULTRA_HDR, + lowLightBoostPriority = LowLightBoostPriority.PRIORITIZE_AE_MODE + ) + + val dataSet = settings.toFeatureGroupabilities() + + assertThat(dataSet).contains(ExplicitlyGroupable(GroupableFeature.FPS_60)) + assertThat(dataSet).contains(ExplicitlyGroupable(GroupableFeature.PREVIEW_STABILIZATION)) + assertThat(dataSet).contains(ExplicitlyGroupable(GroupableFeature.HDR_HLG10)) + assertThat(dataSet).contains(ExplicitlyGroupable(GroupableFeatures.UHD_RECORDING)) + assertThat(dataSet).contains(ExplicitlyGroupable(GroupableFeature.IMAGE_ULTRA_HDR)) + } + + @Test + fun takeIfFeatureGroupInvalid_returnsObject_whenInvalid() { + // Construct an invalid combination: e.g. using feature groups (>=2 explicit) AND an ungroupable + // Here: 60fps (Explicit), Stabilization ON (Explicit) AND Stabilization OPTICAL (Ungroupable - wait, can't set two stabilization modes) + + // Let's use a combination that results in 2 explicits and 1 ungroupable. + // Explicit: 60fps + // Explicit: HLG10 + // Ungroupable: Stabilization OPTICAL + + val settings = PerpetualSessionSettings.SingleCamera( + aspectRatio = AspectRatio.THREE_FOUR, + captureMode = CaptureMode.STANDARD, + streamConfig = StreamConfig.MULTI_STREAM, + targetFrameRate = 60, // Explicit + stabilizationMode = StabilizationMode.OPTICAL, // Ungroupable + dynamicRange = DynamicRange.HLG10, // Explicit + videoQuality = VideoQuality.SD, // Explicit + imageFormat = ImageOutputFormat.JPEG, // Implicit + lowLightBoostPriority = LowLightBoostPriority.PRIORITIZE_AE_MODE + ) + + val obj = "TestObject" + assertThat(obj.takeIfFeatureGroupInvalid(settings)).isEqualTo(obj) + } + + @Test + fun takeIfFeatureGroupInvalid_returnsNull_whenValid() { + // Valid combination: 2 explicits, no ungroupables + val settings = PerpetualSessionSettings.SingleCamera( + aspectRatio = AspectRatio.THREE_FOUR, + captureMode = CaptureMode.STANDARD, + streamConfig = StreamConfig.MULTI_STREAM, + targetFrameRate = 60, // Explicit + stabilizationMode = StabilizationMode.ON, // Explicit + dynamicRange = DynamicRange.HLG10, // Explicit + videoQuality = VideoQuality.SD, // Explicit + imageFormat = ImageOutputFormat.JPEG, // Implicit + lowLightBoostPriority = LowLightBoostPriority.PRIORITIZE_AE_MODE + ) + + val obj = "TestObject" + assertThat(obj.takeIfFeatureGroupInvalid(settings)).isNull() + } + + @Test + fun singleCamera_toGroupableFeatures_returnsCorrectSet() { + val settings = PerpetualSessionSettings.SingleCamera( + aspectRatio = AspectRatio.THREE_FOUR, + captureMode = CaptureMode.STANDARD, + streamConfig = StreamConfig.MULTI_STREAM, + targetFrameRate = 60, + stabilizationMode = StabilizationMode.ON, + dynamicRange = DynamicRange.HLG10, + videoQuality = VideoQuality.UHD, + imageFormat = ImageOutputFormat.JPEG_ULTRA_HDR, + lowLightBoostPriority = LowLightBoostPriority.PRIORITIZE_AE_MODE + ) + + val features = settings.toGroupableFeatures() + + assertThat(features!!).contains(GroupableFeature.FPS_60) + assertThat(features).contains(GroupableFeature.PREVIEW_STABILIZATION) + assertThat(features).contains(GroupableFeature.HDR_HLG10) + assertThat(features).contains(GroupableFeatures.UHD_RECORDING) + assertThat(features).contains(GroupableFeature.IMAGE_ULTRA_HDR) + } + + @Test + fun singleCamera_toGroupableFeatures_filtersUltraHdrWhenVideoOnly() { + val settings = PerpetualSessionSettings.SingleCamera( + aspectRatio = AspectRatio.THREE_FOUR, + captureMode = CaptureMode.VIDEO_ONLY, + streamConfig = StreamConfig.MULTI_STREAM, + targetFrameRate = 60, + stabilizationMode = StabilizationMode.ON, + dynamicRange = DynamicRange.HLG10, + videoQuality = VideoQuality.UHD, + imageFormat = ImageOutputFormat.JPEG_ULTRA_HDR, + lowLightBoostPriority = LowLightBoostPriority.PRIORITIZE_AE_MODE + ) + + val features = settings.toGroupableFeatures() + + assertThat(features!!).contains(GroupableFeature.FPS_60) + assertThat(features).contains(GroupableFeature.PREVIEW_STABILIZATION) + assertThat(features).contains(GroupableFeature.HDR_HLG10) + assertThat(features).contains(GroupableFeatures.UHD_RECORDING) + assertThat(features).doesNotContain(GroupableFeature.IMAGE_ULTRA_HDR) + } + + @Test + fun singleCamera_toGroupableFeatures_filtersVideoQualityWhenImageOnly() { + val settings = PerpetualSessionSettings.SingleCamera( + aspectRatio = AspectRatio.THREE_FOUR, + captureMode = CaptureMode.IMAGE_ONLY, + streamConfig = StreamConfig.MULTI_STREAM, + targetFrameRate = 60, + stabilizationMode = StabilizationMode.ON, + dynamicRange = DynamicRange.HLG10, + videoQuality = VideoQuality.UHD, + imageFormat = ImageOutputFormat.JPEG_ULTRA_HDR, + lowLightBoostPriority = LowLightBoostPriority.PRIORITIZE_AE_MODE + ) + + val features = settings.toGroupableFeatures() + + assertThat(features!!).contains(GroupableFeature.FPS_60) + assertThat(features).contains(GroupableFeature.PREVIEW_STABILIZATION) + assertThat(features).contains(GroupableFeature.HDR_HLG10) + assertThat(features).doesNotContain(GroupableFeatures.UHD_RECORDING) + assertThat(features).contains(GroupableFeature.IMAGE_ULTRA_HDR) + } +} From 8af73791e5095b1ada961b17d5d4ea84f6ff56ba Mon Sep 17 00:00:00 2001 From: Tahsin Masrur Date: Fri, 5 Dec 2025 11:34:38 +0000 Subject: [PATCH 4/6] Add KDoc for Feature Group APIs and helper functions --- .../jetpackcamera/core/camera/CameraSession.kt | 15 +++++++++++++++ .../core/camera/CameraXCameraSystem.kt | 11 +++++++++++ 2 files changed, 26 insertions(+) diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt index 6fe34cb31..6772bf89e 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt @@ -542,6 +542,21 @@ internal fun applyDeviceRotation(deviceRotation: DeviceRotation, useCases: List< } } +/** + * Creates a [SessionConfig] for a single camera session. + * + * This function constructs the session configuration, including binding use cases, setting up + * viewports, and applying necessary effects. It also determines the required feature group + * from the [sessionSettings] to ensure that the combination of features is supported by the + * device. + * + * @param cameraConstraints The constraints applicable to the current camera. + * @param initialTransientSettings The initial transient settings (e.g. flash, zoom). + * @param sessionSettings The persistent settings for the single camera session. + * @param videoCaptureUseCase The video capture use case, if video recording is enabled. + * @param sessionScope The coroutine scope for the session. + * @return A [SessionConfig] ready to be bound to the camera lifecycle. + */ context(CameraSessionContext) @OptIn(ExperimentalCamera2Interop::class) internal suspend fun createSessionConfig( diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt index 338f92422..27b0407f7 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt @@ -557,6 +557,17 @@ constructor( } } + /** + * Updates the [CameraSystemConstraints] based on feature group compatibility. + * + * This function checks various combinations of settings (dynamic range, frame rate, + * stabilization, etc.) against the device's capabilities using the CameraX feature groups API. + * It filters out unsupported options from the system constraints, ensuring that the UI only + * presents valid combinations to the user. + * + * This update happens asynchronously after the initial camera session is started to avoid + * blocking the UI thread. + */ private suspend fun updateSystemConstraintsByFeatureGroups() { val cameraAppSettings = requireNotNull(currentSettings.value) From 77f6ef8ed4c3b74893ea28d7bf70fc1b612abd57 Mon Sep 17 00:00:00 2001 From: Tahsin Masrur Date: Fri, 5 Dec 2025 18:29:46 +0000 Subject: [PATCH 5/6] Add tests --- .../core/camera/CameraXCameraSystemTest.kt | 105 ++++++++++++++++++ .../core/camera/FeatureGroupHandlerTest.kt | 3 +- feature/postcapture/build.gradle.kts | 2 +- 3 files changed, 108 insertions(+), 2 deletions(-) diff --git a/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/CameraXCameraSystemTest.kt b/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/CameraXCameraSystemTest.kt index a4f34d8a4..a6228e901 100644 --- a/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/CameraXCameraSystemTest.kt +++ b/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/CameraXCameraSystemTest.kt @@ -20,6 +20,7 @@ import android.content.ContentResolver import android.graphics.SurfaceTexture import android.net.Uri import android.view.Surface +import androidx.annotation.GuardedBy import androidx.concurrent.futures.DirectExecutor import androidx.test.ext.junit.runners.AndroidJUnit4 import androidx.test.filters.LargeTest @@ -35,27 +36,35 @@ import com.google.jetpackcamera.core.camera.postprocess.ImagePostProcessorFeatur import com.google.jetpackcamera.core.camera.postprocess.PostProcessModule.Companion.provideImagePostProcessorMap import com.google.jetpackcamera.core.camera.utils.APP_REQUIRED_PERMISSIONS import com.google.jetpackcamera.core.common.FakeFilePathGenerator +import com.google.jetpackcamera.model.DynamicRange import com.google.jetpackcamera.model.FlashMode import com.google.jetpackcamera.model.Illuminant import com.google.jetpackcamera.model.LensFacing import com.google.jetpackcamera.model.SaveLocation +import com.google.jetpackcamera.model.VideoQuality import com.google.jetpackcamera.settings.ConstraintsRepository import com.google.jetpackcamera.settings.SettableConstraintsRepository import com.google.jetpackcamera.settings.SettableConstraintsRepositoryImpl import com.google.jetpackcamera.settings.model.CameraAppSettings +import com.google.jetpackcamera.settings.model.CameraSystemConstraints import com.google.jetpackcamera.settings.model.DEFAULT_CAMERA_APP_SETTINGS import java.io.File import java.util.AbstractMap import javax.inject.Provider +import kotlin.time.Duration +import kotlin.time.Duration.Companion.seconds import kotlin.time.DurationUnit import kotlin.time.toDuration import kotlinx.coroutines.CompletableDeferred import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Deferred import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Job import kotlinx.coroutines.async import kotlinx.coroutines.cancelAndJoin import kotlinx.coroutines.channels.ReceiveChannel +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.StateFlow import kotlinx.coroutines.flow.filterNotNull import kotlinx.coroutines.flow.first import kotlinx.coroutines.flow.map @@ -271,6 +280,74 @@ class CameraXCameraSystemTest { torchEnabled.cancel() } + @Test + fun setMultipleFeatures_systemConstraintsUpdatedAndFeaturesSettableIfSupported() = runBlocking { + // Arrange. + val constraintsRepository = ObservableConstraintsRepository() + val cameraSystem = createAndInitCameraXCameraSystem( + constraintsRepository = constraintsRepository + ) + + // Each camera run/update should lead to a new systemConstraints update + var systemConstraints = constraintsRepository.observeNextUpdate() + cameraSystem.startCameraAndWaitUntilRunning() + + val lensFacing = cameraSystem.getCurrentSettings().value?.cameraLensFacing + + // Act: For each of the features — HDR, 60 FPS, UHD recording, await previous constraints + // update and set the feature if the constraints supports it. + + if ( + systemConstraints + .awaitUntil() + .perLensConstraints[lensFacing] + ?.supportedDynamicRanges + ?.contains(DynamicRange.HLG10) == true + ) { + systemConstraints = constraintsRepository.observeNextUpdate() + cameraSystem.setDynamicRange(DynamicRange.HLG10) + } + + if ( + systemConstraints + .awaitUntil() + .perLensConstraints[lensFacing] + ?.supportedFixedFrameRates + ?.contains(60) == true + ) { + systemConstraints = constraintsRepository.observeNextUpdate() + cameraSystem.setTargetFrameRate(60) + } + + if ( + systemConstraints + .awaitUntil() + .perLensConstraints[lensFacing] + ?.supportedVideoQualitiesMap + ?.get(cameraSystem.getCurrentSettings().value?.dynamicRange) + ?.contains(VideoQuality.UHD) == true + ) { + systemConstraints = constraintsRepository.observeNextUpdate() + cameraSystem.setVideoQuality(VideoQuality.UHD) + } + + // Wait to ensure the async updateSystemConstraintsByFeatureGroups has time to run + // and potentially crash if there's an issue. + systemConstraints.awaitUntil() + + // Assert. + // If the test reaches here without crashing, it passes. + // This ensures that the feature group logic doesn't cause runtime exceptions + // even when high-end features are requested. + return@runBlocking + } + + suspend fun Deferred.awaitUntil(timeout: Duration = 2.seconds): T { + return withTimeout(timeout) { + await() + } + } + private suspend fun createAndInitCameraXCameraSystem( appSettings: CameraAppSettings = DEFAULT_CAMERA_APP_SETTINGS, constraintsRepository: SettableConstraintsRepository = SettableConstraintsRepositoryImpl(), @@ -411,3 +488,31 @@ class FakeImagePostProcessor(val shouldError: Boolean = false) : ImagePostProces if (shouldError) throw RuntimeException("Post process failed") } } + +class ObservableConstraintsRepository : SettableConstraintsRepository { + private val lock = Object() + + override val systemConstraints: StateFlow = + MutableStateFlow(null) + + @GuardedBy("lock") + private var updateDeferredList = + mutableListOf>() + + override fun updateSystemConstraints(systemConstraints: CameraSystemConstraints) { + synchronized(lock) { + updateDeferredList.forEach { + it.complete(systemConstraints) + } + updateDeferredList.clear() + } + } + + fun observeNextUpdate(): Deferred { + return synchronized(lock) { + val deferred = CompletableDeferred() + updateDeferredList.add(deferred) + deferred + } + } +} diff --git a/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/FeatureGroupHandlerTest.kt b/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/FeatureGroupHandlerTest.kt index 2e2800ca4..9c9982696 100644 --- a/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/FeatureGroupHandlerTest.kt +++ b/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/FeatureGroupHandlerTest.kt @@ -54,7 +54,8 @@ class FeatureGroupHandlerTest { constraintsRepository = constraintsRepository, filePathGenerator = FakeFilePathGenerator(), availabilityCheckers = emptyMap(), - effectProviders = emptyMap() + effectProviders = emptyMap(), + imagePostProcessors = emptyMap() ) cameraSystem.initialize(DEFAULT_CAMERA_APP_SETTINGS) {} diff --git a/feature/postcapture/build.gradle.kts b/feature/postcapture/build.gradle.kts index 89d0b665e..d422f8d7e 100644 --- a/feature/postcapture/build.gradle.kts +++ b/feature/postcapture/build.gradle.kts @@ -23,7 +23,7 @@ plugins { android { namespace = "com.google.jetpackcamera.feature.postcapture" - compileSdk = 35 + compileSdk = libs.versions.compileSdk.get().toInt() defaultConfig { minSdk = libs.versions.minSdk.get().toInt() From abaabebc57ee43ba656f0f519d2b8bfd4f8c10cd Mon Sep 17 00:00:00 2001 From: Tahsin Masrur Date: Mon, 8 Dec 2025 11:10:03 +0000 Subject: [PATCH 6/6] Add StreamConfig support checking and address review comments --- .../core/camera/CameraXCameraSystemTest.kt | 244 ++++++++++++++---- .../core/camera/CameraSession.kt | 20 +- .../core/camera/CameraXCameraSystem.kt | 10 +- .../core/camera/FeatureGroupHandler.kt | 23 ++ .../settings/model/Constraints.kt | 13 +- .../capture/StreamConfigsUiStateAdapter.kt | 12 +- .../capture/FlashModeUiStateAdapterTest.kt | 3 +- 7 files changed, 247 insertions(+), 78 deletions(-) diff --git a/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/CameraXCameraSystemTest.kt b/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/CameraXCameraSystemTest.kt index a6228e901..b89ed99fb 100644 --- a/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/CameraXCameraSystemTest.kt +++ b/core/camera/src/androidTest/java/com/google/jetpackcamera/core/camera/CameraXCameraSystemTest.kt @@ -19,6 +19,7 @@ import android.app.Application import android.content.ContentResolver import android.graphics.SurfaceTexture import android.net.Uri +import android.util.Log import android.view.Surface import androidx.annotation.GuardedBy import androidx.concurrent.futures.DirectExecutor @@ -29,6 +30,12 @@ import androidx.test.rule.GrantPermissionRule import com.google.common.truth.Truth.assertThat import com.google.common.truth.Truth.assertWithMessage import com.google.common.truth.TruthJUnit.assume +import com.google.jetpackcamera.core.camera.CameraXCameraSystemTest.Feature.DYNAMIC_RANGE_HLG10 +import com.google.jetpackcamera.core.camera.CameraXCameraSystemTest.Feature.FPS_60 +import com.google.jetpackcamera.core.camera.CameraXCameraSystemTest.Feature.IMAGE_FORMAT_JPEG_ULTRA_HDR +import com.google.jetpackcamera.core.camera.CameraXCameraSystemTest.Feature.STABILIZATION_MODE_ON +import com.google.jetpackcamera.core.camera.CameraXCameraSystemTest.Feature.STREAM_CONFIG_SINGLE +import com.google.jetpackcamera.core.camera.CameraXCameraSystemTest.Feature.VIDEO_QUALITY_UHD import com.google.jetpackcamera.core.camera.OnVideoRecordEvent.OnVideoRecordError import com.google.jetpackcamera.core.camera.OnVideoRecordEvent.OnVideoRecorded import com.google.jetpackcamera.core.camera.postprocess.ImagePostProcessor @@ -39,13 +46,17 @@ import com.google.jetpackcamera.core.common.FakeFilePathGenerator import com.google.jetpackcamera.model.DynamicRange import com.google.jetpackcamera.model.FlashMode import com.google.jetpackcamera.model.Illuminant +import com.google.jetpackcamera.model.ImageOutputFormat import com.google.jetpackcamera.model.LensFacing import com.google.jetpackcamera.model.SaveLocation +import com.google.jetpackcamera.model.StabilizationMode +import com.google.jetpackcamera.model.StreamConfig import com.google.jetpackcamera.model.VideoQuality import com.google.jetpackcamera.settings.ConstraintsRepository import com.google.jetpackcamera.settings.SettableConstraintsRepository import com.google.jetpackcamera.settings.SettableConstraintsRepositoryImpl import com.google.jetpackcamera.settings.model.CameraAppSettings +import com.google.jetpackcamera.settings.model.CameraConstraints import com.google.jetpackcamera.settings.model.CameraSystemConstraints import com.google.jetpackcamera.settings.model.DEFAULT_CAMERA_APP_SETTINGS import java.io.File @@ -89,6 +100,7 @@ class CameraXCameraSystemTest { private const val GENERAL_TIMEOUT_MS = 3_000L private const val RECORDING_TIMEOUT_MS = 10_000L private const val RECORDING_START_DURATION_MS = 500L + private const val TAG = "CameraXCameraSystemTest" } @get:Rule @@ -281,65 +293,129 @@ class CameraXCameraSystemTest { } @Test - fun setMultipleFeatures_systemConstraintsUpdatedAndFeaturesSettableIfSupported() = runBlocking { - // Arrange. - val constraintsRepository = ObservableConstraintsRepository() - val cameraSystem = createAndInitCameraXCameraSystem( - constraintsRepository = constraintsRepository + fun setMultipleFeatures_systemConstraintsUpdatedAndFeaturesSetIfSupported() = runBlocking { + // TODO: Add STREAM_CONFIG_SINGLE to the featuresToTest list. This currently leads to flaky + // crashes due to some camera effect related surface not being cleaned up properly somehow. + // This doesn't seem to be related to the primary purpose of this test, so simply excluding + // it for now. + val featuresToTest = listOf( + DYNAMIC_RANGE_HLG10, + FPS_60, + VIDEO_QUALITY_UHD ) - // Each camera run/update should lead to a new systemConstraints update - var systemConstraints = constraintsRepository.observeNextUpdate() - cameraSystem.startCameraAndWaitUntilRunning() + featuresToTest.permutations().forEach { orderedFeatures -> + Log.d(TAG, "Testing $orderedFeatures") - val lensFacing = cameraSystem.getCurrentSettings().value?.cameraLensFacing + // Setup + val constraintsRepository = ObservableConstraintsRepository() + val cameraSystem = createAndInitCameraXCameraSystem( + constraintsRepository = constraintsRepository + ) - // Act: For each of the features — HDR, 60 FPS, UHD recording, await previous constraints - // update and set the feature if the constraints supports it. + // Initial run: each camera run/update should lead to a new systemConstraints update + var currentConstraints = constraintsRepository.observeNextUpdate().let { + cameraSystem.startCameraAndWaitUntilRunning() + it.awaitUntil() + } - if ( - systemConstraints - .awaitUntil() - .perLensConstraints[lensFacing] - ?.supportedDynamicRanges - ?.contains(DynamicRange.HLG10) == true - ) { - systemConstraints = constraintsRepository.observeNextUpdate() - cameraSystem.setDynamicRange(DynamicRange.HLG10) - } + val lensFacing = + requireNotNull(cameraSystem.getCurrentSettings().value?.cameraLensFacing) + + orderedFeatures.forEach { feature -> + currentConstraints = when (feature) { + DYNAMIC_RANGE_HLG10 -> feature.tryApplyFeature( + expectedValue = DynamicRange.HLG10, + lensFacing = lensFacing, + cameraSystemConstraints = currentConstraints, + constraintsRepository = constraintsRepository, + cameraSystem = cameraSystem, + setFeature = { cameraSystem.setDynamicRange(DynamicRange.HLG10) }, + getNewFeatureValue = { it?.dynamicRange } + ) { constraints -> + constraints + ?.supportedDynamicRanges + ?.contains(DynamicRange.HLG10) == true + } - if ( - systemConstraints - .awaitUntil() - .perLensConstraints[lensFacing] - ?.supportedFixedFrameRates - ?.contains(60) == true - ) { - systemConstraints = constraintsRepository.observeNextUpdate() - cameraSystem.setTargetFrameRate(60) - } + FPS_60 -> feature.tryApplyFeature( + expectedValue = 60, + lensFacing = lensFacing, + cameraSystemConstraints = currentConstraints, + constraintsRepository = constraintsRepository, + cameraSystem = cameraSystem, + setFeature = { cameraSystem.setTargetFrameRate(60) }, + getNewFeatureValue = { it?.targetFrameRate } + ) { constraints -> + constraints + ?.supportedFixedFrameRates + ?.contains(60) == true + } - if ( - systemConstraints - .awaitUntil() - .perLensConstraints[lensFacing] - ?.supportedVideoQualitiesMap - ?.get(cameraSystem.getCurrentSettings().value?.dynamicRange) - ?.contains(VideoQuality.UHD) == true - ) { - systemConstraints = constraintsRepository.observeNextUpdate() - cameraSystem.setVideoQuality(VideoQuality.UHD) - } + VIDEO_QUALITY_UHD -> feature.tryApplyFeature( + expectedValue = VideoQuality.UHD, + lensFacing = lensFacing, + cameraSystemConstraints = currentConstraints, + constraintsRepository = constraintsRepository, + cameraSystem = cameraSystem, + setFeature = { cameraSystem.setVideoQuality(VideoQuality.UHD) }, + getNewFeatureValue = { it?.videoQuality } + ) { constraints -> + constraints + ?.supportedVideoQualitiesMap + ?.get(cameraSystem.getCurrentSettings().value?.dynamicRange) + ?.contains(VideoQuality.UHD) == true + } - // Wait to ensure the async updateSystemConstraintsByFeatureGroups has time to run - // and potentially crash if there's an issue. - systemConstraints.awaitUntil() + STABILIZATION_MODE_ON -> feature.tryApplyFeature( + expectedValue = StabilizationMode.ON, + lensFacing = lensFacing, + cameraSystemConstraints = currentConstraints, + constraintsRepository = constraintsRepository, + cameraSystem = cameraSystem, + setFeature = { cameraSystem.setStabilizationMode(StabilizationMode.ON) }, + getNewFeatureValue = { it?.stabilizationMode } + ) { constraints -> + constraints + ?.supportedStabilizationModes + ?.contains(StabilizationMode.ON) == true + } - // Assert. - // If the test reaches here without crashing, it passes. - // This ensures that the feature group logic doesn't cause runtime exceptions - // even when high-end features are requested. - return@runBlocking + IMAGE_FORMAT_JPEG_ULTRA_HDR -> feature.tryApplyFeature( + expectedValue = ImageOutputFormat.JPEG_ULTRA_HDR, + lensFacing = lensFacing, + cameraSystemConstraints = currentConstraints, + constraintsRepository = constraintsRepository, + cameraSystem = cameraSystem, + setFeature = { + cameraSystem.setImageFormat( + ImageOutputFormat.JPEG_ULTRA_HDR + ) + }, + getNewFeatureValue = { it?.imageFormat } + ) { constraints -> + constraints + ?.supportedImageFormatsMap + ?.get(cameraSystem.getCurrentSettings().value?.streamConfig) + ?.contains(ImageOutputFormat.JPEG_ULTRA_HDR) == true + } + + STREAM_CONFIG_SINGLE -> feature.tryApplyFeature( + expectedValue = StreamConfig.SINGLE_STREAM, + lensFacing = lensFacing, + cameraSystemConstraints = currentConstraints, + constraintsRepository = constraintsRepository, + cameraSystem = cameraSystem, + setFeature = { cameraSystem.setStreamConfig(StreamConfig.SINGLE_STREAM) }, + getNewFeatureValue = { it?.streamConfig } + ) { constraints -> + constraints + ?.supportedStreamConfigs + ?.contains(StreamConfig.SINGLE_STREAM) == true + } + } + } + } } suspend fun Deferred.awaitUntil(timeout: Duration = 2.seconds): T { @@ -401,7 +477,8 @@ class CameraXCameraSystemTest { getCurrentCameraState().transform { cameraState -> (cameraState.videoRecordingState as? VideoRecordingState.Active)?.let { emit( - it.elapsedTimeNanos.toDuration(DurationUnit.NANOSECONDS).inWholeMilliseconds + it.elapsedTimeNanos + .toDuration(DurationUnit.NANOSECONDS).inWholeMilliseconds ) } }.first { elapsedTimeMs -> @@ -475,6 +552,71 @@ class CameraXCameraSystemTest { ) ) } + + suspend fun Feature.tryApplyFeature( + expectedValue: T, + lensFacing: LensFacing, + cameraSystemConstraints: CameraSystemConstraints, + constraintsRepository: ObservableConstraintsRepository, + cameraSystem: CameraSystem, + setFeature: suspend () -> Unit, + getNewFeatureValue: (CameraAppSettings?) -> T?, + isSupported: (CameraConstraints?) -> Boolean + ): CameraSystemConstraints { + // Check support + if (!isSupported(cameraSystemConstraints.perLensConstraints[lensFacing])) { + Log.d(TAG, "Skipping $this: Not supported by current constraints.") + return cameraSystemConstraints + } + + Log.d(TAG, "Applying $this...") + + // Prepare observer + val nextUpdate = constraintsRepository.observeNextUpdate() + + setFeature() + + // Wait to verify constraints is updated + val newConstraints = nextUpdate.awaitUntil() + + // Verify feature is set according to current settings + assertThat(getNewFeatureValue(cameraSystem.getCurrentSettings().value)).isEqualTo( + expectedValue + ) + + return newConstraints + } + + fun List.permutations(): List> { + if (isEmpty()) { + // Base case: an empty list has one permutation (the empty list itself) + return listOf(emptyList()) + } + + val result = mutableListOf>() + val head = first() // Take the first element + val tail = drop(1) // Get the rest of the list + + // Recursively get permutations of the tail + tail.permutations().forEach { permOfTail -> + // Insert the head element at all possible positions in each permutation of the tail + for (i in 0..permOfTail.size) { + val newPerm = permOfTail.toMutableList() + newPerm.add(i, head) + result.add(newPerm) + } + } + return result + } + + enum class Feature { + DYNAMIC_RANGE_HLG10, + FPS_60, + VIDEO_QUALITY_UHD, + STABILIZATION_MODE_ON, + IMAGE_FORMAT_JPEG_ULTRA_HDR, + STREAM_CONFIG_SINGLE + } } object FakeImagePostProcessorFeatureKey : ImagePostProcessorFeatureKey diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt index 6772bf89e..5f134c005 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraSession.kt @@ -69,8 +69,6 @@ import androidx.core.content.ContextCompat.checkSelfPermission import androidx.core.net.toFile import androidx.lifecycle.asFlow import com.google.jetpackcamera.core.camera.FeatureGroupability.ExplicitlyGroupable -import com.google.jetpackcamera.core.camera.FeatureGroupability.ImplicitlyGroupable -import com.google.jetpackcamera.core.camera.FeatureGroupability.Ungroupable import com.google.jetpackcamera.core.camera.effects.SingleSurfaceForcingEffect import com.google.jetpackcamera.core.common.FilePathGenerator import com.google.jetpackcamera.model.AspectRatio @@ -651,7 +649,11 @@ internal suspend fun createSessionConfig( "Setting initial device rotation to ${initialTransientSettings.deviceRotation}" ) - val features = sessionSettings.toGroupableFeatures() ?: emptySet() + val features = if (sessionSettings.toFeatureGroupabilities().isInvalid()) { + emptySet() + } else { + sessionSettings.toGroupableFeatures() + } Log.d(TAG, "createSessionConfig: sessionSettings = $sessionSettings, features = $features") @@ -673,13 +675,10 @@ internal suspend fun createSessionConfig( /** * Creates a set of [GroupableFeature] from a [PerpetualSessionSettings.SingleCamera]. * - * Only the [PerpetualSessionSettings.SingleCamera] values that are supported by CameraX feature - * group APIs are included in the returned set. - * - * A null value is returned if the feature groups API can't be used for some value in - * [PerpetualSessionSettings.SingleCamera], e.g. optical stabilization, or 15 FPS. + * Only the [PerpetualSessionSettings.SingleCamera] values that are compatible with CameraX feature + * group APIs (i.e. [ExplicitlyGroupable] features) are included in the returned set. */ -internal fun PerpetualSessionSettings.SingleCamera.toGroupableFeatures(): Set? { +internal fun PerpetualSessionSettings.SingleCamera.toGroupableFeatures(): Set { return buildSet { this@toGroupableFeatures.toFeatureGroupabilities().forEach { when (it) { @@ -695,8 +694,7 @@ internal fun PerpetualSessionSettings.SingleCamera.toGroupableFeatures(): Set {} // No-op. - is Ungroupable -> return null + else -> {} // No-op. } } }.toSet() diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt index 27b0407f7..1ea9c6e40 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/CameraXCameraSystem.kt @@ -290,6 +290,7 @@ constructor( supportedStabilizationModes = supportedStabilizationModes, supportedFixedFrameRates = supportedFixedFrameRates, supportedDynamicRanges = supportedDynamicRanges, + supportedVideoQualitiesMap = supportedVideoQualitiesMap, supportedImageFormatsMap = mapOf( // Only JPEG is supported in single-stream mode, since // single-stream mode uses CameraEffect, which does not support @@ -297,12 +298,15 @@ constructor( Pair(StreamConfig.SINGLE_STREAM, setOf(ImageOutputFormat.JPEG)), Pair(StreamConfig.MULTI_STREAM, supportedImageFormats) ), - supportedVideoQualitiesMap = supportedVideoQualitiesMap, supportedIlluminants = supportedIlluminants, supportedFlashModes = supportedFlashModes, supportedZoomRange = supportedZoomRange, unsupportedStabilizationFpsMap = unsupportedStabilizationFpsMap, - supportedTestPatterns = supportedTestPatterns + supportedTestPatterns = supportedTestPatterns, + supportedStreamConfigs = setOf( + StreamConfig.SINGLE_STREAM, + StreamConfig.MULTI_STREAM + ) ) ) } @@ -312,6 +316,8 @@ constructor( initialSystemConstraints = systemConstraints + Log.d(TAG, "initialize: initialSystemConstraints = $initialSystemConstraints") + constraintsRepository.updateSystemConstraints(systemConstraints) currentSettings.value = diff --git a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupHandler.kt b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupHandler.kt index 103e4d61a..8290e869e 100644 --- a/core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupHandler.kt +++ b/core/camera/src/main/java/com/google/jetpackcamera/core/camera/FeatureGroupHandler.kt @@ -166,6 +166,12 @@ internal class FeatureGroupHandler( initialSystemConstraints, initialCameraConstraints, cameraInfo + ), + supportedStreamConfigs = filterStreamConfig( + currentSettings, + initialSystemConstraints, + initialCameraConstraints, + cameraInfo ) // unsupportedStabilizationFpsMap = unsupportedStabilizationFpsMap ) @@ -325,6 +331,23 @@ internal class FeatureGroupHandler( } } + /** + * Filters supported [StreamConfig]s by checking groupability with current settings. + */ + private suspend fun filterStreamConfig( + cameraAppSettings: CameraAppSettings, + initialSystemConstraints: CameraSystemConstraints, + initialCameraConstraints: CameraConstraints, + cameraInfo: CameraInfo + ): Set { + return initialCameraConstraints.supportedStreamConfigs.filter { + val settings = with(cameraSystem) { + cameraAppSettings.applyStreamConfig(it) + } + isGroupingSupported(settings, cameraInfo, initialSystemConstraints) + }.toSet() + } + /** * Returns whether a [CameraAppSettings] is supported together as a group. * diff --git a/data/settings/src/main/java/com/google/jetpackcamera/settings/model/Constraints.kt b/data/settings/src/main/java/com/google/jetpackcamera/settings/model/Constraints.kt index 0dc3275c4..77256d508 100644 --- a/data/settings/src/main/java/com/google/jetpackcamera/settings/model/Constraints.kt +++ b/data/settings/src/main/java/com/google/jetpackcamera/settings/model/Constraints.kt @@ -96,7 +96,8 @@ data class CameraConstraints( val supportedFlashModes: Set, val supportedZoomRange: Range?, val unsupportedStabilizationFpsMap: Map>, - val supportedTestPatterns: Set + val supportedTestPatterns: Set, + val supportedStreamConfigs: Set ) { val StabilizationMode.unsupportedFpsSet get() = unsupportedStabilizationFpsMap[this] ?: emptySet() @@ -121,19 +122,23 @@ val TYPICAL_SYSTEM_CONSTRAINTS = put( lensFacing, CameraConstraints( - supportedFixedFrameRates = setOf(15, 30), supportedStabilizationModes = setOf(StabilizationMode.OFF), + supportedFixedFrameRates = setOf(15, 30), supportedDynamicRanges = setOf(DynamicRange.SDR), + supportedVideoQualitiesMap = emptyMap(), supportedImageFormatsMap = mapOf( Pair(StreamConfig.SINGLE_STREAM, setOf(ImageOutputFormat.JPEG)), Pair(StreamConfig.MULTI_STREAM, setOf(ImageOutputFormat.JPEG)) ), - supportedVideoQualitiesMap = emptyMap(), supportedIlluminants = setOf(Illuminant.FLASH_UNIT), supportedFlashModes = setOf(FlashMode.OFF, FlashMode.ON, FlashMode.AUTO), supportedZoomRange = Range(.5f, 10f), unsupportedStabilizationFpsMap = emptyMap(), - supportedTestPatterns = setOf(TestPattern.Off) + supportedTestPatterns = setOf(TestPattern.Off), + supportedStreamConfigs = setOf( + StreamConfig.SINGLE_STREAM, + StreamConfig.MULTI_STREAM + ) ) ) } diff --git a/ui/uistateadapter/capture/src/main/java/com/google/jetpackcamera/ui/uistateadapter/capture/StreamConfigsUiStateAdapter.kt b/ui/uistateadapter/capture/src/main/java/com/google/jetpackcamera/ui/uistateadapter/capture/StreamConfigsUiStateAdapter.kt index 661a443de..bbc902948 100644 --- a/ui/uistateadapter/capture/src/main/java/com/google/jetpackcamera/ui/uistateadapter/capture/StreamConfigsUiStateAdapter.kt +++ b/ui/uistateadapter/capture/src/main/java/com/google/jetpackcamera/ui/uistateadapter/capture/StreamConfigsUiStateAdapter.kt @@ -17,7 +17,6 @@ package com.google.jetpackcamera.ui.uistateadapter.capture import com.example.uistateadapter.Utils import com.google.jetpackcamera.model.ConcurrentCameraMode -import com.google.jetpackcamera.model.ImageOutputFormat import com.google.jetpackcamera.model.StreamConfig import com.google.jetpackcamera.settings.model.CameraAppSettings import com.google.jetpackcamera.ui.uistate.capture.StreamConfigUiState @@ -31,16 +30,14 @@ fun StreamConfigUiState.Companion.from(cameraAppSettings: CameraAppSettings): St return createFrom( cameraAppSettings.streamConfig, ORDERED_UI_SUPPORTED_STREAM_CONFIGS.toSet(), - cameraAppSettings.concurrentCameraMode, - cameraAppSettings.imageFormat + cameraAppSettings.concurrentCameraMode ) } private fun createFrom( selectedStreamConfig: StreamConfig, supportedStreamConfigs: Set, - concurrentCameraMode: ConcurrentCameraMode, - imageOutputFormat: ImageOutputFormat + concurrentCameraMode: ConcurrentCameraMode ): StreamConfigUiState { // Ensure we at least support one flash mode check(supportedStreamConfigs.isNotEmpty()) { @@ -60,10 +57,7 @@ private fun createFrom( StreamConfigUiState.Available( selectedStreamConfig = selectedStreamConfig, availableStreamConfigs = availableStreamConfigs, - isActive = !( - concurrentCameraMode == ConcurrentCameraMode.DUAL || - imageOutputFormat == ImageOutputFormat.JPEG_ULTRA_HDR - ) + isActive = concurrentCameraMode != ConcurrentCameraMode.DUAL ) } } diff --git a/ui/uistateadapter/capture/src/test/java/com/google/jetpackcamera/ui/uistateadapter/capture/FlashModeUiStateAdapterTest.kt b/ui/uistateadapter/capture/src/test/java/com/google/jetpackcamera/ui/uistateadapter/capture/FlashModeUiStateAdapterTest.kt index acc70425f..cc6af7de3 100644 --- a/ui/uistateadapter/capture/src/test/java/com/google/jetpackcamera/ui/uistateadapter/capture/FlashModeUiStateAdapterTest.kt +++ b/ui/uistateadapter/capture/src/test/java/com/google/jetpackcamera/ui/uistateadapter/capture/FlashModeUiStateAdapterTest.kt @@ -41,7 +41,8 @@ class FlashModeUiStateAdapterTest { supportedFlashModes = emptySet(), supportedZoomRange = null, unsupportedStabilizationFpsMap = emptyMap(), - supportedTestPatterns = emptySet() + supportedTestPatterns = emptySet(), + supportedStreamConfigs = emptySet() ) private val defaultCameraAppSettings = CameraAppSettings()