Improve camera mixed mode handling and clean up dead code.

This commit is contained in:
Greyson Parrelli
2026-03-31 11:17:33 -04:00
committed by Alex Hart
parent 3f067654d9
commit 36f7c60a99
37 changed files with 396 additions and 1081 deletions

View File

@@ -159,7 +159,7 @@ import org.thoughtcrime.securesms.main.navigateToDetailLocation
import org.thoughtcrime.securesms.main.rememberDetailNavHostController
import org.thoughtcrime.securesms.main.rememberFocusRequester
import org.thoughtcrime.securesms.main.storiesNavGraphBuilder
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig
import org.thoughtcrime.securesms.mediasend.v2.MediaSelectionActivity
import org.thoughtcrime.securesms.megaphone.Megaphone
import org.thoughtcrime.securesms.megaphone.MegaphoneActionController
@@ -1125,7 +1125,7 @@ class MainActivity :
}
}
if (CameraXUtil.isSupported()) {
if (CameraXRemoteConfig.isSupported()) {
onGranted()
} else {
Permissions.with(this@MainActivity)

View File

@@ -30,7 +30,7 @@ import org.thoughtcrime.securesms.avatar.vector.VectorAvatarCreationFragment
import org.thoughtcrime.securesms.components.ButtonStripItemView
import org.thoughtcrime.securesms.components.recyclerview.GridDividerDecoration
import org.thoughtcrime.securesms.mediasend.AvatarSelectionActivity
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig
import org.thoughtcrime.securesms.util.ViewUtil
import org.thoughtcrime.securesms.util.adapter.mapping.MappingAdapter
import org.thoughtcrime.securesms.util.navigation.safeNavigate
@@ -223,7 +223,7 @@ class AvatarPickerFragment : Fragment(R.layout.avatar_picker_fragment) {
@Suppress("DEPRECATION")
private fun openCameraCapture() {
if (CameraXUtil.isSupported()) {
if (CameraXRemoteConfig.isSupported()) {
val intent = AvatarSelectionActivity.getIntentForCameraCapture(requireContext())
startActivityForResult(intent, REQUEST_CODE_SELECT_IMAGE)
} else {

View File

@@ -26,7 +26,7 @@ import org.signal.core.ui.compose.Dialogs
import org.signal.core.ui.compose.theme.SignalTheme
import org.signal.qr.QrScannerView
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.mediasend.camerax.CameraXModelBlocklist
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig
import org.thoughtcrime.securesms.qr.QrScanScreens
import org.thoughtcrime.securesms.recipients.Recipient
import java.util.concurrent.TimeUnit
@@ -98,7 +98,7 @@ fun UsernameQrScanScreen(
view
},
update = { view ->
view.start(lifecycleOwner = lifecycleOwner, forceLegacy = CameraXModelBlocklist.isBlocklisted())
view.start(lifecycleOwner = lifecycleOwner, forceLegacy = CameraXRemoteConfig.isBlocklisted())
},
hasPermission = hasCameraPermission,
onRequestPermissions = onOpenCameraClicked,

View File

@@ -96,7 +96,7 @@ import org.thoughtcrime.securesms.main.MainNavigationDetailLocation
import org.thoughtcrime.securesms.main.MainNavigationRouter
import org.thoughtcrime.securesms.mediaoverview.MediaOverviewActivity
import org.thoughtcrime.securesms.mediapreview.MediaIntentFactory
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig
import org.thoughtcrime.securesms.messagerequests.MessageRequestRepository
import org.thoughtcrime.securesms.nicknames.NicknameActivity
import org.thoughtcrime.securesms.profiles.edit.CreateProfileActivity
@@ -486,7 +486,7 @@ class ConversationSettingsFragment :
.setMessage(R.string.ConversationSettingsFragment__only_admins_of_this_group_can_add_to_its_story)
.setPositiveButton(android.R.string.ok) { d, _ -> d.dismiss() }
.show()
} else if (CameraXUtil.isSupported()) {
} else if (CameraXRemoteConfig.isSupported()) {
addToGroupStoryDelegate.addToStory(state.recipient.id)
} else {
Permissions.with(this@ConversationSettingsFragment)

View File

@@ -29,7 +29,7 @@ import org.thoughtcrime.securesms.conversation.v2.ConversationActivityResultCont
import org.thoughtcrime.securesms.giph.ui.GiphyActivity
import org.thoughtcrime.securesms.maps.PlacePickerActivity
import org.thoughtcrime.securesms.mediasend.MediaSendActivityResult
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig
import org.thoughtcrime.securesms.mediasend.v2.MediaSelectionActivity
import org.thoughtcrime.securesms.recipients.RecipientId
import org.signal.core.ui.R as CoreUiR
@@ -76,7 +76,7 @@ class ConversationActivityResultContracts(private val fragment: Fragment, privat
}
fun launchCamera(recipientId: RecipientId, isReply: Boolean) {
if (CameraXUtil.isSupported()) {
if (CameraXRemoteConfig.isSupported()) {
cameraLauncher.launch(MediaSelectionInput(emptyList(), recipientId, null, isReply))
fragment.requireActivity().overridePendingTransition(R.anim.camera_slide_from_bottom, R.anim.stationary)
} else {

View File

@@ -37,7 +37,7 @@ import org.thoughtcrime.securesms.transport.UndeliverableMessageException;
import org.thoughtcrime.securesms.util.BitmapDecodingException;
import org.thoughtcrime.securesms.util.ImageCompressionUtil;
import org.thoughtcrime.securesms.util.MediaUtil;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor.MemoryFileException;
import org.signal.core.util.MemoryFileDescriptor.MemoryFileException;
import org.thoughtcrime.securesms.util.RemoteConfig;
import org.thoughtcrime.securesms.video.StreamingTranscoder;
import org.thoughtcrime.securesms.video.TranscoderOptions;

View File

@@ -17,7 +17,7 @@ import org.signal.core.ui.compose.Dialogs
import org.signal.qr.QrScannerView
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.linkdevice.LinkDeviceRepository.LinkDeviceResult
import org.thoughtcrime.securesms.mediasend.camerax.CameraXModelBlocklist
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig
import org.thoughtcrime.securesms.qr.QrScanScreens
import org.thoughtcrime.securesms.util.navigation.safeNavigate
import java.util.concurrent.TimeUnit
@@ -104,7 +104,7 @@ fun LinkDeviceQrScanScreen(
view
},
update = { view: QrScannerView ->
view.start(lifecycleOwner = lifecycleOwner, forceLegacy = CameraXModelBlocklist.isBlocklisted())
view.start(lifecycleOwner = lifecycleOwner, forceLegacy = CameraXRemoteConfig.isBlocklisted())
if (showFrontCamera != null) {
view.toggleCamera()
}

View File

@@ -46,7 +46,7 @@ import org.signal.core.models.media.Media;
import org.signal.core.ui.logging.LoggingFragment;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.animation.AnimationCompleteListener;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXModelBlocklist;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig;
import org.thoughtcrime.securesms.mediasend.v2.MediaAnimations;
import org.thoughtcrime.securesms.mediasend.v2.MediaCountIndicatorButton;
import org.signal.glide.decryptableuri.DecryptableUri;
@@ -60,7 +60,7 @@ import io.reactivex.rxjava3.android.schedulers.AndroidSchedulers;
import io.reactivex.rxjava3.disposables.Disposable;
/**
* Camera capture implemented with the legacy camera API's. Should only be used if a device is on the {@link CameraXModelBlocklist}.
* Camera capture implemented with the legacy camera API's. Should only be used if a device is on the {@link CameraXRemoteConfig}.
*/
public class Camera1Fragment extends LoggingFragment implements CameraFragment,
TextureView.SurfaceTextureListener,

View File

@@ -32,8 +32,6 @@ public class CameraButtonView extends View {
private static final int PROGRESS_ARC_STROKE_WIDTH = 4;
private static final int HALF_PROGRESS_ARC_STROKE_WIDTH = PROGRESS_ARC_STROKE_WIDTH / 2;
private static final float DEADZONE_REDUCTION_PERCENT = 0.35f;
private static final int DRAG_DISTANCE_MULTIPLIER = 3;
private static final Interpolator ZOOM_INTERPOLATOR = new DecelerateInterpolator();
private final @NonNull Paint outlinePaint = outlinePaint();
private final @NonNull Paint backgroundPaint = backgroundPaint();
@@ -49,21 +47,12 @@ public class CameraButtonView extends View {
private float progressPercent = 0f;
private @NonNull CameraButtonMode cameraButtonMode = CameraButtonMode.IMAGE;
private @Nullable VideoCaptureListener videoCaptureListener;
private final float imageCaptureSize;
private final float recordSize;
private final RectF progressRect = new RectF();
private final Rect deadzoneRect = new Rect();
private final @NonNull OnLongClickListener internalLongClickListener = v -> {
notifyVideoCaptureStarted();
shrinkAnimation.cancel();
setScaleX(1f);
setScaleY(1f);
isRecordingVideo = true;
return true;
};
public CameraButtonView(@NonNull Context context) {
this(context, null);
@@ -186,20 +175,6 @@ public class CameraButtonView extends View {
canvas.drawArc(progressRect, 270f, 360f * progressPercent, false, progressPaint);
}
public void setVideoCaptureListener(@Nullable VideoCaptureListener videoCaptureListener) {
if (isRecordingVideo) throw new IllegalStateException("Cannot set video capture listener while recording");
if (videoCaptureListener != null) {
this.cameraButtonMode = CameraButtonMode.MIXED;
this.videoCaptureListener = videoCaptureListener;
super.setOnLongClickListener(internalLongClickListener);
} else {
this.cameraButtonMode = CameraButtonMode.IMAGE;
this.videoCaptureListener = null;
super.setOnLongClickListener(null);
}
}
public void setProgress(float percentage) {
progressPercent = Util.clamp(percentage, 0f, 1f);
invalidate();
@@ -257,63 +232,15 @@ public class CameraButtonView extends View {
startAnimation(shrinkAnimation);
}
case MotionEvent.ACTION_MOVE:
if (isRecordingVideo) {
float maxRange = getHeight() * DRAG_DISTANCE_MULTIPLIER;
if (eventIsAboveDeadzone(event)) {
float deltaY = Math.abs(event.getY() - deadzoneRect.top);
float increment = Math.min(1f, deltaY / maxRange);
notifyZoomPercent(ZOOM_INTERPOLATOR.getInterpolation(increment));
invalidate();
} else if (eventIsBelowDeadzone(event)) {
float deltaY = Math.abs(event.getY() - deadzoneRect.bottom);
float increment = Math.min(1f, deltaY / maxRange);
notifyZoomPercent(-ZOOM_INTERPOLATOR.getInterpolation(increment));
invalidate();
}
}
break;
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP:
if (!isRecordingVideo) {
startAnimation(growAnimation);
}
notifyVideoCaptureEnded();
break;
}
return super.onTouchEvent(event);
}
private boolean eventIsAboveDeadzone(MotionEvent event) {
return Math.round(event.getY()) < deadzoneRect.top;
}
private boolean eventIsBelowDeadzone(MotionEvent event) {
return Math.round(event.getY()) > deadzoneRect.bottom;
}
private void notifyVideoCaptureStarted() {
if (!isRecordingVideo && videoCaptureListener != null) {
videoCaptureListener.onVideoCaptureStarted();
}
}
private void notifyVideoCaptureEnded() {
if (isRecordingVideo && videoCaptureListener != null) {
videoCaptureListener.onVideoCaptureComplete();
}
}
private void notifyZoomPercent(float percent) {
if (isRecordingVideo && videoCaptureListener != null) {
videoCaptureListener.onZoomIncremented(percent);
}
}
interface VideoCaptureListener {
void onVideoCaptureStarted();
void onVideoCaptureComplete();
void onZoomIncremented(float percent);
}
}

View File

@@ -10,7 +10,7 @@ import androidx.fragment.app.Fragment;
import org.signal.core.models.media.Media;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig;
import org.thoughtcrime.securesms.mms.MediaConstraints;
import java.io.FileDescriptor;
@@ -24,7 +24,7 @@ public interface CameraFragment {
@SuppressLint({ "RestrictedApi", "UnsafeOptInUsageError" })
static Fragment newInstance(boolean qrScanEnabled) {
if (CameraXUtil.isSupported()) {
if (CameraXRemoteConfig.isSupported()) {
return CameraXFragment.newInstance(qrScanEnabled);
} else {
return Camera1Fragment.newInstance();
@@ -32,7 +32,7 @@ public interface CameraFragment {
}
static Class<? extends Fragment> getFragmentClass() {
if (CameraXUtil.isSupported()) {
if (CameraXRemoteConfig.isSupported()) {
return CameraXFragment.class;
} else {
return Camera1Fragment.class;
@@ -41,7 +41,7 @@ public interface CameraFragment {
@SuppressLint({ "RestrictedApi", "UnsafeOptInUsageError" })
static Fragment newInstanceForAvatarCapture() {
if (CameraXUtil.isSupported()) {
if (CameraXRemoteConfig.isSupported()) {
return CameraXFragment.newInstanceForAvatarCapture();
} else {
return Camera1Fragment.newInstance();

View File

@@ -42,9 +42,11 @@ import androidx.compose.ui.text.style.TextAlign
import androidx.compose.ui.unit.Dp
import androidx.compose.ui.unit.dp
import androidx.lifecycle.viewmodel.compose.viewModel
import org.signal.camera.CameraCaptureMode
import org.signal.camera.CameraScreen
import org.signal.camera.CameraScreenEvents
import org.signal.camera.CameraScreenViewModel
import org.signal.camera.CameraXUtil
import org.signal.camera.VideoCaptureResult
import org.signal.camera.VideoOutput
import org.signal.camera.hud.GalleryThumbnailButton
@@ -55,12 +57,14 @@ import org.signal.core.ui.BottomSheetUtil
import org.signal.core.ui.compose.ComposeFragment
import org.signal.core.ui.permissions.PermissionDeniedBottomSheet.Companion.showPermissionFragment
import org.signal.core.ui.permissions.Permissions
import org.signal.core.util.MemoryFileDescriptor
import org.signal.core.util.asListContains
import org.signal.core.util.logging.Log
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.keyvalue.SignalStore
import org.thoughtcrime.securesms.mediasend.camerax.CameraXModePolicy
import org.thoughtcrime.securesms.mms.MediaConstraints
import org.thoughtcrime.securesms.stories.Stories
import org.thoughtcrime.securesms.util.MemoryFileDescriptor
import org.thoughtcrime.securesms.util.RemoteConfig
import org.thoughtcrime.securesms.video.VideoUtil
import java.io.ByteArrayOutputStream
import java.io.IOException
@@ -99,7 +103,7 @@ class CameraXFragment : ComposeFragment(), CameraFragment {
private var controller: CameraFragment.Controller? = null
private var videoFileDescriptor: MemoryFileDescriptor? = null
private var cameraXModePolicy: CameraXModePolicy? = null
private var captureMode: CameraCaptureMode = CameraCaptureMode.ImageOnly
private val isVideoEnabled: Boolean
get() = requireArguments().getBoolean(IS_VIDEO_ENABLED, true)
@@ -121,24 +125,17 @@ class CameraXFragment : ComposeFragment(), CameraFragment {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
cameraXModePolicy = CameraXModePolicy.acquire(
requireContext(),
controller!!.mediaConstraints,
isVideoEnabled,
isQrScanEnabled
)
Log.d(TAG, "Starting CameraX with mode policy ${cameraXModePolicy?.javaClass?.simpleName}")
captureMode = resolveCaptureMode()
Log.d(TAG, "Starting CameraX with capture mode $captureMode")
}
@Composable
override fun FragmentContent() {
CameraXScreen(
controller = controller,
isVideoEnabled = isVideoEnabled && Build.VERSION.SDK_INT >= 26,
isVideoEnabled = captureMode != CameraCaptureMode.ImageOnly,
isQrScanEnabled = isQrScanEnabled,
isVideoCaptureBindingEnabled = cameraXModePolicy is CameraXModePolicy.Mixed,
captureMode = captureMode,
controlsVisible = controlsVisible.value,
selectedMediaCount = selectedMediaCount.intValue,
onCheckPermissions = { checkPermissions(isVideoEnabled) },
@@ -265,8 +262,8 @@ class CameraXFragment : ComposeFragment(), CameraFragment {
return try {
closeVideoFileDescriptor()
videoFileDescriptor = CameraXVideoCaptureHelper.createFileDescriptor(requireContext())
videoFileDescriptor?.parcelFileDescriptor
videoFileDescriptor = CameraXUtil.createVideoFileDescriptor(requireContext())
videoFileDescriptor?.parcelFd
} catch (e: IOException) {
Log.w(TAG, "Failed to create video file descriptor", e)
null
@@ -292,6 +289,22 @@ class CameraXFragment : ComposeFragment(), CameraFragment {
}
return maxDuration
}
private fun resolveCaptureMode(): CameraCaptureMode {
val isVideoSupported = Build.VERSION.SDK_INT >= 26 &&
isVideoEnabled &&
MediaConstraints.isVideoTranscodeAvailable()
val isMixedModeSupported = isVideoSupported &&
CameraXUtil.isMixedModeSupported(requireContext()) &&
!RemoteConfig.cameraXMixedModelBlocklist.asListContains(Build.MODEL)
return when {
isMixedModeSupported -> CameraCaptureMode.ImageAndVideoSimultaneous
isVideoSupported -> CameraCaptureMode.ImageAndVideoExclusive
else -> CameraCaptureMode.ImageOnly
}
}
}
@Composable
@@ -299,7 +312,7 @@ private fun CameraXScreen(
controller: CameraFragment.Controller?,
isVideoEnabled: Boolean,
isQrScanEnabled: Boolean,
isVideoCaptureBindingEnabled: Boolean,
captureMode: CameraCaptureMode,
controlsVisible: Boolean,
selectedMediaCount: Int,
onCheckPermissions: () -> Unit,
@@ -406,7 +419,7 @@ private fun CameraXScreen(
emitter = { event -> cameraViewModel.onEvent(event) },
roundCorners = cameraDisplay.roundViewFinderCorners,
contentAlignment = cameraAlignment,
enableVideoCapture = isVideoCaptureBindingEnabled,
captureMode = captureMode,
enableQrScanning = isQrScanEnabled,
modifier = Modifier.padding(bottom = viewportBottomMargin)
) {
@@ -615,7 +628,7 @@ private fun CameraXScreenPreview_20_9() {
controller = null,
isVideoEnabled = true,
isQrScanEnabled = false,
isVideoCaptureBindingEnabled = true,
captureMode = CameraCaptureMode.ImageAndVideoSimultaneous,
controlsVisible = true,
selectedMediaCount = 0,
onCheckPermissions = {},
@@ -643,7 +656,7 @@ private fun CameraXScreenPreview_19_9() {
controller = null,
isVideoEnabled = true,
isQrScanEnabled = false,
isVideoCaptureBindingEnabled = true,
captureMode = CameraCaptureMode.ImageAndVideoSimultaneous,
controlsVisible = true,
selectedMediaCount = 0,
onCheckPermissions = {},
@@ -671,7 +684,7 @@ private fun CameraXScreenPreview_18_9() {
controller = null,
isVideoEnabled = true,
isQrScanEnabled = false,
isVideoCaptureBindingEnabled = true,
captureMode = CameraCaptureMode.ImageAndVideoSimultaneous,
controlsVisible = true,
selectedMediaCount = 0,
onCheckPermissions = {},
@@ -699,7 +712,7 @@ private fun CameraXScreenPreview_16_9() {
controller = null,
isVideoEnabled = true,
isQrScanEnabled = false,
isVideoCaptureBindingEnabled = true,
captureMode = CameraCaptureMode.ImageAndVideoSimultaneous,
controlsVisible = true,
selectedMediaCount = 0,
onCheckPermissions = {},
@@ -727,7 +740,7 @@ private fun CameraXScreenPreview_6_5() {
controller = null,
isVideoEnabled = true,
isQrScanEnabled = false,
isVideoCaptureBindingEnabled = true,
captureMode = CameraCaptureMode.ImageAndVideoSimultaneous,
controlsVisible = true,
selectedMediaCount = 0,
onCheckPermissions = {},

View File

@@ -1,76 +0,0 @@
package org.thoughtcrime.securesms.mediasend;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import androidx.annotation.NonNull;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageCapture;
import androidx.camera.view.CameraController;
final class CameraXSelfieFlashHelper {
private static final float MAX_SCREEN_BRIGHTNESS = 1f;
private static final float MAX_SELFIE_FLASH_ALPHA = 0.9f;
private final Window window;
private final CameraController camera;
private final View selfieFlash;
private float brightnessBeforeFlash;
private boolean inFlash;
private int flashMode = -1;
CameraXSelfieFlashHelper(@NonNull Window window,
@NonNull CameraController camera,
@NonNull View selfieFlash)
{
this.window = window;
this.camera = camera;
this.selfieFlash = selfieFlash;
}
void onWillTakePicture() {
if (!inFlash && shouldUseViewBasedFlash()) {
flashMode = camera.getImageCaptureFlashMode();
camera.setImageCaptureFlashMode(ImageCapture.FLASH_MODE_OFF);
}
}
void startFlash() {
if (inFlash || !shouldUseViewBasedFlash()) return;
inFlash = true;
WindowManager.LayoutParams params = window.getAttributes();
brightnessBeforeFlash = params.screenBrightness;
params.screenBrightness = MAX_SCREEN_BRIGHTNESS;
window.setAttributes(params);
selfieFlash.setAlpha(MAX_SELFIE_FLASH_ALPHA);
}
void endFlash() {
if (!inFlash) return;
WindowManager.LayoutParams params = window.getAttributes();
params.screenBrightness = brightnessBeforeFlash;
window.setAttributes(params);
camera.setImageCaptureFlashMode(flashMode);
flashMode = -1;
selfieFlash.setAlpha(MAX_SELFIE_FLASH_ALPHA);
inFlash = false;
}
private boolean shouldUseViewBasedFlash() {
CameraSelector cameraSelector = camera.getCameraSelector();
return (camera.getImageCaptureFlashMode() == ImageCapture.FLASH_MODE_ON || flashMode == ImageCapture.FLASH_MODE_ON) &&
cameraSelector == CameraSelector.DEFAULT_FRONT_CAMERA;
}
}

View File

@@ -1,285 +0,0 @@
package org.thoughtcrime.securesms.mediasend;
import android.Manifest;
import android.animation.ValueAnimator;
import android.annotation.SuppressLint;
import android.content.Context;
import android.util.DisplayMetrics;
import android.util.Size;
import android.view.ViewGroup;
import android.view.animation.LinearInterpolator;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ZoomState;
import androidx.camera.video.FileDescriptorOutputOptions;
import androidx.camera.video.Recording;
import androidx.camera.video.VideoRecordEvent;
import androidx.camera.view.CameraController;
import androidx.camera.view.PreviewView;
import androidx.camera.view.video.AudioConfig;
import androidx.core.content.ContextCompat;
import androidx.core.util.Consumer;
import androidx.fragment.app.Fragment;
import org.signal.core.util.logging.Log;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXModePolicy;
import org.signal.core.ui.permissions.Permissions;
import org.thoughtcrime.securesms.util.ContextUtil;
import org.thoughtcrime.securesms.util.Debouncer;
import org.thoughtcrime.securesms.util.RemoteConfig;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.FileDescriptor;
import java.io.IOException;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
@RequiresApi(26)
class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener {
private static final String TAG = Log.tag(CameraXVideoCaptureHelper.class);
private static final String VIDEO_DEBUG_LABEL = "video-capture";
private static final long VIDEO_SIZE = 10 * 1024 * 1024;
private final @NonNull Fragment fragment;
private final @NonNull PreviewView previewView;
private final @NonNull CameraController cameraController;
private final @NonNull Callback callback;
private final @NonNull MemoryFileDescriptor memoryFileDescriptor;
private final @NonNull ValueAnimator updateProgressAnimator;
private final @NonNull Debouncer debouncer;
private final @NonNull CameraXModePolicy cameraXModePolicy;
private ValueAnimator cameraMetricsAnimator;
private @Nullable Recording activeRecording = null;
private final Consumer<VideoRecordEvent> videoSavedListener = new Consumer<>() {
@Override
public void accept(VideoRecordEvent videoRecordEvent) {
Log.d(TAG, "Received recording event: " + videoRecordEvent.getClass().getSimpleName());
if (videoRecordEvent instanceof VideoRecordEvent.Finalize) {
VideoRecordEvent.Finalize event = (VideoRecordEvent.Finalize) videoRecordEvent;
if (event.hasError()) {
Log.w(TAG, "Hit an error while recording! Error code: " + event.getError(), event.getCause());
debouncer.clear();
callback.onVideoError(event.getCause());
} else {
try {
debouncer.clear();
cameraController.setZoomRatio(getDefaultVideoZoomRatio());
memoryFileDescriptor.seek(0);
callback.onVideoSaved(memoryFileDescriptor.getFileDescriptor());
} catch (IOException e) {
callback.onVideoError(e);
}
}
}
}
};
CameraXVideoCaptureHelper(@NonNull Fragment fragment,
@NonNull CameraButtonView captureButton,
@NonNull CameraController cameraController,
@NonNull PreviewView previewView,
@NonNull MemoryFileDescriptor memoryFileDescriptor,
@NonNull CameraXModePolicy cameraXModePolicy,
int maxVideoDurationSec,
@NonNull Callback callback)
{
this.fragment = fragment;
this.cameraController = cameraController;
this.previewView = previewView;
this.memoryFileDescriptor = memoryFileDescriptor;
this.callback = callback;
float animationScale = ContextUtil.getAnimationScale(fragment.requireContext());
long baseDuration = TimeUnit.SECONDS.toMillis(maxVideoDurationSec);
long scaledDuration = Math.round(animationScale > 0f ? (baseDuration * (1f / animationScale)) : baseDuration);
this.updateProgressAnimator = ValueAnimator.ofFloat(0f, 1f).setDuration(scaledDuration);
this.debouncer = new Debouncer(TimeUnit.SECONDS.toMillis(maxVideoDurationSec));
this.cameraXModePolicy = cameraXModePolicy;
updateProgressAnimator.setInterpolator(new LinearInterpolator());
updateProgressAnimator.addUpdateListener(anim -> {
captureButton.setProgress(anim.getAnimatedFraction());
});
}
@Override
public void onVideoCaptureStarted() {
Log.d(TAG, "onVideoCaptureStarted");
if (canUseCamera() && canRecordAudio()) {
beginCameraRecording();
} else if (!canRecordAudio()) {
displayAudioRecordingPermissionsDialog();
}
}
private boolean canUseCamera() {
return Permissions.hasAll(fragment.requireContext(), Manifest.permission.CAMERA);
}
private boolean canRecordAudio() {
return Permissions.hasAll(fragment.requireContext(), Manifest.permission.RECORD_AUDIO);
}
private void displayAudioRecordingPermissionsDialog() {
Permissions.with(fragment)
.request(Manifest.permission.RECORD_AUDIO)
.ifNecessary()
.withRationaleDialog(fragment.getString(R.string.CameraXFragment_allow_access_microphone), fragment.getString(R.string.CameraXFragment_to_capture_videos_with_sound), R.drawable.ic_mic_24)
.withPermanentDenialDialog(fragment.getString(R.string.ConversationActivity_signal_needs_the_recording_permissions_to_capture_video), null, R.string.CameraXFragment_allow_access_microphone, R.string.CameraXFragment_to_capture_videos, fragment.getParentFragmentManager())
.onAnyDenied(() -> Toast.makeText(fragment.requireContext(), R.string.CameraXFragment_signal_needs_microphone_access_video, Toast.LENGTH_LONG).show())
.execute();
}
@SuppressLint({"RestrictedApi", "MissingPermission"})
private void beginCameraRecording() {
cameraXModePolicy.setToVideo(cameraController);
this.cameraController.setZoomRatio(getDefaultVideoZoomRatio());
callback.onVideoRecordStarted();
shrinkCaptureArea();
FileDescriptorOutputOptions outputOptions = new FileDescriptorOutputOptions.Builder(memoryFileDescriptor.getParcelFileDescriptor()).build();
AudioConfig audioConfig = AudioConfig.create(true);
cameraController.enableTorch(cameraController.getImageCaptureFlashMode() <= ImageCapture.FLASH_MODE_ON);
activeRecording = cameraController.startRecording(outputOptions, audioConfig, ContextCompat.getMainExecutor(fragment.requireContext()), videoSavedListener);
updateProgressAnimator.start();
debouncer.publish(this::onVideoCaptureComplete);
}
private void shrinkCaptureArea() {
Size screenSize = getScreenSize();
Size videoRecordingSize = VideoUtil.getVideoRecordingSize();
float scale = getSurfaceScaleForRecording();
float targetWidthForAnimation = videoRecordingSize.getWidth() * scale;
float scaleX = targetWidthForAnimation / screenSize.getWidth();
if (scaleX == 1f) {
float targetHeightForAnimation = videoRecordingSize.getHeight() * scale;
if (screenSize.getHeight() == targetHeightForAnimation) {
return;
}
cameraMetricsAnimator = ValueAnimator.ofFloat(screenSize.getHeight(), targetHeightForAnimation);
} else {
if (screenSize.getWidth() == targetWidthForAnimation) {
return;
}
cameraMetricsAnimator = ValueAnimator.ofFloat(screenSize.getWidth(), targetWidthForAnimation);
}
ViewGroup.LayoutParams params = previewView.getLayoutParams();
cameraMetricsAnimator.setInterpolator(new LinearInterpolator());
cameraMetricsAnimator.setDuration(200);
cameraMetricsAnimator.addUpdateListener(animation -> {
if (scaleX == 1f) {
params.height = Math.round((float) animation.getAnimatedValue());
} else {
params.width = Math.round((float) animation.getAnimatedValue());
}
previewView.setLayoutParams(params);
});
cameraMetricsAnimator.start();
}
private Size getScreenSize() {
DisplayMetrics metrics = previewView.getResources().getDisplayMetrics();
return new Size(metrics.widthPixels, metrics.heightPixels);
}
private float getSurfaceScaleForRecording() {
Size videoRecordingSize = VideoUtil.getVideoRecordingSize();
Size screenSize = getScreenSize();
return Math.min(screenSize.getHeight(), screenSize.getWidth()) / (float) Math.min(videoRecordingSize.getHeight(), videoRecordingSize.getWidth());
}
@Override
public void onVideoCaptureComplete() {
if (!canRecordAudio()) {
Log.w(TAG, "Can't record audio!");
return;
}
if (activeRecording == null) {
Log.w(TAG, "No active recording!");
return;
}
Log.d(TAG, "onVideoCaptureComplete");
activeRecording.close();
activeRecording = null;
if (cameraMetricsAnimator != null && cameraMetricsAnimator.isRunning()) {
cameraMetricsAnimator.reverse();
}
updateProgressAnimator.cancel();
debouncer.clear();
cameraXModePolicy.setToImage(cameraController);
}
@Override
public void onZoomIncremented(float increment) {
ZoomState zoomState = Objects.requireNonNull(cameraController.getZoomState().getValue());
float base = getDefaultVideoZoomRatio();
if (increment >= 0f) {
float range = zoomState.getMaxZoomRatio() - base;
cameraController.setZoomRatio(base + range * increment);
} else {
float range = base - zoomState.getMinZoomRatio();
cameraController.setZoomRatio(base + range * increment);
}
}
@Override
protected void finalize() throws Throwable {
if (activeRecording != null) {
Log.w(TAG, "Dangling recording left open in finalize()! Attempting to close.");
activeRecording.close();
}
super.finalize();
}
static MemoryFileDescriptor createFileDescriptor(@NonNull Context context) throws MemoryFileDescriptor.MemoryFileException {
return MemoryFileDescriptor.newMemoryFileDescriptor(
context,
VIDEO_DEBUG_LABEL,
VIDEO_SIZE
);
}
public float getDefaultVideoZoomRatio() {
if (RemoteConfig.startVideoRecordAt1x()) {
return 1f;
} else {
return Objects.requireNonNull(cameraController.getZoomState().getValue()).getMinZoomRatio();
}
}
interface Callback {
void onVideoRecordStarted();
void onVideoSaved(@NonNull FileDescriptor fd);
void onVideoError(@Nullable Throwable cause);
}
}

View File

@@ -1,98 +0,0 @@
package org.thoughtcrime.securesms.mediasend.camerax
import android.content.Context
import android.os.Build
import androidx.camera.view.CameraController
import org.signal.core.util.asListContains
import org.thoughtcrime.securesms.mms.MediaConstraints
import org.thoughtcrime.securesms.util.RemoteConfig
import org.thoughtcrime.securesms.video.VideoUtil
/**
* Describes device capabilities
*/
sealed class CameraXModePolicy {
abstract val isVideoSupported: Boolean
abstract val isQrScanEnabled: Boolean
abstract fun initialize(cameraController: CameraController)
open fun initialize(cameraController: CameraController, useCaseFlags: Int) {
if (isQrScanEnabled) {
cameraController.setEnabledUseCases(useCaseFlags or CameraController.IMAGE_ANALYSIS)
} else {
cameraController.setEnabledUseCases(useCaseFlags)
}
}
open fun setToImage(cameraController: CameraController) = Unit
open fun setToVideo(cameraController: CameraController) = Unit
/**
* The device supports having Image and Video enabled at the same time
*/
data class Mixed(override val isQrScanEnabled: Boolean) : CameraXModePolicy() {
override val isVideoSupported: Boolean = true
override fun initialize(cameraController: CameraController) {
super.initialize(cameraController, CameraController.IMAGE_CAPTURE or CameraController.VIDEO_CAPTURE)
}
}
/**
* The device supports image and video, but only one mode at a time.
*/
data class Single(override val isQrScanEnabled: Boolean) : CameraXModePolicy() {
override val isVideoSupported: Boolean = true
override fun initialize(cameraController: CameraController) {
setToImage(cameraController)
}
override fun setToImage(cameraController: CameraController) {
super.initialize(cameraController, CameraController.IMAGE_CAPTURE)
}
override fun setToVideo(cameraController: CameraController) {
super.initialize(cameraController, CameraController.VIDEO_CAPTURE)
}
}
/**
* The device supports taking images only.
*/
data class ImageOnly(override val isQrScanEnabled: Boolean) : CameraXModePolicy() {
override val isVideoSupported: Boolean = false
override fun initialize(cameraController: CameraController) {
super.initialize(cameraController, CameraController.IMAGE_CAPTURE)
}
}
companion object {
@JvmStatic
fun acquire(context: Context, mediaConstraints: MediaConstraints, isVideoEnabled: Boolean, isQrScanEnabled: Boolean): CameraXModePolicy {
val isVideoSupported = Build.VERSION.SDK_INT >= 26 &&
isVideoEnabled &&
MediaConstraints.isVideoTranscodeAvailable() &&
VideoUtil.getMaxVideoRecordDurationInSeconds(context, mediaConstraints) > 0
val isMixedModeSupported = isVideoSupported &&
Build.VERSION.SDK_INT >= 26 &&
CameraXUtil.isMixedModeSupported(context) &&
!RemoteConfig.cameraXMixedModelBlocklist.asListContains(Build.MODEL)
return when {
isMixedModeSupported -> Mixed(isQrScanEnabled)
isVideoSupported -> Single(isQrScanEnabled)
else -> ImageOnly(isQrScanEnabled)
}
}
}
}

View File

@@ -8,7 +8,12 @@ import org.thoughtcrime.securesms.util.RemoteConfig
* Some phones don't work well with CameraX. This class uses a remote config to decide
* which phones should fall back to the legacy camera.
*/
object CameraXModelBlocklist {
object CameraXRemoteConfig {
@JvmStatic
fun isSupported(): Boolean {
return !isBlocklisted()
}
@JvmStatic
fun isBlocklisted(): Boolean {

View File

@@ -1,262 +0,0 @@
package org.thoughtcrime.securesms.mediasend.camerax;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.BitmapRegionDecoder;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.os.Build;
import kotlin.Pair;
import android.util.Size;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.camera.camera2.internal.compat.CameraManagerCompat;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageProxy;
import org.signal.core.util.Stopwatch;
import org.signal.core.util.logging.Log;
import org.thoughtcrime.securesms.util.BitmapDecodingException;
import org.thoughtcrime.securesms.util.BitmapUtil;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Locale;
public class CameraXUtil {
private static final String TAG = Log.tag(CameraXUtil.class);
private static final int[] CAMERA_HARDWARE_LEVEL_ORDERING = new int[]{CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL};
@RequiresApi(24)
private static final int[] CAMERA_HARDWARE_LEVEL_ORDERING_24 = new int[]{CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_3};
@RequiresApi(28)
private static final int[] CAMERA_HARDWARE_LEVEL_ORDERING_28 = new int[]{CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_3};
@SuppressWarnings("SuspiciousNameCombination")
public static ImageResult toJpeg(@NonNull ImageProxy image, boolean flip) throws IOException {
ImageProxy.PlaneProxy[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
Rect cropRect = shouldCropImage(image) ? image.getCropRect() : null;
byte[] data = new byte[buffer.capacity()];
int rotation = image.getImageInfo().getRotationDegrees();
buffer.get(data);
try {
Pair<Integer, Integer> dimens = BitmapUtil.getDimensions(new ByteArrayInputStream(data));
if (dimens.getFirst() != image.getWidth() && dimens.getSecond() != image.getHeight()) {
Log.w(TAG, String.format(Locale.ENGLISH, "Decoded image dimensions differed from stated dimensions! Stated: %d x %d, Decoded: %d x %d",
image.getWidth(), image.getHeight(), dimens.getFirst(), dimens.getSecond()));
Log.w(TAG, "Ignoring the stated rotation and rotating the crop rect 90 degrees (stated rotation is " + rotation + " degrees).");
rotation = 0;
if (cropRect != null) {
cropRect = new Rect(cropRect.top, cropRect.left, cropRect.bottom, cropRect.right);
}
}
} catch (BitmapDecodingException e) {
Log.w(TAG, "Failed to decode!", e);
}
if (cropRect != null || rotation != 0 || flip) {
data = transformByteArray(data, cropRect, rotation, flip);
}
int width = cropRect != null ? (cropRect.right - cropRect.left) : image.getWidth();
int height = cropRect != null ? (cropRect.bottom - cropRect.top) : image.getHeight();
if (rotation == 90 || rotation == 270) {
int swap = width;
width = height;
height = swap;
}
return new ImageResult(data, width, height);
}
public static boolean isSupported() {
return !CameraXModelBlocklist.isBlocklisted();
}
public static int toCameraDirectionInt(CameraSelector cameraSelector) {
if (cameraSelector == CameraSelector.DEFAULT_FRONT_CAMERA) {
return Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
return Camera.CameraInfo.CAMERA_FACING_BACK;
}
}
public static CameraSelector toCameraSelector(@CameraSelector.LensFacing int cameraDirectionInt) {
if (cameraDirectionInt == Camera.CameraInfo.CAMERA_FACING_FRONT) {
return CameraSelector.DEFAULT_FRONT_CAMERA;
} else {
return CameraSelector.DEFAULT_BACK_CAMERA;
}
}
public static @ImageCapture.CaptureMode int getOptimalCaptureMode() {
return FastCameraModels.contains(Build.MODEL) ? ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY
: ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY;
}
private static byte[] transformByteArray(@NonNull byte[] data, @Nullable Rect cropRect, int rotation, boolean flip) throws IOException {
Stopwatch stopwatch = new Stopwatch("transform");
Bitmap in;
if (cropRect != null) {
BitmapRegionDecoder decoder = BitmapRegionDecoder.newInstance(data, 0, data.length, false);
in = decoder.decodeRegion(cropRect, new BitmapFactory.Options());
decoder.recycle();
stopwatch.split("crop");
} else {
in = BitmapFactory.decodeByteArray(data, 0, data.length);
}
Bitmap out = in;
if (rotation != 0 || flip) {
Matrix matrix = new Matrix();
matrix.postRotate(rotation);
if (flip) {
matrix.postScale(-1, 1);
matrix.postTranslate(in.getWidth(), 0);
}
out = Bitmap.createBitmap(in, 0, 0, in.getWidth(), in.getHeight(), matrix, true);
}
byte[] transformedData = toJpegBytes(out);
stopwatch.split("transcode");
in.recycle();
out.recycle();
stopwatch.stop(TAG);
return transformedData;
}
private static boolean shouldCropImage(@NonNull ImageProxy image) {
Size sourceSize = new Size(image.getWidth(), image.getHeight());
Size targetSize = new Size(image.getCropRect().width(), image.getCropRect().height());
return !targetSize.equals(sourceSize);
}
private static byte[] toJpegBytes(@NonNull Bitmap bitmap) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
if (!bitmap.compress(Bitmap.CompressFormat.JPEG, 80, out)) {
throw new IOException("Failed to compress bitmap.");
}
return out.toByteArray();
}
public static boolean isMixedModeSupported(@NonNull Context context) {
return getLowestSupportedHardwareLevel(context) != CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
}
public static int getLowestSupportedHardwareLevel(@NonNull Context context) {
@SuppressLint("RestrictedApi") CameraManager cameraManager = CameraManagerCompat.from(context.getApplicationContext()).unwrap();
try {
int supported = maxHardwareLevel();
for (String cameraId : cameraManager.getCameraIdList()) {
Integer hwLevel = null;
try {
hwLevel = cameraManager.getCameraCharacteristics(cameraId).get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
} catch (NullPointerException e) {
// redmi device crash, assume lowest
}
if (hwLevel == null || hwLevel == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
}
supported = smallerHardwareLevel(supported, hwLevel);
}
return supported;
} catch (CameraAccessException e) {
Log.w(TAG, "Failed to enumerate cameras", e);
return CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
}
}
private static int maxHardwareLevel() {
if (Build.VERSION.SDK_INT >= 24) return CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_3;
else return CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
}
private static int smallerHardwareLevel(int levelA, int levelB) {
int[] hardwareInfoOrdering = getHardwareInfoOrdering();
for (int hwInfo : hardwareInfoOrdering) {
if (levelA == hwInfo || levelB == hwInfo) return hwInfo;
}
return CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
}
private static int[] getHardwareInfoOrdering() {
if (Build.VERSION.SDK_INT >= 28) return CAMERA_HARDWARE_LEVEL_ORDERING_28;
else if (Build.VERSION.SDK_INT >= 24) return CAMERA_HARDWARE_LEVEL_ORDERING_24;
else return CAMERA_HARDWARE_LEVEL_ORDERING;
}
public static class ImageResult {
private final byte[] data;
private final int width;
private final int height;
public ImageResult(@NonNull byte[] data, int width, int height) {
this.data = data;
this.width = width;
this.height = height;
}
public byte[] getData() {
return data;
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
}
}

View File

@@ -6,7 +6,7 @@ import androidx.fragment.app.Fragment
import androidx.navigation.NavController
import org.signal.core.ui.permissions.Permissions
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig
import org.thoughtcrime.securesms.util.navigation.safeNavigate
class MediaSelectionNavigator(
@@ -37,7 +37,7 @@ class MediaSelectionNavigator(
fun Fragment.requestPermissionsForCamera(
onGranted: () -> Unit
) {
if (CameraXUtil.isSupported()) {
if (CameraXRemoteConfig.isSupported()) {
onGranted()
} else {
Permissions.with(this)

View File

@@ -25,7 +25,7 @@ import org.thoughtcrime.securesms.components.recyclerview.GridDividerDecoration
import org.thoughtcrime.securesms.conversation.ManageContextMenu
import org.thoughtcrime.securesms.databinding.V2MediaGalleryFragmentBinding
import org.thoughtcrime.securesms.mediasend.MediaRepository
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig
import org.thoughtcrime.securesms.mediasend.v2.review.MediaGalleryGridItemTouchListener
import org.thoughtcrime.securesms.util.Material3OnScrollHelper
import org.thoughtcrime.securesms.util.SystemWindowInsetsSetter
@@ -101,7 +101,7 @@ class MediaGalleryFragment : Fragment(R.layout.v2_media_gallery_fragment) {
if (callbacks.isCameraEnabled()) {
binding.mediaGalleryToolbar.setOnMenuItemClickListener { item ->
if (item.itemId == R.id.action_camera) {
if (CameraXUtil.isSupported()) {
if (CameraXRemoteConfig.isSupported()) {
callbacks.onNavigateToCamera()
} else {
Permissions.with(this)

View File

@@ -16,7 +16,7 @@ import org.thoughtcrime.securesms.util.BitmapDecodingException;
import org.thoughtcrime.securesms.util.BitmapUtil;
import org.thoughtcrime.securesms.util.RemoteConfig;
import org.thoughtcrime.securesms.util.MediaUtil;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
import org.signal.core.util.MemoryFileDescriptor;
import org.thoughtcrime.securesms.video.TranscodingPreset;
import java.io.IOException;

View File

@@ -15,7 +15,7 @@ import org.signal.core.util.logging.Log;
import org.signal.qr.QrScannerView;
import org.signal.core.ui.logging.LoggingFragment;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXModelBlocklist;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig;
import org.thoughtcrime.securesms.payments.MobileCoinPublicAddress;
import org.signal.core.util.concurrent.LifecycleDisposable;
import org.thoughtcrime.securesms.util.navigation.SafeNavigation;
@@ -53,7 +53,7 @@ public final class PaymentsTransferQrScanFragment extends LoggingFragment {
Toolbar toolbar = view.findViewById(R.id.payments_transfer_scan_qr);
toolbar.setNavigationOnClickListener(v -> Navigation.findNavController(v).popBackStack());
scannerView.start(getViewLifecycleOwner(), CameraXModelBlocklist.isBlocklisted());
scannerView.start(getViewLifecycleOwner(), CameraXRemoteConfig.isBlocklisted());
lifecycleDisposable.bindTo(getViewLifecycleOwner());

View File

@@ -1,196 +0,0 @@
package org.thoughtcrime.securesms.util;
import android.app.ActivityManager;
import android.content.Context;
import android.os.ParcelFileDescriptor;
import androidx.annotation.NonNull;
import org.signal.core.util.logging.Log;
import java.io.Closeable;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.text.NumberFormat;
import java.util.Locale;
import java.util.concurrent.atomic.AtomicLong;
public final class MemoryFileDescriptor implements Closeable {
private static final String TAG = Log.tag(MemoryFileDescriptor.class);
private static Boolean supported;
private final ParcelFileDescriptor parcelFileDescriptor;
private final AtomicLong sizeEstimate;
/**
* Does this device support memory file descriptor.
*/
public synchronized static boolean supported() {
if (supported == null) {
try {
int fileDescriptor = FileUtils.createMemoryFileDescriptor("CHECK");
if (fileDescriptor < 0) {
supported = false;
Log.w(TAG, "MemoryFileDescriptor is not available.");
} else {
supported = true;
ParcelFileDescriptor.adoptFd(fileDescriptor).close();
}
} catch (IOException e) {
Log.w(TAG, e);
}
}
return supported;
}
/**
* memfd files do not show on the available RAM, so we must track our allocations in addition.
*/
private static long sizeOfAllMemoryFileDescriptors;
private MemoryFileDescriptor(@NonNull ParcelFileDescriptor parcelFileDescriptor, long sizeEstimate) {
this.parcelFileDescriptor = parcelFileDescriptor;
this.sizeEstimate = new AtomicLong(sizeEstimate);
}
/**
* @param debugName The name supplied in name is used as a filename and will be displayed
* as the target of the corresponding symbolic link in the directory
* /proc/self/fd/. The displayed name is always prefixed with memfd:
* and serves only for debugging purposes. Names do not affect the
* behavior of the file descriptor, and as such multiple files can have
* the same name without any side effects.
* @param sizeEstimate An estimated upper bound on this file. This is used to check there will be
* enough RAM available and to register with a global counter of reservations.
* Use zero to avoid RAM check.
* @return MemoryFileDescriptor
* @throws MemoryLimitException If there is not enough available RAM to comfortably fit this file.
* @throws MemoryFileCreationException If fails to create a memory file descriptor.
*/
public static MemoryFileDescriptor newMemoryFileDescriptor(@NonNull Context context,
@NonNull String debugName,
long sizeEstimate)
throws MemoryFileException
{
if (sizeEstimate < 0) throw new IllegalArgumentException();
if (sizeEstimate > 0) {
ActivityManager activityManager = ServiceUtil.getActivityManager(context);
ActivityManager.MemoryInfo memoryInfo = new ActivityManager.MemoryInfo();
synchronized (MemoryFileDescriptor.class) {
activityManager.getMemoryInfo(memoryInfo);
long remainingRam = memoryInfo.availMem - memoryInfo.threshold - sizeEstimate - sizeOfAllMemoryFileDescriptors;
if (remainingRam <= 0) {
NumberFormat numberFormat = NumberFormat.getInstance(Locale.US);
Log.w(TAG, String.format("Not enough RAM available without taking the system into a low memory state.%n" +
"Available: %s%n" +
"Low memory threshold: %s%n" +
"Requested: %s%n" +
"Total MemoryFileDescriptor limit: %s%n" +
"Shortfall: %s",
numberFormat.format(memoryInfo.availMem),
numberFormat.format(memoryInfo.threshold),
numberFormat.format(sizeEstimate),
numberFormat.format(sizeOfAllMemoryFileDescriptors),
numberFormat.format(remainingRam)
));
throw new MemoryLimitException();
}
sizeOfAllMemoryFileDescriptors += sizeEstimate;
}
}
int fileDescriptor = FileUtils.createMemoryFileDescriptor(debugName);
if (fileDescriptor < 0) {
Log.w(TAG, "Failed to create file descriptor: " + fileDescriptor);
throw new MemoryFileCreationException();
}
return new MemoryFileDescriptor(ParcelFileDescriptor.adoptFd(fileDescriptor), sizeEstimate);
}
@Override
public void close() throws IOException {
try {
clearAndRemoveAllocation();
} catch (Exception e) {
Log.w(TAG, "Failed to clear data in MemoryFileDescriptor", e);
} finally {
parcelFileDescriptor.close();
}
}
private void clearAndRemoveAllocation() throws IOException {
clear();
long oldEstimate = sizeEstimate.getAndSet(0);
synchronized (MemoryFileDescriptor.class) {
sizeOfAllMemoryFileDescriptors -= oldEstimate;
}
}
/** Rewinds and clears all bytes. */
private void clear() throws IOException {
long size;
try (FileInputStream fileInputStream = new FileInputStream(getFileDescriptor())) {
FileChannel channel = fileInputStream.getChannel();
size = channel.size();
if (size == 0) return;
channel.position(0);
}
byte[] zeros = new byte[16 * 1024];
try (FileOutputStream output = new FileOutputStream(getFileDescriptor())) {
while (size > 0) {
int limit = (int) Math.min(size, zeros.length);
output.write(zeros, 0, limit);
size -= limit;
}
}
}
public FileDescriptor getFileDescriptor() {
return parcelFileDescriptor.getFileDescriptor();
}
public ParcelFileDescriptor getParcelFileDescriptor() {
return parcelFileDescriptor;
}
public void seek(long position) throws IOException {
try (FileInputStream fileInputStream = new FileInputStream(getFileDescriptor())) {
fileInputStream.getChannel().position(position);
}
}
public long size() throws IOException {
try (FileInputStream fileInputStream = new FileInputStream(getFileDescriptor())) {
return fileInputStream.getChannel().size();
}
}
public static class MemoryFileException extends IOException {
}
private static final class MemoryLimitException extends MemoryFileException {
}
private static final class MemoryFileCreationException extends MemoryFileException {
}
}

View File

@@ -817,7 +817,7 @@ object RemoteConfig {
/** A comma-separated list of manufacturers that should *not* use CameraX mixed mode. */
val cameraXMixedModelBlocklist: String by remoteString(
key = "android.cameraXMixedModelBlockList.2",
key = "android.cameraXMixedModelBlockList.3",
defaultValue = "",
hotSwappable = false
)

View File

@@ -13,7 +13,7 @@ import org.signal.qr.QrScannerView
import org.signal.qr.kitkat.ScanListener
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.components.ShapeScrim
import org.thoughtcrime.securesms.mediasend.camerax.CameraXModelBlocklist
import org.thoughtcrime.securesms.mediasend.camerax.CameraXRemoteConfig
import org.thoughtcrime.securesms.util.ViewUtil
import org.thoughtcrime.securesms.util.fragments.findListener
@@ -41,7 +41,7 @@ class VerifyScanFragment : Fragment() {
ViewUtil.updateLayoutParams(cameraMarks, width, height)
}
cameraView.start(viewLifecycleOwner, CameraXModelBlocklist.isBlocklisted())
cameraView.start(viewLifecycleOwner, CameraXRemoteConfig.isBlocklisted())
lifecycleDisposable.bindTo(viewLifecycleOwner)

View File

@@ -14,25 +14,10 @@ public final class VideoUtil {
private VideoUtil() { }
public static Size getVideoRecordingSize() {
return isPortrait(screenSize())
? new Size(VideoConstants.VIDEO_SHORT_EDGE_HD, VideoConstants.VIDEO_LONG_EDGE_HD)
: new Size(VideoConstants.VIDEO_LONG_EDGE_HD, VideoConstants.VIDEO_SHORT_EDGE_HD);
}
public static int getMaxVideoRecordDurationInSeconds(@NonNull Context context, @NonNull MediaConstraints mediaConstraints) {
long allowedSize = mediaConstraints.getCompressedVideoMaxSize(context);
int duration = (int) Math.floor((float) allowedSize / VideoConstants.MAX_ALLOWED_BYTES_PER_SECOND);
return Math.min(duration, VideoConstants.VIDEO_MAX_RECORD_LENGTH_S);
}
private static Size screenSize() {
DisplayMetrics metrics = Resources.getSystem().getDisplayMetrics();
return new Size(metrics.widthPixels, metrics.heightPixels);
}
private static boolean isPortrait(Size size) {
return size.getWidth() < size.getHeight();
}
}

View File

@@ -0,0 +1,162 @@
package org.signal.core.util
import android.app.ActivityManager
import android.content.Context
import android.os.ParcelFileDescriptor
import org.signal.core.util.logging.Log
import org.thoughtcrime.securesms.util.FileUtils
import java.io.Closeable
import java.io.FileDescriptor
import java.io.FileInputStream
import java.io.FileOutputStream
import java.io.IOException
import java.text.NumberFormat
import java.util.Locale
import java.util.concurrent.atomic.AtomicLong
class MemoryFileDescriptor private constructor(
private val parcelFileDescriptor: ParcelFileDescriptor,
private val sizeEstimate: AtomicLong
) : Closeable {
val fileDescriptor: FileDescriptor
get() = parcelFileDescriptor.fileDescriptor
val parcelFd: ParcelFileDescriptor
get() = parcelFileDescriptor
@Throws(IOException::class)
fun seek(position: Long) {
FileInputStream(fileDescriptor).use { stream ->
stream.channel.position(position)
}
}
@Throws(IOException::class)
fun size(): Long {
FileInputStream(fileDescriptor).use { stream ->
return stream.channel.size()
}
}
@Throws(IOException::class)
override fun close() {
try {
clearAndRemoveAllocation()
} catch (e: Exception) {
Log.w(TAG, "Failed to clear data in MemoryFileDescriptor", e)
} finally {
parcelFileDescriptor.close()
}
}
@Throws(IOException::class)
private fun clearAndRemoveAllocation() {
clear()
val oldEstimate = sizeEstimate.getAndSet(0)
synchronized(MemoryFileDescriptor::class.java) {
sizeOfAllMemoryFileDescriptors -= oldEstimate
}
}
@Throws(IOException::class)
private fun clear() {
val size: Long
FileInputStream(fileDescriptor).use { stream ->
val channel = stream.channel
size = channel.size()
if (size == 0L) return
channel.position(0)
}
val zeros = ByteArray(16 * 1024)
var remaining = size
FileOutputStream(fileDescriptor).use { output ->
while (remaining > 0) {
val limit = remaining.coerceAtMost(zeros.size.toLong()).toInt()
output.write(zeros, 0, limit)
remaining -= limit
}
}
}
open class MemoryFileException : IOException()
private class MemoryLimitException : MemoryFileException()
private class MemoryFileCreationException : MemoryFileException()
companion object {
private val TAG = Log.tag(MemoryFileDescriptor::class.java)
private var sizeOfAllMemoryFileDescriptors: Long = 0
@JvmStatic
@Synchronized
fun supported(): Boolean {
return try {
val fd = FileUtils.createMemoryFileDescriptor("CHECK")
if (fd < 0) {
Log.w(TAG, "MemoryFileDescriptor is not available.")
false
} else {
ParcelFileDescriptor.adoptFd(fd).close()
true
}
} catch (e: IOException) {
Log.w(TAG, e)
false
}
}
@JvmStatic
@Throws(MemoryFileException::class)
fun newMemoryFileDescriptor(context: Context, debugName: String, sizeEstimate: Long): MemoryFileDescriptor {
require(sizeEstimate >= 0)
if (sizeEstimate > 0) {
val activityManager = context.getSystemService(Context.ACTIVITY_SERVICE) as ActivityManager
val memoryInfo = ActivityManager.MemoryInfo()
synchronized(MemoryFileDescriptor::class.java) {
activityManager.getMemoryInfo(memoryInfo)
val remainingRam = memoryInfo.availMem - memoryInfo.threshold - sizeEstimate - sizeOfAllMemoryFileDescriptors
if (remainingRam <= 0) {
val numberFormat = NumberFormat.getInstance(Locale.US)
Log.w(
TAG,
String.format(
"Not enough RAM available without taking the system into a low memory state.%n" +
"Available: %s%n" +
"Low memory threshold: %s%n" +
"Requested: %s%n" +
"Total MemoryFileDescriptor limit: %s%n" +
"Shortfall: %s",
numberFormat.format(memoryInfo.availMem),
numberFormat.format(memoryInfo.threshold),
numberFormat.format(sizeEstimate),
numberFormat.format(sizeOfAllMemoryFileDescriptors),
numberFormat.format(remainingRam)
)
)
throw MemoryLimitException()
}
sizeOfAllMemoryFileDescriptors += sizeEstimate
}
}
val fd = FileUtils.createMemoryFileDescriptor(debugName)
if (fd < 0) {
Log.w(TAG, "Failed to create file descriptor: $fd")
throw MemoryFileCreationException()
}
return MemoryFileDescriptor(ParcelFileDescriptor.adoptFd(fd), AtomicLong(sizeEstimate))
}
}
}

View File

@@ -17,7 +17,7 @@ public final class FileUtils {
public static native int getFileDescriptorOwner(FileDescriptor fileDescriptor);
static native int createMemoryFileDescriptor(String name);
public static native int createMemoryFileDescriptor(String name);
public static byte[] getFileDigest(FileInputStream fin) throws IOException {
try {

View File

@@ -21,7 +21,7 @@ dependencies {
lintChecks(project(":lintchecks"))
// Signal Core
implementation(project(":core:util-jvm"))
implementation(project(":core:util"))
implementation(project(":core:ui"))
implementation(project(":lib:glide"))

View File

@@ -0,0 +1,12 @@
/*
* Copyright 2026 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.signal.camera
enum class CameraCaptureMode {
ImageAndVideoSimultaneous,
ImageAndVideoExclusive,
ImageOnly
}

View File

@@ -76,7 +76,7 @@ fun CameraScreen(
modifier: Modifier = Modifier,
roundCorners: Boolean = true,
contentAlignment: Alignment = Alignment.Center,
enableVideoCapture: Boolean = true,
captureMode: CameraCaptureMode = CameraCaptureMode.ImageAndVideoSimultaneous,
enableQrScanning: Boolean = false,
content: @Composable BoxScope.() -> Unit = {}
) {
@@ -106,7 +106,7 @@ fun CameraScreen(
cameraProvider = cameraProvider,
surfaceProvider = surfaceProvider,
context = context,
enableVideoCapture = enableVideoCapture,
captureMode = captureMode,
enableQrScanning = enableQrScanning
)
)

View File

@@ -14,7 +14,7 @@ sealed interface CameraScreenEvents {
val cameraProvider: ProcessCameraProvider,
val surfaceProvider: Preview.SurfaceProvider,
val context: Context,
val enableVideoCapture: Boolean = true,
val captureMode: CameraCaptureMode = CameraCaptureMode.ImageAndVideoSimultaneous,
val enableQrScanning: Boolean = false
) : CameraScreenEvents

View File

@@ -87,7 +87,7 @@ class CameraScreenViewModel : ViewModel() {
private var imageCapture: ImageCapture? = null
private var videoCapture: VideoCapture<Recorder>? = null
private var recording: Recording? = null
private var isLimitedBinding: Boolean = false
private var captureMode: CameraCaptureMode = CameraCaptureMode.ImageOnly
private var brightnessBeforeFlash: Float = WindowManager.LayoutParams.BRIGHTNESS_OVERRIDE_NONE
private var brightnessWindow: WeakReference<Window>? = null
private var orientationListener: OrientationEventListener? = null
@@ -109,6 +109,8 @@ class CameraScreenViewModel : ViewModel() {
}
fun onEvent(event: CameraScreenEvents) {
logEvent(event)
val currentState = _state.value
when (event) {
is CameraScreenEvents.BindCamera -> {
@@ -138,6 +140,19 @@ class CameraScreenViewModel : ViewModel() {
}
}
private fun logEvent(event: CameraScreenEvents) {
when (event) {
is CameraScreenEvents.BindCamera -> Log.d(TAG, "[Event] BindCamera(captureMode=${event.captureMode}, enableQrScanning=${event.enableQrScanning})")
is CameraScreenEvents.TapToFocus -> Log.d(TAG, "[Event] TapToFocus(view=${event.viewX},${event.viewY}, surface=${event.surfaceX},${event.surfaceY})")
is CameraScreenEvents.PinchZoom -> Log.d(TAG, "[Event] PinchZoom(factor=${event.zoomFactor})")
is CameraScreenEvents.LinearZoom -> Log.d(TAG, "[Event] LinearZoom(${event.linearZoom})")
is CameraScreenEvents.SwitchCamera -> Log.d(TAG, "[Event] SwitchCamera")
is CameraScreenEvents.SetFlashMode -> Log.d(TAG, "[Event] SetFlashMode(${event.flashMode})")
is CameraScreenEvents.NextFlashMode -> Log.d(TAG, "[Event] NextFlashMode")
is CameraScreenEvents.ClearCaptureError -> Log.d(TAG, "[Event] ClearCaptureError")
}
}
/**
* Capture a photo.
* If using front camera with flash enabled but no hardware flash available,
@@ -240,7 +255,7 @@ class CameraScreenViewModel : ViewModel() {
output: VideoOutput,
onVideoCaptured: (VideoCaptureResult) -> Unit
) {
val capture = if (isLimitedBinding) rebindForVideoCapture() ?: return else videoCapture ?: return
val capture = videoCapture ?: rebindForVideoCapture() ?: return
recordingStartZoomRatio = _state.value.zoomRatio
@@ -314,7 +329,7 @@ class CameraScreenViewModel : ViewModel() {
// Clear recording
recording = null
if (isLimitedBinding) {
if (captureMode == CameraCaptureMode.ImageAndVideoExclusive) {
rebindToLastSuccessfulAttempt()
}
}
@@ -452,7 +467,7 @@ class CameraScreenViewModel : ViewModel() {
lastSuccessfulAttempt = attempt
imageCapture = attempt.imageCapture
videoCapture = attempt.videoCapture
isLimitedBinding = event.enableVideoCapture && attempt.videoCapture == null
captureMode = event.captureMode
} catch (e: Exception) {
Log.e(TAG, "Use case binding failed (attempt ${index + 1} of ${bindingAttempts.size})", e)
continue
@@ -499,7 +514,7 @@ class CameraScreenViewModel : ViewModel() {
.setResolutionSelector(resolutionSelector)
.build()
val videoCapture: VideoCapture<Recorder>? = if (event.enableVideoCapture && !FORCE_LIMITED_BINDING) {
val videoCapture: VideoCapture<Recorder>? = if (event.captureMode == CameraCaptureMode.ImageAndVideoSimultaneous && !FORCE_LIMITED_BINDING) {
buildVideoCapture()
} else {
null

View File

@@ -0,0 +1,113 @@
/*
* Copyright 2026 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.signal.camera
import android.annotation.SuppressLint
import android.content.Context
import android.hardware.camera2.CameraAccessException
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraMetadata
import android.os.Build
import androidx.annotation.RequiresApi
import androidx.camera.camera2.internal.compat.CameraManagerCompat
import org.signal.core.util.MemoryFileDescriptor
import org.signal.core.util.logging.Log
object CameraXUtil {
private val TAG = Log.tag(CameraXUtil::class.java)
private const val VIDEO_DEBUG_LABEL = "video-capture"
private const val VIDEO_SIZE = 10L * 1024 * 1024
@Throws(MemoryFileDescriptor.MemoryFileException::class)
fun createVideoFileDescriptor(context: Context): MemoryFileDescriptor {
return MemoryFileDescriptor.newMemoryFileDescriptor(context, VIDEO_DEBUG_LABEL, VIDEO_SIZE)
}
private val CAMERA_HARDWARE_LEVEL_ORDERING = intArrayOf(
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL
)
@RequiresApi(24)
private val CAMERA_HARDWARE_LEVEL_ORDERING_24 = intArrayOf(
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_3
)
@RequiresApi(28)
private val CAMERA_HARDWARE_LEVEL_ORDERING_28 = intArrayOf(
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL,
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_3
)
fun isMixedModeSupported(context: Context): Boolean {
return getLowestSupportedHardwareLevel(context) != CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY
}
fun getLowestSupportedHardwareLevel(context: Context): Int {
@SuppressLint("RestrictedApi")
val cameraManager = CameraManagerCompat.from(context.applicationContext).unwrap()
try {
var supported = maxHardwareLevel()
for (cameraId in cameraManager.cameraIdList) {
var hwLevel: Int? = null
try {
hwLevel = cameraManager.getCameraCharacteristics(cameraId).get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
} catch (_: NullPointerException) {
// redmi device crash, assume lowest
}
if (hwLevel == null || hwLevel == CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY
}
supported = smallerHardwareLevel(supported, hwLevel)
}
return supported
} catch (e: CameraAccessException) {
Log.w(TAG, "Failed to enumerate cameras", e)
return CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY
}
}
private fun maxHardwareLevel(): Int {
return if (Build.VERSION.SDK_INT >= 24) {
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_3
} else {
CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL
}
}
private fun smallerHardwareLevel(levelA: Int, levelB: Int): Int {
val hardwareInfoOrdering: IntArray = getHardwareInfoOrdering()
for (hwInfo in hardwareInfoOrdering) {
if (levelA == hwInfo || levelB == hwInfo) {
return hwInfo
}
}
return CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY
}
private fun getHardwareInfoOrdering(): IntArray {
return when {
Build.VERSION.SDK_INT >= 28 -> CAMERA_HARDWARE_LEVEL_ORDERING_28
Build.VERSION.SDK_INT >= 24 -> CAMERA_HARDWARE_LEVEL_ORDERING_24
else -> CAMERA_HARDWARE_LEVEL_ORDERING
}
}
}

View File

@@ -86,7 +86,7 @@ class CameraScreenViewModelTest {
// ===========================================================================
private fun bindCamera(
enableVideoCapture: Boolean = true,
captureMode: CameraCaptureMode = CameraCaptureMode.ImageAndVideoSimultaneous,
enableQrScanning: Boolean = false
) = viewModel.onEvent(
CameraScreenEvents.BindCamera(
@@ -94,7 +94,7 @@ class CameraScreenViewModelTest {
cameraProvider = mockCameraProvider,
surfaceProvider = mockSurfaceProvider,
context = RuntimeEnvironment.getApplication(),
enableVideoCapture = enableVideoCapture,
captureMode = captureMode,
enableQrScanning = enableQrScanning
)
)
@@ -145,7 +145,7 @@ class CameraScreenViewModelTest {
fun `binding with all use cases binds video and QR on the first attempt`() {
val attempts = captureBindingAttempts()
bindCamera(enableVideoCapture = true, enableQrScanning = true)
bindCamera(captureMode = CameraCaptureMode.ImageAndVideoSimultaneous, enableQrScanning = true)
assertThat(attempts.size).isEqualTo(1)
assertThat(attempts[0].hasVideoCapture()).isTrue()
@@ -156,7 +156,7 @@ class CameraScreenViewModelTest {
fun `binding with no optional use cases binds only preview and image capture`() {
val attempts = captureBindingAttempts(failCount = 0)
bindCamera(enableVideoCapture = false, enableQrScanning = false)
bindCamera(captureMode = CameraCaptureMode.ImageOnly, enableQrScanning = false)
assertThat(attempts.size).isEqualTo(1)
assertThat(attempts[0].hasVideoCapture()).isFalse()
@@ -171,7 +171,7 @@ class CameraScreenViewModelTest {
fun `when first attempt fails with video and QR, second attempt drops video but keeps QR`() {
val attempts = captureBindingAttempts(failCount = 1)
bindCamera(enableVideoCapture = true, enableQrScanning = true)
bindCamera(captureMode = CameraCaptureMode.ImageAndVideoSimultaneous, enableQrScanning = true)
assertThat(attempts.size).isEqualTo(2)
assertThat(attempts[0].hasVideoCapture()).isTrue()
@@ -184,7 +184,7 @@ class CameraScreenViewModelTest {
fun `when first two attempts fail with video and QR, third attempt drops both`() {
val attempts = captureBindingAttempts(failCount = 2)
bindCamera(enableVideoCapture = true, enableQrScanning = true)
bindCamera(captureMode = CameraCaptureMode.ImageAndVideoSimultaneous, enableQrScanning = true)
assertThat(attempts.size).isEqualTo(3)
assertThat(attempts[2].hasVideoCapture()).isFalse()
@@ -195,7 +195,7 @@ class CameraScreenViewModelTest {
fun `when all attempts fail, all three use case combinations are tried`() {
val attempts = captureBindingAttempts(failCount = Int.MAX_VALUE)
bindCamera(enableVideoCapture = true, enableQrScanning = true)
bindCamera(captureMode = CameraCaptureMode.ImageAndVideoSimultaneous, enableQrScanning = true)
assertThat(attempts.size).isEqualTo(3)
}
@@ -204,7 +204,7 @@ class CameraScreenViewModelTest {
fun `with only video requested, fallback drops video and nothing else`() {
val attempts = captureBindingAttempts(failCount = Int.MAX_VALUE)
bindCamera(enableVideoCapture = true, enableQrScanning = false)
bindCamera(captureMode = CameraCaptureMode.ImageAndVideoSimultaneous, enableQrScanning = false)
assertThat(attempts.size).isEqualTo(2)
assertThat(attempts[0].hasVideoCapture()).isTrue()
@@ -216,7 +216,7 @@ class CameraScreenViewModelTest {
fun `with only QR requested, fallback drops QR and nothing else`() {
val attempts = captureBindingAttempts(failCount = Int.MAX_VALUE)
bindCamera(enableVideoCapture = false, enableQrScanning = true)
bindCamera(captureMode = CameraCaptureMode.ImageOnly, enableQrScanning = true)
assertThat(attempts.size).isEqualTo(2)
assertThat(attempts[0].hasImageAnalysis()).isTrue()
@@ -228,7 +228,7 @@ class CameraScreenViewModelTest {
fun `each failed binding attempt calls unbindAll before retrying`() {
captureBindingAttempts(failCount = 2)
bindCamera(enableVideoCapture = true, enableQrScanning = true)
bindCamera(captureMode = CameraCaptureMode.ImageAndVideoSimultaneous, enableQrScanning = true)
// unbindAll called once before each of the 3 attempts
verify(exactly = 3) { mockCameraProvider.unbindAll() }
@@ -242,7 +242,7 @@ class CameraScreenViewModelTest {
fun `when video was dropped during initial binding, startRecording rebinds with video`() {
// Initial bind: first attempt (with video) fails, second (without) succeeds → limited mode
captureBindingAttempts(failCount = 1)
bindCamera(enableVideoCapture = true, enableQrScanning = false)
bindCamera(captureMode = CameraCaptureMode.ImageAndVideoSimultaneous, enableQrScanning = false)
val postInitAttempts = captureBindingAttempts()
@@ -259,7 +259,7 @@ class CameraScreenViewModelTest {
@Test
fun `in normal binding mode, startRecording does not rebind`() {
captureBindingAttempts()
bindCamera(enableVideoCapture = true, enableQrScanning = false)
bindCamera(captureMode = CameraCaptureMode.ImageAndVideoSimultaneous, enableQrScanning = false)
val postInitAttempts = captureBindingAttempts()
@@ -275,7 +275,7 @@ class CameraScreenViewModelTest {
@Test
fun `when the video rebind fails, restores the last successful use case set`() {
captureBindingAttempts(failCount = 1)
bindCamera(enableVideoCapture = true, enableQrScanning = false)
bindCamera(captureMode = CameraCaptureMode.ImageAndVideoSimultaneous, enableQrScanning = false)
// Both the failed video rebind and the restore attempt are captured here
val postInitAttempts = captureBindingAttempts(failCount = Int.MAX_VALUE)