Rebuild CameraXFragment to use a brand new camera.

This commit is contained in:
Greyson Parrelli
2026-01-28 16:02:51 -05:00
parent 0c102b061c
commit f53ae66fc9
71 changed files with 5232 additions and 678 deletions

View File

@@ -87,22 +87,25 @@ enum class CameraDisplay(
toggleBottomMargin = 54
);
@JvmOverloads
@Px
fun getCameraCaptureMarginBottom(resources: Resources): Int {
val positionInfo = if (Stories.isFeatureEnabled()) withTogglePositionInfo else withoutTogglePositionInfo
fun getCameraCaptureMarginBottom(resources: Resources, storiesEnabled: Boolean = Stories.isFeatureEnabled()): Int {
val positionInfo = if (storiesEnabled) withTogglePositionInfo else withoutTogglePositionInfo
return positionInfo.cameraCaptureMarginBottomDp.dp - getCameraButtonSizeOffset(resources)
}
@JvmOverloads
@Px
fun getCameraViewportMarginBottom(): Int {
val positionInfo = if (Stories.isFeatureEnabled()) withTogglePositionInfo else withoutTogglePositionInfo
fun getCameraViewportMarginBottom(storiesEnabled: Boolean = Stories.isFeatureEnabled()): Int {
val positionInfo = if (storiesEnabled) withTogglePositionInfo else withoutTogglePositionInfo
return positionInfo.cameraViewportMarginBottomDp.dp
}
fun getCameraViewportGravity(): CameraViewportGravity {
val positionInfo = if (Stories.isFeatureEnabled()) withTogglePositionInfo else withoutTogglePositionInfo
@JvmOverloads
fun getCameraViewportGravity(storiesEnabled: Boolean = Stories.isFeatureEnabled()): CameraViewportGravity {
val positionInfo = if (storiesEnabled) withTogglePositionInfo else withoutTogglePositionInfo
return positionInfo.cameraViewportGravity
}

View File

@@ -1,669 +0,0 @@
package org.thoughtcrime.securesms.mediasend;
import android.Manifest;
import android.animation.Animator;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Color;
import android.os.Build;
import android.os.Bundle;
import android.view.GestureDetector;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.RotateAnimation;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.AspectRatio;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureException;
import androidx.camera.core.ImageProxy;
import androidx.camera.video.FallbackStrategy;
import androidx.camera.video.Quality;
import androidx.camera.video.QualitySelector;
import androidx.camera.view.CameraController;
import androidx.camera.view.LifecycleCameraController;
import androidx.camera.view.PreviewView;
import androidx.constraintlayout.widget.ConstraintLayout;
import androidx.constraintlayout.widget.ConstraintSet;
import androidx.core.content.ContextCompat;
import com.bumptech.glide.Glide;
import com.google.android.material.button.MaterialButton;
import com.google.android.material.card.MaterialCardView;
import com.google.common.util.concurrent.ListenableFuture;
import org.signal.core.util.Stopwatch;
import org.signal.core.util.concurrent.SimpleTask;
import org.signal.core.util.logging.Log;
import org.signal.core.models.media.Media;
import org.signal.qr.QrProcessor;
import org.thoughtcrime.securesms.LoggingFragment;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.animation.AnimationCompleteListener;
import org.thoughtcrime.securesms.components.TooltipPopup;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXFlashToggleView;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXModePolicy;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil;
import org.thoughtcrime.securesms.mediasend.v2.MediaAnimations;
import org.thoughtcrime.securesms.mediasend.v2.MediaCountIndicatorButton;
import org.thoughtcrime.securesms.mms.DecryptableUri;
import org.thoughtcrime.securesms.mms.MediaConstraints;
import org.thoughtcrime.securesms.permissions.Permissions;
import org.thoughtcrime.securesms.util.BottomSheetUtil;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
import org.thoughtcrime.securesms.util.TextSecurePreferences;
import org.thoughtcrime.securesms.util.ViewUtil;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.FileDescriptor;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import io.reactivex.rxjava3.android.schedulers.AndroidSchedulers;
import io.reactivex.rxjava3.disposables.Disposable;
import static org.thoughtcrime.securesms.permissions.PermissionDeniedBottomSheet.showPermissionFragment;
/**
* Camera captured implemented using the CameraX SDK, which uses Camera2 under the hood. Should be
* preferred whenever possible.
*/
public class CameraXFragment extends LoggingFragment implements CameraFragment {
private static final String TAG = Log.tag(CameraXFragment.class);
private static final String IS_VIDEO_ENABLED = "is_video_enabled";
private static final String IS_QR_SCAN_ENABLED = "is_qr_scan_enabled";
private static final PreviewView.ScaleType PREVIEW_SCALE_TYPE = PreviewView.ScaleType.FILL_CENTER;
private PreviewView previewView;
private MaterialCardView cameraParent;
private ViewGroup controlsContainer;
private Controller controller;
private View selfieFlash;
private MemoryFileDescriptor videoFileDescriptor;
private LifecycleCameraController cameraController;
private Disposable mostRecentItemDisposable = Disposable.disposed();
private CameraXModePolicy cameraXModePolicy;
private CameraScreenBrightnessController cameraScreenBrightnessController;
private boolean isMediaSelected;
private View missingPermissionsContainer;
private TextView missingPermissionsText;
private MaterialButton allowAccessButton;
private final Executor qrAnalysisExecutor = Executors.newSingleThreadExecutor();
private final QrProcessor qrProcessor = new QrProcessor();
public static CameraXFragment newInstanceForAvatarCapture() {
CameraXFragment fragment = new CameraXFragment();
Bundle args = new Bundle();
args.putBoolean(IS_VIDEO_ENABLED, false);
args.putBoolean(IS_QR_SCAN_ENABLED, false);
fragment.setArguments(args);
return fragment;
}
public static CameraXFragment newInstance(boolean qrScanEnabled) {
CameraXFragment fragment = new CameraXFragment();
Bundle args = new Bundle();
args.putBoolean(IS_QR_SCAN_ENABLED, qrScanEnabled);
fragment.setArguments(args);
return fragment;
}
@Override
public void onAttach(@NonNull Context context) {
super.onAttach(context);
if (getActivity() instanceof Controller) {
this.controller = (Controller) getActivity();
} else if (getParentFragment() instanceof Controller) {
this.controller = (Controller) getParentFragment();
}
if (controller == null) {
throw new IllegalStateException("Parent must implement controller interface.");
}
}
@Override
public @Nullable View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
return inflater.inflate(R.layout.camerax_fragment, container, false);
}
@SuppressLint("MissingPermission")
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
this.cameraParent = view.findViewById(R.id.camerax_camera_parent);
this.previewView = view.findViewById(R.id.camerax_camera);
this.controlsContainer = view.findViewById(R.id.camerax_controls_container);
this.cameraXModePolicy = CameraXModePolicy.acquire(requireContext(),
controller.getMediaConstraints(),
requireArguments().getBoolean(IS_VIDEO_ENABLED, true),
requireArguments().getBoolean(IS_QR_SCAN_ENABLED, false));
this.missingPermissionsContainer = view.findViewById(R.id.missing_permissions_container);
this.missingPermissionsText = view.findViewById(R.id.missing_permissions_text);
this.allowAccessButton = view.findViewById(R.id.allow_access_button);
checkPermissions(requireArguments().getBoolean(IS_VIDEO_ENABLED, true));
Log.d(TAG, "Starting CameraX with mode policy " + cameraXModePolicy.getClass().getSimpleName());
previewView.setScaleType(PREVIEW_SCALE_TYPE);
final LifecycleCameraController lifecycleCameraController = new LifecycleCameraController(requireContext());
cameraController = lifecycleCameraController;
lifecycleCameraController.bindToLifecycle(getViewLifecycleOwner());
lifecycleCameraController.setCameraSelector(CameraXUtil.toCameraSelector(TextSecurePreferences.getDirectCaptureCameraId(requireContext())));
lifecycleCameraController.setTapToFocusEnabled(true);
lifecycleCameraController.setImageCaptureMode(CameraXUtil.getOptimalCaptureMode());
lifecycleCameraController.setVideoCaptureQualitySelector(QualitySelector.from(Quality.HD, FallbackStrategy.lowerQualityThan(Quality.HD)));
previewView.setController(lifecycleCameraController);
cameraXModePolicy.initialize(lifecycleCameraController);
cameraScreenBrightnessController = new CameraScreenBrightnessController(
requireActivity().getWindow(),
new CameraStateProvider(lifecycleCameraController)
);
previewView.setScaleType(PREVIEW_SCALE_TYPE);
lifecycleCameraController.setImageCaptureTargetSize(new CameraController.OutputSize(AspectRatio.RATIO_16_9));
controlsContainer.removeAllViews();
controlsContainer.addView(LayoutInflater.from(getContext()).inflate(R.layout.camera_controls_portrait, controlsContainer, false));
initControls(lifecycleCameraController);
if (requireArguments().getBoolean(IS_QR_SCAN_ENABLED, false)) {
lifecycleCameraController.setImageAnalysisAnalyzer(qrAnalysisExecutor, imageProxy -> {
try (imageProxy) {
String data = qrProcessor.getScannedData(imageProxy);
if (data != null) {
controller.onQrCodeFound(data);
}
}
});
}
}
@Override
public void onResume() {
super.onResume();
cameraController.bindToLifecycle(getViewLifecycleOwner());
Log.d(TAG, "Camera init complete from onResume");
requireActivity().setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
if (hasCameraPermission()) {
missingPermissionsContainer.setVisibility(View.GONE);
}
}
@Override
public void onDestroyView() {
super.onDestroyView();
mostRecentItemDisposable.dispose();
closeVideoFileDescriptor();
requireActivity().setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
}
@Override
public void fadeOutControls(@NonNull Runnable onEndAction) {
controlsContainer.setEnabled(false);
controlsContainer.animate()
.setDuration(250)
.alpha(0f)
.setInterpolator(MediaAnimations.getInterpolator())
.setListener(new AnimationCompleteListener() {
@Override
public void onAnimationEnd(Animator animation) {
controlsContainer.setEnabled(true);
onEndAction.run();
}
});
}
@Override
public void fadeInControls() {
controlsContainer.setEnabled(false);
controlsContainer.animate()
.setDuration(250)
.alpha(1f)
.setInterpolator(MediaAnimations.getInterpolator())
.setListener(new AnimationCompleteListener() {
@Override
public void onAnimationEnd(Animator animation) {
controlsContainer.setEnabled(true);
}
});
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
Permissions.onRequestPermissionsResult(this, requestCode, permissions, grantResults);
}
private void checkPermissions(boolean includeAudio) {
if (hasCameraPermission()) {
missingPermissionsContainer.setVisibility(View.GONE);
} else {
boolean hasAudioPermission = Permissions.hasAll(requireContext(), Manifest.permission.RECORD_AUDIO);
missingPermissionsContainer.setVisibility(View.VISIBLE);
int textResId = (!includeAudio || hasAudioPermission) ? R.string.CameraXFragment_to_capture_photos_and_video_allow_camera : R.string.CameraXFragment_to_capture_photos_and_video_allow_camera_microphone;
missingPermissionsText.setText(textResId);
allowAccessButton.setOnClickListener(v -> requestPermissions(includeAudio));
}
}
private void requestPermissions(boolean includeAudio) {
if (includeAudio) {
Permissions.with(this)
.request(Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO)
.ifNecessary()
.onSomeGranted(permissions -> {
if (permissions.contains(Manifest.permission.CAMERA)) {
missingPermissionsContainer.setVisibility(View.GONE);
}
})
.onSomePermanentlyDenied(deniedPermissions -> {
if (deniedPermissions.containsAll(List.of(Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO))) {
showPermissionFragment(R.string.CameraXFragment_allow_access_camera_microphone, R.string.CameraXFragment_to_capture_photos_videos, false).show(getParentFragmentManager(), BottomSheetUtil.STANDARD_BOTTOM_SHEET_FRAGMENT_TAG);
} else if (deniedPermissions.contains(Manifest.permission.CAMERA)) {
showPermissionFragment(R.string.CameraXFragment_allow_access_camera, R.string.CameraXFragment_to_capture_photos_videos, false).show(getParentFragmentManager(), BottomSheetUtil.STANDARD_BOTTOM_SHEET_FRAGMENT_TAG);
}
})
.onSomeDenied(deniedPermissions -> {
if (deniedPermissions.contains(Manifest.permission.CAMERA)) {
Toast.makeText(requireContext(), R.string.CameraXFragment_signal_needs_camera_access_capture_photos, Toast.LENGTH_LONG).show();
}
})
.execute();
} else {
Permissions.with(this)
.request(Manifest.permission.CAMERA)
.ifNecessary()
.onAllGranted (() -> missingPermissionsContainer.setVisibility(View.GONE))
.onAnyDenied(() -> Toast.makeText(requireContext(), R.string.CameraXFragment_signal_needs_camera_access_capture_photos, Toast.LENGTH_LONG).show())
.withPermanentDenialDialog(getString(R.string.CameraXFragment_signal_needs_camera_access_capture_photos), null, R.string.CameraXFragment_allow_access_camera, R.string.CameraXFragment_to_capture_photos, getParentFragmentManager())
.execute();
}
}
private boolean hasCameraPermission() {
return Permissions.hasAll(requireContext(), Manifest.permission.CAMERA);
}
private void presentRecentItemThumbnail(@Nullable Media media) {
View thumbBackground = controlsContainer.findViewById(R.id.camera_gallery_button_background);
ImageView thumbnail = controlsContainer.findViewById(R.id.camera_gallery_button);
if (media != null) {
thumbBackground.setBackgroundResource(R.drawable.circle_tintable);
thumbnail.clearColorFilter();
thumbnail.setScaleType(ImageView.ScaleType.FIT_CENTER);
Glide.with(this)
.load(new DecryptableUri(media.getUri()))
.centerCrop()
.into(thumbnail);
} else {
thumbBackground.setBackgroundResource(R.drawable.media_selection_camera_switch_background);
thumbnail.setImageResource(R.drawable.symbol_album_tilt_24);
thumbnail.setColorFilter(Color.WHITE);
thumbnail.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
}
}
@Override
public void presentHud(int selectedMediaCount) {
MediaCountIndicatorButton countButton = controlsContainer.findViewById(R.id.camera_review_button);
if (selectedMediaCount > 0) {
countButton.setVisibility(View.VISIBLE);
countButton.setCount(selectedMediaCount);
} else {
countButton.setVisibility(View.GONE);
}
isMediaSelected = selectedMediaCount > 0;
updateGalleryVisibility();
}
private void updateGalleryVisibility() {
View cameraGalleryContainer = controlsContainer.findViewById(R.id.camera_gallery_button_background);
if (isMediaSelected) {
cameraGalleryContainer.setVisibility(View.GONE);
} else {
cameraGalleryContainer.setVisibility(View.VISIBLE);
}
}
private void initializeViewFinderAndControlsPositioning() {
MaterialCardView cameraCard = requireView().findViewById(R.id.camerax_camera_parent);
View controls = requireView().findViewById(R.id.camerax_controls_container);
CameraDisplay cameraDisplay = CameraDisplay.getDisplay(requireActivity());
if (!cameraDisplay.getRoundViewFinderCorners()) {
cameraCard.setRadius(0f);
}
ViewUtil.setBottomMargin(controls, cameraDisplay.getCameraCaptureMarginBottom(getResources()));
if (cameraDisplay.getCameraViewportGravity() == CameraDisplay.CameraViewportGravity.CENTER) {
ConstraintSet constraintSet = new ConstraintSet();
constraintSet.clone((ConstraintLayout) requireView());
constraintSet.connect(R.id.camerax_camera_parent, ConstraintSet.TOP, ConstraintSet.PARENT_ID, ConstraintSet.TOP);
constraintSet.applyTo((ConstraintLayout) requireView());
ViewUtil.setTopMargin(cameraCard, ViewUtil.getStatusBarHeight(requireView()));
ViewUtil.setBottomMargin(cameraCard, ViewUtil.getNavigationBarHeight(requireView()));
} else {
ViewUtil.setBottomMargin(cameraCard, cameraDisplay.getCameraViewportMarginBottom());
}
}
@SuppressLint({ "ClickableViewAccessibility", "MissingPermission" })
private void initControls(LifecycleCameraController lifecycleCameraController) {
View flipButton = requireView().findViewById(R.id.camera_flip_button);
CameraButtonView captureButton = requireView().findViewById(R.id.camera_capture_button);
View galleryButton = requireView().findViewById(R.id.camera_gallery_button);
View countButton = requireView().findViewById(R.id.camera_review_button);
CameraXFlashToggleView flashButton = requireView().findViewById(R.id.camera_flash_button);
initializeViewFinderAndControlsPositioning();
mostRecentItemDisposable.dispose();
mostRecentItemDisposable = controller.getMostRecentMediaItem()
.observeOn(AndroidSchedulers.mainThread())
.subscribe(item -> presentRecentItemThumbnail(item.orElse(null)));
selfieFlash = requireView().findViewById(R.id.camera_selfie_flash);
final ListenableFuture<Void> cameraInitFuture = lifecycleCameraController.getInitializationFuture();
captureButton.setOnClickListener(v -> {
if (hasCameraPermission() && cameraInitFuture.isDone()) {
captureButton.setEnabled(false);
flipButton.setEnabled(false);
flashButton.setEnabled(false);
onCaptureClicked();
} else {
Log.i(TAG, "Camera capture button clicked but the camera controller is not yet initialized.");
}
});
previewView.setScaleType(PREVIEW_SCALE_TYPE);
cameraInitFuture.addListener(() -> initializeFlipButton(flipButton, flashButton), ContextCompat.getMainExecutor(requireContext()));
flashButton.setAutoFlashEnabled(lifecycleCameraController.getImageCaptureFlashMode() >= ImageCapture.FLASH_MODE_AUTO);
flashButton.setFlash(lifecycleCameraController.getImageCaptureFlashMode());
flashButton.setOnFlashModeChangedListener(mode -> {
cameraController.setImageCaptureFlashMode(mode);
cameraScreenBrightnessController.onCameraFlashChanged(mode == ImageCapture.FLASH_MODE_ON);
});
galleryButton.setOnClickListener(v -> controller.onGalleryClicked());
countButton.setOnClickListener(v -> controller.onCameraCountButtonClicked());
if (Build.VERSION.SDK_INT >= 26 && cameraXModePolicy.isVideoSupported()) {
try {
closeVideoFileDescriptor();
videoFileDescriptor = CameraXVideoCaptureHelper.createFileDescriptor(requireContext());
Animation inAnimation = AnimationUtils.loadAnimation(requireContext(), R.anim.fade_in);
Animation outAnimation = AnimationUtils.loadAnimation(requireContext(), R.anim.fade_out);
int maxDuration = VideoUtil.getMaxVideoRecordDurationInSeconds(requireContext(), controller.getMediaConstraints());
if (controller.getMaxVideoDuration() > 0) {
maxDuration = controller.getMaxVideoDuration();
}
Log.d(TAG, "Max duration: " + maxDuration + " sec");
captureButton.setVideoCaptureListener(new CameraXVideoCaptureHelper(
this,
captureButton,
lifecycleCameraController,
previewView,
videoFileDescriptor,
cameraXModePolicy,
maxDuration,
new CameraXVideoCaptureHelper.Callback() {
@Override
public void onVideoRecordStarted() {
hideAndDisableControlsForVideoRecording(captureButton, flashButton, flipButton, outAnimation);
}
@Override
public void onVideoSaved(@NonNull FileDescriptor fd) {
showAndEnableControlsAfterVideoRecording(captureButton, flashButton, flipButton, inAnimation);
controller.onVideoCaptured(fd);
}
@Override
public void onVideoError(@Nullable Throwable cause) {
showAndEnableControlsAfterVideoRecording(captureButton, flashButton, flipButton, inAnimation);
controller.onVideoCaptureError();
}
}
));
displayVideoRecordingTooltipIfNecessary(captureButton);
} catch (IOException e) {
Log.w(TAG, "Video capture is not supported on this device.", e);
}
} else {
captureButton.setOnLongClickListener(unused -> {
CameraFragment.toastVideoRecordingNotAvailable(requireContext());
return true;
});
Log.i(TAG, "Video capture not supported. " +
"API: " + Build.VERSION.SDK_INT + ", " +
"MFD: " + MemoryFileDescriptor.supported() + ", " +
"Camera: " + CameraXUtil.getLowestSupportedHardwareLevel(requireContext()) + ", " +
"MaxDuration: " + VideoUtil.getMaxVideoRecordDurationInSeconds(requireContext(), controller.getMediaConstraints()) + " sec");
}
}
private void displayVideoRecordingTooltipIfNecessary(CameraButtonView captureButton) {
if (shouldDisplayVideoRecordingTooltip()) {
int displayRotation = requireActivity().getWindowManager().getDefaultDisplay().getRotation();
TooltipPopup.forTarget(captureButton)
.setOnDismissListener(this::neverDisplayVideoRecordingTooltipAgain)
.setBackgroundTint(ContextCompat.getColor(requireContext(), R.color.core_ultramarine))
.setTextColor(ContextCompat.getColor(requireContext(), R.color.signal_text_toolbar_title))
.setText(R.string.CameraXFragment_tap_for_photo_hold_for_video)
.show(displayRotation == Surface.ROTATION_0 || displayRotation == Surface.ROTATION_180 ? TooltipPopup.POSITION_ABOVE : TooltipPopup.POSITION_START);
}
}
private boolean shouldDisplayVideoRecordingTooltip() {
return !TextSecurePreferences.hasSeenVideoRecordingTooltip(requireContext()) && MediaConstraints.isVideoTranscodeAvailable();
}
private void neverDisplayVideoRecordingTooltipAgain() {
Context context = getContext();
if (context != null) {
TextSecurePreferences.setHasSeenVideoRecordingTooltip(requireContext(), true);
}
}
private void hideAndDisableControlsForVideoRecording(@NonNull View captureButton,
@NonNull View flashButton,
@NonNull View flipButton,
@NonNull Animation outAnimation)
{
captureButton.setEnabled(false);
flashButton.startAnimation(outAnimation);
flashButton.setVisibility(View.INVISIBLE);
flipButton.startAnimation(outAnimation);
flipButton.setVisibility(View.INVISIBLE);
}
private void showAndEnableControlsAfterVideoRecording(@NonNull View captureButton,
@NonNull View flashButton,
@NonNull View flipButton,
@NonNull Animation inAnimation)
{
Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(() -> {
captureButton.setEnabled(true);
flashButton.startAnimation(inAnimation);
flashButton.setVisibility(View.VISIBLE);
flipButton.startAnimation(inAnimation);
flipButton.setVisibility(View.VISIBLE);
});
}
}
private void onCaptureClicked() {
Stopwatch stopwatch = new Stopwatch("Capture");
CameraXSelfieFlashHelper flashHelper = new CameraXSelfieFlashHelper(
requireActivity().getWindow(),
cameraController,
selfieFlash
);
flashHelper.onWillTakePicture();
cameraController.takePicture(ContextCompat.getMainExecutor(requireContext()), new ImageCapture.OnImageCapturedCallback() {
@Override
public void onCaptureSuccess(@NonNull ImageProxy image) {
flashHelper.endFlash();
final boolean flip = cameraController.getCameraSelector() == CameraSelector.DEFAULT_FRONT_CAMERA;
SimpleTask.run(CameraXFragment.this.getViewLifecycleOwner().getLifecycle(), () -> {
stopwatch.split("captured");
try {
return CameraXUtil.toJpeg(image, flip);
} catch (IOException e) {
Log.w(TAG, "Failed to encode captured image.", e);
return null;
} finally {
image.close();
}
}, result -> {
stopwatch.split("transformed");
stopwatch.stop(TAG);
if (result != null) {
controller.onImageCaptured(result.getData(), result.getWidth(), result.getHeight());
} else {
controller.onCameraError();
}
});
}
@Override
public void onError(@NonNull ImageCaptureException exception) {
Log.w(TAG, "Failed to capture image due to error " + exception.getImageCaptureError(), exception.getCause());
flashHelper.endFlash();
controller.onCameraError();
}
});
flashHelper.startFlash();
}
private void closeVideoFileDescriptor() {
if (videoFileDescriptor != null) {
try {
videoFileDescriptor.close();
videoFileDescriptor = null;
} catch (IOException e) {
Log.w(TAG, "Failed to close video file descriptor", e);
}
}
}
@SuppressLint({ "MissingPermission" })
private void initializeFlipButton(@NonNull View flipButton, @NonNull CameraXFlashToggleView flashButton) {
if (getContext() == null) {
Log.w(TAG, "initializeFlipButton called either before or after fragment was attached.");
return;
}
if (!getLifecycle().getCurrentState().isAtLeast(androidx.lifecycle.Lifecycle.State.STARTED)) {
return;
}
getViewLifecycleOwner().getLifecycle().addObserver(cameraScreenBrightnessController);
if (cameraController.hasCamera(CameraSelector.DEFAULT_FRONT_CAMERA) && cameraController.hasCamera(CameraSelector.DEFAULT_BACK_CAMERA)) {
flipButton.setVisibility(View.VISIBLE);
flipButton.setOnClickListener(v -> {
CameraSelector cameraSelector = cameraController.getCameraSelector() == CameraSelector.DEFAULT_FRONT_CAMERA
? CameraSelector.DEFAULT_BACK_CAMERA
: CameraSelector.DEFAULT_FRONT_CAMERA;
cameraController.setCameraSelector(cameraSelector);
TextSecurePreferences.setDirectCaptureCameraId(getContext(), CameraXUtil.toCameraDirectionInt(cameraController.getCameraSelector()));
Animation animation = new RotateAnimation(0, -180, RotateAnimation.RELATIVE_TO_SELF, 0.5f, RotateAnimation.RELATIVE_TO_SELF, 0.5f);
animation.setDuration(200);
animation.setInterpolator(new DecelerateInterpolator());
flipButton.startAnimation(animation);
flashButton.setAutoFlashEnabled(cameraController.getImageCaptureFlashMode() >= ImageCapture.FLASH_MODE_AUTO);
flashButton.setFlash(cameraController.getImageCaptureFlashMode());
cameraScreenBrightnessController.onCameraDirectionChanged(cameraSelector == CameraSelector.DEFAULT_FRONT_CAMERA);
});
GestureDetector gestureDetector = new GestureDetector(requireContext(), new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onDoubleTap(@NonNull MotionEvent e) {
if (flipButton.isEnabled()) {
flipButton.performClick();
}
return true;
}
});
previewView.setOnTouchListener((v, event) -> gestureDetector.onTouchEvent(event));
} else {
flipButton.setVisibility(View.GONE);
}
}
private static class CameraStateProvider implements CameraScreenBrightnessController.CameraStateProvider {
private final CameraController cameraController;
private CameraStateProvider(CameraController cameraController) {
this.cameraController = cameraController;
}
@Override
public boolean isFrontFacingCameraSelected() {
return cameraController.getCameraSelector() == CameraSelector.DEFAULT_FRONT_CAMERA;
}
@Override
public boolean isFlashEnabled() {
return cameraController.getImageCaptureFlashMode() == ImageCapture.FLASH_MODE_ON;
}
}
}

View File

@@ -0,0 +1,670 @@
package org.thoughtcrime.securesms.mediasend
import android.Manifest
import android.content.Context
import android.content.pm.ActivityInfo
import android.graphics.Bitmap
import android.os.Build
import android.os.Bundle
import android.os.ParcelFileDescriptor
import android.widget.Toast
import androidx.compose.animation.AnimatedVisibility
import androidx.compose.animation.core.tween
import androidx.compose.animation.fadeIn
import androidx.compose.animation.fadeOut
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.Box
import androidx.compose.foundation.layout.BoxWithConstraints
import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.Spacer
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.padding
import androidx.compose.material3.Button
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.runtime.LaunchedEffect
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableIntStateOf
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.remember
import androidx.compose.runtime.setValue
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.platform.LocalDensity
import androidx.compose.ui.res.stringResource
import androidx.compose.ui.text.style.TextAlign
import androidx.compose.ui.unit.dp
import androidx.lifecycle.viewmodel.compose.viewModel
import org.signal.camera.CameraScreen
import org.signal.camera.CameraScreenEvents
import org.signal.camera.CameraScreenViewModel
import org.signal.camera.VideoCaptureResult
import org.signal.camera.VideoOutput
import org.signal.camera.hud.StringResources
import org.signal.camera.hud.StandardCameraHud
import org.signal.camera.hud.StandardCameraHudEvents
import org.signal.core.util.logging.Log
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.compose.ComposeFragment
import org.thoughtcrime.securesms.mediasend.camerax.CameraXModePolicy
import org.thoughtcrime.securesms.permissions.PermissionDeniedBottomSheet.Companion.showPermissionFragment
import org.thoughtcrime.securesms.permissions.Permissions
import org.thoughtcrime.securesms.stories.Stories
import org.thoughtcrime.securesms.util.BottomSheetUtil
import org.thoughtcrime.securesms.util.MemoryFileDescriptor
import org.thoughtcrime.securesms.video.VideoUtil
import java.io.ByteArrayOutputStream
import java.io.IOException
private val TAG = Log.tag(CameraXFragment::class.java)
/**
* Camera capture implemented using a Compose-based CameraScreen with CameraX SDK under the hood.
* This is the preferred camera implementation when supported.
*/
class CameraXFragment : ComposeFragment(), CameraFragment {
companion object {
private const val IS_VIDEO_ENABLED = "is_video_enabled"
private const val IS_QR_SCAN_ENABLED = "is_qr_scan_enabled"
private const val CONTROLS_ANIMATION_DURATION = 250L
@JvmStatic
fun newInstanceForAvatarCapture(): CameraXFragment {
return CameraXFragment().apply {
arguments = Bundle().apply {
putBoolean(IS_VIDEO_ENABLED, false)
putBoolean(IS_QR_SCAN_ENABLED, false)
}
}
}
@JvmStatic
fun newInstance(qrScanEnabled: Boolean): CameraXFragment {
return CameraXFragment().apply {
arguments = Bundle().apply {
putBoolean(IS_QR_SCAN_ENABLED, qrScanEnabled)
}
}
}
}
private var controller: CameraFragment.Controller? = null
private var videoFileDescriptor: MemoryFileDescriptor? = null
private var cameraXModePolicy: CameraXModePolicy? = null
private val isVideoEnabled: Boolean
get() = requireArguments().getBoolean(IS_VIDEO_ENABLED, true)
private val isQrScanEnabled: Boolean
get() = requireArguments().getBoolean(IS_QR_SCAN_ENABLED, false)
// Compose state holders for HUD visibility
private var controlsVisible = mutableStateOf(true)
private var selectedMediaCount = mutableIntStateOf(0)
override fun onAttach(context: Context) {
super.onAttach(context)
controller = when {
activity is CameraFragment.Controller -> activity as CameraFragment.Controller
parentFragment is CameraFragment.Controller -> parentFragment as CameraFragment.Controller
else -> throw IllegalStateException("Parent must implement Controller interface.")
}
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
cameraXModePolicy = CameraXModePolicy.acquire(
requireContext(),
controller!!.mediaConstraints,
isVideoEnabled,
isQrScanEnabled
)
Log.d(TAG, "Starting CameraX with mode policy ${cameraXModePolicy?.javaClass?.simpleName}")
}
@Composable
override fun FragmentContent() {
CameraXScreen(
controller = controller,
isVideoEnabled = isVideoEnabled && Build.VERSION.SDK_INT >= 26,
isQrScanEnabled = isQrScanEnabled,
controlsVisible = controlsVisible.value,
selectedMediaCount = selectedMediaCount.intValue,
onCheckPermissions = { checkPermissions(isVideoEnabled) },
hasCameraPermission = { hasCameraPermission() },
createVideoFileDescriptor = { createVideoFileDescriptor() },
getMaxVideoDurationInSeconds = { getMaxVideoDurationInSeconds() },
cameraDisplay = CameraDisplay.getDisplay(requireActivity())
)
}
override fun onResume() {
super.onResume()
requireActivity().requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT
}
override fun onDestroyView() {
super.onDestroyView()
closeVideoFileDescriptor()
requireActivity().requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED
}
override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<String>,
grantResults: IntArray
) {
Permissions.onRequestPermissionsResult(this, requestCode, permissions, grantResults)
}
override fun presentHud(selectedMediaCount: Int) {
this.selectedMediaCount.intValue = selectedMediaCount
}
override fun fadeOutControls(onEndAction: Runnable) {
controlsVisible.value = false
// Post the end action after a short delay to allow animation to complete
view?.postDelayed({ onEndAction.run() }, CONTROLS_ANIMATION_DURATION)
}
override fun fadeInControls() {
controlsVisible.value = true
}
private fun checkPermissions(includeAudio: Boolean) {
if (hasCameraPermission()) {
return
}
if (includeAudio) {
Permissions.with(this)
.request(Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO)
.ifNecessary()
.onSomeGranted { permissions ->
// Will trigger recomposition via hasCameraPermission check
}
.onSomePermanentlyDenied { deniedPermissions ->
if (deniedPermissions.containsAll(listOf(Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO))) {
showPermissionFragment(
R.string.CameraXFragment_allow_access_camera_microphone,
R.string.CameraXFragment_to_capture_photos_videos,
false
).show(parentFragmentManager, BottomSheetUtil.STANDARD_BOTTOM_SHEET_FRAGMENT_TAG)
} else if (deniedPermissions.contains(Manifest.permission.CAMERA)) {
showPermissionFragment(
R.string.CameraXFragment_allow_access_camera,
R.string.CameraXFragment_to_capture_photos_videos,
false
).show(parentFragmentManager, BottomSheetUtil.STANDARD_BOTTOM_SHEET_FRAGMENT_TAG)
}
}
.onSomeDenied { deniedPermissions ->
if (deniedPermissions.contains(Manifest.permission.CAMERA)) {
Toast.makeText(
requireContext(),
R.string.CameraXFragment_signal_needs_camera_access_capture_photos,
Toast.LENGTH_LONG
).show()
}
}
.execute()
} else {
Permissions.with(this)
.request(Manifest.permission.CAMERA)
.ifNecessary()
.onAllGranted { /* Will trigger recomposition */ }
.onAnyDenied {
Toast.makeText(
requireContext(),
R.string.CameraXFragment_signal_needs_camera_access_capture_photos,
Toast.LENGTH_LONG
).show()
}
.withPermanentDenialDialog(
getString(R.string.CameraXFragment_signal_needs_camera_access_capture_photos),
null,
R.string.CameraXFragment_allow_access_camera,
R.string.CameraXFragment_to_capture_photos,
parentFragmentManager
)
.execute()
}
}
private fun hasCameraPermission(): Boolean {
return Permissions.hasAll(requireContext(), Manifest.permission.CAMERA)
}
private fun createVideoFileDescriptor(): ParcelFileDescriptor? {
if (Build.VERSION.SDK_INT < 26) {
throw IllegalStateException("Video capture requires API 26 or higher")
}
return try {
closeVideoFileDescriptor()
videoFileDescriptor = CameraXVideoCaptureHelper.createFileDescriptor(requireContext())
videoFileDescriptor?.parcelFileDescriptor
} catch (e: IOException) {
Log.w(TAG, "Failed to create video file descriptor", e)
null
}
}
private fun closeVideoFileDescriptor() {
videoFileDescriptor?.let {
try {
it.close()
} catch (e: IOException) {
Log.w(TAG, "Failed to close video file descriptor", e)
}
videoFileDescriptor = null
}
}
private fun getMaxVideoDurationInSeconds(): Int {
var maxDuration = VideoUtil.getMaxVideoRecordDurationInSeconds(requireContext(), controller!!.mediaConstraints)
val controllerMaxDuration = controller?.maxVideoDuration ?: 0
if (controllerMaxDuration > 0) {
maxDuration = controllerMaxDuration
}
return maxDuration
}
}
@Composable
private fun CameraXScreen(
controller: CameraFragment.Controller?,
isVideoEnabled: Boolean,
isQrScanEnabled: Boolean,
controlsVisible: Boolean,
selectedMediaCount: Int,
onCheckPermissions: () -> Unit,
hasCameraPermission: () -> Boolean,
createVideoFileDescriptor: () -> ParcelFileDescriptor?,
getMaxVideoDurationInSeconds: () -> Int,
cameraDisplay: CameraDisplay,
storiesEnabled: Boolean = Stories.isFeatureEnabled()
) {
val context = LocalContext.current
val cameraViewModel: CameraScreenViewModel = viewModel()
val cameraState by cameraViewModel.state
var hasPermission by remember { mutableStateOf(hasCameraPermission()) }
LaunchedEffect(Unit) {
if (!hasPermission) {
onCheckPermissions()
}
}
LaunchedEffect(cameraViewModel, isQrScanEnabled) {
if (isQrScanEnabled) {
cameraViewModel.qrCodeDetected.collect { qrCode ->
controller?.onQrCodeFound(qrCode)
}
}
}
LaunchedEffect(Unit) {
while (true) {
kotlinx.coroutines.delay(500)
val newHasPermission = hasCameraPermission()
if (newHasPermission != hasPermission) {
hasPermission = newHasPermission
}
}
}
val resources = LocalContext.current.resources
val hudBottomMargin = with(LocalDensity.current) {
cameraDisplay.getCameraCaptureMarginBottom(resources, storiesEnabled).toDp()
}
val viewportGravity = cameraDisplay.getCameraViewportGravity(storiesEnabled)
val cameraAlignment = when (viewportGravity) {
CameraDisplay.CameraViewportGravity.CENTER -> Alignment.Center
CameraDisplay.CameraViewportGravity.BOTTOM -> Alignment.BottomCenter
}
val viewportBottomMargin = if (viewportGravity == CameraDisplay.CameraViewportGravity.BOTTOM) {
with(LocalDensity.current) { cameraDisplay.getCameraViewportMarginBottom(storiesEnabled).toDp() }
} else {
0.dp
}
BoxWithConstraints(
modifier = Modifier.fillMaxSize()
) {
// We have to do a bunch of match to figure out how to place the camera buttons because
// the logic relies on positining things from the edge of the screen, which doesn't jive
// with how the composables are arranged. When this screen is re-written, we should simplify
// this whole setup. For now, I'm just doing my best to match current behavior.
val cameraAspectRatio = 9f / 16f
val availableHeight = maxHeight - viewportBottomMargin
val availableAspectRatio = maxWidth / availableHeight
val matchHeightFirst = availableAspectRatio > cameraAspectRatio
val viewportHeight = if (matchHeightFirst) {
availableHeight
} else {
maxWidth / cameraAspectRatio
}
val bottomGapFromAlignment = when (viewportGravity) {
CameraDisplay.CameraViewportGravity.CENTER -> (availableHeight - viewportHeight) / 2
CameraDisplay.CameraViewportGravity.BOTTOM -> 0.dp
}
val totalBottomOffset = viewportBottomMargin + bottomGapFromAlignment
val hudBottomPaddingInsideViewport = maxOf(0.dp, hudBottomMargin - totalBottomOffset)
if (hasPermission) {
CameraScreen(
state = cameraState,
emitter = { event -> cameraViewModel.onEvent(event) },
roundCorners = cameraDisplay.roundViewFinderCorners,
contentAlignment = cameraAlignment,
modifier = Modifier.padding(bottom = viewportBottomMargin)
) {
AnimatedVisibility(
visible = controlsVisible,
enter = fadeIn(animationSpec = tween(durationMillis = 150)),
exit = fadeOut(animationSpec = tween(durationMillis = 150))
) {
Box(modifier = Modifier.fillMaxSize()) {
StandardCameraHud(
state = cameraState,
modifier = Modifier.padding(bottom = hudBottomPaddingInsideViewport),
maxRecordingDurationMs = getMaxVideoDurationInSeconds() * 1000L,
mediaSelectionCount = selectedMediaCount,
emitter = { event ->
handleHudEvent(
event = event,
context = context,
cameraViewModel = cameraViewModel,
controller = controller,
isVideoEnabled = isVideoEnabled,
createVideoFileDescriptor = createVideoFileDescriptor
)
},
stringResources = StringResources(
photoCaptureFailed = R.string.CameraXFragment_photo_capture_failed,
photoProcessingFailed = R.string.CameraXFragment_photo_processing_failed
)
)
}
}
}
} else {
PermissionMissingContent(
isVideoEnabled = isVideoEnabled,
onRequestPermissions = onCheckPermissions
)
}
}
}
@Composable
private fun PermissionMissingContent(
isVideoEnabled: Boolean,
onRequestPermissions: () -> Unit
) {
val context = LocalContext.current
val hasAudioPermission = remember { Permissions.hasAll(context, Manifest.permission.RECORD_AUDIO) }
val textResId = if (!isVideoEnabled || hasAudioPermission) {
R.string.CameraXFragment_to_capture_photos_and_video_allow_camera
} else {
R.string.CameraXFragment_to_capture_photos_and_video_allow_camera_microphone
}
Box(
modifier = Modifier
.fillMaxSize()
.background(Color.Black),
contentAlignment = Alignment.Center
) {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
modifier = Modifier.padding(16.dp)
) {
Text(
text = stringResource(textResId),
color = Color.White,
textAlign = TextAlign.Center
)
Spacer(modifier = Modifier.height(16.dp))
Button(onClick = onRequestPermissions) {
Text(text = stringResource(R.string.CameraXFragment_allow_access))
}
}
}
}
private fun handleHudEvent(
event: StandardCameraHudEvents,
context: Context,
cameraViewModel: CameraScreenViewModel,
controller: CameraFragment.Controller?,
isVideoEnabled: Boolean,
createVideoFileDescriptor: () -> ParcelFileDescriptor?
) {
when (event) {
is StandardCameraHudEvents.PhotoCaptureTriggered -> {
cameraViewModel.capturePhoto(
context = context,
onPhotoCaptured = { bitmap ->
handlePhotoCaptured(bitmap, controller)
}
)
}
is StandardCameraHudEvents.VideoCaptureStarted -> {
if (Build.VERSION.SDK_INT >= 26 && isVideoEnabled) {
val fileDescriptor = createVideoFileDescriptor()
if (fileDescriptor != null) {
cameraViewModel.startRecording(
context = context,
output = VideoOutput.FileDescriptorOutput(fileDescriptor),
onVideoCaptured = { result ->
handleVideoCaptured(result, controller)
}
)
} else {
CameraFragment.toastVideoRecordingNotAvailable(context)
}
} else {
CameraFragment.toastVideoRecordingNotAvailable(context)
}
}
is StandardCameraHudEvents.VideoCaptureStopped -> {
cameraViewModel.stopRecording()
}
is StandardCameraHudEvents.GalleryClick -> {
controller?.onGalleryClicked()
}
is StandardCameraHudEvents.MediaSelectionClick -> {
controller?.onCameraCountButtonClicked()
}
is StandardCameraHudEvents.ToggleFlash -> {
cameraViewModel.onEvent(CameraScreenEvents.NextFlashMode)
}
is StandardCameraHudEvents.ClearCaptureError -> {
cameraViewModel.onEvent(CameraScreenEvents.ClearCaptureError)
}
is StandardCameraHudEvents.SwitchCamera -> {
cameraViewModel.onEvent(CameraScreenEvents.SwitchCamera(context))
}
is StandardCameraHudEvents.SetZoomLevel -> {
cameraViewModel.onEvent(CameraScreenEvents.LinearZoom(event.zoomLevel))
}
}
}
private fun handlePhotoCaptured(bitmap: Bitmap, controller: CameraFragment.Controller?) {
// Convert bitmap to JPEG byte array
val outputStream = ByteArrayOutputStream()
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, outputStream)
val data = outputStream.toByteArray()
controller?.onImageCaptured(data, bitmap.width, bitmap.height)
}
private fun handleVideoCaptured(result: VideoCaptureResult, controller: CameraFragment.Controller?) {
when (result) {
is VideoCaptureResult.Success -> {
result.fileDescriptor?.let { parcelFd ->
try {
// Seek to beginning before reading
android.system.Os.lseek(parcelFd.fileDescriptor, 0, android.system.OsConstants.SEEK_SET)
controller?.onVideoCaptured(parcelFd.fileDescriptor)
} catch (e: Exception) {
Log.w(TAG, "Failed to seek video file descriptor", e)
controller?.onVideoCaptureError()
}
} ?: controller?.onVideoCaptureError()
}
is VideoCaptureResult.Error -> {
Log.w(TAG, "Video capture failed: ${result.message}", result.throwable)
controller?.onVideoCaptureError()
}
}
}
@androidx.compose.ui.tooling.preview.Preview(
name = "20:9 Display",
showBackground = true,
widthDp = 360,
heightDp = 800
)
@Composable
private fun CameraXScreenPreview_20_9() {
org.signal.core.ui.compose.Previews.Preview {
CameraXScreen(
controller = null,
isVideoEnabled = true,
isQrScanEnabled = false,
controlsVisible = true,
selectedMediaCount = 0,
onCheckPermissions = {},
hasCameraPermission = { true },
createVideoFileDescriptor = { null },
getMaxVideoDurationInSeconds = { 60 },
cameraDisplay = CameraDisplay.DISPLAY_20_9,
storiesEnabled = true
)
}
}
@androidx.compose.ui.tooling.preview.Preview(
name = "19:9 Display",
showBackground = true,
widthDp = 360,
heightDp = 760
)
@Composable
private fun CameraXScreenPreview_19_9() {
org.signal.core.ui.compose.Previews.Preview {
CameraXScreen(
controller = null,
isVideoEnabled = true,
isQrScanEnabled = false,
controlsVisible = true,
selectedMediaCount = 0,
onCheckPermissions = {},
hasCameraPermission = { true },
createVideoFileDescriptor = { null },
getMaxVideoDurationInSeconds = { 60 },
cameraDisplay = CameraDisplay.DISPLAY_19_9,
storiesEnabled = true
)
}
}
@androidx.compose.ui.tooling.preview.Preview(
name = "18:9 Display",
showBackground = true,
widthDp = 360,
heightDp = 720
)
@Composable
private fun CameraXScreenPreview_18_9() {
org.signal.core.ui.compose.Previews.Preview {
CameraXScreen(
controller = null,
isVideoEnabled = true,
isQrScanEnabled = false,
controlsVisible = true,
selectedMediaCount = 0,
onCheckPermissions = {},
hasCameraPermission = { true },
createVideoFileDescriptor = { null },
getMaxVideoDurationInSeconds = { 60 },
cameraDisplay = CameraDisplay.DISPLAY_18_9,
storiesEnabled = true
)
}
}
@androidx.compose.ui.tooling.preview.Preview(
name = "16:9 Display",
showBackground = true,
widthDp = 360,
heightDp = 640
)
@Composable
private fun CameraXScreenPreview_16_9() {
org.signal.core.ui.compose.Previews.Preview {
CameraXScreen(
controller = null,
isVideoEnabled = true,
isQrScanEnabled = false,
controlsVisible = true,
selectedMediaCount = 0,
onCheckPermissions = {},
hasCameraPermission = { true },
createVideoFileDescriptor = { null },
getMaxVideoDurationInSeconds = { 60 },
cameraDisplay = CameraDisplay.DISPLAY_16_9,
storiesEnabled = true
)
}
}
@androidx.compose.ui.tooling.preview.Preview(
name = "6:5 Display (Tablet)",
showBackground = true,
widthDp = 480,
heightDp = 576
)
@Composable
private fun CameraXScreenPreview_6_5() {
org.signal.core.ui.compose.Previews.Preview {
CameraXScreen(
controller = null,
isVideoEnabled = true,
isQrScanEnabled = false,
controlsVisible = true,
selectedMediaCount = 0,
onCheckPermissions = {},
hasCameraPermission = { true },
createVideoFileDescriptor = { null },
getMaxVideoDurationInSeconds = { 60 },
cameraDisplay = CameraDisplay.DISPLAY_6_5,
storiesEnabled = true
)
}
}

View File

@@ -158,7 +158,7 @@ class ReviewCardViewHolder extends RecyclerView.ViewHolder {
private void presentSignalConnection(@NonNull TextView line, @NonNull ImageView icon, @NonNull Context context, @NonNull ReviewCard reviewCard) {
Preconditions.checkArgument(reviewCard.getReviewRecipient().isProfileSharing());
Drawable chevron = ContextCompat.getDrawable(context, R.drawable.symbol_chevron_right_24);
Drawable chevron = ContextCompat.getDrawable(context, org.signal.core.ui.R.drawable.symbol_chevron_right_24);
Preconditions.checkNotNull(chevron);
chevron.setTint(ContextCompat.getColor(context, R.color.core_grey_45));