mirror of
https://github.com/signalapp/Signal-Android.git
synced 2026-05-01 14:13:22 +01:00
Rebuild CameraXFragment to use a brand new camera.
This commit is contained in:
17
feature/camera/src/main/AndroidManifest.xml
Normal file
17
feature/camera/src/main/AndroidManifest.xml
Normal file
@@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
|
||||
|
||||
<uses-feature android:name="android.hardware.camera" android:required="true" />
|
||||
<uses-permission android:name="android.permission.CAMERA" />
|
||||
<uses-permission android:name="android.permission.RECORD_AUDIO" />
|
||||
<uses-permission android:name="android.permission.VIBRATE" />
|
||||
|
||||
<!-- Permissions for reading media files to show gallery thumbnail -->
|
||||
<!-- For Android 13 (API 33) and above -->
|
||||
<uses-permission android:name="android.permission.READ_MEDIA_IMAGES" />
|
||||
<uses-permission android:name="android.permission.READ_MEDIA_VIDEO" />
|
||||
<!-- For Android 12 (API 32) and below -->
|
||||
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"
|
||||
android:maxSdkVersion="32" />
|
||||
|
||||
</manifest>
|
||||
320
feature/camera/src/main/java/org/signal/camera/CameraScreen.kt
Normal file
320
feature/camera/src/main/java/org/signal/camera/CameraScreen.kt
Normal file
@@ -0,0 +1,320 @@
|
||||
package org.signal.camera
|
||||
|
||||
import android.content.res.Configuration
|
||||
import androidx.camera.compose.CameraXViewfinder
|
||||
import androidx.camera.core.SurfaceRequest
|
||||
import androidx.camera.lifecycle.ProcessCameraProvider
|
||||
import androidx.camera.core.Preview as CameraPreview
|
||||
import androidx.compose.animation.AnimatedVisibility
|
||||
import androidx.compose.animation.core.Animatable
|
||||
import androidx.compose.animation.core.Spring
|
||||
import androidx.compose.animation.core.spring
|
||||
import androidx.compose.animation.core.tween
|
||||
import androidx.compose.animation.fadeIn
|
||||
import androidx.compose.animation.fadeOut
|
||||
import androidx.compose.foundation.background
|
||||
import androidx.compose.foundation.gestures.detectTapGestures
|
||||
import androidx.compose.foundation.gestures.detectTransformGestures
|
||||
import androidx.compose.foundation.layout.Box
|
||||
import androidx.compose.foundation.layout.BoxScope
|
||||
import androidx.compose.foundation.layout.BoxWithConstraints
|
||||
import androidx.compose.foundation.layout.aspectRatio
|
||||
import androidx.compose.foundation.layout.fillMaxSize
|
||||
import androidx.compose.foundation.shape.RoundedCornerShape
|
||||
import androidx.compose.runtime.Composable
|
||||
import androidx.compose.runtime.LaunchedEffect
|
||||
import androidx.compose.runtime.getValue
|
||||
import androidx.compose.runtime.mutableStateOf
|
||||
import androidx.compose.runtime.remember
|
||||
import androidx.compose.runtime.setValue
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.draw.clip
|
||||
import androidx.compose.ui.draw.drawBehind
|
||||
import androidx.compose.ui.geometry.Offset
|
||||
import androidx.compose.ui.graphics.Color
|
||||
import androidx.compose.ui.graphics.drawscope.Stroke
|
||||
import androidx.compose.ui.input.pointer.pointerInput
|
||||
import androidx.compose.ui.platform.LocalConfiguration
|
||||
import androidx.compose.ui.platform.LocalContext
|
||||
import androidx.compose.ui.platform.LocalInspectionMode
|
||||
import androidx.compose.ui.platform.LocalLifecycleOwner
|
||||
import androidx.compose.ui.tooling.preview.Preview
|
||||
import androidx.compose.ui.unit.dp
|
||||
import kotlinx.coroutines.delay
|
||||
import kotlinx.coroutines.launch
|
||||
import org.signal.core.ui.compose.Previews
|
||||
|
||||
/**
|
||||
* A camera screen that handles core camera functionality, such as:
|
||||
* - Tap to focus
|
||||
* - Pinch to zoom
|
||||
* - Camera switching
|
||||
*
|
||||
* among other things.
|
||||
*
|
||||
* This composable is state-driven and emits events through [emitter]. The parent
|
||||
* composable is responsible for handling these events, typically by forwarding them
|
||||
* to a [CameraScreenViewModel].
|
||||
*
|
||||
* Use the [content] parameter to overlay custom HUD elements on top of the camera.
|
||||
* For a ready-to-use HUD, see [org.signal.camera.hud.StandardCameraHud].
|
||||
*
|
||||
* @param state The camera screen state, typically from a [CameraScreenViewModel].
|
||||
* @param emitter Callback for events that need to be handled by the parent, likely via [CameraScreenViewModel].
|
||||
* @param modifier Modifier to apply to the camera container.
|
||||
* @param roundCorners Whether to apply rounded corners to the camera viewfinder. Defaults to true.
|
||||
* @param contentAlignment The alignment of the camera viewfinder within the available space. Defaults to center.
|
||||
* @param content Composable content to overlay on top of the camera surface. The content is placed in a Box
|
||||
* with the same size and position as the camera surface.
|
||||
*/
|
||||
@Composable
|
||||
fun CameraScreen(
|
||||
state: CameraScreenState,
|
||||
emitter: (CameraScreenEvents) -> Unit,
|
||||
modifier: Modifier = Modifier,
|
||||
roundCorners: Boolean = true,
|
||||
contentAlignment: Alignment = Alignment.Center,
|
||||
content: @Composable BoxScope.() -> Unit = {}
|
||||
) {
|
||||
val context = LocalContext.current
|
||||
val lifecycleOwner = LocalLifecycleOwner.current
|
||||
val configuration = LocalConfiguration.current
|
||||
val isInPreview = LocalInspectionMode.current
|
||||
|
||||
// State to hold the surface request from CameraX Preview
|
||||
var surfaceRequest by remember { mutableStateOf<SurfaceRequest?>(null) }
|
||||
|
||||
// Determine aspect ratio based on orientation
|
||||
val isLandscape = configuration.orientation == Configuration.ORIENTATION_LANDSCAPE
|
||||
val aspectRatio = if (isLandscape) 16f / 9f else 9f / 16f
|
||||
|
||||
// Bind camera and setup surface provider
|
||||
LaunchedEffect(lifecycleOwner, state.lensFacing) {
|
||||
val cameraProvider = ProcessCameraProvider.getInstance(context).get()
|
||||
|
||||
val surfaceProvider = CameraPreview.SurfaceProvider { request ->
|
||||
surfaceRequest = request
|
||||
}
|
||||
|
||||
emitter(
|
||||
CameraScreenEvents.BindCamera(
|
||||
lifecycleOwner = lifecycleOwner,
|
||||
cameraProvider = cameraProvider,
|
||||
surfaceProvider = surfaceProvider,
|
||||
context = context
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
BoxWithConstraints(
|
||||
contentAlignment = contentAlignment,
|
||||
modifier = modifier.fillMaxSize()
|
||||
) {
|
||||
// Determine whether to match height constraints first based on available space.
|
||||
val availableAspectRatio = maxWidth / maxHeight
|
||||
val matchHeightFirst = availableAspectRatio > aspectRatio
|
||||
|
||||
Box(
|
||||
modifier = Modifier
|
||||
.aspectRatio(aspectRatio, matchHeightConstraintsFirst = matchHeightFirst)
|
||||
) {
|
||||
val cornerShape = if (roundCorners) RoundedCornerShape(16.dp) else RoundedCornerShape(0.dp)
|
||||
|
||||
if (isInPreview) {
|
||||
// Preview placeholder - shows a dark box with border to represent camera viewfinder
|
||||
Box(
|
||||
modifier = Modifier
|
||||
.fillMaxSize()
|
||||
.clip(cornerShape)
|
||||
.drawBehind {
|
||||
drawRect(Color(0xFF1A1A1A))
|
||||
}
|
||||
)
|
||||
} else if (surfaceRequest != null) {
|
||||
CameraXViewfinder(
|
||||
surfaceRequest = surfaceRequest!!,
|
||||
modifier = Modifier
|
||||
.fillMaxSize()
|
||||
.clip(cornerShape)
|
||||
.pointerInput(Unit) {
|
||||
detectTapGestures { offset ->
|
||||
emitter(
|
||||
CameraScreenEvents.TapToFocus(
|
||||
x = offset.x,
|
||||
y = offset.y,
|
||||
width = size.width.toFloat(),
|
||||
height = size.height.toFloat()
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
.pointerInput(Unit) {
|
||||
detectTransformGestures { _, _, zoom, _ ->
|
||||
emitter(CameraScreenEvents.PinchZoom(zoom))
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
if (state.showFocusIndicator && state.focusPoint != null) {
|
||||
FocusIndicator(
|
||||
focusPoint = state.focusPoint,
|
||||
modifier = Modifier.fillMaxSize()
|
||||
)
|
||||
}
|
||||
|
||||
// Selfie flash overlay (white screen for front camera)
|
||||
SelfieFlashOverlay(visible = state.showSelfieFlash)
|
||||
|
||||
// Content overlay (HUD elements, buttons, etc. from parent)
|
||||
content()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun FocusIndicator(
|
||||
focusPoint: Offset,
|
||||
modifier: Modifier = Modifier
|
||||
) {
|
||||
val scale = remember { Animatable(1.5f) }
|
||||
val alpha = remember { Animatable(1f) }
|
||||
|
||||
LaunchedEffect(focusPoint) {
|
||||
// Reset animations
|
||||
scale.snapTo(1.5f)
|
||||
alpha.snapTo(1f)
|
||||
|
||||
// Animate scale down with spring
|
||||
launch {
|
||||
scale.animateTo(
|
||||
targetValue = 0.8f,
|
||||
animationSpec = spring(
|
||||
dampingRatio = Spring.DampingRatioMediumBouncy,
|
||||
stiffness = Spring.StiffnessLow
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Fade out after delay
|
||||
launch {
|
||||
delay(400L)
|
||||
alpha.animateTo(
|
||||
targetValue = 0f,
|
||||
animationSpec = tween(durationMillis = 400)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
Box(
|
||||
modifier = modifier
|
||||
.drawBehind {
|
||||
val radius = 40.dp.toPx() * scale.value
|
||||
drawCircle(
|
||||
color = Color.White.copy(alpha = alpha.value),
|
||||
radius = radius,
|
||||
center = focusPoint,
|
||||
style = Stroke(width = 2.dp.toPx())
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* White overlay used as a selfie flash for front camera photos.
|
||||
* Fades in quickly when shown, fades out when hidden.
|
||||
*/
|
||||
@Composable
|
||||
private fun SelfieFlashOverlay(visible: Boolean) {
|
||||
AnimatedVisibility(
|
||||
visible = visible,
|
||||
enter = fadeIn(animationSpec = tween(durationMillis = 100)),
|
||||
exit = fadeOut(animationSpec = tween(durationMillis = 200))
|
||||
) {
|
||||
Box(
|
||||
modifier = Modifier
|
||||
.fillMaxSize()
|
||||
.background(Color.White.copy(alpha = 0.95f))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(name = "Phone", showBackground = true, backgroundColor = 0xFF000000)
|
||||
@Composable
|
||||
private fun CameraScreenPreview() {
|
||||
Previews.Preview {
|
||||
CameraScreen(
|
||||
state = CameraScreenState(),
|
||||
emitter = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(
|
||||
name = "Phone - Small",
|
||||
showBackground = true,
|
||||
backgroundColor = 0xFF000000,
|
||||
widthDp = 320,
|
||||
heightDp = 568
|
||||
)
|
||||
@Composable
|
||||
private fun CameraScreenPreviewSmallPhone() {
|
||||
Previews.Preview {
|
||||
CameraScreen(
|
||||
state = CameraScreenState(),
|
||||
emitter = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(
|
||||
name = "Tablet",
|
||||
showBackground = true,
|
||||
backgroundColor = 0xFF000000,
|
||||
widthDp = 600,
|
||||
heightDp = 960
|
||||
)
|
||||
@Composable
|
||||
private fun CameraScreenPreviewTablet() {
|
||||
Previews.Preview {
|
||||
CameraScreen(
|
||||
state = CameraScreenState(),
|
||||
emitter = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(
|
||||
name = "Landscape",
|
||||
showBackground = true,
|
||||
backgroundColor = 0xFF000000,
|
||||
widthDp = 840,
|
||||
heightDp = 400
|
||||
)
|
||||
@Composable
|
||||
private fun CameraScreenPreviewLandscape() {
|
||||
Previews.Preview {
|
||||
CameraScreen(
|
||||
state = CameraScreenState(),
|
||||
emitter = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(
|
||||
name = "Foldable",
|
||||
showBackground = true,
|
||||
backgroundColor = 0xFF000000,
|
||||
widthDp = 673,
|
||||
heightDp = 841
|
||||
)
|
||||
@Composable
|
||||
private fun CameraScreenPreviewFoldable() {
|
||||
Previews.Preview {
|
||||
CameraScreen(
|
||||
state = CameraScreenState(),
|
||||
emitter = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
package org.signal.camera
|
||||
|
||||
import android.content.Context
|
||||
import androidx.annotation.FloatRange
|
||||
import androidx.camera.core.Preview
|
||||
import androidx.camera.lifecycle.ProcessCameraProvider
|
||||
import androidx.lifecycle.LifecycleOwner
|
||||
|
||||
sealed interface CameraScreenEvents {
|
||||
|
||||
/** Binds a camera to a sruface provider. */
|
||||
data class BindCamera(
|
||||
val lifecycleOwner: LifecycleOwner,
|
||||
val cameraProvider: ProcessCameraProvider,
|
||||
val surfaceProvider: Preview.SurfaceProvider,
|
||||
val context: Context
|
||||
) : CameraScreenEvents
|
||||
|
||||
/** Focuses the camera on a point. */
|
||||
data class TapToFocus(
|
||||
val x: Float,
|
||||
val y: Float,
|
||||
val width: Float,
|
||||
val height: Float
|
||||
) : CameraScreenEvents
|
||||
|
||||
/** Zoom that happens when you pinch your fingers. */
|
||||
data class PinchZoom(val zoomFactor: Float) : CameraScreenEvents
|
||||
|
||||
/** Zoom that happens when you move your finger up and down during recording. */
|
||||
data class LinearZoom(@param:FloatRange(from = 0.0, to = 1.0) val linearZoom: Float) : CameraScreenEvents
|
||||
|
||||
/** Switches between available cameras (i.e. front and rear cameras). */
|
||||
data class SwitchCamera(val context: Context) : CameraScreenEvents
|
||||
|
||||
/** Sets the flash to a specific mode. */
|
||||
data class SetFlashMode(val flashMode: FlashMode) : CameraScreenEvents
|
||||
|
||||
/** Moves the flash to the next available mode. */
|
||||
data object NextFlashMode : CameraScreenEvents
|
||||
|
||||
/** Indicates the capture error has been handled and can be cleared. */
|
||||
data object ClearCaptureError : CameraScreenEvents
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
package org.signal.camera
|
||||
|
||||
import androidx.camera.core.CameraSelector
|
||||
import androidx.camera.core.ImageCapture
|
||||
import androidx.compose.ui.geometry.Offset
|
||||
|
||||
/**
|
||||
* State for CameraScreen.
|
||||
* Contains UI-related state for camera functionality.
|
||||
*/
|
||||
data class CameraScreenState(
|
||||
val focusPoint: Offset? = null,
|
||||
val showFocusIndicator: Boolean = false,
|
||||
val lensFacing: Int = CameraSelector.LENS_FACING_BACK,
|
||||
val zoomRatio: Float = 1f,
|
||||
val flashMode: FlashMode = FlashMode.Off,
|
||||
val isRecording: Boolean = false,
|
||||
val recordingDuration: Long = 0L,
|
||||
val showShutter: Boolean = false,
|
||||
val showSelfieFlash: Boolean = false,
|
||||
val captureError: CaptureError? = null
|
||||
)
|
||||
|
||||
sealed interface CaptureError {
|
||||
data class PhotoCaptureFailed(val message: String?) : CaptureError
|
||||
data class PhotoProcessingFailed(val message: String?) : CaptureError
|
||||
}
|
||||
|
||||
/**
|
||||
* Flash mode for the camera.
|
||||
*/
|
||||
enum class FlashMode(val cameraxMode: Int) {
|
||||
Off(ImageCapture.FLASH_MODE_OFF),
|
||||
On(ImageCapture.FLASH_MODE_ON),
|
||||
Auto(ImageCapture.FLASH_MODE_AUTO);
|
||||
|
||||
/**
|
||||
* Returns the next flash mode in the cycle: OFF -> ON -> AUTO -> OFF
|
||||
*/
|
||||
fun next(): FlashMode {
|
||||
return when (this) {
|
||||
Off -> On
|
||||
On -> Auto
|
||||
Auto -> Off
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,576 @@
|
||||
package org.signal.camera
|
||||
|
||||
import android.content.Context
|
||||
import android.graphics.Bitmap
|
||||
import android.graphics.Matrix
|
||||
import android.os.Build
|
||||
import android.os.VibrationEffect
|
||||
import android.os.Vibrator
|
||||
import androidx.camera.core.AspectRatio
|
||||
import androidx.camera.core.Camera
|
||||
import androidx.camera.core.CameraSelector
|
||||
import androidx.camera.core.FocusMeteringAction
|
||||
import androidx.camera.core.ImageAnalysis
|
||||
import androidx.camera.core.ImageCapture
|
||||
import androidx.camera.core.ImageCaptureException
|
||||
import androidx.camera.core.ImageProxy
|
||||
import androidx.camera.core.Preview
|
||||
import androidx.camera.core.SurfaceOrientedMeteringPointFactory
|
||||
import androidx.camera.core.resolutionselector.AspectRatioStrategy
|
||||
import androidx.camera.core.resolutionselector.ResolutionSelector
|
||||
import androidx.camera.lifecycle.ProcessCameraProvider
|
||||
import androidx.camera.video.Recorder
|
||||
import androidx.camera.video.Recording
|
||||
import androidx.camera.video.VideoCapture
|
||||
import androidx.camera.video.VideoRecordEvent
|
||||
import androidx.compose.runtime.MutableState
|
||||
import androidx.compose.runtime.State
|
||||
import androidx.compose.runtime.mutableStateOf
|
||||
import androidx.compose.ui.geometry.Offset
|
||||
import androidx.core.content.ContextCompat
|
||||
import androidx.lifecycle.LifecycleOwner
|
||||
import androidx.lifecycle.ViewModel
|
||||
import androidx.lifecycle.viewModelScope
|
||||
import com.google.zxing.BarcodeFormat
|
||||
import com.google.zxing.BinaryBitmap
|
||||
import com.google.zxing.DecodeHintType
|
||||
import com.google.zxing.MultiFormatReader
|
||||
import com.google.zxing.NotFoundException
|
||||
import com.google.zxing.PlanarYUVLuminanceSource
|
||||
import com.google.zxing.common.HybridBinarizer
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.delay
|
||||
import kotlinx.coroutines.flow.Flow
|
||||
import kotlinx.coroutines.flow.MutableSharedFlow
|
||||
import kotlinx.coroutines.launch
|
||||
import kotlinx.coroutines.withContext
|
||||
import org.signal.core.util.logging.Log
|
||||
import org.signal.core.util.throttleLatest
|
||||
import java.util.EnumMap
|
||||
import java.util.concurrent.Executors
|
||||
import kotlin.time.Duration.Companion.seconds
|
||||
|
||||
private const val TAG = "CameraScreenViewModel"
|
||||
|
||||
class CameraScreenViewModel : ViewModel() {
|
||||
companion object {
|
||||
private val imageAnalysisExecutor = Executors.newSingleThreadExecutor()
|
||||
}
|
||||
|
||||
private val _state: MutableState<CameraScreenState> = mutableStateOf(CameraScreenState())
|
||||
val state: State<CameraScreenState>
|
||||
get() = _state
|
||||
|
||||
private var camera: Camera? = null
|
||||
private var lifecycleOwner: LifecycleOwner? = null
|
||||
private var cameraProvider: ProcessCameraProvider? = null
|
||||
private var imageCapture: ImageCapture? = null
|
||||
private var videoCapture: VideoCapture<Recorder>? = null
|
||||
private var recording: Recording? = null
|
||||
|
||||
private val _qrCodeDetected = MutableSharedFlow<String>(extraBufferCapacity = 1)
|
||||
|
||||
/**
|
||||
* Flow of detected QR codes. Observers can collect from this flow to receive QR code detections.
|
||||
* The flow filters consecutive duplicates and is throttled to avoid rapid-fire detections.
|
||||
*/
|
||||
val qrCodeDetected: Flow<String> = _qrCodeDetected.throttleLatest(2.seconds)
|
||||
|
||||
private val qrCodeReader = MultiFormatReader().apply {
|
||||
val hints = EnumMap<DecodeHintType, Any>(DecodeHintType::class.java)
|
||||
hints[DecodeHintType.POSSIBLE_FORMATS] = listOf(BarcodeFormat.QR_CODE)
|
||||
hints[DecodeHintType.TRY_HARDER] = true
|
||||
setHints(hints)
|
||||
}
|
||||
|
||||
fun onEvent(event: CameraScreenEvents) {
|
||||
val currentState = _state.value
|
||||
when (event) {
|
||||
is CameraScreenEvents.BindCamera -> {
|
||||
handleBindCameraEvent(currentState, event)
|
||||
}
|
||||
is CameraScreenEvents.TapToFocus -> {
|
||||
handleTapToFocusEvent(currentState, event)
|
||||
}
|
||||
is CameraScreenEvents.PinchZoom -> {
|
||||
handlePinchZoomEvent(currentState, event)
|
||||
}
|
||||
is CameraScreenEvents.LinearZoom -> {
|
||||
handleSetLinearZoomEvent(currentState, event.linearZoom)
|
||||
}
|
||||
is CameraScreenEvents.SwitchCamera -> {
|
||||
handleSwitchCameraEvent(currentState)
|
||||
}
|
||||
is CameraScreenEvents.SetFlashMode -> {
|
||||
handleSetFlashModeEvent(currentState, event.flashMode)
|
||||
}
|
||||
is CameraScreenEvents.NextFlashMode -> {
|
||||
handleSetFlashModeEvent(currentState, currentState.flashMode.next())
|
||||
}
|
||||
is CameraScreenEvents.ClearCaptureError -> {
|
||||
handleClearCaptureErrorEvent(currentState)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture a photo.
|
||||
* If using front camera with flash enabled but no hardware flash available,
|
||||
* uses a selfie flash (white screen overlay) to illuminate the subject.
|
||||
*/
|
||||
@androidx.annotation.OptIn(markerClass = [androidx.camera.core.ExperimentalGetImage::class])
|
||||
fun capturePhoto(
|
||||
context: Context,
|
||||
onPhotoCaptured: (Bitmap) -> Unit,
|
||||
) {
|
||||
val state = _state.value
|
||||
val capture = imageCapture ?: run {
|
||||
_state.value = state.copy(captureError = CaptureError.PhotoCaptureFailed("Camera not ready"))
|
||||
return
|
||||
}
|
||||
|
||||
val needsSelfieFlash = state.lensFacing == CameraSelector.LENS_FACING_FRONT &&
|
||||
state.flashMode == FlashMode.On
|
||||
|
||||
if (needsSelfieFlash) {
|
||||
captureWithSelfieFlash(context, capture, state, onPhotoCaptured)
|
||||
} else {
|
||||
capturePhotoInternal(context, capture, state, onPhotoCaptured)
|
||||
}
|
||||
}
|
||||
|
||||
private fun captureWithSelfieFlash(
|
||||
context: Context,
|
||||
capture: ImageCapture,
|
||||
state: CameraScreenState,
|
||||
onPhotoCaptured: (Bitmap) -> Unit
|
||||
) {
|
||||
// Show selfie flash
|
||||
_state.value = state.copy(showSelfieFlash = true)
|
||||
|
||||
// Wait for screen to brighten, then capture
|
||||
viewModelScope.launch {
|
||||
delay(150L) // Give screen time to brighten
|
||||
capturePhotoInternal(context, capture, _state.value, onPhotoCaptured)
|
||||
}
|
||||
}
|
||||
|
||||
@androidx.annotation.OptIn(markerClass = [androidx.camera.core.ExperimentalGetImage::class])
|
||||
private fun capturePhotoInternal(
|
||||
context: Context,
|
||||
capture: ImageCapture,
|
||||
state: CameraScreenState,
|
||||
onPhotoCaptured: (Bitmap) -> Unit
|
||||
) {
|
||||
// Vibrate for haptic feedback
|
||||
vibrate(context)
|
||||
|
||||
capture.takePicture(
|
||||
ContextCompat.getMainExecutor(context),
|
||||
object : ImageCapture.OnImageCapturedCallback() {
|
||||
override fun onCaptureSuccess(imageProxy: ImageProxy) {
|
||||
viewModelScope.launch {
|
||||
try {
|
||||
// Convert ImageProxy to Bitmap, mirroring for front camera to match preview
|
||||
val mirrorImage = state.lensFacing == CameraSelector.LENS_FACING_FRONT
|
||||
val bitmap = imageProxy.toBitmapWithTransforms(mirrorHorizontally = mirrorImage)
|
||||
// Pass bitmap to callback
|
||||
triggerShutter(state)
|
||||
onPhotoCaptured(bitmap)
|
||||
} catch (e: Exception) {
|
||||
Log.e(TAG, "Failed to process image: ${e.message}", e)
|
||||
_state.value = state.copy(captureError = CaptureError.PhotoCaptureFailed(e.message))
|
||||
} finally {
|
||||
imageProxy.close()
|
||||
hideSelfieFlash()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun onError(e: ImageCaptureException) {
|
||||
Log.e(TAG, "Photo capture failed: ${e.message}", e)
|
||||
_state.value = state.copy(captureError = CaptureError.PhotoCaptureFailed(e.message))
|
||||
hideSelfieFlash()
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
private fun hideSelfieFlash() {
|
||||
if (_state.value.showSelfieFlash) {
|
||||
_state.value = _state.value.copy(showSelfieFlash = false)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start video recording.
|
||||
*/
|
||||
@androidx.annotation.OptIn(markerClass = [androidx.camera.core.ExperimentalGetImage::class])
|
||||
@android.annotation.SuppressLint("MissingPermission", "RestrictedApi", "NewApi")
|
||||
fun startRecording(
|
||||
context: Context,
|
||||
output: VideoOutput,
|
||||
onVideoCaptured: (VideoCaptureResult) -> Unit
|
||||
) {
|
||||
val capture = videoCapture ?: return
|
||||
|
||||
// Prepare recording based on configuration
|
||||
val pendingRecording = when (output) {
|
||||
is VideoOutput.FileOutput -> {
|
||||
val fileOutputOptions = androidx.camera.video.FileOutputOptions.Builder(output.file).build()
|
||||
capture.output.prepareRecording(context, fileOutputOptions)
|
||||
}
|
||||
is VideoOutput.FileDescriptorOutput -> {
|
||||
val fileDescriptorOutputOptions = androidx.camera.video.FileDescriptorOutputOptions.Builder(
|
||||
output.fileDescriptor
|
||||
).build()
|
||||
capture.output.prepareRecording(context, fileDescriptorOutputOptions)
|
||||
}
|
||||
}
|
||||
|
||||
val activeRecording = pendingRecording
|
||||
.withAudioEnabled()
|
||||
.start(ContextCompat.getMainExecutor(context)) { recordEvent ->
|
||||
when (recordEvent) {
|
||||
is VideoRecordEvent.Start -> {
|
||||
Log.d(TAG, "Video recording started")
|
||||
startRecordingTimer()
|
||||
vibrate(context)
|
||||
}
|
||||
is VideoRecordEvent.Finalize -> {
|
||||
val result = if (!recordEvent.hasError()) {
|
||||
Log.d(TAG, "Video recording succeeded")
|
||||
when (output) {
|
||||
is VideoOutput.FileOutput -> {
|
||||
VideoCaptureResult.Success(outputFile = output.file)
|
||||
}
|
||||
is VideoOutput.FileDescriptorOutput -> {
|
||||
VideoCaptureResult.Success(fileDescriptor = output.fileDescriptor)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Log.e(TAG, "Video recording failed: ${recordEvent.error}")
|
||||
val fileDescriptor = (output as? VideoOutput.FileDescriptorOutput)?.fileDescriptor
|
||||
VideoCaptureResult.Error(
|
||||
message = "Video recording failed",
|
||||
throwable = recordEvent.cause,
|
||||
fileDescriptor = fileDescriptor
|
||||
)
|
||||
}
|
||||
|
||||
// Call the callback
|
||||
onVideoCaptured(result)
|
||||
stopRecordingTimer()
|
||||
|
||||
// Clear recording
|
||||
recording = null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
recording = activeRecording
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop video recording.
|
||||
*/
|
||||
fun stopRecording() {
|
||||
recording?.stop()
|
||||
recording = null
|
||||
}
|
||||
|
||||
override fun onCleared() {
|
||||
super.onCleared()
|
||||
stopRecording()
|
||||
}
|
||||
|
||||
private fun handleBindCameraEvent(
|
||||
state: CameraScreenState,
|
||||
event: CameraScreenEvents.BindCamera
|
||||
) {
|
||||
val resolutionSelector = ResolutionSelector.Builder()
|
||||
.setAspectRatioStrategy(AspectRatioStrategy.RATIO_16_9_FALLBACK_AUTO_STRATEGY)
|
||||
.build()
|
||||
|
||||
// Preview with 16:9 aspect ratio - uses Compose Viewfinder
|
||||
val preview = Preview.Builder()
|
||||
.setResolutionSelector(resolutionSelector)
|
||||
.build()
|
||||
.also { it.surfaceProvider = event.surfaceProvider }
|
||||
|
||||
// Image capture with 16:9 aspect ratio (optimized for speed)
|
||||
val imageCaptureUseCase = ImageCapture.Builder()
|
||||
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
|
||||
.setResolutionSelector(resolutionSelector)
|
||||
.build()
|
||||
|
||||
// Video capture (16:9 is default for video)
|
||||
val recorder = Recorder.Builder()
|
||||
.setAspectRatio(AspectRatio.RATIO_16_9)
|
||||
.setQualitySelector(
|
||||
androidx.camera.video.QualitySelector.from(
|
||||
androidx.camera.video.Quality.HIGHEST,
|
||||
androidx.camera.video.FallbackStrategy.higherQualityOrLowerThan(androidx.camera.video.Quality.HD)
|
||||
)
|
||||
)
|
||||
.build()
|
||||
val videoCaptureUseCase = VideoCapture.withOutput(recorder)
|
||||
|
||||
// Image analysis for QR code detection
|
||||
val imageAnalysisUseCase = ImageAnalysis.Builder()
|
||||
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
|
||||
.build()
|
||||
.also {
|
||||
it.setAnalyzer(imageAnalysisExecutor) { imageProxy ->
|
||||
processImageForQrCode(imageProxy)
|
||||
}
|
||||
}
|
||||
|
||||
// Select camera based on lensFacing
|
||||
val cameraSelector = CameraSelector.Builder()
|
||||
.requireLensFacing(state.lensFacing)
|
||||
.build()
|
||||
|
||||
try {
|
||||
// Unbind use cases before rebinding
|
||||
event.cameraProvider.unbindAll()
|
||||
|
||||
// Bind use cases to camera
|
||||
camera = event.cameraProvider.bindToLifecycle(
|
||||
event.lifecycleOwner,
|
||||
cameraSelector,
|
||||
preview,
|
||||
imageCaptureUseCase,
|
||||
videoCaptureUseCase,
|
||||
imageAnalysisUseCase
|
||||
)
|
||||
|
||||
lifecycleOwner = event.lifecycleOwner
|
||||
cameraProvider = event.cameraProvider
|
||||
imageCapture = imageCaptureUseCase
|
||||
videoCapture = videoCaptureUseCase
|
||||
} catch (e: Exception) {
|
||||
Log.e(TAG, "Use case binding failed", e)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleTapToFocusEvent(
|
||||
state: CameraScreenState,
|
||||
event: CameraScreenEvents.TapToFocus
|
||||
) {
|
||||
val currentCamera = camera ?: return
|
||||
|
||||
val factory = SurfaceOrientedMeteringPointFactory(event.width, event.height)
|
||||
val point = factory.createPoint(event.x, event.y)
|
||||
val action = FocusMeteringAction.Builder(point).build()
|
||||
|
||||
currentCamera.cameraControl.startFocusAndMetering(action)
|
||||
|
||||
_state.value = state.copy(
|
||||
focusPoint = Offset(event.x, event.y),
|
||||
showFocusIndicator = true
|
||||
)
|
||||
|
||||
// Hide indicator after animation
|
||||
viewModelScope.launch {
|
||||
delay(800L) // Duration for spring animation + fade out
|
||||
_state.value = _state.value.copy(showFocusIndicator = false)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handlePinchZoomEvent(
|
||||
state: CameraScreenState,
|
||||
event: CameraScreenEvents.PinchZoom
|
||||
) {
|
||||
val currentCamera = camera ?: return
|
||||
|
||||
// Get current zoom ratio and calculate new zoom
|
||||
val currentZoom = state.zoomRatio
|
||||
val newZoom = (currentZoom * event.zoomFactor).coerceIn(
|
||||
currentCamera.cameraInfo.zoomState.value?.minZoomRatio ?: 1f,
|
||||
currentCamera.cameraInfo.zoomState.value?.maxZoomRatio ?: 1f
|
||||
)
|
||||
|
||||
// Apply zoom to camera
|
||||
currentCamera.cameraControl.setZoomRatio(newZoom)
|
||||
|
||||
// Update state
|
||||
_state.value = state.copy(zoomRatio = newZoom)
|
||||
}
|
||||
|
||||
private fun handleSwitchCameraEvent(state: CameraScreenState) {
|
||||
// Toggle between front and back camera
|
||||
val newLensFacing = if (state.lensFacing == CameraSelector.LENS_FACING_BACK) {
|
||||
CameraSelector.LENS_FACING_FRONT
|
||||
} else {
|
||||
CameraSelector.LENS_FACING_BACK
|
||||
}
|
||||
|
||||
_state.value = state.copy(lensFacing = newLensFacing)
|
||||
}
|
||||
|
||||
private fun handleSetFlashModeEvent(
|
||||
state: CameraScreenState,
|
||||
flashMode: FlashMode
|
||||
) {
|
||||
_state.value = state.copy(flashMode = flashMode)
|
||||
|
||||
imageCapture?.flashMode = flashMode.cameraxMode
|
||||
}
|
||||
|
||||
private fun handleSetLinearZoomEvent(
|
||||
state: CameraScreenState,
|
||||
linearZoom: Float
|
||||
) {
|
||||
val currentCamera = camera ?: return
|
||||
|
||||
// Clamp linear zoom to valid range
|
||||
val clampedLinearZoom = linearZoom.coerceIn(0f, 1f)
|
||||
|
||||
// CameraX setLinearZoom takes 0.0-1.0 and maps to min-max zoom ratio
|
||||
currentCamera.cameraControl.setLinearZoom(clampedLinearZoom)
|
||||
|
||||
// Calculate the actual zoom ratio for state tracking
|
||||
val minZoom = currentCamera.cameraInfo.zoomState.value?.minZoomRatio ?: 1f
|
||||
val maxZoom = currentCamera.cameraInfo.zoomState.value?.maxZoomRatio ?: 1f
|
||||
val newZoomRatio = minZoom + (maxZoom - minZoom) * clampedLinearZoom
|
||||
|
||||
_state.value = state.copy(zoomRatio = newZoomRatio)
|
||||
}
|
||||
|
||||
private fun triggerShutter(state: CameraScreenState) {
|
||||
_state.value = state.copy(showShutter = true)
|
||||
|
||||
// Hide flash after animation
|
||||
viewModelScope.launch {
|
||||
delay(200L)
|
||||
_state.value = _state.value.copy(showShutter = false)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleClearCaptureErrorEvent(state: CameraScreenState) {
|
||||
_state.value = state.copy(captureError = null)
|
||||
}
|
||||
|
||||
private fun startRecordingTimer() {
|
||||
_state.value = _state.value.copy(isRecording = true, recordingDuration = 0L)
|
||||
|
||||
viewModelScope.launch {
|
||||
while (_state.value.isRecording) {
|
||||
delay(100L)
|
||||
_state.value = _state.value.copy(recordingDuration = _state.value.recordingDuration + 100L)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun stopRecordingTimer() {
|
||||
_state.value = _state.value.copy(isRecording = false, recordingDuration = 0L)
|
||||
}
|
||||
|
||||
@androidx.annotation.OptIn(markerClass = [androidx.camera.core.ExperimentalGetImage::class])
|
||||
private suspend fun ImageProxy.toBitmapWithTransforms(mirrorHorizontally: Boolean = false): Bitmap = withContext(Dispatchers.Default) {
|
||||
val imageProxy = this@toBitmapWithTransforms
|
||||
val bitmap = imageProxy.toBitmap()
|
||||
|
||||
val needsRotation = imageProxy.imageInfo.rotationDegrees != 0
|
||||
|
||||
if (needsRotation || mirrorHorizontally) {
|
||||
val matrix = Matrix()
|
||||
if (needsRotation) {
|
||||
matrix.postRotate(imageProxy.imageInfo.rotationDegrees.toFloat())
|
||||
}
|
||||
if (mirrorHorizontally) {
|
||||
// Mirror horizontally (flip around vertical axis)
|
||||
matrix.postScale(-1f, 1f, bitmap.width / 2f, bitmap.height / 2f)
|
||||
}
|
||||
Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
|
||||
} else {
|
||||
bitmap
|
||||
}
|
||||
}
|
||||
|
||||
@androidx.annotation.OptIn(markerClass = [androidx.camera.core.ExperimentalGetImage::class])
|
||||
private fun processImageForQrCode(imageProxy: ImageProxy) {
|
||||
val mediaImage = imageProxy.image
|
||||
if (mediaImage == null) {
|
||||
imageProxy.close()
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
// Get the Y plane (luminance)
|
||||
val yPlane = mediaImage.planes[0]
|
||||
val yBuffer = yPlane.buffer
|
||||
val ySize = yBuffer.remaining()
|
||||
|
||||
val yData = ByteArray(ySize)
|
||||
yBuffer.get(yData)
|
||||
|
||||
// Create a planar YUV source with proper stride handling
|
||||
val width = mediaImage.width
|
||||
val height = mediaImage.height
|
||||
val rowStride = yPlane.rowStride
|
||||
val pixelStride = yPlane.pixelStride
|
||||
|
||||
// If row stride equals width and pixel stride is 1, we can use the data directly
|
||||
val source = if (rowStride == width && pixelStride == 1) {
|
||||
PlanarYUVLuminanceSource(
|
||||
yData,
|
||||
width,
|
||||
height,
|
||||
0,
|
||||
0,
|
||||
width,
|
||||
height,
|
||||
false
|
||||
)
|
||||
} else {
|
||||
// Need to account for stride - copy row by row
|
||||
val adjustedData = ByteArray(width * height)
|
||||
var outputPos = 0
|
||||
for (row in 0 until height) {
|
||||
val inputPos = row * rowStride
|
||||
for (col in 0 until width) {
|
||||
adjustedData[outputPos++] = yData[inputPos + col * pixelStride]
|
||||
}
|
||||
}
|
||||
PlanarYUVLuminanceSource(
|
||||
adjustedData,
|
||||
width,
|
||||
height,
|
||||
0,
|
||||
0,
|
||||
width,
|
||||
height,
|
||||
false
|
||||
)
|
||||
}
|
||||
|
||||
val binaryBitmap = BinaryBitmap(HybridBinarizer(source))
|
||||
|
||||
try {
|
||||
val result = qrCodeReader.decodeWithState(binaryBitmap)
|
||||
qrCodeReader.reset() // Reset state after successful decode
|
||||
_qrCodeDetected.tryEmit(result.text)
|
||||
} catch (_: NotFoundException) {
|
||||
// No QR code found in this frame, which is normal
|
||||
qrCodeReader.reset() // Reset state for next attempt
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Log.e(TAG, "Error processing image for QR code: ${e.message}", e)
|
||||
}
|
||||
imageProxy.close()
|
||||
}
|
||||
|
||||
private fun vibrate(context: Context) {
|
||||
val vibrator = context.getSystemService(Context.VIBRATOR_SERVICE) as? Vibrator
|
||||
vibrator?.let {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
|
||||
it.vibrate(VibrationEffect.createOneShot(50, 75))
|
||||
} else {
|
||||
@Suppress("DEPRECATION")
|
||||
it.vibrate(50)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
package org.signal.camera
|
||||
|
||||
import android.os.ParcelFileDescriptor
|
||||
import java.io.File
|
||||
|
||||
/**
|
||||
* Configuration for video output.
|
||||
* Allows the consumer to specify where videos should be saved.
|
||||
*/
|
||||
sealed class VideoOutput {
|
||||
/**
|
||||
* Save video to a specific file.
|
||||
* The consumer is responsible for creating the file and managing its lifecycle.
|
||||
*/
|
||||
data class FileOutput(val file: File) : VideoOutput()
|
||||
|
||||
/**
|
||||
* Save video to a file descriptor.
|
||||
* The consumer provides the file descriptor and is responsible for closing it.
|
||||
* This is useful for writing to pipes, sockets, or any other file-descriptor-backed output.
|
||||
*/
|
||||
data class FileDescriptorOutput(val fileDescriptor: ParcelFileDescriptor) : VideoOutput()
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of video capture.
|
||||
*/
|
||||
sealed class VideoCaptureResult {
|
||||
/**
|
||||
* Video was successfully captured and saved.
|
||||
* @param outputFile The file where the video was saved (for FileOutput)
|
||||
* @param fileDescriptor The file descriptor used (for FileDescriptorOutput)
|
||||
*/
|
||||
data class Success(
|
||||
val outputFile: File? = null,
|
||||
val fileDescriptor: ParcelFileDescriptor? = null
|
||||
) : VideoCaptureResult()
|
||||
|
||||
/**
|
||||
* Video capture failed.
|
||||
* @param fileDescriptor The file descriptor that was being used (for cleanup)
|
||||
*/
|
||||
data class Error(
|
||||
val message: String?,
|
||||
val throwable: Throwable?,
|
||||
val fileDescriptor: ParcelFileDescriptor? = null
|
||||
) : VideoCaptureResult()
|
||||
}
|
||||
@@ -0,0 +1,387 @@
|
||||
/*
|
||||
* Copyright 2026 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.signal.camera.hud
|
||||
|
||||
import androidx.compose.animation.core.Animatable
|
||||
import androidx.compose.animation.core.Spring
|
||||
import androidx.compose.animation.core.spring
|
||||
import androidx.compose.foundation.Canvas
|
||||
import androidx.compose.foundation.gestures.awaitEachGesture
|
||||
import androidx.compose.foundation.gestures.awaitFirstDown
|
||||
import androidx.compose.foundation.layout.Box
|
||||
import androidx.compose.foundation.layout.size
|
||||
import androidx.compose.runtime.Composable
|
||||
import androidx.compose.runtime.LaunchedEffect
|
||||
import androidx.compose.runtime.getValue
|
||||
import androidx.compose.runtime.mutableStateOf
|
||||
import androidx.compose.runtime.remember
|
||||
import androidx.compose.runtime.setValue
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.geometry.Offset
|
||||
import androidx.compose.ui.graphics.graphicsLayer
|
||||
import androidx.compose.ui.geometry.Size
|
||||
import androidx.compose.ui.graphics.Color
|
||||
import androidx.compose.ui.graphics.StrokeCap
|
||||
import androidx.compose.ui.graphics.drawscope.DrawScope
|
||||
import androidx.compose.ui.graphics.drawscope.Stroke
|
||||
import androidx.compose.ui.input.pointer.pointerInput
|
||||
import androidx.compose.ui.platform.LocalDensity
|
||||
import androidx.compose.ui.tooling.preview.Preview
|
||||
import androidx.compose.ui.unit.Dp
|
||||
import androidx.compose.ui.unit.dp
|
||||
import kotlin.math.min
|
||||
|
||||
/**
|
||||
* Capture button colors matching CameraButtonView.java
|
||||
*/
|
||||
private object CaptureButtonColors {
|
||||
/** Background fill: white at 30% alpha (0x4CFFFFFF) */
|
||||
val Background = Color(0x4CFFFFFF)
|
||||
|
||||
/** Outer ring stroke: pure white (0xFFFFFFFF) */
|
||||
val Arc = Color.White
|
||||
|
||||
/** Inner fill: pure white (0xFFFFFFFF) */
|
||||
val CaptureFill = Color.White
|
||||
|
||||
/** Outer stroke while recording: black at 15% alpha (0x26000000) */
|
||||
val Outline = Color(0x26000000)
|
||||
|
||||
/** Record indicator: Material red (0xFFF44336) */
|
||||
val Record = Color(0xFFF44336)
|
||||
|
||||
/** Progress arc: pure white (0xFFFFFFFF) */
|
||||
val Progress = Color.White
|
||||
}
|
||||
|
||||
/**
|
||||
* Stroke widths matching CameraButtonView.java
|
||||
*/
|
||||
private object CaptureButtonDimensions {
|
||||
/** Stroke width for the capture arc in image mode: 3.5dp */
|
||||
val CaptureArcStrokeWidth = 3.5.dp
|
||||
|
||||
/** Stroke width for the outline in video mode: 4dp */
|
||||
val OutlineStrokeWidth = 4.dp
|
||||
|
||||
/** Stroke width for the progress arc: 4dp */
|
||||
val ProgressArcStrokeWidth = 4.dp
|
||||
|
||||
/** Protection margin for capture fill circle: 10dp */
|
||||
val CaptureFillProtection = 10.dp
|
||||
|
||||
/** Default button size */
|
||||
val DefaultButtonSize = 80.dp
|
||||
|
||||
/** Default image capture size (inner area) */
|
||||
val DefaultImageCaptureSize = 60.dp
|
||||
|
||||
/** Default record indicator size (red dot) */
|
||||
val DefaultRecordSize = 24.dp
|
||||
}
|
||||
|
||||
/**
|
||||
* Drag distance multiplier for zoom calculation.
|
||||
* Matches DRAG_DISTANCE_MULTIPLIER = 3 from CameraButtonView.
|
||||
*/
|
||||
private const val DRAG_DISTANCE_MULTIPLIER = 3
|
||||
|
||||
/**
|
||||
* Deadzone reduction percentage.
|
||||
* Matches DEADZONE_REDUCTION_PERCENT = 0.35f from CameraButtonView.
|
||||
*/
|
||||
private const val DEADZONE_REDUCTION_PERCENT = 0.35f
|
||||
|
||||
/**
|
||||
* A capture button that supports both photo capture (tap) and video recording (long press).
|
||||
*
|
||||
* This composable mimics the behavior and appearance of [CameraButtonView] from the legacy
|
||||
* camera implementation. It displays:
|
||||
* - In idle state: A white-filled circle with a white arc outline
|
||||
* - In recording state: A larger circle with a red recording indicator and progress arc
|
||||
*
|
||||
* @param modifier Modifier to be applied to the button
|
||||
* @param isRecording Whether video recording is currently active
|
||||
* @param recordingProgress Progress of the recording from 0f to 1f (for progress arc display)
|
||||
* @param imageCaptureSize Size of the inner capture circle in image mode
|
||||
* @param recordSize Size of the red recording indicator circle
|
||||
* @param onTap Callback for tap gesture (photo capture)
|
||||
* @param onLongPressStart Callback when long press begins (video recording start)
|
||||
* @param onLongPressEnd Callback when long press ends (video recording stop)
|
||||
* @param onZoomChange Callback for zoom level changes during recording (0f to 1f)
|
||||
*/
|
||||
@Composable
|
||||
fun CaptureButton(
|
||||
modifier: Modifier = Modifier,
|
||||
isRecording: Boolean,
|
||||
recordingProgress: Float = 0f,
|
||||
imageCaptureSize: Dp = CaptureButtonDimensions.DefaultImageCaptureSize,
|
||||
recordSize: Dp = CaptureButtonDimensions.DefaultRecordSize,
|
||||
onTap: () -> Unit,
|
||||
onLongPressStart: () -> Unit,
|
||||
onLongPressEnd: () -> Unit,
|
||||
onZoomChange: (Float) -> Unit
|
||||
) {
|
||||
var isPressed by remember { mutableStateOf(false) }
|
||||
val scale = remember { Animatable(1f) }
|
||||
|
||||
// Scale animation for press feedback and recording state
|
||||
LaunchedEffect(isPressed, isRecording) {
|
||||
val targetScale = when {
|
||||
isRecording -> 1.3f
|
||||
isPressed -> 0.9f
|
||||
else -> 1f
|
||||
}
|
||||
|
||||
scale.animateTo(
|
||||
targetValue = targetScale,
|
||||
animationSpec = spring(
|
||||
dampingRatio = Spring.DampingRatioMediumBouncy,
|
||||
stiffness = if (isRecording) Spring.StiffnessLow else Spring.StiffnessMedium
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
val density = LocalDensity.current
|
||||
val imageCaptureRadius = with(density) { imageCaptureSize.toPx() / 2f }
|
||||
val recordRadius = with(density) { recordSize.toPx() / 2f }
|
||||
val captureArcStroke = with(density) { CaptureButtonDimensions.CaptureArcStrokeWidth.toPx() }
|
||||
val outlineStroke = with(density) { CaptureButtonDimensions.OutlineStrokeWidth.toPx() }
|
||||
val progressStroke = with(density) { CaptureButtonDimensions.ProgressArcStrokeWidth.toPx() }
|
||||
val fillProtection = with(density) { CaptureButtonDimensions.CaptureFillProtection.toPx() }
|
||||
|
||||
Box(
|
||||
modifier = modifier
|
||||
.size(CaptureButtonDimensions.DefaultButtonSize)
|
||||
.graphicsLayer {
|
||||
scaleX = scale.value
|
||||
scaleY = scale.value
|
||||
}
|
||||
.pointerInput(Unit) {
|
||||
awaitEachGesture {
|
||||
val down = awaitFirstDown(requireUnconsumed = false)
|
||||
isPressed = true
|
||||
|
||||
var longPressTriggered = false
|
||||
var startY = down.position.y
|
||||
val pressStartTime = System.currentTimeMillis()
|
||||
val longPressTimeoutMs = viewConfiguration.longPressTimeoutMillis
|
||||
|
||||
// Calculate deadzone for zoom gestures
|
||||
val deadzoneTop = size.height * DEADZONE_REDUCTION_PERCENT / 2f
|
||||
val maxRange = size.height * DRAG_DISTANCE_MULTIPLIER
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
val event = withTimeoutOrNull(50) { awaitPointerEvent() }
|
||||
|
||||
if (event != null) {
|
||||
val currentPointer = event.changes.firstOrNull { it.id == down.id }
|
||||
|
||||
if (currentPointer == null || !currentPointer.pressed) {
|
||||
// Finger lifted
|
||||
if (!longPressTriggered) {
|
||||
onTap()
|
||||
} else {
|
||||
onLongPressEnd()
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
// Check for long press timeout
|
||||
val elapsed = System.currentTimeMillis() - pressStartTime
|
||||
if (!longPressTriggered && elapsed >= longPressTimeoutMs) {
|
||||
longPressTriggered = true
|
||||
startY = currentPointer.position.y
|
||||
onLongPressStart()
|
||||
}
|
||||
|
||||
// Handle zoom during recording
|
||||
if (longPressTriggered) {
|
||||
val isAboveDeadzone = currentPointer.position.y < deadzoneTop
|
||||
if (isAboveDeadzone) {
|
||||
val deltaY = (deadzoneTop - currentPointer.position.y).coerceAtLeast(0f)
|
||||
val zoomPercent = (deltaY / maxRange).coerceIn(0f, 1f)
|
||||
// Apply decelerate interpolation like CameraButtonView
|
||||
val interpolatedZoom = decelerateInterpolation(zoomPercent)
|
||||
onZoomChange(interpolatedZoom)
|
||||
}
|
||||
}
|
||||
|
||||
currentPointer.consume()
|
||||
} else {
|
||||
// Timeout - check for long press
|
||||
val elapsed = System.currentTimeMillis() - pressStartTime
|
||||
if (!longPressTriggered && elapsed >= longPressTimeoutMs) {
|
||||
longPressTriggered = true
|
||||
startY = down.position.y
|
||||
onLongPressStart()
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
isPressed = false
|
||||
}
|
||||
}
|
||||
},
|
||||
contentAlignment = Alignment.Center
|
||||
) {
|
||||
Canvas(modifier = Modifier.matchParentSize()) {
|
||||
if (isRecording) {
|
||||
drawForVideoCapture(
|
||||
recordRadius = recordRadius,
|
||||
outlineStroke = outlineStroke,
|
||||
progressStroke = progressStroke,
|
||||
progressPercent = recordingProgress
|
||||
)
|
||||
} else {
|
||||
drawForImageCapture(
|
||||
captureRadius = imageCaptureRadius,
|
||||
arcStroke = captureArcStroke,
|
||||
fillProtection = fillProtection
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decelerate interpolation matching DecelerateInterpolator from Android.
|
||||
* Formula: 1.0 - (1.0 - input)^2
|
||||
*/
|
||||
private fun decelerateInterpolation(input: Float): Float {
|
||||
return 1f - (1f - input) * (1f - input)
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw the button in image capture mode.
|
||||
*/
|
||||
private fun DrawScope.drawForImageCapture(
|
||||
captureRadius: Float,
|
||||
arcStroke: Float,
|
||||
fillProtection: Float
|
||||
) {
|
||||
val centerX = size.width / 2f
|
||||
val centerY = size.height / 2f
|
||||
val radius = min(centerX, centerY)
|
||||
|
||||
// Background circle
|
||||
drawCircle(
|
||||
color = CaptureButtonColors.Background,
|
||||
radius = radius,
|
||||
center = Offset(centerX, centerY)
|
||||
)
|
||||
|
||||
// Arc outline
|
||||
drawCircle(
|
||||
color = CaptureButtonColors.Arc,
|
||||
radius = radius,
|
||||
center = Offset(centerX, centerY),
|
||||
style = Stroke(width = arcStroke)
|
||||
)
|
||||
|
||||
// Inner fill circle (smaller to create the ring effect)
|
||||
drawCircle(
|
||||
color = CaptureButtonColors.CaptureFill,
|
||||
radius = radius - fillProtection,
|
||||
center = Offset(centerX, centerY)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw the button in video capture mode.
|
||||
*/
|
||||
private fun DrawScope.drawForVideoCapture(
|
||||
recordRadius: Float,
|
||||
outlineStroke: Float,
|
||||
progressStroke: Float,
|
||||
progressPercent: Float
|
||||
) {
|
||||
val centerX = size.width / 2f
|
||||
val centerY = size.height / 2f
|
||||
val radius = min(centerX, centerY)
|
||||
|
||||
// Background circle
|
||||
drawCircle(
|
||||
color = CaptureButtonColors.Background,
|
||||
radius = radius,
|
||||
center = Offset(centerX, centerY)
|
||||
)
|
||||
|
||||
// Outline stroke
|
||||
drawCircle(
|
||||
color = CaptureButtonColors.Outline,
|
||||
radius = radius,
|
||||
center = Offset(centerX, centerY),
|
||||
style = Stroke(width = outlineStroke)
|
||||
)
|
||||
|
||||
// Red record indicator
|
||||
drawCircle(
|
||||
color = CaptureButtonColors.Record,
|
||||
radius = recordRadius,
|
||||
center = Offset(centerX, centerY)
|
||||
)
|
||||
|
||||
// Progress arc (only if there's progress to show)
|
||||
if (progressPercent > 0f) {
|
||||
val strokeHalf = progressStroke / 2f
|
||||
drawArc(
|
||||
color = CaptureButtonColors.Progress,
|
||||
startAngle = -90f, // Start from top
|
||||
sweepAngle = 360f * progressPercent,
|
||||
useCenter = false,
|
||||
topLeft = Offset(strokeHalf, strokeHalf),
|
||||
size = Size(size.width - progressStroke, size.height - progressStroke),
|
||||
style = Stroke(width = progressStroke, cap = StrokeCap.Round)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(name = "Idle State", showBackground = true, backgroundColor = 0xFF444444)
|
||||
@Composable
|
||||
private fun CaptureButtonIdlePreview() {
|
||||
Box(modifier = Modifier.size(120.dp), contentAlignment = Alignment.Center) {
|
||||
CaptureButton(
|
||||
isRecording = false,
|
||||
onTap = {},
|
||||
onLongPressStart = {},
|
||||
onLongPressEnd = {},
|
||||
onZoomChange = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(name = "Recording State", showBackground = true, backgroundColor = 0xFF444444)
|
||||
@Composable
|
||||
private fun CaptureButtonRecordingPreview() {
|
||||
Box(modifier = Modifier.size(120.dp), contentAlignment = Alignment.Center) {
|
||||
CaptureButton(
|
||||
isRecording = true,
|
||||
recordingProgress = 0f,
|
||||
onTap = {},
|
||||
onLongPressStart = {},
|
||||
onLongPressEnd = {},
|
||||
onZoomChange = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(name = "Recording with Progress", showBackground = true, backgroundColor = 0xFF444444)
|
||||
@Composable
|
||||
private fun CaptureButtonRecordingWithProgressPreview() {
|
||||
Box(modifier = Modifier.size(120.dp), contentAlignment = Alignment.Center) {
|
||||
CaptureButton(
|
||||
isRecording = true,
|
||||
recordingProgress = 0.65f,
|
||||
onTap = {},
|
||||
onLongPressStart = {},
|
||||
onLongPressEnd = {},
|
||||
onZoomChange = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,230 @@
|
||||
/*
|
||||
* Copyright 2026 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.signal.camera.hud
|
||||
|
||||
import android.Manifest
|
||||
import android.content.ContentUris
|
||||
import android.content.Context
|
||||
import android.content.pm.PackageManager
|
||||
import android.net.Uri
|
||||
import android.os.Build
|
||||
import android.provider.MediaStore
|
||||
import androidx.compose.foundation.background
|
||||
import androidx.compose.foundation.border
|
||||
import androidx.compose.foundation.clickable
|
||||
import androidx.compose.foundation.layout.Box
|
||||
import androidx.compose.foundation.layout.size
|
||||
import androidx.compose.foundation.shape.CircleShape
|
||||
import androidx.compose.runtime.Composable
|
||||
import androidx.compose.runtime.LaunchedEffect
|
||||
import androidx.compose.runtime.getValue
|
||||
import androidx.compose.runtime.mutableStateOf
|
||||
import androidx.compose.runtime.remember
|
||||
import androidx.compose.runtime.setValue
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.draw.clip
|
||||
import androidx.compose.ui.graphics.Color
|
||||
import androidx.compose.ui.platform.LocalContext
|
||||
import androidx.compose.ui.unit.dp
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import org.signal.glide.compose.GlideImage
|
||||
import org.signal.glide.compose.GlideImageScaleType
|
||||
import kotlinx.coroutines.withContext
|
||||
|
||||
/**
|
||||
* A button that displays a thumbnail of the most recent image or video from the gallery.
|
||||
* Shows a circular thumbnail with a white border that opens the gallery when clicked.
|
||||
*
|
||||
* @param modifier Modifier to apply to the button
|
||||
* @param onClick Callback when the button is clicked
|
||||
*/
|
||||
@Composable
|
||||
fun GalleryThumbnailButton(
|
||||
modifier: Modifier = Modifier,
|
||||
onClick: () -> Unit
|
||||
) {
|
||||
val context = LocalContext.current
|
||||
var thumbnailUri by remember { mutableStateOf<Uri?>(null) }
|
||||
|
||||
// Load the most recent media item
|
||||
LaunchedEffect(Unit) {
|
||||
thumbnailUri = getLatestMediaUri(context)
|
||||
}
|
||||
|
||||
Box(
|
||||
modifier = modifier
|
||||
.size(52.dp)
|
||||
.clip(CircleShape)
|
||||
.border(2.dp, Color.White, CircleShape)
|
||||
.background(Color.Black.copy(alpha = 0.3f), CircleShape)
|
||||
.clickable(onClick = onClick),
|
||||
contentAlignment = Alignment.Center
|
||||
) {
|
||||
if (thumbnailUri != null) {
|
||||
GlideImage(
|
||||
model = thumbnailUri,
|
||||
scaleType = GlideImageScaleType.CENTER_CROP,
|
||||
modifier = Modifier
|
||||
.size(52.dp)
|
||||
.clip(CircleShape)
|
||||
)
|
||||
} else {
|
||||
// Fallback to a simple icon if no media found
|
||||
Box(
|
||||
modifier = Modifier
|
||||
.size(52.dp)
|
||||
.background(Color.Gray.copy(alpha = 0.5f), CircleShape)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries MediaStore to get the URI of the most recent image or video.
|
||||
* Checks both images and videos and returns whichever is more recent.
|
||||
*/
|
||||
private suspend fun getLatestMediaUri(context: Context): Uri? = withContext(Dispatchers.IO) {
|
||||
try {
|
||||
val imageUri = getLatestImageUri(context)
|
||||
val videoUri = getLatestVideoUri(context)
|
||||
|
||||
// Compare timestamps if both exist, otherwise return whichever is available
|
||||
when {
|
||||
imageUri != null && videoUri != null -> {
|
||||
val imageTime = getMediaTimestamp(context, imageUri) ?: 0L
|
||||
val videoTime = getMediaTimestamp(context, videoUri) ?: 0L
|
||||
if (imageTime >= videoTime) imageUri else videoUri
|
||||
}
|
||||
imageUri != null -> imageUri
|
||||
videoUri != null -> videoUri
|
||||
else -> null
|
||||
}
|
||||
} catch (e: SecurityException) {
|
||||
// Permission denied - return null
|
||||
null
|
||||
} catch (e: Exception) {
|
||||
// Other error - return null
|
||||
null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the most recent image URI from MediaStore.
|
||||
*/
|
||||
private fun getLatestImageUri(context: Context): Uri? {
|
||||
val projection = arrayOf(
|
||||
MediaStore.Images.Media._ID,
|
||||
MediaStore.Images.Media.DATE_ADDED
|
||||
)
|
||||
|
||||
val sortOrder = "${MediaStore.Images.Media.DATE_ADDED} DESC"
|
||||
|
||||
context.contentResolver.query(
|
||||
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
|
||||
projection,
|
||||
null,
|
||||
null,
|
||||
sortOrder
|
||||
)?.use { cursor ->
|
||||
if (cursor.moveToFirst()) {
|
||||
val idColumn = cursor.getColumnIndexOrThrow(MediaStore.Images.Media._ID)
|
||||
val id = cursor.getLong(idColumn)
|
||||
return ContentUris.withAppendedId(
|
||||
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
|
||||
id
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the most recent video URI from MediaStore.
|
||||
*/
|
||||
private fun getLatestVideoUri(context: Context): Uri? {
|
||||
val projection = arrayOf(
|
||||
MediaStore.Video.Media._ID,
|
||||
MediaStore.Video.Media.DATE_ADDED
|
||||
)
|
||||
|
||||
val sortOrder = "${MediaStore.Video.Media.DATE_ADDED} DESC"
|
||||
|
||||
context.contentResolver.query(
|
||||
MediaStore.Video.Media.EXTERNAL_CONTENT_URI,
|
||||
projection,
|
||||
null,
|
||||
null,
|
||||
sortOrder
|
||||
)?.use { cursor ->
|
||||
if (cursor.moveToFirst()) {
|
||||
val idColumn = cursor.getColumnIndexOrThrow(MediaStore.Video.Media._ID)
|
||||
val id = cursor.getLong(idColumn)
|
||||
return ContentUris.withAppendedId(
|
||||
MediaStore.Video.Media.EXTERNAL_CONTENT_URI,
|
||||
id
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the timestamp of a media item.
|
||||
*/
|
||||
private fun getMediaTimestamp(context: Context, uri: Uri): Long? {
|
||||
val projection = arrayOf(MediaStore.MediaColumns.DATE_ADDED)
|
||||
|
||||
context.contentResolver.query(
|
||||
uri,
|
||||
projection,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
)?.use { cursor ->
|
||||
if (cursor.moveToFirst()) {
|
||||
val dateColumn = cursor.getColumnIndexOrThrow(MediaStore.MediaColumns.DATE_ADDED)
|
||||
return cursor.getLong(dateColumn)
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the app has permission to read media files.
|
||||
* For Android 13+ (API 33+), we need READ_MEDIA_IMAGES and READ_MEDIA_VIDEO.
|
||||
* For older versions, we need READ_EXTERNAL_STORAGE.
|
||||
*/
|
||||
fun hasMediaPermissions(context: Context): Boolean {
|
||||
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
|
||||
// Android 13+
|
||||
context.checkSelfPermission(Manifest.permission.READ_MEDIA_IMAGES) ==
|
||||
PackageManager.PERMISSION_GRANTED ||
|
||||
context.checkSelfPermission(Manifest.permission.READ_MEDIA_VIDEO) ==
|
||||
PackageManager.PERMISSION_GRANTED
|
||||
} else {
|
||||
// Older Android versions
|
||||
context.checkSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE) ==
|
||||
PackageManager.PERMISSION_GRANTED
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of permissions needed to read media files based on the Android version.
|
||||
*/
|
||||
fun getMediaPermissions(): Array<String> {
|
||||
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
|
||||
arrayOf(
|
||||
Manifest.permission.READ_MEDIA_IMAGES,
|
||||
Manifest.permission.READ_MEDIA_VIDEO
|
||||
)
|
||||
} else {
|
||||
arrayOf(Manifest.permission.READ_EXTERNAL_STORAGE)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,492 @@
|
||||
/*
|
||||
* Copyright 2026 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.signal.camera.hud
|
||||
|
||||
import android.content.res.Configuration
|
||||
import android.widget.Toast
|
||||
import androidx.annotation.StringRes
|
||||
import androidx.compose.animation.AnimatedVisibility
|
||||
import androidx.compose.animation.core.tween
|
||||
import androidx.compose.animation.fadeIn
|
||||
import androidx.compose.animation.fadeOut
|
||||
import androidx.compose.foundation.background
|
||||
import androidx.compose.foundation.border
|
||||
import androidx.compose.foundation.clickable
|
||||
import androidx.compose.foundation.layout.Box
|
||||
import androidx.compose.foundation.layout.BoxScope
|
||||
import androidx.compose.foundation.layout.Row
|
||||
import androidx.compose.foundation.layout.fillMaxHeight
|
||||
import androidx.compose.foundation.layout.fillMaxSize
|
||||
import androidx.compose.foundation.layout.fillMaxWidth
|
||||
import androidx.compose.foundation.layout.height
|
||||
import androidx.compose.foundation.layout.padding
|
||||
import androidx.compose.foundation.layout.size
|
||||
import androidx.compose.foundation.shape.CircleShape
|
||||
import androidx.compose.foundation.shape.RoundedCornerShape
|
||||
import androidx.compose.material3.Icon
|
||||
import androidx.compose.material3.IconButton
|
||||
import androidx.compose.material3.MaterialTheme
|
||||
import androidx.compose.material3.Text
|
||||
import androidx.compose.runtime.Composable
|
||||
import androidx.compose.runtime.LaunchedEffect
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.draw.clip
|
||||
import androidx.compose.ui.graphics.Color
|
||||
import androidx.compose.ui.platform.LocalConfiguration
|
||||
import androidx.compose.ui.platform.LocalContext
|
||||
import androidx.compose.ui.platform.LocalDensity
|
||||
import androidx.compose.ui.text.font.FontWeight
|
||||
import androidx.compose.ui.tooling.preview.Preview
|
||||
import androidx.compose.ui.unit.dp
|
||||
import androidx.compose.ui.unit.sp
|
||||
import org.signal.camera.CameraScreenState
|
||||
import org.signal.camera.CaptureError
|
||||
import org.signal.camera.FlashMode
|
||||
import org.signal.core.ui.compose.SignalIcons
|
||||
import java.util.Locale
|
||||
|
||||
/** Default maximum recording duration: 60 seconds */
|
||||
const val DEFAULT_MAX_RECORDING_DURATION_MS = 60_000L
|
||||
|
||||
data class StringResources(
|
||||
@param:StringRes val photoCaptureFailed: Int = 0,
|
||||
@param:StringRes val photoProcessingFailed: Int = 0,
|
||||
@param:StringRes val switchCamera: Int = 0,
|
||||
@param:StringRes val flashOff: Int = 0,
|
||||
@param:StringRes val flashOn: Int = 0,
|
||||
@param:StringRes val flashAuto: Int = 0,
|
||||
@param:StringRes val send: Int = 0
|
||||
)
|
||||
|
||||
/**
|
||||
* A standard camera HUD that provides common camera controls:
|
||||
* - Flash toggle button
|
||||
* - Capture button (tap for photo, long press for video)
|
||||
* - Camera switch button
|
||||
* - Gallery button
|
||||
* - Recording duration display
|
||||
* - Flash overlay animation
|
||||
*
|
||||
* This composable is designed to be used as the content of [org.signal.camera.CameraScreen]:
|
||||
*
|
||||
* ```kotlin
|
||||
* CameraScreen(
|
||||
* state = viewModel.state.value,
|
||||
* emitter = { viewModel.onEvent(it) }
|
||||
* ) {
|
||||
* StandardCameraHud(
|
||||
* state = viewModel.state.value,
|
||||
* maxRecordingDurationMs = 30_000L,
|
||||
* emitter = { event ->
|
||||
* when (event) {
|
||||
* is CameraHudEvents.PhotoCaptured -> savePhoto(event.bitmap)
|
||||
* is CameraHudEvents.VideoCaptured -> handleVideo(event.result)
|
||||
* is CameraHudEvents.GalleryClick -> openGallery()
|
||||
* }
|
||||
* }
|
||||
* )
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* @param state The current camera screen state
|
||||
* @param maxRecordingDurationMs Maximum video recording duration in milliseconds (for progress indicator)
|
||||
* @param mediaSelectionCount Number of media items currently selected (shows count indicator when > 0)
|
||||
* @param emitter Callback for HUD events (photo captured, video captured, gallery click)
|
||||
*/
|
||||
@Composable
|
||||
fun BoxScope.StandardCameraHud(
|
||||
state: CameraScreenState,
|
||||
emitter: (StandardCameraHudEvents) -> Unit,
|
||||
modifier: Modifier = Modifier,
|
||||
maxRecordingDurationMs: Long = DEFAULT_MAX_RECORDING_DURATION_MS,
|
||||
mediaSelectionCount: Int = 0,
|
||||
stringResources: StringResources = StringResources(0, 0)
|
||||
) {
|
||||
val context = LocalContext.current
|
||||
|
||||
LaunchedEffect(state.captureError) {
|
||||
state.captureError?.let { error ->
|
||||
val message = when (error) {
|
||||
is CaptureError.PhotoCaptureFailed -> stringResources.photoCaptureFailed
|
||||
is CaptureError.PhotoProcessingFailed -> stringResources.photoProcessingFailed
|
||||
}
|
||||
Toast.makeText(context, message, Toast.LENGTH_SHORT).show()
|
||||
emitter(StandardCameraHudEvents.ClearCaptureError)
|
||||
}
|
||||
}
|
||||
|
||||
LaunchedEffect(state.isRecording, state.recordingDuration, maxRecordingDurationMs) {
|
||||
if (state.isRecording && maxRecordingDurationMs > 0 && state.recordingDuration >= maxRecordingDurationMs) {
|
||||
emitter(StandardCameraHudEvents.VideoCaptureStopped)
|
||||
}
|
||||
}
|
||||
|
||||
StandardCameraHudContent(
|
||||
state = state,
|
||||
emitter = emitter,
|
||||
modifier = modifier,
|
||||
maxRecordingDurationMs = maxRecordingDurationMs,
|
||||
mediaSelectionCount = mediaSelectionCount,
|
||||
stringResources = stringResources
|
||||
)
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun BoxScope.StandardCameraHudContent(
|
||||
state: CameraScreenState,
|
||||
emitter: (StandardCameraHudEvents) -> Unit,
|
||||
modifier: Modifier = Modifier,
|
||||
maxRecordingDurationMs: Long = DEFAULT_MAX_RECORDING_DURATION_MS,
|
||||
mediaSelectionCount: Int = 0,
|
||||
stringResources: StringResources = StringResources()
|
||||
) {
|
||||
val configuration = LocalConfiguration.current
|
||||
val isLandscape = configuration.orientation == Configuration.ORIENTATION_LANDSCAPE
|
||||
|
||||
ShutterOverlay(state.showShutter)
|
||||
|
||||
FlashToggleButton(
|
||||
flashMode = state.flashMode,
|
||||
onToggle = { emitter(StandardCameraHudEvents.ToggleFlash) },
|
||||
stringResources = stringResources,
|
||||
modifier = Modifier
|
||||
.align(if (isLandscape) Alignment.TopStart else Alignment.TopEnd)
|
||||
.padding(16.dp)
|
||||
)
|
||||
|
||||
if (state.isRecording) {
|
||||
RecordingDurationDisplay(
|
||||
durationMillis = state.recordingDuration,
|
||||
modifier = Modifier
|
||||
.align(Alignment.TopCenter)
|
||||
.padding(top = 16.dp)
|
||||
)
|
||||
}
|
||||
|
||||
CameraControls(
|
||||
isLandscape = isLandscape,
|
||||
isRecording = state.isRecording,
|
||||
recordingProgress = if (maxRecordingDurationMs > 0) {
|
||||
(state.recordingDuration.toFloat() / maxRecordingDurationMs).coerceIn(0f, 1f)
|
||||
} else {
|
||||
0f
|
||||
},
|
||||
mediaSelectionCount = mediaSelectionCount,
|
||||
emitter = emitter,
|
||||
stringResources = stringResources,
|
||||
modifier = modifier.align(if (isLandscape) Alignment.CenterEnd else Alignment.BottomCenter)
|
||||
)
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun ShutterOverlay(showFlash: Boolean) {
|
||||
AnimatedVisibility(
|
||||
visible = showFlash,
|
||||
enter = fadeIn(animationSpec = tween(50)),
|
||||
exit = fadeOut(animationSpec = tween(200))
|
||||
) {
|
||||
Box(
|
||||
modifier = Modifier
|
||||
.fillMaxSize()
|
||||
.clip(RoundedCornerShape(16.dp))
|
||||
.background(Color.Black)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Camera control buttons layout with center element always truly centered
|
||||
* and side elements at fixed distances from edges.
|
||||
*/
|
||||
@Composable
|
||||
private fun CameraControls(
|
||||
isLandscape: Boolean,
|
||||
isRecording: Boolean,
|
||||
recordingProgress: Float,
|
||||
mediaSelectionCount: Int,
|
||||
emitter: (StandardCameraHudEvents) -> Unit,
|
||||
stringResources: StringResources,
|
||||
modifier: Modifier = Modifier
|
||||
) {
|
||||
val galleryOrMediaCount: @Composable () -> Unit = {
|
||||
if (mediaSelectionCount > 0) {
|
||||
MediaCountIndicator(
|
||||
count = mediaSelectionCount,
|
||||
onClick = { emitter(StandardCameraHudEvents.MediaSelectionClick) },
|
||||
stringResources = stringResources
|
||||
)
|
||||
} else {
|
||||
GalleryThumbnailButton(onClick = { emitter(StandardCameraHudEvents.GalleryClick) })
|
||||
}
|
||||
}
|
||||
|
||||
val captureButton: @Composable () -> Unit = {
|
||||
CaptureButton(
|
||||
isRecording = isRecording,
|
||||
recordingProgress = recordingProgress,
|
||||
onTap = { emitter(StandardCameraHudEvents.PhotoCaptureTriggered) },
|
||||
onLongPressStart = { emitter(StandardCameraHudEvents.VideoCaptureStarted) },
|
||||
onLongPressEnd = { emitter(StandardCameraHudEvents.VideoCaptureStopped) },
|
||||
onZoomChange = { emitter(StandardCameraHudEvents.SetZoomLevel(it)) }
|
||||
)
|
||||
}
|
||||
|
||||
val cameraSwitchButton: @Composable () -> Unit = {
|
||||
CameraSwitchButton(
|
||||
onClick = { emitter(StandardCameraHudEvents.SwitchCamera) },
|
||||
stringResources = stringResources
|
||||
)
|
||||
}
|
||||
|
||||
if (isLandscape) {
|
||||
Box(
|
||||
modifier = modifier
|
||||
.fillMaxHeight()
|
||||
.padding(end = 16.dp, top = 40.dp, bottom = 40.dp)
|
||||
) {
|
||||
Box(modifier = Modifier.align(Alignment.TopCenter)) {
|
||||
galleryOrMediaCount()
|
||||
}
|
||||
Box(modifier = Modifier.align(Alignment.Center)) {
|
||||
captureButton()
|
||||
}
|
||||
Box(modifier = Modifier.align(Alignment.BottomCenter)) {
|
||||
cameraSwitchButton()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Box(
|
||||
modifier = modifier
|
||||
.fillMaxWidth()
|
||||
.padding(bottom = 16.dp, start = 40.dp, end = 40.dp)
|
||||
) {
|
||||
Box(modifier = Modifier.align(Alignment.CenterStart)) {
|
||||
cameraSwitchButton()
|
||||
}
|
||||
Box(modifier = Modifier.align(Alignment.Center)) {
|
||||
captureButton()
|
||||
}
|
||||
Box(modifier = Modifier.align(Alignment.CenterEnd)) {
|
||||
galleryOrMediaCount()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun RecordingDurationDisplay(
|
||||
durationMillis: Long,
|
||||
modifier: Modifier = Modifier
|
||||
) {
|
||||
val seconds = (durationMillis / 1000) % 60
|
||||
val minutes = (durationMillis / 1000) / 60
|
||||
val timeText = String.format(Locale.US, "%02d:%02d", minutes, seconds)
|
||||
|
||||
Box(
|
||||
modifier = modifier
|
||||
.background(Color.Black.copy(alpha = 0.5f), shape = CircleShape)
|
||||
.padding(horizontal = 16.dp, vertical = 8.dp)
|
||||
) {
|
||||
Text(
|
||||
text = timeText,
|
||||
color = Color.White,
|
||||
fontSize = 20.sp,
|
||||
fontWeight = FontWeight.Bold
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun CameraSwitchButton(
|
||||
onClick: () -> Unit,
|
||||
stringResources: StringResources,
|
||||
modifier: Modifier = Modifier
|
||||
) {
|
||||
val contentDescription = if (stringResources.switchCamera != 0) {
|
||||
LocalContext.current.getString(stringResources.switchCamera)
|
||||
} else {
|
||||
null
|
||||
}
|
||||
|
||||
IconButton(
|
||||
onClick = onClick,
|
||||
modifier = modifier
|
||||
.size(52.dp)
|
||||
.border(2.dp, Color.White, CircleShape)
|
||||
.background(MaterialTheme.colorScheme.surfaceVariant.copy(alpha = 0.38f), shape = CircleShape)
|
||||
) {
|
||||
Icon(
|
||||
painter = SignalIcons.CameraSwitch.painter,
|
||||
contentDescription = contentDescription,
|
||||
tint = Color.White,
|
||||
modifier = Modifier.size(28.dp)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun FlashToggleButton(
|
||||
flashMode: FlashMode,
|
||||
onToggle: () -> Unit,
|
||||
stringResources: StringResources,
|
||||
modifier: Modifier = Modifier
|
||||
) {
|
||||
val context = LocalContext.current
|
||||
|
||||
val icon = when (flashMode) {
|
||||
FlashMode.Off -> SignalIcons.FlashOff
|
||||
FlashMode.On -> SignalIcons.FlashOn
|
||||
FlashMode.Auto -> SignalIcons.FlashAuto
|
||||
}
|
||||
|
||||
val contentDescriptionRes = when (flashMode) {
|
||||
FlashMode.Off -> stringResources.flashOff
|
||||
FlashMode.On -> stringResources.flashOn
|
||||
FlashMode.Auto -> stringResources.flashAuto
|
||||
}
|
||||
|
||||
val contentDescription = if (contentDescriptionRes != 0) {
|
||||
context.getString(contentDescriptionRes)
|
||||
} else {
|
||||
null
|
||||
}
|
||||
|
||||
IconButton(
|
||||
onClick = onToggle,
|
||||
modifier = modifier
|
||||
.background(Color.Black.copy(alpha = 0.5f), shape = CircleShape)
|
||||
) {
|
||||
Icon(
|
||||
painter = icon.painter,
|
||||
contentDescription = contentDescription,
|
||||
tint = Color.White,
|
||||
modifier = Modifier
|
||||
.padding(6.dp)
|
||||
.size(24.dp)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Signal ultramarine blue color for the count badge */
|
||||
private val UltramarineBlue = Color(0xFF2C6BED)
|
||||
|
||||
/**
|
||||
* Media count indicator that shows the number of selected media items.
|
||||
* Displays a pill-shaped button with the count in a blue badge and a chevron icon.
|
||||
*/
|
||||
@Composable
|
||||
private fun MediaCountIndicator(
|
||||
count: Int,
|
||||
onClick: () -> Unit,
|
||||
stringResources: StringResources,
|
||||
modifier: Modifier = Modifier
|
||||
) {
|
||||
val contentDescription = if (stringResources.send != 0) {
|
||||
LocalContext.current.getString(stringResources.send)
|
||||
} else {
|
||||
null
|
||||
}
|
||||
|
||||
Row(
|
||||
modifier = modifier
|
||||
.height(44.dp)
|
||||
.background(
|
||||
color = MaterialTheme.colorScheme.surfaceVariant,
|
||||
shape = RoundedCornerShape(32.dp)
|
||||
)
|
||||
.clip(RoundedCornerShape(32.dp))
|
||||
.clickable(onClick = onClick)
|
||||
.padding(horizontal = 12.dp),
|
||||
verticalAlignment = Alignment.CenterVertically
|
||||
) {
|
||||
val size = with (LocalDensity.current) {
|
||||
22.sp.toDp()
|
||||
}
|
||||
Box(
|
||||
modifier = Modifier
|
||||
.background(
|
||||
color = UltramarineBlue,
|
||||
shape = CircleShape
|
||||
)
|
||||
.size(size),
|
||||
contentAlignment = Alignment.Center
|
||||
) {
|
||||
Text(
|
||||
text = if (count > 99) "99+" else count.toString(),
|
||||
color = Color.White,
|
||||
fontSize = 13.sp,
|
||||
fontWeight = FontWeight.Medium
|
||||
)
|
||||
}
|
||||
|
||||
Icon(
|
||||
painter = SignalIcons.ChevronRight.painter,
|
||||
contentDescription = contentDescription,
|
||||
tint = MaterialTheme.colorScheme.onSurface,
|
||||
modifier = Modifier
|
||||
.padding(start = 3.dp)
|
||||
.size(24.dp)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(name = "Default", showBackground = true, backgroundColor = 0xFF444444, widthDp = 360, heightDp = 640)
|
||||
@Composable
|
||||
private fun StandardCameraHudPreview() {
|
||||
Box(modifier = Modifier.fillMaxSize()) {
|
||||
StandardCameraHudContent(
|
||||
state = CameraScreenState(),
|
||||
emitter = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(name = "Recording", showBackground = true, backgroundColor = 0xFF444444, widthDp = 360, heightDp = 640)
|
||||
@Composable
|
||||
private fun StandardCameraHudRecordingPreview() {
|
||||
Box(modifier = Modifier.fillMaxSize()) {
|
||||
StandardCameraHudContent(
|
||||
state = CameraScreenState(
|
||||
isRecording = true,
|
||||
recordingDuration = 18_000L,
|
||||
flashMode = FlashMode.On
|
||||
),
|
||||
maxRecordingDurationMs = 30_000L,
|
||||
emitter = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(name = "With Media Selected", showBackground = true, backgroundColor = 0xFF444444, widthDp = 360, heightDp = 640)
|
||||
@Composable
|
||||
private fun StandardCameraHudWithMediaPreview() {
|
||||
Box(modifier = Modifier.fillMaxSize()) {
|
||||
StandardCameraHudContent(
|
||||
state = CameraScreenState(),
|
||||
mediaSelectionCount = 1,
|
||||
emitter = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Preview(
|
||||
name = "Landscape",
|
||||
showBackground = true,
|
||||
backgroundColor = 0xFF444444,
|
||||
widthDp = 640,
|
||||
heightDp = 360,
|
||||
device = "spec:width=640dp,height=360dp,orientation=landscape"
|
||||
)
|
||||
@Composable
|
||||
private fun StandardCameraHudLandscapePreview() {
|
||||
Box(modifier = Modifier.fillMaxSize()) {
|
||||
StandardCameraHudContent(
|
||||
state = CameraScreenState(),
|
||||
emitter = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
package org.signal.camera.hud
|
||||
|
||||
import androidx.annotation.FloatRange
|
||||
|
||||
/**
|
||||
* Events emitted by camera HUD components like [StandardCameraHud].
|
||||
* The parent composable handles these events to respond to user actions.
|
||||
*/
|
||||
sealed interface StandardCameraHudEvents {
|
||||
|
||||
data object PhotoCaptureTriggered : StandardCameraHudEvents
|
||||
|
||||
data object VideoCaptureStarted : StandardCameraHudEvents
|
||||
|
||||
data object VideoCaptureStopped : StandardCameraHudEvents
|
||||
|
||||
data object SwitchCamera : StandardCameraHudEvents
|
||||
|
||||
data class SetZoomLevel(@param:FloatRange(from = 0.0, to = 1.0) val zoomLevel: Float) : StandardCameraHudEvents
|
||||
|
||||
/**
|
||||
* Emitted when the gallery button is clicked.
|
||||
*/
|
||||
data object GalleryClick : StandardCameraHudEvents
|
||||
|
||||
/**
|
||||
* Emitted when the media selection indicator is clicked to advance to the next screen.
|
||||
*/
|
||||
data object MediaSelectionClick : StandardCameraHudEvents
|
||||
|
||||
/**
|
||||
* Emitted when the flash toggle button is clicked.
|
||||
*/
|
||||
data object ToggleFlash : StandardCameraHudEvents
|
||||
|
||||
/**
|
||||
* Emitted when a capture error should be cleared (after displaying to user).
|
||||
*/
|
||||
data object ClearCaptureError : StandardCameraHudEvents
|
||||
}
|
||||
Reference in New Issue
Block a user