Display audio levels for each participant in group calls.

This commit is contained in:
Rashad Sookram
2022-04-01 17:09:56 -04:00
committed by Cody Henthorne
parent a9f208153c
commit ec92d5ddb7
19 changed files with 379 additions and 32 deletions

View File

@@ -0,0 +1,130 @@
package org.thoughtcrime.securesms.components.webrtc
import android.animation.ValueAnimator
import android.content.Context
import android.graphics.Canvas
import android.graphics.Color
import android.graphics.Paint
import android.graphics.RectF
import android.util.AttributeSet
import android.view.View
import android.widget.FrameLayout
import org.signal.core.util.DimensionUnit
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.events.CallParticipant
import org.thoughtcrime.securesms.service.webrtc.WebRtcActionProcessor
import org.thoughtcrime.securesms.util.visible
/**
* An indicator shown for each participant in a call which shows the state of their audio.
*/
class AudioIndicatorView(context: Context, attrs: AttributeSet) : FrameLayout(context, attrs) {
companion object {
private const val SIDE_BAR_SHRINK_FACTOR = 0.75f
}
private val barPaint = Paint(Paint.ANTI_ALIAS_FLAG).apply {
style = Paint.Style.FILL
color = Color.WHITE
}
private val barRect = RectF()
private val barWidth = DimensionUnit.DP.toPixels(4f)
private val barRadius = DimensionUnit.DP.toPixels(32f)
private val barPadding = DimensionUnit.DP.toPixels(4f)
private var middleBarAnimation: ValueAnimator? = null
private var sideBarAnimation: ValueAnimator? = null
private var showAudioLevel = false
private var lastAudioLevel: CallParticipant.AudioLevel? = null
init {
inflate(context, R.layout.audio_indicator_view, this)
setWillNotDraw(false)
}
private val micMuted: View = findViewById(R.id.mic_muted)
fun bind(microphoneEnabled: Boolean, level: CallParticipant.AudioLevel?) {
micMuted.visible = !microphoneEnabled
val wasShowingAudioLevel = showAudioLevel
showAudioLevel = microphoneEnabled && level != null
if (showAudioLevel) {
val scaleFactor = when (level!!) {
CallParticipant.AudioLevel.LOWEST -> 0.2f
CallParticipant.AudioLevel.LOW -> 0.4f
CallParticipant.AudioLevel.MEDIUM -> 0.6f
CallParticipant.AudioLevel.HIGH -> 0.8f
CallParticipant.AudioLevel.HIGHEST -> 1.0f
}
middleBarAnimation?.end()
middleBarAnimation = createAnimation(middleBarAnimation, height * scaleFactor)
middleBarAnimation?.start()
sideBarAnimation?.end()
var finalHeight = height * scaleFactor
if (level != CallParticipant.AudioLevel.LOWEST) {
finalHeight *= SIDE_BAR_SHRINK_FACTOR
}
sideBarAnimation = createAnimation(sideBarAnimation, finalHeight)
sideBarAnimation?.start()
}
if (showAudioLevel != wasShowingAudioLevel || level != lastAudioLevel) {
invalidate()
}
lastAudioLevel = level
}
private fun createAnimation(current: ValueAnimator?, finalHeight: Float): ValueAnimator {
val currentHeight = current?.animatedValue as? Float ?: 0f
return ValueAnimator.ofFloat(currentHeight, finalHeight).apply {
duration = WebRtcActionProcessor.AUDIO_LEVELS_INTERVAL.toLong()
}
}
override fun onDraw(canvas: Canvas) {
super.onDraw(canvas)
val middleBarHeight = middleBarAnimation?.animatedValue as? Float
val sideBarHeight = sideBarAnimation?.animatedValue as? Float
if (showAudioLevel && middleBarHeight != null && sideBarHeight != null) {
val audioLevelWidth = 3 * barWidth + 2 * barPadding
val xOffsetBase = (width - audioLevelWidth) / 2
canvas.drawBar(
xOffset = xOffsetBase,
size = sideBarHeight
)
canvas.drawBar(
xOffset = barPadding + barWidth + xOffsetBase,
size = middleBarHeight
)
canvas.drawBar(
xOffset = 2 * (barPadding + barWidth) + xOffsetBase,
size = sideBarHeight
)
if (middleBarAnimation?.isRunning == true || sideBarAnimation?.isRunning == true) {
invalidate()
}
}
}
private fun Canvas.drawBar(xOffset: Float, size: Float) {
val yOffset = (height - size) / 2
barRect.set(xOffset, yOffset, xOffset + barWidth, height - yOffset)
drawRoundRect(barRect, barRadius, barRadius, barPaint)
}
}

View File

@@ -62,7 +62,7 @@ public class CallParticipantView extends ConstraintLayout {
private ImageView pipAvatar;
private BadgeImageView pipBadge;
private ContactPhoto contactPhoto;
private View audioMuted;
private AudioIndicatorView audioIndicator;
private View infoOverlay;
private EmojiTextView infoMessage;
private Button infoMoreInfo;
@@ -90,7 +90,7 @@ public class CallParticipantView extends ConstraintLayout {
pipAvatar = findViewById(R.id.call_participant_item_pip_avatar);
rendererFrame = findViewById(R.id.call_participant_renderer_frame);
renderer = findViewById(R.id.call_participant_renderer);
audioMuted = findViewById(R.id.call_participant_mic_muted);
audioIndicator = findViewById(R.id.call_participant_audio_indicator);
infoOverlay = findViewById(R.id.call_participant_info_overlay);
infoIcon = findViewById(R.id.call_participant_info_icon);
infoMessage = findViewById(R.id.call_participant_info_message);
@@ -123,7 +123,7 @@ public class CallParticipantView extends ConstraintLayout {
rendererFrame.setVisibility(View.GONE);
renderer.setVisibility(View.GONE);
renderer.attachBroadcastVideoSink(null);
audioMuted.setVisibility(View.GONE);
audioIndicator.setVisibility(View.GONE);
avatar.setVisibility(View.GONE);
badge.setVisibility(View.GONE);
pipAvatar.setVisibility(View.GONE);
@@ -159,7 +159,8 @@ public class CallParticipantView extends ConstraintLayout {
renderer.attachBroadcastVideoSink(null);
}
audioMuted.setVisibility(participant.isMicrophoneEnabled() ? View.GONE : View.VISIBLE);
audioIndicator.setVisibility(View.VISIBLE);
audioIndicator.bind(participant.isMicrophoneEnabled(), participant.getAudioLevel());
}
if (participantChanged || !Objects.equals(contactPhoto, participant.getRecipient().getContactPhoto())) {

View File

@@ -13,6 +13,7 @@ import org.thoughtcrime.securesms.groups.ui.GroupMemberEntry
import org.thoughtcrime.securesms.recipients.Recipient
import org.thoughtcrime.securesms.ringrtc.CameraState
import org.thoughtcrime.securesms.service.webrtc.collections.ParticipantCollection
import org.thoughtcrime.securesms.service.webrtc.state.WebRtcEphemeralState
import java.util.concurrent.TimeUnit
/**
@@ -260,6 +261,15 @@ data class CallParticipantsState(
return oldState.copy(groupMembers = groupMembers)
}
@JvmStatic
fun update(oldState: CallParticipantsState, ephemeralState: WebRtcEphemeralState): CallParticipantsState {
return oldState.copy(
remoteParticipants = oldState.remoteParticipants.map { p -> p.copy(audioLevel = ephemeralState.remoteAudioLevels[p.callParticipantId]) },
localParticipant = oldState.localParticipant.copy(audioLevel = ephemeralState.localAudioLevel),
focusedParticipant = oldState.focusedParticipant.copy(audioLevel = ephemeralState.remoteAudioLevels[oldState.focusedParticipant.callParticipantId])
)
}
private fun determineLocalRenderMode(
oldState: CallParticipantsState,
localParticipant: CallParticipant = oldState.localParticipant,

View File

@@ -292,7 +292,7 @@ public class WebRtcCallView extends ConstraintLayout {
rotatableControls.add(videoToggle);
rotatableControls.add(cameraDirectionToggle);
rotatableControls.add(decline);
rotatableControls.add(smallLocalRender.findViewById(R.id.call_participant_mic_muted));
rotatableControls.add(smallLocalRender.findViewById(R.id.call_participant_audio_indicator));
rotatableControls.add(ringToggle);
largeHeaderConstraints = new ConstraintSet();

View File

@@ -30,6 +30,7 @@ import org.thoughtcrime.securesms.keyvalue.SignalStore;
import org.thoughtcrime.securesms.recipients.LiveRecipient;
import org.thoughtcrime.securesms.recipients.Recipient;
import org.thoughtcrime.securesms.recipients.RecipientId;
import org.thoughtcrime.securesms.service.webrtc.state.WebRtcEphemeralState;
import org.thoughtcrime.securesms.util.DefaultValueLiveData;
import org.thoughtcrime.securesms.util.SingleLiveEvent;
import org.thoughtcrime.securesms.util.Util;
@@ -66,6 +67,7 @@ public class WebRtcCallViewModel extends ViewModel {
private final MutableLiveData<Boolean> isLandscapeEnabled = new MutableLiveData<>();
private final LiveData<Integer> controlsRotation;
private final Observer<List<GroupMemberEntry.FullMember>> groupMemberStateUpdater = m -> participantsState.setValue(CallParticipantsState.update(participantsState.getValue(), m));
private final MutableLiveData<WebRtcEphemeralState> ephemeralState = new MutableLiveData<>();
private final Handler elapsedTimeHandler = new Handler(Looper.getMainLooper());
private final Runnable elapsedTimeRunnable = this::handleTick;
@@ -159,6 +161,10 @@ public class WebRtcCallViewModel extends ViewModel {
return shouldShowSpeakerHint;
}
public LiveData<WebRtcEphemeralState> getEphemeralState() {
return ephemeralState;
}
public boolean canEnterPipMode() {
return canEnterPipMode;
}
@@ -288,6 +294,11 @@ public class WebRtcCallViewModel extends ViewModel {
}
}
@MainThread
public void updateFromEphemeralState(@NonNull WebRtcEphemeralState state) {
ephemeralState.setValue(state);
}
private int resolveRotation(boolean isLandscapeEnabled, @NonNull Orientation orientation) {
if (isLandscapeEnabled) {
return 0;