New Android 12+ audio route picker for calls.

This commit is contained in:
Nicholas
2023-04-03 16:40:18 -04:00
committed by Alex Hart
parent 99bd8e82ca
commit a0aeac767d
25 changed files with 700 additions and 341 deletions

View File

@@ -0,0 +1,8 @@
package org.thoughtcrime.securesms.components.webrtc
import androidx.annotation.RequiresApi
@RequiresApi(31)
interface OnAudioOutputChangedListener31 {
fun audioOutputChanged(audioDeviceId: Int)
}

View File

@@ -8,7 +8,8 @@ import org.thoughtcrime.securesms.R;
public enum WebRtcAudioOutput {
HANDSET(R.string.WebRtcAudioOutputToggle__phone_earpiece, R.drawable.ic_handset_solid_24),
SPEAKER(R.string.WebRtcAudioOutputToggle__speaker, R.drawable.symbol_speaker_fill_white_24),
HEADSET(R.string.WebRtcAudioOutputToggle__bluetooth, R.drawable.symbol_speaker_bluetooth_fill_white_24);
BLUETOOTH_HEADSET(R.string.WebRtcAudioOutputToggle__bluetooth, R.drawable.symbol_speaker_bluetooth_fill_white_24),
WIRED_HEADSET(R.string.WebRtcAudioOutputToggle__wired_headset, R.drawable.symbol_headphones_filed_24);
private final @StringRes int labelRes;
private final @DrawableRes int iconRes;

View File

@@ -0,0 +1,169 @@
package org.thoughtcrime.securesms.components.webrtc
import android.content.DialogInterface
import android.os.Bundle
import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.Row
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.wrapContentSize
import androidx.compose.foundation.selection.selectable
import androidx.compose.foundation.selection.selectableGroup
import androidx.compose.material3.Icon
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.RadioButton
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.saveable.rememberSaveable
import androidx.compose.runtime.setValue
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.res.painterResource
import androidx.compose.ui.res.stringResource
import androidx.compose.ui.semantics.Role
import androidx.compose.ui.tooling.preview.Preview
import androidx.compose.ui.unit.dp
import androidx.fragment.app.FragmentManager
import androidx.fragment.app.viewModels
import androidx.lifecycle.ViewModel
import kotlinx.collections.immutable.ImmutableList
import kotlinx.collections.immutable.toImmutableList
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.compose.ComposeBottomSheetDialogFragment
import org.thoughtcrime.securesms.util.BottomSheetUtil
import org.thoughtcrime.securesms.webrtc.audio.SignalAudioManager
/**
* A bottom sheet that allows the user to select what device they want to route audio to. Intended to be used with Android 31+ APIs.
*/
class WebRtcAudioOutputBottomSheet : ComposeBottomSheetDialogFragment(), DialogInterface {
private val viewModel by viewModels<AudioOutputViewModel>()
@Composable
override fun SheetContent() {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
modifier = Modifier
.padding(16.dp)
.wrapContentSize()
) {
Handle()
DeviceList(audioOutputOptions = viewModel.audioRoutes.toImmutableList(), initialDeviceId = viewModel.defaultDeviceId, modifier = Modifier.fillMaxWidth(), onDeviceSelected = viewModel.onClick)
}
}
override fun cancel() {
dismiss()
}
fun show(fm: FragmentManager, tag: String?, audioRoutes: List<AudioOutputOption>, selectedDeviceId: Int, onClick: (AudioOutputOption) -> Unit) {
super.showNow(fm, tag)
viewModel.audioRoutes = audioRoutes
viewModel.defaultDeviceId = selectedDeviceId
viewModel.onClick = onClick
}
companion object {
const val TAG = "WebRtcAudioOutputBottomSheet"
@JvmStatic
fun show(fragmentManager: FragmentManager, audioRoutes: List<AudioOutputOption>, selectedDeviceId: Int, onClick: (AudioOutputOption) -> Unit): WebRtcAudioOutputBottomSheet {
val bottomSheet = WebRtcAudioOutputBottomSheet()
val args = Bundle()
bottomSheet.show(fragmentManager, BottomSheetUtil.STANDARD_BOTTOM_SHEET_FRAGMENT_TAG, audioRoutes, selectedDeviceId, onClick)
return bottomSheet
}
}
}
@Composable
fun DeviceList(audioOutputOptions: ImmutableList<AudioOutputOption>, initialDeviceId: Int, modifier: Modifier = Modifier.fillMaxWidth(), onDeviceSelected: (AudioOutputOption) -> Unit) {
var selectedDeviceId by rememberSaveable { mutableStateOf(initialDeviceId) }
Column(
horizontalAlignment = Alignment.Start,
modifier = modifier
) {
Text(
text = stringResource(R.string.WebRtcAudioOutputToggle__audio_output),
style = MaterialTheme.typography.headlineMedium,
modifier = Modifier
.padding(8.dp)
)
Column(Modifier.selectableGroup()) {
audioOutputOptions.forEach { device: AudioOutputOption ->
Row(
Modifier
.fillMaxWidth()
.height(56.dp)
.selectable(
selected = (device.deviceId == selectedDeviceId),
onClick = {
onDeviceSelected(device)
selectedDeviceId = device.deviceId
},
role = Role.RadioButton
)
.padding(horizontal = 16.dp),
verticalAlignment = Alignment.CenterVertically
) {
RadioButton(
selected = (device.deviceId == selectedDeviceId),
onClick = null // null recommended for accessibility with screenreaders
)
Icon(
modifier = Modifier.padding(start = 16.dp),
painter = painterResource(id = getDrawableResourceForDeviceType(device.deviceType)),
contentDescription = stringResource(id = getDescriptionStringResourceForDeviceType(device.deviceType)),
tint = MaterialTheme.colorScheme.onSurface
)
Text(
text = device.friendlyName,
style = MaterialTheme.typography.bodyLarge,
modifier = Modifier.padding(start = 16.dp)
)
}
}
}
}
}
class AudioOutputViewModel : ViewModel() {
var audioRoutes: List<AudioOutputOption> = emptyList()
var defaultDeviceId: Int = -1
var onClick: (AudioOutputOption) -> Unit = {}
}
private fun getDrawableResourceForDeviceType(deviceType: SignalAudioManager.AudioDevice): Int {
return when (deviceType) {
SignalAudioManager.AudioDevice.WIRED_HEADSET -> R.drawable.symbol_headphones_outline_24
SignalAudioManager.AudioDevice.EARPIECE -> R.drawable.symbol_phone_speaker_outline_24
SignalAudioManager.AudioDevice.BLUETOOTH -> R.drawable.symbol_speaker_bluetooth_fill_white_24
SignalAudioManager.AudioDevice.SPEAKER_PHONE, SignalAudioManager.AudioDevice.NONE -> R.drawable.symbol_speaker_outline_24
}
}
private fun getDescriptionStringResourceForDeviceType(deviceType: SignalAudioManager.AudioDevice): Int {
return when (deviceType) {
SignalAudioManager.AudioDevice.WIRED_HEADSET -> R.string.WebRtcAudioOutputBottomSheet__headset_icon_content_description
SignalAudioManager.AudioDevice.EARPIECE -> R.string.WebRtcAudioOutputBottomSheet__earpiece_icon_content_description
SignalAudioManager.AudioDevice.BLUETOOTH -> R.string.WebRtcAudioOutputBottomSheet__bluetooth_icon_content_description
SignalAudioManager.AudioDevice.SPEAKER_PHONE, SignalAudioManager.AudioDevice.NONE -> R.string.WebRtcAudioOutputBottomSheet__speaker_icon_content_description
}
}
data class AudioOutputOption(val friendlyName: String, val deviceType: SignalAudioManager.AudioDevice, val deviceId: Int)
@Preview
@Composable
private fun SampleOutputBottomSheet() {
val outputs: ImmutableList<AudioOutputOption> = listOf(
AudioOutputOption("Earpiece", SignalAudioManager.AudioDevice.EARPIECE, 0),
AudioOutputOption("Speaker", SignalAudioManager.AudioDevice.SPEAKER_PHONE, 1),
AudioOutputOption("BT Headset", SignalAudioManager.AudioDevice.BLUETOOTH, 2),
AudioOutputOption("Wired Headset", SignalAudioManager.AudioDevice.WIRED_HEADSET, 3)
).toImmutableList()
DeviceList(outputs, 0) { }
}

View File

@@ -1,211 +0,0 @@
package org.thoughtcrime.securesms.components.webrtc;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.widget.AppCompatImageView;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.google.android.material.dialog.MaterialAlertDialogBuilder;
import org.thoughtcrime.securesms.R;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class WebRtcAudioOutputToggleButton extends AppCompatImageView {
private static final String STATE_OUTPUT_INDEX = "audio.output.toggle.state.output.index";
private static final String STATE_HEADSET_ENABLED = "audio.output.toggle.state.headset.enabled";
private static final String STATE_HANDSET_ENABLED = "audio.output.toggle.state.handset.enabled";
private static final String STATE_PARENT = "audio.output.toggle.state.parent";
private static final int[] SPEAKER_OFF = { R.attr.state_speaker_off };
private static final int[] SPEAKER_ON = { R.attr.state_speaker_on };
private static final int[] OUTPUT_HANDSET = { R.attr.state_handset_selected };
private static final int[] OUTPUT_SPEAKER = { R.attr.state_speaker_selected };
private static final int[] OUTPUT_HEADSET = { R.attr.state_headset_selected };
private static final int[][] OUTPUT_ENUM = { SPEAKER_OFF, SPEAKER_ON, OUTPUT_HANDSET, OUTPUT_SPEAKER, OUTPUT_HEADSET };
private static final List<WebRtcAudioOutput> OUTPUT_MODES = Arrays.asList(WebRtcAudioOutput.HANDSET, WebRtcAudioOutput.SPEAKER, WebRtcAudioOutput.HANDSET, WebRtcAudioOutput.SPEAKER, WebRtcAudioOutput.HEADSET);
private boolean isHeadsetAvailable;
private boolean isHandsetAvailable;
private int outputIndex;
private OnAudioOutputChangedListener audioOutputChangedListener;
private DialogInterface picker;
public WebRtcAudioOutputToggleButton(@NonNull Context context) {
this(context, null);
}
public WebRtcAudioOutputToggleButton(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public WebRtcAudioOutputToggleButton(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
super.setOnClickListener((v) -> {
List<WebRtcAudioOutput> availableModes = buildOutputModeList(isHeadsetAvailable, isHandsetAvailable);
if (availableModes.size() > 2 || !isHandsetAvailable) showPicker(availableModes);
else setAudioOutput(OUTPUT_MODES.get((outputIndex + 1) % OUTPUT_MODES.size()), true);
});
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
hidePicker();
}
@Override
public int[] onCreateDrawableState(int extraSpace) {
final int[] extra = OUTPUT_ENUM[outputIndex];
final int[] drawableState = super.onCreateDrawableState(extraSpace + extra.length);
mergeDrawableStates(drawableState, extra);
return drawableState;
}
@Override
public void setOnClickListener(@Nullable OnClickListener l) {
throw new UnsupportedOperationException("This View does not support custom click listeners.");
}
public void setControlAvailability(boolean isHandsetAvailable, boolean isHeadsetAvailable) {
this.isHandsetAvailable = isHandsetAvailable;
this.isHeadsetAvailable = isHeadsetAvailable;
}
public void setAudioOutput(@NonNull WebRtcAudioOutput audioOutput, boolean notifyListener) {
int oldIndex = outputIndex;
outputIndex = resolveAudioOutputIndex(OUTPUT_MODES.lastIndexOf(audioOutput));
if (oldIndex != outputIndex) {
refreshDrawableState();
if (notifyListener) {
notifyListener();
}
}
}
public void setOnAudioOutputChangedListener(@Nullable OnAudioOutputChangedListener listener) {
this.audioOutputChangedListener = listener;
}
private void showPicker(@NonNull List<WebRtcAudioOutput> availableModes) {
RecyclerView rv = new RecyclerView(getContext());
AudioOutputAdapter adapter = new AudioOutputAdapter(audioOutput -> {
setAudioOutput(audioOutput, true);
hidePicker();
},
availableModes);
adapter.setSelectedOutput(OUTPUT_MODES.get(outputIndex));
rv.setLayoutManager(new LinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false));
rv.setAdapter(adapter);
picker = new MaterialAlertDialogBuilder(getContext())
.setTitle(R.string.WebRtcAudioOutputToggle__audio_output)
.setView(rv)
.setCancelable(true)
.show();
}
@Override
protected Parcelable onSaveInstanceState() {
Parcelable parentState = super.onSaveInstanceState();
Bundle bundle = new Bundle();
bundle.putParcelable(STATE_PARENT, parentState);
bundle.putInt(STATE_OUTPUT_INDEX, outputIndex);
bundle.putBoolean(STATE_HEADSET_ENABLED, isHeadsetAvailable);
bundle.putBoolean(STATE_HANDSET_ENABLED, isHandsetAvailable);
return bundle;
}
@Override
protected void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
Bundle savedState = (Bundle) state;
isHeadsetAvailable = savedState.getBoolean(STATE_HEADSET_ENABLED);
isHandsetAvailable = savedState.getBoolean(STATE_HANDSET_ENABLED);
setAudioOutput(OUTPUT_MODES.get(
resolveAudioOutputIndex(savedState.getInt(STATE_OUTPUT_INDEX))),
false
);
super.onRestoreInstanceState(savedState.getParcelable(STATE_PARENT));
} else {
super.onRestoreInstanceState(state);
}
}
private void hidePicker() {
if (picker != null) {
picker.dismiss();
picker = null;
}
}
private void notifyListener() {
if (audioOutputChangedListener == null) return;
audioOutputChangedListener.audioOutputChanged(OUTPUT_MODES.get(outputIndex));
}
private static List<WebRtcAudioOutput> buildOutputModeList(boolean isHeadsetAvailable, boolean isHandsetAvailable) {
List<WebRtcAudioOutput> modes = new ArrayList(3);
modes.add(WebRtcAudioOutput.SPEAKER);
if (isHeadsetAvailable) {
modes.add(WebRtcAudioOutput.HEADSET);
}
if (isHandsetAvailable) {
modes.add(WebRtcAudioOutput.HANDSET);
}
return modes;
};
private int resolveAudioOutputIndex(int desiredAudioOutputIndex) {
if (isIllegalAudioOutputIndex(desiredAudioOutputIndex)) {
throw new IllegalArgumentException("Unsupported index: " + desiredAudioOutputIndex);
}
if (isUnsupportedAudioOutput(desiredAudioOutputIndex, isHeadsetAvailable, isHandsetAvailable)) {
if (!isHandsetAvailable) {
return OUTPUT_MODES.lastIndexOf(WebRtcAudioOutput.SPEAKER);
} else {
return OUTPUT_MODES.indexOf(WebRtcAudioOutput.HANDSET);
}
}
if (!isHeadsetAvailable) {
return desiredAudioOutputIndex % 2;
}
return desiredAudioOutputIndex;
}
private static boolean isIllegalAudioOutputIndex(int desiredAudioOutputIndex) {
return desiredAudioOutputIndex < 0 || desiredAudioOutputIndex > OUTPUT_MODES.size();
}
private static boolean isUnsupportedAudioOutput(int desiredAudioOutputIndex, boolean isHeadsetAvailable, boolean isHandsetAvailable) {
return (OUTPUT_MODES.get(desiredAudioOutputIndex) == WebRtcAudioOutput.HEADSET && !isHeadsetAvailable) ||
(OUTPUT_MODES.get(desiredAudioOutputIndex) == WebRtcAudioOutput.HANDSET && !isHandsetAvailable);
}
}

View File

@@ -0,0 +1,292 @@
package org.thoughtcrime.securesms.components.webrtc
import android.content.Context
import android.content.ContextWrapper
import android.content.DialogInterface
import android.media.AudioDeviceInfo
import android.os.Build
import android.os.Bundle
import android.os.Parcelable
import android.util.AttributeSet
import android.view.View.OnClickListener
import android.widget.Toast
import androidx.annotation.RequiresApi
import androidx.appcompat.widget.AppCompatImageView
import androidx.fragment.app.FragmentActivity
import androidx.fragment.app.FragmentManager
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.recyclerview.widget.RecyclerView
import com.google.android.material.dialog.MaterialAlertDialogBuilder
import org.signal.core.util.logging.Log
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.dependencies.ApplicationDependencies
import org.thoughtcrime.securesms.webrtc.audio.AudioDeviceMapping
import org.thoughtcrime.securesms.webrtc.audio.SignalAudioManager
/**
* A UI button that triggers a picker dialog/bottom sheet allowing the user to select the audio output for the ongoing call.
*/
class WebRtcAudioOutputToggleButton @JvmOverloads constructor(context: Context, attrs: AttributeSet? = null, defStyleAttr: Int = 0) : AppCompatImageView(context, attrs, defStyleAttr) {
private val TAG = Log.tag(WebRtcAudioOutputToggleButton::class.java)
private var outputState: OutputState = OutputState()
private var audioOutputChangedListenerLegacy: OnAudioOutputChangedListener? = null
private var audioOutputChangedListener31: OnAudioOutputChangedListener31? = null
private var picker: DialogInterface? = null
private val clickListenerLegacy: OnClickListener = OnClickListener {
val outputs = outputState.getOutputs()
if (outputs.size >= SHOW_PICKER_THRESHOLD || !outputState.isEarpieceAvailable) {
showPickerLegacy(outputs)
} else {
setAudioOutput(outputState.peekNext(), true)
}
}
@RequiresApi(31)
private val clickListener31 = OnClickListener {
val fragmentActivity = context.fragmentActivity()
if (fragmentActivity != null) {
showPicker31(fragmentActivity.supportFragmentManager)
} else {
Log.e(TAG, "WebRtcAudioOutputToggleButton instantiated from a context that does not inherit from FragmentActivity.")
Toast.makeText(context, R.string.WebRtcAudioOutputToggleButton_fragment_activity_error, Toast.LENGTH_LONG).show()
}
}
init {
super.setOnClickListener(
if (Build.VERSION.SDK_INT >= 31) {
clickListener31
} else {
clickListenerLegacy
}
)
}
override fun onDetachedFromWindow() {
super.onDetachedFromWindow()
hidePicker()
}
/**
* DO NOT REMOVE THE ELVIS OPERATOR IN THE FIRST LINE
* Somehow, through XML inflation (reflection?), [outputState] can actually be null,
* even though the compiler disagrees.
* */
override fun onCreateDrawableState(extraSpace: Int): IntArray {
val currentState = outputState ?: return super.onCreateDrawableState(extraSpace) // DO NOT REMOVE
val currentOutput = currentState.getCurrentOutput()
val extra = when (currentOutput) {
WebRtcAudioOutput.HANDSET -> intArrayOf(R.attr.state_handset_selected)
WebRtcAudioOutput.SPEAKER -> intArrayOf(R.attr.state_speaker_selected)
WebRtcAudioOutput.BLUETOOTH_HEADSET -> intArrayOf(R.attr.state_bt_headset_selected)
WebRtcAudioOutput.WIRED_HEADSET -> intArrayOf(R.attr.state_wired_headset_selected)
}
val oldLabel = context.getString(currentOutput.labelRes)
Log.i(TAG, "Switching drawable to $oldLabel")
val drawableState = super.onCreateDrawableState(extraSpace + extra.size)
mergeDrawableStates(drawableState, extra)
return drawableState
}
override fun setOnClickListener(l: OnClickListener?) {
throw UnsupportedOperationException("This View does not support custom click listeners.")
}
fun setControlAvailability(isEarpieceAvailable: Boolean, isBluetoothHeadsetAvailable: Boolean) {
outputState.isEarpieceAvailable = isEarpieceAvailable
outputState.isBluetoothHeadsetAvailable = isBluetoothHeadsetAvailable
}
fun setAudioOutput(audioOutput: WebRtcAudioOutput, notifyListener: Boolean) {
val oldOutput = outputState.getCurrentOutput()
if (oldOutput != audioOutput) {
outputState.setCurrentOutput(audioOutput)
refreshDrawableState()
if (notifyListener) {
audioOutputChangedListenerLegacy?.audioOutputChanged(audioOutput)
}
}
}
fun setOnAudioOutputChangedListenerLegacy(listener: OnAudioOutputChangedListener?) {
audioOutputChangedListenerLegacy = listener
}
@RequiresApi(31)
fun setOnAudioOutputChangedListener31(listener: OnAudioOutputChangedListener31?) {
audioOutputChangedListener31 = listener
}
private fun showPickerLegacy(availableModes: List<WebRtcAudioOutput?>) {
val rv = RecyclerView(context)
val adapter = AudioOutputAdapter(
{ audioOutput: WebRtcAudioOutput ->
setAudioOutput(audioOutput, true)
hidePicker()
},
availableModes
)
adapter.setSelectedOutput(outputState.getCurrentOutput())
rv.layoutManager = LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false)
rv.adapter = adapter
picker = MaterialAlertDialogBuilder(context)
.setTitle(R.string.WebRtcAudioOutputToggle__audio_output)
.setView(rv)
.setCancelable(true)
.show()
}
@RequiresApi(31)
private fun showPicker31(fragmentManager: FragmentManager) {
val am = ApplicationDependencies.getAndroidCallAudioManager()
if (am.availableCommunicationDevices.isEmpty()) {
Toast.makeText(context, R.string.WebRtcAudioOutputToggleButton_no_eligible_audio_i_o_detected, Toast.LENGTH_LONG).show()
return
}
val devices: List<AudioOutputOption> = am.availableCommunicationDevices.map { AudioOutputOption(it.toFriendlyName(context).toString(), AudioDeviceMapping.fromPlatformType(it.type), it.id) }
picker = WebRtcAudioOutputBottomSheet.show(fragmentManager, devices, am.communicationDevice?.id ?: -1) {
audioOutputChangedListener31?.audioOutputChanged(it.deviceId)
when (it.deviceType) {
SignalAudioManager.AudioDevice.WIRED_HEADSET -> {
outputState.isWiredHeadsetAvailable = true
setAudioOutput(WebRtcAudioOutput.WIRED_HEADSET, true)
}
SignalAudioManager.AudioDevice.EARPIECE -> {
outputState.isEarpieceAvailable = true
setAudioOutput(WebRtcAudioOutput.HANDSET, true)
}
SignalAudioManager.AudioDevice.BLUETOOTH -> {
outputState.isBluetoothHeadsetAvailable = true
setAudioOutput(WebRtcAudioOutput.BLUETOOTH_HEADSET, true)
}
SignalAudioManager.AudioDevice.SPEAKER_PHONE, SignalAudioManager.AudioDevice.NONE -> setAudioOutput(WebRtcAudioOutput.SPEAKER, true)
}
}
}
@RequiresApi(23)
private fun AudioDeviceInfo.toFriendlyName(context: Context): CharSequence {
return when (this.type) {
AudioDeviceInfo.TYPE_BUILTIN_EARPIECE -> context.getString(R.string.WebRtcAudioOutputToggle__phone_earpiece)
AudioDeviceInfo.TYPE_BUILTIN_SPEAKER -> context.getString(R.string.WebRtcAudioOutputToggle__speaker)
AudioDeviceInfo.TYPE_WIRED_HEADSET -> context.getString(R.string.WebRtcAudioOutputToggle__wired_headset)
AudioDeviceInfo.TYPE_USB_HEADSET -> context.getString(R.string.WebRtcAudioOutputToggle__wired_headset_usb)
else -> this.productName
}
}
override fun onSaveInstanceState(): Parcelable {
val parentState = super.onSaveInstanceState()
val bundle = Bundle()
bundle.putParcelable(STATE_PARENT, parentState)
bundle.putInt(STATE_OUTPUT_INDEX, outputState.getBackingIndexForBackup())
bundle.putBoolean(STATE_HEADSET_ENABLED, outputState.isBluetoothHeadsetAvailable)
bundle.putBoolean(STATE_HANDSET_ENABLED, outputState.isEarpieceAvailable)
return bundle
}
override fun onRestoreInstanceState(state: Parcelable) {
if (state is Bundle) {
outputState.isBluetoothHeadsetAvailable = state.getBoolean(STATE_HEADSET_ENABLED)
outputState.isEarpieceAvailable = state.getBoolean(STATE_HANDSET_ENABLED)
outputState.setBackingIndexForRestore(state.getInt(STATE_OUTPUT_INDEX))
refreshDrawableState()
super.onRestoreInstanceState(state.getParcelable(STATE_PARENT))
} else {
super.onRestoreInstanceState(state)
}
}
private fun hidePicker() {
picker?.dismiss()
picker = null
}
inner class OutputState {
private val availableOutputs: LinkedHashSet<WebRtcAudioOutput> = linkedSetOf(WebRtcAudioOutput.SPEAKER)
private var selectedDevice = 0
@Deprecated("Used only for onSaveInstanceState.")
fun getBackingIndexForBackup(): Int {
return selectedDevice
}
@Deprecated("Used only for onRestoreInstanceState.")
fun setBackingIndexForRestore(index: Int) {
selectedDevice = 0
}
fun getCurrentOutput(): WebRtcAudioOutput {
return getOutputs()[selectedDevice]
}
fun setCurrentOutput(outputType: WebRtcAudioOutput): Boolean {
val newIndex = getOutputs().indexOf(outputType)
return if (newIndex < 0) {
false
} else {
selectedDevice = newIndex
true
}
}
fun getOutputs(): List<WebRtcAudioOutput> {
return availableOutputs.toList()
}
fun peekNext(): WebRtcAudioOutput {
val peekIndex = (selectedDevice + 1) % availableOutputs.size
return getOutputs()[peekIndex]
}
var isEarpieceAvailable: Boolean
get() = availableOutputs.contains(WebRtcAudioOutput.HANDSET)
set(value) {
if (value) {
availableOutputs.add(WebRtcAudioOutput.HANDSET)
} else {
availableOutputs.remove(WebRtcAudioOutput.HANDSET)
}
}
var isBluetoothHeadsetAvailable: Boolean
get() = availableOutputs.contains(WebRtcAudioOutput.BLUETOOTH_HEADSET)
set(value) {
if (value) {
availableOutputs.add(WebRtcAudioOutput.BLUETOOTH_HEADSET)
} else {
availableOutputs.remove(WebRtcAudioOutput.BLUETOOTH_HEADSET)
}
}
var isWiredHeadsetAvailable: Boolean
get() = availableOutputs.contains(WebRtcAudioOutput.WIRED_HEADSET)
set(value) {
if (value) {
availableOutputs.add(WebRtcAudioOutput.WIRED_HEADSET)
} else {
availableOutputs.remove(WebRtcAudioOutput.WIRED_HEADSET)
}
}
}
companion object {
private const val SHOW_PICKER_THRESHOLD = 3
private const val STATE_OUTPUT_INDEX = "audio.output.toggle.state.output.index"
private const val STATE_HEADSET_ENABLED = "audio.output.toggle.state.headset.enabled"
private const val STATE_HANDSET_ENABLED = "audio.output.toggle.state.handset.enabled"
private const val STATE_PARENT = "audio.output.toggle.state.parent"
private tailrec fun Context.fragmentActivity(): FragmentActivity? = when (this) {
is FragmentActivity -> this
else -> (this as? ContextWrapper)?.baseContext?.fragmentActivity()
}
}
}

View File

@@ -5,6 +5,7 @@ import android.graphics.ColorMatrix;
import android.graphics.ColorMatrixColorFilter;
import android.graphics.Point;
import android.graphics.Rect;
import android.os.Build;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
@@ -16,6 +17,7 @@ import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.content.res.AppCompatResources;
import androidx.constraintlayout.widget.ConstraintLayout;
import androidx.constraintlayout.widget.ConstraintSet;
@@ -237,9 +239,16 @@ public class WebRtcCallView extends ConstraintLayout {
adjustableMarginsSet.add(videoToggle);
adjustableMarginsSet.add(audioToggle);
audioToggle.setOnAudioOutputChangedListener(outputMode -> {
runIfNonNull(controlsListener, listener -> listener.onAudioOutputChanged(outputMode));
});
if (Build.VERSION.SDK_INT >= 31) {
audioToggle.setOnAudioOutputChangedListener31(deviceId -> {
runIfNonNull(controlsListener, listener -> listener.onAudioOutputChanged31(deviceId));
});
} else {
audioToggle.setOnAudioOutputChangedListenerLegacy(outputMode -> {
runIfNonNull(controlsListener, listener -> listener.onAudioOutputChanged(outputMode));
});
}
videoToggle.setOnCheckedChangeListener((v, isOn) -> {
runIfNonNull(controlsListener, listener -> listener.onVideoChanged(isOn));
@@ -639,8 +648,8 @@ public class WebRtcCallView extends ConstraintLayout {
if (webRtcControls.displayAudioToggle()) {
visibleViewSet.add(audioToggle);
audioToggle.setControlAvailability(webRtcControls.enableHandsetInAudioToggle(),
webRtcControls.enableHeadsetInAudioToggle());
audioToggle.setControlAvailability(webRtcControls.enableEarpieceInAudioToggle(),
webRtcControls.enableBluetoothHeadsetInAudioToggle());
audioToggle.setAudioOutput(webRtcControls.getAudioOutput(), false);
}
@@ -1049,6 +1058,8 @@ public class WebRtcCallView extends ConstraintLayout {
void showSystemUI();
void hideSystemUI();
void onAudioOutputChanged(@NonNull WebRtcAudioOutput audioOutput);
@RequiresApi(31)
void onAudioOutputChanged31(@NonNull int audioOutputAddress);
void onVideoChanged(boolean isVideoEnabled);
void onMicChanged(boolean isMicEnabled);
void onCameraDirectionChanged();

View File

@@ -153,7 +153,7 @@ public final class WebRtcControls {
}
boolean displayAudioToggle() {
return (isPreJoin() || isAtLeastOutgoing()) && (!isLocalVideoEnabled || enableHeadsetInAudioToggle());
return (isPreJoin() || isAtLeastOutgoing()) && (!isLocalVideoEnabled || enableBluetoothHeadsetInAudioToggle());
}
boolean displayCameraToggle() {
@@ -172,11 +172,11 @@ public final class WebRtcControls {
return isIncoming();
}
boolean enableHandsetInAudioToggle() {
boolean enableEarpieceInAudioToggle() {
return !isLocalVideoEnabled;
}
boolean enableHeadsetInAudioToggle() {
boolean enableBluetoothHeadsetInAudioToggle() {
return availableDevices.contains(SignalAudioManager.AudioDevice.BLUETOOTH);
}
@@ -201,7 +201,9 @@ public final class WebRtcControls {
case SPEAKER_PHONE:
return WebRtcAudioOutput.SPEAKER;
case BLUETOOTH:
return WebRtcAudioOutput.HEADSET;
return WebRtcAudioOutput.BLUETOOTH_HEADSET;
case WIRED_HEADSET:
return WebRtcAudioOutput.WIRED_HEADSET;
default:
return WebRtcAudioOutput.HANDSET;
}