New Android 12+ audio route picker for calls.

This commit is contained in:
Nicholas
2023-04-03 16:40:18 -04:00
committed by Alex Hart
parent 99bd8e82ca
commit a0aeac767d
25 changed files with 700 additions and 341 deletions

View File

@@ -31,9 +31,9 @@ import android.os.Bundle;
import android.util.Rational;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatDelegate;
import androidx.core.content.ContextCompat;
@@ -416,15 +416,19 @@ public class WebRtcCallActivity extends BaseActivity implements SafetyNumberChan
}
private void handleSetAudioHandset() {
ApplicationDependencies.getSignalCallManager().selectAudioDevice(SignalAudioManager.AudioDevice.EARPIECE);
ApplicationDependencies.getSignalCallManager().selectAudioDevice(new SignalAudioManager.ChosenAudioDeviceIdentifier(SignalAudioManager.AudioDevice.EARPIECE));
}
private void handleSetAudioSpeaker() {
ApplicationDependencies.getSignalCallManager().selectAudioDevice(SignalAudioManager.AudioDevice.SPEAKER_PHONE);
ApplicationDependencies.getSignalCallManager().selectAudioDevice(new SignalAudioManager.ChosenAudioDeviceIdentifier(SignalAudioManager.AudioDevice.SPEAKER_PHONE));
}
private void handleSetAudioBluetooth() {
ApplicationDependencies.getSignalCallManager().selectAudioDevice(SignalAudioManager.AudioDevice.BLUETOOTH);
ApplicationDependencies.getSignalCallManager().selectAudioDevice(new SignalAudioManager.ChosenAudioDeviceIdentifier(SignalAudioManager.AudioDevice.BLUETOOTH));
}
private void handleSetAudioWiredHeadset() {
ApplicationDependencies.getSignalCallManager().selectAudioDevice(new SignalAudioManager.ChosenAudioDeviceIdentifier(SignalAudioManager.AudioDevice.WIRED_HEADSET));
}
private void handleSetMuteAudio(boolean enabled) {
@@ -786,17 +790,26 @@ public class WebRtcCallActivity extends BaseActivity implements SafetyNumberChan
case HANDSET:
handleSetAudioHandset();
break;
case HEADSET:
case BLUETOOTH_HEADSET:
handleSetAudioBluetooth();
break;
case SPEAKER:
handleSetAudioSpeaker();
break;
case WIRED_HEADSET:
handleSetAudioWiredHeadset();
break;
default:
throw new IllegalStateException("Unknown output: " + audioOutput);
}
}
@RequiresApi(31)
@Override
public void onAudioOutputChanged31(@NonNull int audioDeviceInfo) {
ApplicationDependencies.getSignalCallManager().selectAudioDevice(new SignalAudioManager.ChosenAudioDeviceIdentifier(audioDeviceInfo));
}
@Override
public void onVideoChanged(boolean isVideoEnabled) {
handleSetMuteVideo(!isVideoEnabled);

View File

@@ -0,0 +1,8 @@
package org.thoughtcrime.securesms.components.webrtc
import androidx.annotation.RequiresApi
@RequiresApi(31)
interface OnAudioOutputChangedListener31 {
fun audioOutputChanged(audioDeviceId: Int)
}

View File

@@ -8,7 +8,8 @@ import org.thoughtcrime.securesms.R;
public enum WebRtcAudioOutput {
HANDSET(R.string.WebRtcAudioOutputToggle__phone_earpiece, R.drawable.ic_handset_solid_24),
SPEAKER(R.string.WebRtcAudioOutputToggle__speaker, R.drawable.symbol_speaker_fill_white_24),
HEADSET(R.string.WebRtcAudioOutputToggle__bluetooth, R.drawable.symbol_speaker_bluetooth_fill_white_24);
BLUETOOTH_HEADSET(R.string.WebRtcAudioOutputToggle__bluetooth, R.drawable.symbol_speaker_bluetooth_fill_white_24),
WIRED_HEADSET(R.string.WebRtcAudioOutputToggle__wired_headset, R.drawable.symbol_headphones_filed_24);
private final @StringRes int labelRes;
private final @DrawableRes int iconRes;

View File

@@ -0,0 +1,169 @@
package org.thoughtcrime.securesms.components.webrtc
import android.content.DialogInterface
import android.os.Bundle
import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.Row
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.wrapContentSize
import androidx.compose.foundation.selection.selectable
import androidx.compose.foundation.selection.selectableGroup
import androidx.compose.material3.Icon
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.RadioButton
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.saveable.rememberSaveable
import androidx.compose.runtime.setValue
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.res.painterResource
import androidx.compose.ui.res.stringResource
import androidx.compose.ui.semantics.Role
import androidx.compose.ui.tooling.preview.Preview
import androidx.compose.ui.unit.dp
import androidx.fragment.app.FragmentManager
import androidx.fragment.app.viewModels
import androidx.lifecycle.ViewModel
import kotlinx.collections.immutable.ImmutableList
import kotlinx.collections.immutable.toImmutableList
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.compose.ComposeBottomSheetDialogFragment
import org.thoughtcrime.securesms.util.BottomSheetUtil
import org.thoughtcrime.securesms.webrtc.audio.SignalAudioManager
/**
* A bottom sheet that allows the user to select what device they want to route audio to. Intended to be used with Android 31+ APIs.
*/
class WebRtcAudioOutputBottomSheet : ComposeBottomSheetDialogFragment(), DialogInterface {
private val viewModel by viewModels<AudioOutputViewModel>()
@Composable
override fun SheetContent() {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
modifier = Modifier
.padding(16.dp)
.wrapContentSize()
) {
Handle()
DeviceList(audioOutputOptions = viewModel.audioRoutes.toImmutableList(), initialDeviceId = viewModel.defaultDeviceId, modifier = Modifier.fillMaxWidth(), onDeviceSelected = viewModel.onClick)
}
}
override fun cancel() {
dismiss()
}
fun show(fm: FragmentManager, tag: String?, audioRoutes: List<AudioOutputOption>, selectedDeviceId: Int, onClick: (AudioOutputOption) -> Unit) {
super.showNow(fm, tag)
viewModel.audioRoutes = audioRoutes
viewModel.defaultDeviceId = selectedDeviceId
viewModel.onClick = onClick
}
companion object {
const val TAG = "WebRtcAudioOutputBottomSheet"
@JvmStatic
fun show(fragmentManager: FragmentManager, audioRoutes: List<AudioOutputOption>, selectedDeviceId: Int, onClick: (AudioOutputOption) -> Unit): WebRtcAudioOutputBottomSheet {
val bottomSheet = WebRtcAudioOutputBottomSheet()
val args = Bundle()
bottomSheet.show(fragmentManager, BottomSheetUtil.STANDARD_BOTTOM_SHEET_FRAGMENT_TAG, audioRoutes, selectedDeviceId, onClick)
return bottomSheet
}
}
}
@Composable
fun DeviceList(audioOutputOptions: ImmutableList<AudioOutputOption>, initialDeviceId: Int, modifier: Modifier = Modifier.fillMaxWidth(), onDeviceSelected: (AudioOutputOption) -> Unit) {
var selectedDeviceId by rememberSaveable { mutableStateOf(initialDeviceId) }
Column(
horizontalAlignment = Alignment.Start,
modifier = modifier
) {
Text(
text = stringResource(R.string.WebRtcAudioOutputToggle__audio_output),
style = MaterialTheme.typography.headlineMedium,
modifier = Modifier
.padding(8.dp)
)
Column(Modifier.selectableGroup()) {
audioOutputOptions.forEach { device: AudioOutputOption ->
Row(
Modifier
.fillMaxWidth()
.height(56.dp)
.selectable(
selected = (device.deviceId == selectedDeviceId),
onClick = {
onDeviceSelected(device)
selectedDeviceId = device.deviceId
},
role = Role.RadioButton
)
.padding(horizontal = 16.dp),
verticalAlignment = Alignment.CenterVertically
) {
RadioButton(
selected = (device.deviceId == selectedDeviceId),
onClick = null // null recommended for accessibility with screenreaders
)
Icon(
modifier = Modifier.padding(start = 16.dp),
painter = painterResource(id = getDrawableResourceForDeviceType(device.deviceType)),
contentDescription = stringResource(id = getDescriptionStringResourceForDeviceType(device.deviceType)),
tint = MaterialTheme.colorScheme.onSurface
)
Text(
text = device.friendlyName,
style = MaterialTheme.typography.bodyLarge,
modifier = Modifier.padding(start = 16.dp)
)
}
}
}
}
}
class AudioOutputViewModel : ViewModel() {
var audioRoutes: List<AudioOutputOption> = emptyList()
var defaultDeviceId: Int = -1
var onClick: (AudioOutputOption) -> Unit = {}
}
private fun getDrawableResourceForDeviceType(deviceType: SignalAudioManager.AudioDevice): Int {
return when (deviceType) {
SignalAudioManager.AudioDevice.WIRED_HEADSET -> R.drawable.symbol_headphones_outline_24
SignalAudioManager.AudioDevice.EARPIECE -> R.drawable.symbol_phone_speaker_outline_24
SignalAudioManager.AudioDevice.BLUETOOTH -> R.drawable.symbol_speaker_bluetooth_fill_white_24
SignalAudioManager.AudioDevice.SPEAKER_PHONE, SignalAudioManager.AudioDevice.NONE -> R.drawable.symbol_speaker_outline_24
}
}
private fun getDescriptionStringResourceForDeviceType(deviceType: SignalAudioManager.AudioDevice): Int {
return when (deviceType) {
SignalAudioManager.AudioDevice.WIRED_HEADSET -> R.string.WebRtcAudioOutputBottomSheet__headset_icon_content_description
SignalAudioManager.AudioDevice.EARPIECE -> R.string.WebRtcAudioOutputBottomSheet__earpiece_icon_content_description
SignalAudioManager.AudioDevice.BLUETOOTH -> R.string.WebRtcAudioOutputBottomSheet__bluetooth_icon_content_description
SignalAudioManager.AudioDevice.SPEAKER_PHONE, SignalAudioManager.AudioDevice.NONE -> R.string.WebRtcAudioOutputBottomSheet__speaker_icon_content_description
}
}
data class AudioOutputOption(val friendlyName: String, val deviceType: SignalAudioManager.AudioDevice, val deviceId: Int)
@Preview
@Composable
private fun SampleOutputBottomSheet() {
val outputs: ImmutableList<AudioOutputOption> = listOf(
AudioOutputOption("Earpiece", SignalAudioManager.AudioDevice.EARPIECE, 0),
AudioOutputOption("Speaker", SignalAudioManager.AudioDevice.SPEAKER_PHONE, 1),
AudioOutputOption("BT Headset", SignalAudioManager.AudioDevice.BLUETOOTH, 2),
AudioOutputOption("Wired Headset", SignalAudioManager.AudioDevice.WIRED_HEADSET, 3)
).toImmutableList()
DeviceList(outputs, 0) { }
}

View File

@@ -1,211 +0,0 @@
package org.thoughtcrime.securesms.components.webrtc;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.widget.AppCompatImageView;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.google.android.material.dialog.MaterialAlertDialogBuilder;
import org.thoughtcrime.securesms.R;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class WebRtcAudioOutputToggleButton extends AppCompatImageView {
private static final String STATE_OUTPUT_INDEX = "audio.output.toggle.state.output.index";
private static final String STATE_HEADSET_ENABLED = "audio.output.toggle.state.headset.enabled";
private static final String STATE_HANDSET_ENABLED = "audio.output.toggle.state.handset.enabled";
private static final String STATE_PARENT = "audio.output.toggle.state.parent";
private static final int[] SPEAKER_OFF = { R.attr.state_speaker_off };
private static final int[] SPEAKER_ON = { R.attr.state_speaker_on };
private static final int[] OUTPUT_HANDSET = { R.attr.state_handset_selected };
private static final int[] OUTPUT_SPEAKER = { R.attr.state_speaker_selected };
private static final int[] OUTPUT_HEADSET = { R.attr.state_headset_selected };
private static final int[][] OUTPUT_ENUM = { SPEAKER_OFF, SPEAKER_ON, OUTPUT_HANDSET, OUTPUT_SPEAKER, OUTPUT_HEADSET };
private static final List<WebRtcAudioOutput> OUTPUT_MODES = Arrays.asList(WebRtcAudioOutput.HANDSET, WebRtcAudioOutput.SPEAKER, WebRtcAudioOutput.HANDSET, WebRtcAudioOutput.SPEAKER, WebRtcAudioOutput.HEADSET);
private boolean isHeadsetAvailable;
private boolean isHandsetAvailable;
private int outputIndex;
private OnAudioOutputChangedListener audioOutputChangedListener;
private DialogInterface picker;
public WebRtcAudioOutputToggleButton(@NonNull Context context) {
this(context, null);
}
public WebRtcAudioOutputToggleButton(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public WebRtcAudioOutputToggleButton(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
super.setOnClickListener((v) -> {
List<WebRtcAudioOutput> availableModes = buildOutputModeList(isHeadsetAvailable, isHandsetAvailable);
if (availableModes.size() > 2 || !isHandsetAvailable) showPicker(availableModes);
else setAudioOutput(OUTPUT_MODES.get((outputIndex + 1) % OUTPUT_MODES.size()), true);
});
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
hidePicker();
}
@Override
public int[] onCreateDrawableState(int extraSpace) {
final int[] extra = OUTPUT_ENUM[outputIndex];
final int[] drawableState = super.onCreateDrawableState(extraSpace + extra.length);
mergeDrawableStates(drawableState, extra);
return drawableState;
}
@Override
public void setOnClickListener(@Nullable OnClickListener l) {
throw new UnsupportedOperationException("This View does not support custom click listeners.");
}
public void setControlAvailability(boolean isHandsetAvailable, boolean isHeadsetAvailable) {
this.isHandsetAvailable = isHandsetAvailable;
this.isHeadsetAvailable = isHeadsetAvailable;
}
public void setAudioOutput(@NonNull WebRtcAudioOutput audioOutput, boolean notifyListener) {
int oldIndex = outputIndex;
outputIndex = resolveAudioOutputIndex(OUTPUT_MODES.lastIndexOf(audioOutput));
if (oldIndex != outputIndex) {
refreshDrawableState();
if (notifyListener) {
notifyListener();
}
}
}
public void setOnAudioOutputChangedListener(@Nullable OnAudioOutputChangedListener listener) {
this.audioOutputChangedListener = listener;
}
private void showPicker(@NonNull List<WebRtcAudioOutput> availableModes) {
RecyclerView rv = new RecyclerView(getContext());
AudioOutputAdapter adapter = new AudioOutputAdapter(audioOutput -> {
setAudioOutput(audioOutput, true);
hidePicker();
},
availableModes);
adapter.setSelectedOutput(OUTPUT_MODES.get(outputIndex));
rv.setLayoutManager(new LinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false));
rv.setAdapter(adapter);
picker = new MaterialAlertDialogBuilder(getContext())
.setTitle(R.string.WebRtcAudioOutputToggle__audio_output)
.setView(rv)
.setCancelable(true)
.show();
}
@Override
protected Parcelable onSaveInstanceState() {
Parcelable parentState = super.onSaveInstanceState();
Bundle bundle = new Bundle();
bundle.putParcelable(STATE_PARENT, parentState);
bundle.putInt(STATE_OUTPUT_INDEX, outputIndex);
bundle.putBoolean(STATE_HEADSET_ENABLED, isHeadsetAvailable);
bundle.putBoolean(STATE_HANDSET_ENABLED, isHandsetAvailable);
return bundle;
}
@Override
protected void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
Bundle savedState = (Bundle) state;
isHeadsetAvailable = savedState.getBoolean(STATE_HEADSET_ENABLED);
isHandsetAvailable = savedState.getBoolean(STATE_HANDSET_ENABLED);
setAudioOutput(OUTPUT_MODES.get(
resolveAudioOutputIndex(savedState.getInt(STATE_OUTPUT_INDEX))),
false
);
super.onRestoreInstanceState(savedState.getParcelable(STATE_PARENT));
} else {
super.onRestoreInstanceState(state);
}
}
private void hidePicker() {
if (picker != null) {
picker.dismiss();
picker = null;
}
}
private void notifyListener() {
if (audioOutputChangedListener == null) return;
audioOutputChangedListener.audioOutputChanged(OUTPUT_MODES.get(outputIndex));
}
private static List<WebRtcAudioOutput> buildOutputModeList(boolean isHeadsetAvailable, boolean isHandsetAvailable) {
List<WebRtcAudioOutput> modes = new ArrayList(3);
modes.add(WebRtcAudioOutput.SPEAKER);
if (isHeadsetAvailable) {
modes.add(WebRtcAudioOutput.HEADSET);
}
if (isHandsetAvailable) {
modes.add(WebRtcAudioOutput.HANDSET);
}
return modes;
};
private int resolveAudioOutputIndex(int desiredAudioOutputIndex) {
if (isIllegalAudioOutputIndex(desiredAudioOutputIndex)) {
throw new IllegalArgumentException("Unsupported index: " + desiredAudioOutputIndex);
}
if (isUnsupportedAudioOutput(desiredAudioOutputIndex, isHeadsetAvailable, isHandsetAvailable)) {
if (!isHandsetAvailable) {
return OUTPUT_MODES.lastIndexOf(WebRtcAudioOutput.SPEAKER);
} else {
return OUTPUT_MODES.indexOf(WebRtcAudioOutput.HANDSET);
}
}
if (!isHeadsetAvailable) {
return desiredAudioOutputIndex % 2;
}
return desiredAudioOutputIndex;
}
private static boolean isIllegalAudioOutputIndex(int desiredAudioOutputIndex) {
return desiredAudioOutputIndex < 0 || desiredAudioOutputIndex > OUTPUT_MODES.size();
}
private static boolean isUnsupportedAudioOutput(int desiredAudioOutputIndex, boolean isHeadsetAvailable, boolean isHandsetAvailable) {
return (OUTPUT_MODES.get(desiredAudioOutputIndex) == WebRtcAudioOutput.HEADSET && !isHeadsetAvailable) ||
(OUTPUT_MODES.get(desiredAudioOutputIndex) == WebRtcAudioOutput.HANDSET && !isHandsetAvailable);
}
}

View File

@@ -0,0 +1,292 @@
package org.thoughtcrime.securesms.components.webrtc
import android.content.Context
import android.content.ContextWrapper
import android.content.DialogInterface
import android.media.AudioDeviceInfo
import android.os.Build
import android.os.Bundle
import android.os.Parcelable
import android.util.AttributeSet
import android.view.View.OnClickListener
import android.widget.Toast
import androidx.annotation.RequiresApi
import androidx.appcompat.widget.AppCompatImageView
import androidx.fragment.app.FragmentActivity
import androidx.fragment.app.FragmentManager
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.recyclerview.widget.RecyclerView
import com.google.android.material.dialog.MaterialAlertDialogBuilder
import org.signal.core.util.logging.Log
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.dependencies.ApplicationDependencies
import org.thoughtcrime.securesms.webrtc.audio.AudioDeviceMapping
import org.thoughtcrime.securesms.webrtc.audio.SignalAudioManager
/**
* A UI button that triggers a picker dialog/bottom sheet allowing the user to select the audio output for the ongoing call.
*/
class WebRtcAudioOutputToggleButton @JvmOverloads constructor(context: Context, attrs: AttributeSet? = null, defStyleAttr: Int = 0) : AppCompatImageView(context, attrs, defStyleAttr) {
private val TAG = Log.tag(WebRtcAudioOutputToggleButton::class.java)
private var outputState: OutputState = OutputState()
private var audioOutputChangedListenerLegacy: OnAudioOutputChangedListener? = null
private var audioOutputChangedListener31: OnAudioOutputChangedListener31? = null
private var picker: DialogInterface? = null
private val clickListenerLegacy: OnClickListener = OnClickListener {
val outputs = outputState.getOutputs()
if (outputs.size >= SHOW_PICKER_THRESHOLD || !outputState.isEarpieceAvailable) {
showPickerLegacy(outputs)
} else {
setAudioOutput(outputState.peekNext(), true)
}
}
@RequiresApi(31)
private val clickListener31 = OnClickListener {
val fragmentActivity = context.fragmentActivity()
if (fragmentActivity != null) {
showPicker31(fragmentActivity.supportFragmentManager)
} else {
Log.e(TAG, "WebRtcAudioOutputToggleButton instantiated from a context that does not inherit from FragmentActivity.")
Toast.makeText(context, R.string.WebRtcAudioOutputToggleButton_fragment_activity_error, Toast.LENGTH_LONG).show()
}
}
init {
super.setOnClickListener(
if (Build.VERSION.SDK_INT >= 31) {
clickListener31
} else {
clickListenerLegacy
}
)
}
override fun onDetachedFromWindow() {
super.onDetachedFromWindow()
hidePicker()
}
/**
* DO NOT REMOVE THE ELVIS OPERATOR IN THE FIRST LINE
* Somehow, through XML inflation (reflection?), [outputState] can actually be null,
* even though the compiler disagrees.
* */
override fun onCreateDrawableState(extraSpace: Int): IntArray {
val currentState = outputState ?: return super.onCreateDrawableState(extraSpace) // DO NOT REMOVE
val currentOutput = currentState.getCurrentOutput()
val extra = when (currentOutput) {
WebRtcAudioOutput.HANDSET -> intArrayOf(R.attr.state_handset_selected)
WebRtcAudioOutput.SPEAKER -> intArrayOf(R.attr.state_speaker_selected)
WebRtcAudioOutput.BLUETOOTH_HEADSET -> intArrayOf(R.attr.state_bt_headset_selected)
WebRtcAudioOutput.WIRED_HEADSET -> intArrayOf(R.attr.state_wired_headset_selected)
}
val oldLabel = context.getString(currentOutput.labelRes)
Log.i(TAG, "Switching drawable to $oldLabel")
val drawableState = super.onCreateDrawableState(extraSpace + extra.size)
mergeDrawableStates(drawableState, extra)
return drawableState
}
override fun setOnClickListener(l: OnClickListener?) {
throw UnsupportedOperationException("This View does not support custom click listeners.")
}
fun setControlAvailability(isEarpieceAvailable: Boolean, isBluetoothHeadsetAvailable: Boolean) {
outputState.isEarpieceAvailable = isEarpieceAvailable
outputState.isBluetoothHeadsetAvailable = isBluetoothHeadsetAvailable
}
fun setAudioOutput(audioOutput: WebRtcAudioOutput, notifyListener: Boolean) {
val oldOutput = outputState.getCurrentOutput()
if (oldOutput != audioOutput) {
outputState.setCurrentOutput(audioOutput)
refreshDrawableState()
if (notifyListener) {
audioOutputChangedListenerLegacy?.audioOutputChanged(audioOutput)
}
}
}
fun setOnAudioOutputChangedListenerLegacy(listener: OnAudioOutputChangedListener?) {
audioOutputChangedListenerLegacy = listener
}
@RequiresApi(31)
fun setOnAudioOutputChangedListener31(listener: OnAudioOutputChangedListener31?) {
audioOutputChangedListener31 = listener
}
private fun showPickerLegacy(availableModes: List<WebRtcAudioOutput?>) {
val rv = RecyclerView(context)
val adapter = AudioOutputAdapter(
{ audioOutput: WebRtcAudioOutput ->
setAudioOutput(audioOutput, true)
hidePicker()
},
availableModes
)
adapter.setSelectedOutput(outputState.getCurrentOutput())
rv.layoutManager = LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false)
rv.adapter = adapter
picker = MaterialAlertDialogBuilder(context)
.setTitle(R.string.WebRtcAudioOutputToggle__audio_output)
.setView(rv)
.setCancelable(true)
.show()
}
@RequiresApi(31)
private fun showPicker31(fragmentManager: FragmentManager) {
val am = ApplicationDependencies.getAndroidCallAudioManager()
if (am.availableCommunicationDevices.isEmpty()) {
Toast.makeText(context, R.string.WebRtcAudioOutputToggleButton_no_eligible_audio_i_o_detected, Toast.LENGTH_LONG).show()
return
}
val devices: List<AudioOutputOption> = am.availableCommunicationDevices.map { AudioOutputOption(it.toFriendlyName(context).toString(), AudioDeviceMapping.fromPlatformType(it.type), it.id) }
picker = WebRtcAudioOutputBottomSheet.show(fragmentManager, devices, am.communicationDevice?.id ?: -1) {
audioOutputChangedListener31?.audioOutputChanged(it.deviceId)
when (it.deviceType) {
SignalAudioManager.AudioDevice.WIRED_HEADSET -> {
outputState.isWiredHeadsetAvailable = true
setAudioOutput(WebRtcAudioOutput.WIRED_HEADSET, true)
}
SignalAudioManager.AudioDevice.EARPIECE -> {
outputState.isEarpieceAvailable = true
setAudioOutput(WebRtcAudioOutput.HANDSET, true)
}
SignalAudioManager.AudioDevice.BLUETOOTH -> {
outputState.isBluetoothHeadsetAvailable = true
setAudioOutput(WebRtcAudioOutput.BLUETOOTH_HEADSET, true)
}
SignalAudioManager.AudioDevice.SPEAKER_PHONE, SignalAudioManager.AudioDevice.NONE -> setAudioOutput(WebRtcAudioOutput.SPEAKER, true)
}
}
}
@RequiresApi(23)
private fun AudioDeviceInfo.toFriendlyName(context: Context): CharSequence {
return when (this.type) {
AudioDeviceInfo.TYPE_BUILTIN_EARPIECE -> context.getString(R.string.WebRtcAudioOutputToggle__phone_earpiece)
AudioDeviceInfo.TYPE_BUILTIN_SPEAKER -> context.getString(R.string.WebRtcAudioOutputToggle__speaker)
AudioDeviceInfo.TYPE_WIRED_HEADSET -> context.getString(R.string.WebRtcAudioOutputToggle__wired_headset)
AudioDeviceInfo.TYPE_USB_HEADSET -> context.getString(R.string.WebRtcAudioOutputToggle__wired_headset_usb)
else -> this.productName
}
}
override fun onSaveInstanceState(): Parcelable {
val parentState = super.onSaveInstanceState()
val bundle = Bundle()
bundle.putParcelable(STATE_PARENT, parentState)
bundle.putInt(STATE_OUTPUT_INDEX, outputState.getBackingIndexForBackup())
bundle.putBoolean(STATE_HEADSET_ENABLED, outputState.isBluetoothHeadsetAvailable)
bundle.putBoolean(STATE_HANDSET_ENABLED, outputState.isEarpieceAvailable)
return bundle
}
override fun onRestoreInstanceState(state: Parcelable) {
if (state is Bundle) {
outputState.isBluetoothHeadsetAvailable = state.getBoolean(STATE_HEADSET_ENABLED)
outputState.isEarpieceAvailable = state.getBoolean(STATE_HANDSET_ENABLED)
outputState.setBackingIndexForRestore(state.getInt(STATE_OUTPUT_INDEX))
refreshDrawableState()
super.onRestoreInstanceState(state.getParcelable(STATE_PARENT))
} else {
super.onRestoreInstanceState(state)
}
}
private fun hidePicker() {
picker?.dismiss()
picker = null
}
inner class OutputState {
private val availableOutputs: LinkedHashSet<WebRtcAudioOutput> = linkedSetOf(WebRtcAudioOutput.SPEAKER)
private var selectedDevice = 0
@Deprecated("Used only for onSaveInstanceState.")
fun getBackingIndexForBackup(): Int {
return selectedDevice
}
@Deprecated("Used only for onRestoreInstanceState.")
fun setBackingIndexForRestore(index: Int) {
selectedDevice = 0
}
fun getCurrentOutput(): WebRtcAudioOutput {
return getOutputs()[selectedDevice]
}
fun setCurrentOutput(outputType: WebRtcAudioOutput): Boolean {
val newIndex = getOutputs().indexOf(outputType)
return if (newIndex < 0) {
false
} else {
selectedDevice = newIndex
true
}
}
fun getOutputs(): List<WebRtcAudioOutput> {
return availableOutputs.toList()
}
fun peekNext(): WebRtcAudioOutput {
val peekIndex = (selectedDevice + 1) % availableOutputs.size
return getOutputs()[peekIndex]
}
var isEarpieceAvailable: Boolean
get() = availableOutputs.contains(WebRtcAudioOutput.HANDSET)
set(value) {
if (value) {
availableOutputs.add(WebRtcAudioOutput.HANDSET)
} else {
availableOutputs.remove(WebRtcAudioOutput.HANDSET)
}
}
var isBluetoothHeadsetAvailable: Boolean
get() = availableOutputs.contains(WebRtcAudioOutput.BLUETOOTH_HEADSET)
set(value) {
if (value) {
availableOutputs.add(WebRtcAudioOutput.BLUETOOTH_HEADSET)
} else {
availableOutputs.remove(WebRtcAudioOutput.BLUETOOTH_HEADSET)
}
}
var isWiredHeadsetAvailable: Boolean
get() = availableOutputs.contains(WebRtcAudioOutput.WIRED_HEADSET)
set(value) {
if (value) {
availableOutputs.add(WebRtcAudioOutput.WIRED_HEADSET)
} else {
availableOutputs.remove(WebRtcAudioOutput.WIRED_HEADSET)
}
}
}
companion object {
private const val SHOW_PICKER_THRESHOLD = 3
private const val STATE_OUTPUT_INDEX = "audio.output.toggle.state.output.index"
private const val STATE_HEADSET_ENABLED = "audio.output.toggle.state.headset.enabled"
private const val STATE_HANDSET_ENABLED = "audio.output.toggle.state.handset.enabled"
private const val STATE_PARENT = "audio.output.toggle.state.parent"
private tailrec fun Context.fragmentActivity(): FragmentActivity? = when (this) {
is FragmentActivity -> this
else -> (this as? ContextWrapper)?.baseContext?.fragmentActivity()
}
}
}

View File

@@ -5,6 +5,7 @@ import android.graphics.ColorMatrix;
import android.graphics.ColorMatrixColorFilter;
import android.graphics.Point;
import android.graphics.Rect;
import android.os.Build;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
@@ -16,6 +17,7 @@ import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.content.res.AppCompatResources;
import androidx.constraintlayout.widget.ConstraintLayout;
import androidx.constraintlayout.widget.ConstraintSet;
@@ -237,9 +239,16 @@ public class WebRtcCallView extends ConstraintLayout {
adjustableMarginsSet.add(videoToggle);
adjustableMarginsSet.add(audioToggle);
audioToggle.setOnAudioOutputChangedListener(outputMode -> {
runIfNonNull(controlsListener, listener -> listener.onAudioOutputChanged(outputMode));
});
if (Build.VERSION.SDK_INT >= 31) {
audioToggle.setOnAudioOutputChangedListener31(deviceId -> {
runIfNonNull(controlsListener, listener -> listener.onAudioOutputChanged31(deviceId));
});
} else {
audioToggle.setOnAudioOutputChangedListenerLegacy(outputMode -> {
runIfNonNull(controlsListener, listener -> listener.onAudioOutputChanged(outputMode));
});
}
videoToggle.setOnCheckedChangeListener((v, isOn) -> {
runIfNonNull(controlsListener, listener -> listener.onVideoChanged(isOn));
@@ -639,8 +648,8 @@ public class WebRtcCallView extends ConstraintLayout {
if (webRtcControls.displayAudioToggle()) {
visibleViewSet.add(audioToggle);
audioToggle.setControlAvailability(webRtcControls.enableHandsetInAudioToggle(),
webRtcControls.enableHeadsetInAudioToggle());
audioToggle.setControlAvailability(webRtcControls.enableEarpieceInAudioToggle(),
webRtcControls.enableBluetoothHeadsetInAudioToggle());
audioToggle.setAudioOutput(webRtcControls.getAudioOutput(), false);
}
@@ -1049,6 +1058,8 @@ public class WebRtcCallView extends ConstraintLayout {
void showSystemUI();
void hideSystemUI();
void onAudioOutputChanged(@NonNull WebRtcAudioOutput audioOutput);
@RequiresApi(31)
void onAudioOutputChanged31(@NonNull int audioOutputAddress);
void onVideoChanged(boolean isVideoEnabled);
void onMicChanged(boolean isMicEnabled);
void onCameraDirectionChanged();

View File

@@ -153,7 +153,7 @@ public final class WebRtcControls {
}
boolean displayAudioToggle() {
return (isPreJoin() || isAtLeastOutgoing()) && (!isLocalVideoEnabled || enableHeadsetInAudioToggle());
return (isPreJoin() || isAtLeastOutgoing()) && (!isLocalVideoEnabled || enableBluetoothHeadsetInAudioToggle());
}
boolean displayCameraToggle() {
@@ -172,11 +172,11 @@ public final class WebRtcControls {
return isIncoming();
}
boolean enableHandsetInAudioToggle() {
boolean enableEarpieceInAudioToggle() {
return !isLocalVideoEnabled;
}
boolean enableHeadsetInAudioToggle() {
boolean enableBluetoothHeadsetInAudioToggle() {
return availableDevices.contains(SignalAudioManager.AudioDevice.BLUETOOTH);
}
@@ -201,7 +201,9 @@ public final class WebRtcControls {
case SPEAKER_PHONE:
return WebRtcAudioOutput.SPEAKER;
case BLUETOOTH:
return WebRtcAudioOutput.HEADSET;
return WebRtcAudioOutput.BLUETOOTH_HEADSET;
case WIRED_HEADSET:
return WebRtcAudioOutput.WIRED_HEADSET;
default:
return WebRtcAudioOutput.HANDSET;
}

View File

@@ -1,6 +1,7 @@
package org.thoughtcrime.securesms.service.webrtc;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import org.signal.core.util.logging.Log;
import org.thoughtcrime.securesms.components.webrtc.BroadcastVideoSink;
@@ -38,7 +39,7 @@ public abstract class DeviceAwareActionProcessor extends WebRtcActionProcessor {
}
@Override
protected @NonNull WebRtcServiceState handleSetUserAudioDevice(@NonNull WebRtcServiceState currentState, @NonNull SignalAudioManager.AudioDevice userDevice) {
protected @NonNull WebRtcServiceState handleSetUserAudioDevice(@NonNull WebRtcServiceState currentState, @NonNull SignalAudioManager.ChosenAudioDeviceIdentifier userDevice) {
Log.i(tag, "handleSetUserAudioDevice(): userDevice: " + userDevice);
RemotePeer activePeer = currentState.getCallInfoState().getActivePeer();

View File

@@ -9,6 +9,7 @@ import android.os.ResultReceiver;
import androidx.annotation.AnyThread;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.annimon.stream.Stream;
@@ -30,8 +31,8 @@ import org.signal.ringrtc.Remote;
import org.signal.storageservice.protos.groups.GroupExternalCredential;
import org.thoughtcrime.securesms.WebRtcCallActivity;
import org.thoughtcrime.securesms.crypto.UnidentifiedAccessUtil;
import org.thoughtcrime.securesms.database.GroupTable;
import org.thoughtcrime.securesms.database.CallTable;
import org.thoughtcrime.securesms.database.GroupTable;
import org.thoughtcrime.securesms.database.SignalDatabase;
import org.thoughtcrime.securesms.database.model.GroupRecord;
import org.thoughtcrime.securesms.dependencies.ApplicationDependencies;
@@ -319,7 +320,7 @@ private void processStateless(@NonNull Function1<WebRtcEphemeralState, WebRtcEph
process((s, p) -> p.handleBluetoothPermissionDenied(s));
}
public void selectAudioDevice(@NonNull SignalAudioManager.AudioDevice desiredDevice) {
public void selectAudioDevice(@NonNull SignalAudioManager.ChosenAudioDeviceIdentifier desiredDevice) {
process((s, p) -> p.handleSetUserAudioDevice(s, desiredDevice));
}

View File

@@ -5,6 +5,7 @@ import android.os.ResultReceiver;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.annimon.stream.Stream;
@@ -473,7 +474,7 @@ public abstract class WebRtcActionProcessor {
.build();
}
protected @NonNull WebRtcServiceState handleSetUserAudioDevice(@NonNull WebRtcServiceState currentState, @NonNull SignalAudioManager.AudioDevice userDevice) {
protected @NonNull WebRtcServiceState handleSetUserAudioDevice(@NonNull WebRtcServiceState currentState, @NonNull SignalAudioManager.ChosenAudioDeviceIdentifier userDevice) {
Log.i(tag, "handleSetUserAudioDevice not processed");
return currentState;
}

View File

@@ -184,7 +184,7 @@ public final class WebRtcCallService extends Service implements SignalAudioManag
case ACTION_SEND_AUDIO_COMMAND:
setCallNotification();
if (signalAudioManager == null) {
signalAudioManager = SignalAudioManager.create(this, this, isGroup);
signalAudioManager = SignalAudioManager.create(this, this);
}
AudioManagerCommand audioCommand = Objects.requireNonNull(intent.getParcelableExtra(EXTRA_AUDIO_COMMAND));
Log.i(TAG, "Sending audio command [" + audioCommand.getClass().getSimpleName() + "] to " + signalAudioManager.getClass().getSimpleName());

View File

@@ -5,6 +5,7 @@ import android.net.Uri;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import org.signal.ringrtc.CallManager;
import org.signal.ringrtc.GroupCall;
@@ -152,8 +153,12 @@ public class WebRtcInteractor {
WebRtcCallService.sendAudioManagerCommand(context, new AudioManagerCommand.Start());
}
public void setUserAudioDevice(@Nullable RecipientId recipientId, @NonNull SignalAudioManager.AudioDevice userDevice) {
WebRtcCallService.sendAudioManagerCommand(context, new AudioManagerCommand.SetUserDevice(recipientId, userDevice));
public void setUserAudioDevice(@Nullable RecipientId recipientId, @NonNull SignalAudioManager.ChosenAudioDeviceIdentifier userDevice) {
if (userDevice.isLegacy()) {
WebRtcCallService.sendAudioManagerCommand(context, new AudioManagerCommand.SetUserDevice(recipientId, userDevice.getDesiredAudioDeviceLegacy().ordinal(), false));
} else {
WebRtcCallService.sendAudioManagerCommand(context, new AudioManagerCommand.SetUserDevice(recipientId, userDevice.getDesiredAudioDevice31(), true));
}
}
public void setDefaultAudioDevice(@NonNull RecipientId recipientId, @NonNull SignalAudioManager.AudioDevice userDevice, boolean clearUserEarpieceSelection) {

View File

@@ -6,6 +6,8 @@ import androidx.annotation.RequiresApi
@RequiresApi(31)
object AudioDeviceMapping {
val orderOfPreference: List<SignalAudioManager.AudioDevice> = listOf(SignalAudioManager.AudioDevice.BLUETOOTH, SignalAudioManager.AudioDevice.WIRED_HEADSET, SignalAudioManager.AudioDevice.EARPIECE, SignalAudioManager.AudioDevice.SPEAKER_PHONE, SignalAudioManager.AudioDevice.NONE)
private val systemDeviceTypeMap: Map<SignalAudioManager.AudioDevice, List<Int>> = mapOf(
SignalAudioManager.AudioDevice.BLUETOOTH to listOf(AudioDeviceInfo.TYPE_BLUETOOTH_SCO, AudioDeviceInfo.TYPE_BLUETOOTH_A2DP, AudioDeviceInfo.TYPE_BLE_HEADSET, AudioDeviceInfo.TYPE_HEARING_AID),
SignalAudioManager.AudioDevice.EARPIECE to listOf(AudioDeviceInfo.TYPE_BUILTIN_EARPIECE),

View File

@@ -77,18 +77,20 @@ sealed class AudioManagerCommand : Parcelable {
}
}
class SetUserDevice(val recipientId: RecipientId?, val device: SignalAudioManager.AudioDevice) : AudioManagerCommand() {
class SetUserDevice(val recipientId: RecipientId?, val device: Int, val isId: Boolean) : AudioManagerCommand() {
override fun writeToParcel(parcel: Parcel, flags: Int) {
parcel.writeParcelable(recipientId, flags)
parcel.writeSerializable(device)
parcel.writeInt(device)
ParcelUtil.writeBoolean(parcel, isId)
}
companion object {
@JvmField
val CREATOR: Parcelable.Creator<SetUserDevice> = ParcelCheat {
SetUserDevice(
it.readParcelableCompat(RecipientId::class.java),
it.readSerializableCompat(SignalAudioManager.AudioDevice::class.java)!!
recipientId = it.readParcelableCompat(RecipientId::class.java),
device = it.readInt(),
isId = ParcelUtil.readBoolean(it)
)
}
}

View File

@@ -4,7 +4,6 @@ import android.content.Context
import android.media.AudioDeviceCallback
import android.media.AudioDeviceInfo
import android.media.AudioManager
import android.media.MediaRouter
import android.net.Uri
import androidx.annotation.RequiresApi
import org.signal.core.util.logging.Log
@@ -19,15 +18,12 @@ import org.thoughtcrime.securesms.recipients.RecipientId
class FullSignalAudioManagerApi31(context: Context, eventListener: EventListener?) : SignalAudioManager(context, eventListener) {
private val TAG = "SignalAudioManager31"
private var currentAudioDevice: AudioDevice = AudioDevice.NONE
private var defaultAudioDevice: AudioDevice = AudioDevice.EARPIECE
private var userSelectedAudioDevice: AudioDevice = AudioDevice.NONE
private var userSelectedAudioDevice: AudioDeviceInfo? = null
private var savedAudioMode = AudioManager.MODE_INVALID
private var savedIsSpeakerPhoneOn = false
private var savedIsMicrophoneMute = false
private var hasWiredHeadset = false
private var autoSwitchToWiredHeadset = true
private var autoSwitchToBluetooth = true
private val deviceCallback = object : AudioDeviceCallback() {
@@ -56,9 +52,10 @@ class FullSignalAudioManagerApi31(context: Context, eventListener: EventListener
else -> throw AssertionError("Invalid default audio device selection")
}
if (clearUserEarpieceSelection && userSelectedAudioDevice == AudioDevice.EARPIECE) {
val userSelectedDeviceType: AudioDevice = userSelectedAudioDevice?.type?.let { AudioDeviceMapping.fromPlatformType(it) } ?: AudioDevice.NONE
if (clearUserEarpieceSelection && userSelectedDeviceType == AudioDevice.EARPIECE) {
Log.d(TAG, "Clearing user setting of earpiece")
userSelectedAudioDevice = AudioDevice.NONE
userSelectedAudioDevice = null
}
Log.d(TAG, "New default: $defaultAudioDevice userSelected: $userSelectedAudioDevice")
@@ -124,16 +121,15 @@ class FullSignalAudioManagerApi31(context: Context, eventListener: EventListener
Log.d(TAG, "Stopped")
}
override fun selectAudioDevice(recipientId: RecipientId?, device: AudioDevice) {
val devices: List<AudioDeviceInfo> = androidAudioManager.availableCommunicationDevices
val availableDevices: List<AudioDevice> = devices.map { AudioDeviceMapping.fromPlatformType(it.type) }
val actualDevice = if (device == AudioDevice.EARPIECE && availableDevices.contains(AudioDevice.WIRED_HEADSET)) AudioDevice.WIRED_HEADSET else device
Log.d(TAG, "selectAudioDevice(): device: $device actualDevice: $actualDevice")
if (!availableDevices.contains(actualDevice)) {
Log.w(TAG, "Can not select $actualDevice from available $availableDevices")
override fun selectAudioDevice(recipientId: RecipientId?, device: Int, isId: Boolean) {
if (!isId) {
throw IllegalArgumentException("Must supply a device address for API 31+.")
}
userSelectedAudioDevice = actualDevice
Log.d(TAG, "Selecting $device")
userSelectedAudioDevice = androidAudioManager.availableCommunicationDevices.find { it.id == device }
updateAudioDeviceState()
}
@@ -141,7 +137,7 @@ class FullSignalAudioManagerApi31(context: Context, eventListener: EventListener
Log.i(TAG, "startIncomingRinger(): uri: ${if (ringtoneUri != null) "present" else "null"} vibrate: $vibrate")
androidAudioManager.mode = AudioManager.MODE_RINGTONE
setMicrophoneMute(false)
setDefaultAudioDevice(null, AudioDevice.SPEAKER_PHONE, false)
setDefaultAudioDevice(recipientId = null, newDefaultDevice = AudioDevice.SPEAKER_PHONE, clearUserEarpieceSelection = false)
incomingRinger.start(ringtoneUri, vibrate)
}
@@ -167,88 +163,35 @@ class FullSignalAudioManagerApi31(context: Context, eventListener: EventListener
private fun updateAudioDeviceState() {
handler.assertHandlerThread()
val communicationDevice: AudioDeviceInfo? = androidAudioManager.communicationDevice
currentAudioDevice = if (communicationDevice == null) {
AudioDevice.NONE
} else {
AudioDeviceMapping.fromPlatformType(communicationDevice.type)
}
val currentAudioDevice: AudioDeviceInfo? = androidAudioManager.communicationDevice
val availableCommunicationDevices: List<AudioDeviceInfo> = androidAudioManager.availableCommunicationDevices
availableCommunicationDevices.forEach { Log.d(TAG, "Detected communication device of type: ${it.type}") }
val hasBluetoothHeadset = isBluetoothHeadsetConnected()
hasWiredHeadset = availableCommunicationDevices.any { AudioDeviceMapping.fromPlatformType(it.type) == AudioDevice.WIRED_HEADSET }
Log.i(
TAG,
"updateAudioDeviceState(): " +
"wired: $hasWiredHeadset " +
"bt: $hasBluetoothHeadset " +
"available: $availableCommunicationDevices " +
"selected: $selectedAudioDevice " +
"userSelected: $userSelectedAudioDevice"
)
val audioDevices: MutableSet<AudioDevice> = mutableSetOf(AudioDevice.SPEAKER_PHONE)
if (hasBluetoothHeadset) {
audioDevices += AudioDevice.BLUETOOTH
}
if (hasWiredHeadset) {
audioDevices += AudioDevice.WIRED_HEADSET
if (userSelectedAudioDevice != null) {
androidAudioManager.communicationDevice = userSelectedAudioDevice
} else {
autoSwitchToWiredHeadset = true
if (androidAudioManager.hasEarpiece(context)) {
audioDevices += AudioDevice.EARPIECE
}
}
if (!hasBluetoothHeadset && userSelectedAudioDevice == AudioDevice.BLUETOOTH) {
userSelectedAudioDevice = AudioDevice.NONE
}
if (hasWiredHeadset && autoSwitchToWiredHeadset) {
userSelectedAudioDevice = AudioDevice.WIRED_HEADSET
autoSwitchToWiredHeadset = false
}
if (!hasWiredHeadset && userSelectedAudioDevice == AudioDevice.WIRED_HEADSET) {
userSelectedAudioDevice = AudioDevice.NONE
}
if (!autoSwitchToBluetooth && !hasBluetoothHeadset) {
autoSwitchToBluetooth = true
}
if (autoSwitchToBluetooth && hasBluetoothHeadset) {
userSelectedAudioDevice = AudioDevice.BLUETOOTH
autoSwitchToBluetooth = false
}
val deviceToSet: AudioDevice = when {
audioDevices.contains(userSelectedAudioDevice) -> userSelectedAudioDevice
audioDevices.contains(defaultAudioDevice) -> defaultAudioDevice
else -> AudioDevice.SPEAKER_PHONE
}
if (deviceToSet != currentAudioDevice) {
try {
val chosenDevice: AudioDeviceInfo = availableCommunicationDevices.first { AudioDeviceMapping.getEquivalentPlatformTypes(deviceToSet).contains(it.type) }
val result = androidAudioManager.setCommunicationDevice(chosenDevice)
if (result) {
Log.i(TAG, "Set active device to ID ${chosenDevice.id}, type ${chosenDevice.type}")
currentAudioDevice = deviceToSet
eventListener?.onAudioDeviceChanged(currentAudioDevice, availableCommunicationDevices.map { AudioDeviceMapping.fromPlatformType(it.type) }.toSet())
} else {
Log.w(TAG, "Setting device $chosenDevice failed.")
val excludedDevices = emptyList<String>() // TODO: pull this from somewhere. Preferences?
val autoSelectableDevices = availableCommunicationDevices.filterNot { excludedDevices.contains(it.address) }
var candidate: AudioDeviceInfo? = null
val searchOrder: List<AudioDevice> = listOf(defaultAudioDevice) + AudioDeviceMapping.orderOfPreference.filterNot { it == defaultAudioDevice }
for (deviceType in searchOrder) {
candidate = autoSelectableDevices.find { AudioDeviceMapping.fromPlatformType(it.type) == deviceType }
if (candidate != null) {
break
}
}
when (candidate) {
null -> {
Log.e(TAG, "Tried to switch audio devices but could not find suitable device in list of types: ${autoSelectableDevices.map { it.type }.joinToString()}")
androidAudioManager.clearCommunicationDevice()
}
currentAudioDevice -> Log.d(TAG, "Request to switch to existing audio device ignored.")
else -> {
Log.d(TAG, "Switching to new device of type ${candidate.type} from ${currentAudioDevice?.type}")
androidAudioManager.communicationDevice = candidate
eventListener?.onAudioDeviceChanged(AudioDeviceMapping.fromPlatformType(candidate.type), availableCommunicationDevices.map { AudioDeviceMapping.fromPlatformType(it.type) }.toSet())
}
} catch (e: NoSuchElementException) {
androidAudioManager.clearCommunicationDevice()
}
}
}
private fun isBluetoothHeadsetConnected(): Boolean {
val mediaRouter = context.getSystemService(Context.MEDIA_ROUTER_SERVICE) as MediaRouter
val liveAudioRoute = mediaRouter.getSelectedRoute(MediaRouter.ROUTE_TYPE_LIVE_AUDIO)
return liveAudioRoute.deviceType == MediaRouter.RouteInfo.DEVICE_TYPE_BLUETOOTH
}
}

View File

@@ -39,7 +39,7 @@ sealed class SignalAudioManager(protected val context: Context, protected val ev
companion object {
@JvmStatic
fun create(context: Context, eventListener: EventListener?, isGroup: Boolean): SignalAudioManager {
fun create(context: Context, eventListener: EventListener?): SignalAudioManager {
return if (Build.VERSION.SDK_INT >= 31) {
FullSignalAudioManagerApi31(context, eventListener)
} else {
@@ -55,7 +55,7 @@ sealed class SignalAudioManager(protected val context: Context, protected val ev
is AudioManagerCommand.Start -> start()
is AudioManagerCommand.Stop -> stop(command.playDisconnect)
is AudioManagerCommand.SetDefaultDevice -> setDefaultAudioDevice(command.recipientId, command.device, command.clearUserEarpieceSelection)
is AudioManagerCommand.SetUserDevice -> selectAudioDevice(command.recipientId, command.device)
is AudioManagerCommand.SetUserDevice -> selectAudioDevice(command.recipientId, command.device, command.isId)
is AudioManagerCommand.StartIncomingRinger -> startIncomingRinger(command.ringtoneUri, command.vibrate)
is AudioManagerCommand.SilenceIncomingRinger -> silenceIncomingRinger()
is AudioManagerCommand.StartOutgoingRinger -> startOutgoingRinger()
@@ -78,7 +78,7 @@ sealed class SignalAudioManager(protected val context: Context, protected val ev
protected abstract fun start()
protected abstract fun stop(playDisconnect: Boolean)
protected abstract fun setDefaultAudioDevice(recipientId: RecipientId?, newDefaultDevice: AudioDevice, clearUserEarpieceSelection: Boolean)
protected abstract fun selectAudioDevice(recipientId: RecipientId?, device: AudioDevice)
protected abstract fun selectAudioDevice(recipientId: RecipientId?, device: Int, isId: Boolean)
protected abstract fun startIncomingRinger(ringtoneUri: Uri?, vibrate: Boolean)
protected abstract fun startOutgoingRinger()
@@ -95,6 +95,28 @@ sealed class SignalAudioManager(protected val context: Context, protected val ev
UNINITIALIZED, PREINITIALIZED, RUNNING
}
/**
* This encapsulates the two ways to represent a chosen audio device.
* Use [desiredAudioDeviceLegacy] for API < 31
* Use [desiredAudioDevice31] for API 31+
*/
class ChosenAudioDeviceIdentifier {
var desiredAudioDeviceLegacy: AudioDevice? = null
var desiredAudioDevice31: Int? = null
fun isLegacy(): Boolean {
return desiredAudioDeviceLegacy != null
}
constructor(device: AudioDevice) {
desiredAudioDeviceLegacy = device
}
constructor(device: Int) {
desiredAudioDevice31 = device
}
}
interface EventListener {
@JvmSuppressWildcards
fun onAudioDeviceChanged(activeDevice: AudioDevice, devices: Set<AudioDevice>)
@@ -337,8 +359,12 @@ class FullSignalAudioManager(context: Context, eventListener: EventListener?) :
updateAudioDeviceState()
}
override fun selectAudioDevice(recipientId: RecipientId?, device: AudioDevice) {
val actualDevice = if (device == AudioDevice.EARPIECE && audioDevices.contains(AudioDevice.WIRED_HEADSET)) AudioDevice.WIRED_HEADSET else device
override fun selectAudioDevice(recipientId: RecipientId?, device: Int, isId: Boolean) {
if (isId) {
throw IllegalArgumentException("Passing audio device address $device to legacy audio manager")
}
val mappedDevice = AudioDevice.values()[device]
val actualDevice: AudioDevice = if (mappedDevice == AudioDevice.EARPIECE && audioDevices.contains(AudioDevice.WIRED_HEADSET)) AudioDevice.WIRED_HEADSET else mappedDevice
Log.d(TAG, "selectAudioDevice(): device: $device actualDevice: $actualDevice")
if (!audioDevices.contains(actualDevice)) {
@@ -377,7 +403,7 @@ class FullSignalAudioManager(context: Context, eventListener: EventListener?) :
Log.i(TAG, "startIncomingRinger(): uri: ${if (ringtoneUri != null) "present" else "null"} vibrate: $vibrate")
androidAudioManager.mode = AudioManager.MODE_RINGTONE
setMicrophoneMute(false)
setDefaultAudioDevice(null, AudioDevice.SPEAKER_PHONE, false)
setDefaultAudioDevice(recipientId = null, newDefaultDevice = AudioDevice.SPEAKER_PHONE, clearUserEarpieceSelection = false)
incomingRinger.start(ringtoneUri, vibrate)
}