Clean up callMessages_test

Co-authored-by: Miriam Zimmerman <mutexlox@signal.org>
This commit is contained in:
automated-signal
2025-12-10 15:49:53 -06:00
committed by GitHub
parent 07dc2bb201
commit 89bd68beb5

View File

@@ -35,6 +35,11 @@ describe('callMessages', function callMessages(this: Mocha.Suite) {
let app1: App;
let app2: App;
const INPUT1 = 'input_source_a';
const OUTPUT1 = 'output_sink_a';
const INPUT2 = 'input_source_b';
const OUTPUT2 = 'output_sink_b';
async function setUpAudio(source: string, sink: string) {
debug(`setup source: ${source}, sink: ${sink}`);
const args = ['--setup', '--input-source', source, '--output-sink', sink];
@@ -62,18 +67,50 @@ describe('callMessages', function callMessages(this: Mocha.Suite) {
]);
}
function playAudio(source: string, sink: string, inputFile: string) {
execFile(
VIRTUAL_AUDIO,
[
'--play',
'--input-source',
source,
'--output-sink',
sink,
'--input-file',
inputFile,
],
(error, stdout, stderr) => {
if (error) {
throw error;
}
debug(stdout);
debug(stderr);
}
);
}
async function stopAudio(source: string, sink: string) {
await execFilePromise(VIRTUAL_AUDIO, [
'--stop',
'--input-source',
source,
'--output-sink',
sink,
]);
}
before(async () => {
runTurnInContainer();
// Set up two virtual sources and sinks.
await setUpAudio('input_source_a', 'output_sink_a');
await setUpAudio('input_source_b', 'output_sink_b');
await setUpAudio(INPUT1, OUTPUT1);
await setUpAudio(INPUT2, OUTPUT2);
});
after(async () => {
tearDownTurnContainer();
await tearDownAudio('input_source_a', 'output_sink_a');
await tearDownAudio('input_source_b', 'output_sink_b');
await tearDownAudio(INPUT1, OUTPUT1);
await tearDownAudio(INPUT2, OUTPUT2);
});
beforeEach(async () => {
@@ -164,6 +201,22 @@ describe('callMessages', function callMessages(this: Mocha.Suite) {
await win.locator('.module-calling-device-selection__close-button').click();
}
async function acceptAudioCall(win: Page) {
// Only wait for 3 seconds to make sure that this succeeded properly rather
// than timing out after ~10 seconds and using a direct connection
await win
.locator('.IncomingCallBar__button--accept-audio')
.click({ timeout: 3000 });
}
async function hangupCall(win: Page) {
await win.locator('.CallControls__JoinLeaveButton--hangup').click();
}
async function getAndResetMaxAudioLevel(win: Page) {
return win.evaluate('window.SignalCI?.getAndResetMaxAudioLevel()');
}
it('can call and decline a call', async () => {
const window1 = await app1.getWindow();
await startAudioCallWith(window1, bootstrap2.phone.device.aci);
@@ -187,37 +240,13 @@ describe('callMessages', function callMessages(this: Mocha.Suite) {
await startAudioCallWith(window1, bootstrap2.phone.device.aci);
const window2 = await app2.getWindow();
// Only wait for 3 seconds to make sure that this succeeded properly rather
// than timing out after ~10 seconds and using a direct connection
await window2
.locator('.IncomingCallBar__button--accept-audio')
.click({ timeout: 3000 });
await acceptAudioCall(window2);
try {
await setInputAndOutput(window1, 'input_source_a', 'output_sink_a');
await setInputAndOutput(window1, INPUT1, OUTPUT1);
await setInputAndOutput(window2, INPUT2, OUTPUT2);
await setInputAndOutput(window2, 'input_source_b', 'output_sink_b');
execFile(
VIRTUAL_AUDIO,
[
'--play',
'--input-source',
'input_source_a',
'--output-sink',
'output_sink_a',
'--input-file',
theRaven,
],
(error, stdout, stderr) => {
if (error) {
throw error;
}
debug(stdout);
debug(stderr);
}
);
playAudio(INPUT1, OUTPUT1, theRaven);
// Wait for audio levels indicator to be visible.
await expect(
@@ -226,31 +255,20 @@ describe('callMessages', function callMessages(this: Mocha.Suite) {
)
).toBeVisible({ timeout: 15000 });
// Wait a second to let the audio play
// Wait 2 seconds to let the audio play
await new Promise(f => setTimeout(f, 2000));
expect(
await window2.evaluate('window.SignalCI?.getAndResetMaxAudioLevel()')
).toBeGreaterThanOrEqual(0.25);
expect(await getAndResetMaxAudioLevel(window2)).toBeGreaterThanOrEqual(
0.25
);
} finally {
await bootstrap2.screenshotWindow(window2, 'callee');
// hang up after we detect audio (or fail to)
await window2.locator('.CallControls__JoinLeaveButton--hangup').click();
await hangupCall(window2);
await execFilePromise(VIRTUAL_AUDIO, [
'--stop',
'--input-source',
'input_source_a',
'--output-sink',
'output_source_a',
]);
await stopAudio(INPUT1, OUTPUT1);
await awaitNoCall(window1);
await awaitNoCall(window2);
await window2.locator('.NavTabs__Item--Settings').click();
await window2.locator('.Preferences__button--calls').click();
await bootstrap2.screenshotWindow(window2, 'callee');
}
});
@@ -258,91 +276,48 @@ describe('callMessages', function callMessages(this: Mocha.Suite) {
const theRaven = join(FIXTURES, 'the_raven.wav');
const window1 = await app1.getWindow();
const window2 = await app2.getWindow();
// First call: Neither muted, window2 ends call
await startAudioCallWith(window1, bootstrap2.phone.device.aci);
// Only wait for 3 seconds to make sure that this succeeded properly rather
// than timing out after ~10 seconds and using a direct connection
await window2
.locator('.IncomingCallBar__button--accept-audio')
.click({ timeout: 3000 });
await acceptAudioCall(window2);
try {
await setInputAndOutput(window1, 'input_source_a', 'output_sink_a');
await setInputAndOutput(window2, 'input_source_b', 'output_sink_b');
await setInputAndOutput(window1, INPUT1, OUTPUT1);
await setInputAndOutput(window2, INPUT2, OUTPUT2);
} finally {
// hang up
await window2.locator('.CallControls__JoinLeaveButton--hangup').click();
await hangupCall(window2);
await awaitNoCall(window1);
await awaitNoCall(window2);
}
// Second call
// Second call: window1 mutes after placing call but before window 2 answers
await startAudioCallWith(window1, bootstrap2.phone.device.aci);
// window1 mutes after placing call but before window 2 answers
await window1.getByLabel('Mute mic').click();
// Only wait for 3 seconds to make sure that this succeeded properly rather
// than timing out after ~10 seconds and using a direct connection
await window2
.locator('.IncomingCallBar__button--accept-audio')
.click({ timeout: 3000 });
await acceptAudioCall(window2);
// Wait a few hundred ms for initial comfort noise to subside
await new Promise(f => setTimeout(f, 600));
// We haven't played any audio into the virtual mic yet, so it's safe to
// ignore anything this early / to assume it's comfort noise
await window2.evaluate('window.SignalCI?.getAndResetMaxAudioLevel()');
await getAndResetMaxAudioLevel(window2);
try {
await setInputAndOutput(window1, 'input_source_a', 'output_sink_a');
await setInputAndOutput(window1, INPUT1, OUTPUT1);
await setInputAndOutput(window2, INPUT2, OUTPUT2);
playAudio(INPUT1, OUTPUT1, theRaven);
await setInputAndOutput(window2, 'input_source_b', 'output_sink_b');
execFile(
VIRTUAL_AUDIO,
[
'--play',
'--input-source',
'input_source_a',
'--output-sink',
'output_sink_a',
'--input-file',
theRaven,
],
(error, stdout, stderr) => {
if (error) {
throw error;
}
debug(stdout);
debug(stderr);
}
);
// Wait a second
// Wait 2 seconds to let the audio play.
await new Promise(f => setTimeout(f, 2000));
// Make sure we got no audio
expect(
await window2.evaluate('window.SignalCI?.getAndResetMaxAudioLevel()')
).toBeCloseTo(0);
expect(await getAndResetMaxAudioLevel(window2)).toBeCloseTo(0);
} finally {
await window2.locator('.CallControls__JoinLeaveButton--hangup').click();
await hangupCall(window2);
await execFilePromise(VIRTUAL_AUDIO, [
'--stop',
'--input-source',
'input_source_a',
'--output-sink',
'output_source_a',
]);
await stopAudio(INPUT1, OUTPUT1);
await awaitNoCall(window1);
await awaitNoCall(window2);