Support consecutive playback in all media

This commit is contained in:
Fedor Indutny
2025-11-18 17:20:08 -08:00
committed by GitHub
parent 745472cc4b
commit 10a9e40a2b
10 changed files with 181 additions and 349 deletions

View File

@@ -585,12 +585,13 @@ export type BackupAttachmentDownloadProgress = {
completedBytes: number;
};
export type GetOlderMediaOptionsType = Readonly<{
export type GetSortedMediaOptionsType = Readonly<{
conversationId: string;
limit: number;
messageId?: string;
receivedAt?: number;
sentAt?: number;
order: 'older' | 'newer';
type: 'media' | 'audio' | 'documents';
}>;
@@ -842,7 +843,9 @@ type ReadableInterface = {
) => Array<MessageType>;
// getOlderMessagesByConversation is JSON on server, full message on Client
hasMedia: (conversationId: string) => boolean;
getOlderMedia: (options: GetOlderMediaOptionsType) => Array<MediaItemDBType>;
getSortedMedia: (
options: GetSortedMediaOptionsType
) => Array<MediaItemDBType>;
getOlderLinkPreviews: (
options: GetOlderLinkPreviewsOptionsType
) => Array<LinkPreviewMediaItemDBType>;

View File

@@ -136,7 +136,7 @@ import type {
GetConversationRangeCenteredOnMessageResultType,
GetKnownMessageAttachmentsResultType,
GetNearbyMessageFromDeletedSetOptionsType,
GetOlderMediaOptionsType,
GetSortedMediaOptionsType,
GetOlderLinkPreviewsOptionsType,
GetRecentStoryRepliesOptionsType,
GetUnreadByConversationAndMarkReadResultType,
@@ -456,7 +456,7 @@ export const DataReader: ServerReadableInterface = {
hasGroupCallHistoryMessage,
hasMedia,
getOlderMedia,
getSortedMedia,
getOlderLinkPreviews,
getAllNotificationProfiles,
@@ -5242,25 +5242,52 @@ function hasMedia(db: ReadableDB, conversationId: string): boolean {
const { VOICE_MESSAGE } = SignalService.AttachmentPointer.Flags;
function getOlderMedia(
function getSortedMedia(
db: ReadableDB,
{
order,
conversationId,
limit,
messageId,
receivedAt: maxReceivedAt = Number.MAX_VALUE,
sentAt: maxSentAt = Number.MAX_VALUE,
receivedAt: givenReceivedAt,
sentAt: givenSentAt,
type,
}: GetOlderMediaOptionsType
}: GetSortedMediaOptionsType
): Array<MediaItemDBType> {
const timeFilters = {
first: sqlFragment`
message_attachments.receivedAt = ${maxReceivedAt}
AND
message_attachments.sentAt < ${maxSentAt}
`,
second: sqlFragment`message_attachments.receivedAt < ${maxReceivedAt}`,
let timeFilters: {
first: QueryFragment;
second: QueryFragment;
};
let timeOrder: QueryFragment;
if (order === 'older') {
const maxReceivedAt = givenReceivedAt ?? Number.MAX_VALUE;
const maxSentAt = givenSentAt ?? Number.MAX_VALUE;
timeFilters = {
first: sqlFragment`
message_attachments.receivedAt = ${maxReceivedAt}
AND
message_attachments.sentAt < ${maxSentAt}
`,
second: sqlFragment`message_attachments.receivedAt < ${maxReceivedAt}`,
};
timeOrder = sqlFragment`DESC`;
} else if (order === 'newer') {
const minReceivedAt = givenReceivedAt ?? Number.MIN_VALUE;
const minSentAt = givenSentAt ?? Number.MIN_VALUE;
timeFilters = {
first: sqlFragment`
message_attachments.receivedAt = ${minReceivedAt}
AND
message_attachments.sentAt > ${minSentAt}
`,
second: sqlFragment`message_attachments.receivedAt > ${minReceivedAt}`,
};
timeOrder = sqlFragment`ASC`;
} else {
throw missingCaseError(order);
}
let contentFilter: QueryFragment;
if (type === 'media') {
@@ -5313,7 +5340,9 @@ function getOlderMedia(
message_attachments.isViewOnce IS NOT 1 AND
message_attachments.messageType IN ('incoming', 'outgoing') AND
(${messageId ?? null} IS NULL OR message_attachments.messageId IS NOT ${messageId ?? null})
ORDER BY message_attachments.receivedAt DESC, message_attachments.sentAt DESC
ORDER BY
message_attachments.receivedAt ${timeOrder},
message_attachments.sentAt ${timeOrder}
LIMIT ${limit}
`;

View File

@@ -9,6 +9,7 @@ import { useBoundActions } from '../../hooks/useBoundActions.std.js';
import type { StateType as RootStateType } from '../reducer.preload.js';
import { setVoiceNotePlaybackRate } from './conversations.preload.js';
import { extractVoiceNoteForPlayback } from '../selectors/audioPlayer.preload.js';
import { getUserConversationId } from '../selectors/user.std.js';
import type {
VoiceNoteAndConsecutiveForPlayback,
VoiceNoteForPlayback,
@@ -25,6 +26,14 @@ import { createLogger } from '../../logging/log.std.js';
import { isAudio } from '../../util/Attachment.std.js';
import { getLocalAttachmentUrl } from '../../util/getLocalAttachmentUrl.std.js';
import { assertDev } from '../../util/assert.std.js';
import { drop } from '../../util/drop.std.js';
import { Sound, SoundType } from '../../util/Sound.std.js';
import { getMessageById } from '../../messages/getMessageById.preload.js';
import { DataReader } from '../../sql/Client.preload.js';
const stateChangeConfirmUpSound = new Sound({
soundType: SoundType.VoiceNoteEnd,
});
const log = createLogger('audioPlayer');
@@ -36,17 +45,14 @@ type AudioPlayerContentDraft = ReadonlyDeep<{
url: string;
}>;
/** A voice note, with a queue for consecutive playback */
/** A voice note consecutive playback */
export type AudioPlayerContentVoiceNote = ReadonlyDeep<{
conversationId: string;
context: string;
current: VoiceNoteForPlayback;
queue: ReadonlyArray<VoiceNoteForPlayback>;
nextMessageTimestamp: number | undefined;
// playing because it followed a message
// false on the first of a consecutive group
isConsecutive: boolean;
ourConversationId: string | undefined;
}>;
export type ActiveAudioPlayerStateType = ReadonlyDeep<{
@@ -79,6 +85,39 @@ export type AudioPlayerStateType = ReadonlyDeep<{
active: ActiveAudioPlayerStateType | undefined;
}>;
// Helpers
async function getNextVoiceNote({
current,
conversationId,
ourConversationId,
}: {
current: VoiceNoteForPlayback;
conversationId: string;
ourConversationId: string;
}): Promise<VoiceNoteForPlayback | undefined> {
const results = await DataReader.getSortedMedia({
conversationId,
limit: 1,
messageId: current.id,
receivedAt: current.receivedAt,
sentAt: current.sentAt,
type: 'audio',
order: 'newer',
});
if (results.length === 0) {
return undefined;
}
const next = await getMessageById(results[0].message.id);
if (next == null) {
return undefined;
}
return extractVoiceNoteForPlayback(next.attributes, ourConversationId);
}
// Actions
export type SetMessageAudioAction = ReadonlyDeep<{
@@ -145,9 +184,59 @@ export const actions = {
messageAudioEnded,
};
function messageAudioEnded(): MessageAudioEnded {
return {
type: 'audioPlayer/MESSAGE_AUDIO_ENDED',
function messageAudioEnded(): ThunkAction<
void,
RootStateType,
unknown,
SetMessageAudioAction | MessageAudioEnded
> {
return async (dispatch, getState) => {
const state = getState();
const {
audioPlayer: { active },
} = state;
const ourConversationId = getUserConversationId(getState());
if (ourConversationId == null || active == null) {
dispatch({
type: 'audioPlayer/MESSAGE_AUDIO_ENDED',
});
return;
}
const { content, playbackRate } = active;
if (content == null || AudioPlayerContent.isDraft(content)) {
dispatch({
type: 'audioPlayer/MESSAGE_AUDIO_ENDED',
});
return;
}
const { conversationId, context, current } = content;
const next = await getNextVoiceNote({
current,
conversationId,
ourConversationId,
});
if (next == null) {
drop(stateChangeConfirmUpSound.play());
dispatch({
type: 'audioPlayer/MESSAGE_AUDIO_ENDED',
});
return;
}
dispatch({
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: {
conversationId,
context,
current: next,
isConsecutive: true,
startPosition: 0,
playbackRate,
},
});
};
}
@@ -218,32 +307,21 @@ function loadVoiceNoteAudio({
voiceNoteData,
position,
context,
ourConversationId,
playbackRate,
}: {
voiceNoteData: VoiceNoteAndConsecutiveForPlayback;
position: number;
context: string;
ourConversationId: string;
playbackRate: number;
}): SetMessageAudioAction {
const {
conversationId,
voiceNote,
consecutiveVoiceNotes,
// playbackRate,
nextMessageTimestamp,
} = voiceNoteData;
const { conversationId, voiceNote } = voiceNoteData;
return {
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: {
conversationId,
context,
current: voiceNote,
queue: consecutiveVoiceNotes,
isConsecutive: false,
nextMessageTimestamp,
ourConversationId,
startPosition: position,
playbackRate,
},
@@ -394,90 +472,6 @@ export function reducer(
};
}
if (action.type === 'MESSAGES_ADDED') {
if (!active) {
return state;
}
const { content } = active;
if (!content) {
return state;
}
if (!AudioPlayerContent.isVoiceNote(content)) {
return state;
}
if (content.conversationId !== action.payload.conversationId) {
return state;
}
const updatedQueue: Array<VoiceNoteForPlayback> = [...content.queue];
for (const message of action.payload.messages) {
if (message.deletedForEveryone) {
continue;
}
if (message.timestamp < content.current.timestamp) {
continue;
}
// in range of the queue
if (
content.nextMessageTimestamp === undefined ||
message.timestamp < content.nextMessageTimestamp
) {
if (message.type !== 'incoming' && message.type !== 'outgoing') {
continue;
}
const voiceNote = extractVoiceNoteForPlayback(
message,
content.ourConversationId
);
// index of the message in the queue after this one
const idx = updatedQueue.findIndex(
m => m.timestamp > message.timestamp
);
// break up consecutive queue: drop values older than this message
if (!voiceNote && idx !== -1) {
updatedQueue.splice(idx);
continue;
}
// insert a new voice note
if (voiceNote) {
if (idx === -1) {
log.info(
`MESSAGES_ADDED: Adding voice note ${voiceNote.messageIdForLogging} to end of queue`
);
updatedQueue.push(voiceNote);
} else {
log.info(
`MESSAGES_ADDED: Adding voice note ${voiceNote.messageIdForLogging} to queue at index ${idx}`
);
updatedQueue.splice(idx, 0, voiceNote);
}
}
}
}
if (updatedQueue.length === content.queue.length) {
return state;
}
return {
...state,
active: {
...active,
content: {
...content,
queue: updatedQueue,
},
},
};
}
if (action.type === 'audioPlayer/MESSAGE_AUDIO_ENDED') {
if (!active) {
return state;
@@ -487,37 +481,6 @@ export function reducer(
return state;
}
if (AudioPlayerContent.isDraft(content)) {
log.info('MESSAGE_AUDIO_ENDED: Voice note was draft, stopping playback');
return {
...state,
active: undefined,
};
}
const { queue } = content;
const [nextVoiceNote, ...newQueue] = queue;
if (nextVoiceNote) {
log.info(
`MESSAGE_AUDIO_ENDED: Starting next voice note ${nextVoiceNote.messageIdForLogging}`
);
return {
...state,
active: {
...active,
startPosition: 0,
content: {
...content,
current: nextVoiceNote,
queue: newQueue,
isConsecutive: true,
},
},
};
}
log.info('MESSAGE_AUDIO_ENDED: Stopping playback');
return {
...state,
@@ -545,57 +508,16 @@ export function reducer(
// if we deleted the message currently being played
// move on to the next message
if (content.current.id === id) {
const [next, ...rest] = content.queue;
if (!next) {
log.info(
'MESSAGE_DELETED: Removed currently-playing message, stopping playback'
);
return {
...state,
active: undefined,
};
}
log.info(
'MESSAGE_DELETED: Removed currently-playing message, moving to next in queue'
);
return {
...state,
active: {
...active,
content: {
...content,
current: next,
queue: rest,
},
},
};
}
// if we deleted a message on the queue
// just update the queue
const message = content.queue.find(el => el.id === id);
if (message) {
log.info('MESSAGE_DELETED: Removed message from the queue');
return {
...state,
active: {
...active,
content: {
...content,
queue: content.queue.filter(el => el.id !== id),
},
},
active: undefined,
};
}
return state;
}
// if it's a voice note
// and this event is letting us know that it has downloaded
// update the url if it's in the queue
// Update currently playing message if it just downloaded
if (action.type === 'MESSAGE_CHANGED') {
if (!active) {
return state;
@@ -649,28 +571,6 @@ export function reducer(
};
}
// if it's in the queue
const idx = content.queue.findIndex(v => v.id === id);
if (idx !== -1) {
log.info('MESSAGE_CHANGED: Adding content url to message in queue');
const updatedQueue = [...content.queue];
updatedQueue[idx] = {
...updatedQueue[idx],
url,
};
return {
...state,
active: {
...active,
content: {
...content,
queue: updatedQueue,
},
},
};
}
return state;
}

View File

@@ -112,12 +112,12 @@ function _sortItems<
}
function _cleanAttachments(
type: 'media' | 'audio' | 'document',
type: 'media' | 'audio' | 'documents',
rawMedia: ReadonlyArray<MediaItemDBType>
): ReadonlyArray<MediaItemType> {
return rawMedia.map(({ message, index, attachment }) => {
return {
type,
type: type === 'documents' ? 'document' : type,
index,
attachment: getPropsForAttachment(attachment, 'attachment', message),
message,
@@ -159,20 +159,23 @@ function initialLoad(
const [rawMedia, rawAudio, rawDocuments, rawLinkPreviews] =
await Promise.all([
DataReader.getOlderMedia({
DataReader.getSortedMedia({
conversationId,
limit: FETCH_CHUNK_COUNT,
type: 'media',
order: 'older',
}),
DataReader.getOlderMedia({
DataReader.getSortedMedia({
conversationId,
limit: FETCH_CHUNK_COUNT,
type: 'audio',
order: 'older',
}),
DataReader.getOlderMedia({
DataReader.getSortedMedia({
conversationId,
limit: FETCH_CHUNK_COUNT,
type: 'documents',
order: 'older',
}),
DataReader.getOlderLinkPreviews({
conversationId,
@@ -182,7 +185,7 @@ function initialLoad(
const media = _cleanAttachments('media', rawMedia);
const audio = _cleanAttachments('audio', rawAudio);
const documents = _cleanAttachments('document', rawDocuments);
const documents = _cleanAttachments('documents', rawDocuments);
const links = _cleanLinkPreviews(rawLinkPreviews);
dispatch({
@@ -256,26 +259,23 @@ function loadMore(
let audio: ReadonlyArray<MediaItemType> = [];
let documents: ReadonlyArray<MediaItemType> = [];
let links: ReadonlyArray<LinkPreviewMediaItemType> = [];
if (type === 'media') {
const rawMedia = await DataReader.getOlderMedia({
if (type === 'media' || type === 'audio' || type === 'documents') {
const rawMedia = await DataReader.getSortedMedia({
...sharedOptions,
type: 'media',
order: 'older',
type,
});
media = _cleanAttachments('media', rawMedia);
} else if (type === 'audio') {
const rawAudio = await DataReader.getOlderMedia({
...sharedOptions,
type: 'audio',
});
audio = _cleanAttachments('audio', rawAudio);
} else if (type === 'documents') {
const rawDocuments = await DataReader.getOlderMedia({
...sharedOptions,
type: 'documents',
});
documents = _cleanAttachments('document', rawDocuments);
const result = _cleanAttachments(type, rawMedia);
if (type === 'media') {
media = result;
} else if (type === 'audio') {
audio = result;
} else if (type === 'documents') {
documents = result;
} else {
throw missingCaseError(type);
}
} else if (type === 'links') {
const rawPreviews = await DataReader.getOlderLinkPreviews(sharedOptions);
links = _cleanLinkPreviews(rawPreviews);
@@ -452,7 +452,7 @@ export function reducer(
)
);
const newDocuments = _cleanAttachments(
'document',
'documents',
messageMediaItems.filter(({ attachment }) => isFile(attachment))
);
const newLinks = _cleanLinkPreviews(

View File

@@ -22,7 +22,6 @@ import {
import type { StateType } from '../reducer.preload.js';
import { createLogger } from '../../logging/log.std.js';
import { getLocalAttachmentUrl } from '../../util/getLocalAttachmentUrl.std.js';
import type { MessageWithUIFieldsType } from '../ducks/conversations.preload.js';
import type { ReadonlyMessageAttributesType } from '../../model-types.d.ts';
import { getMessageIdForLogging } from '../../util/idForLogging.preload.js';
import * as Attachment from '../../util/Attachment.std.js';
@@ -41,7 +40,8 @@ export type VoiceNoteForPlayback = {
sourceServiceId: ServiceIdString | undefined;
isPlayed: boolean;
messageIdForLogging: string;
timestamp: number;
sentAt: number;
receivedAt: number;
};
export const isPaused = (state: StateType): boolean => {
@@ -106,7 +106,8 @@ export function extractVoiceNoteForPlayback(
type,
isPlayed: isPlayed(type, status, message.readStatus),
messageIdForLogging: getMessageIdForLogging(message),
timestamp: message.timestamp,
sentAt: message.sent_at,
receivedAt: message.received_at,
source: message.source,
sourceServiceId: message.sourceServiceId,
};
@@ -116,11 +117,7 @@ export function extractVoiceNoteForPlayback(
export type VoiceNoteAndConsecutiveForPlayback = {
conversationId: string;
voiceNote: VoiceNoteForPlayback;
previousMessageId: string | undefined;
consecutiveVoiceNotes: ReadonlyArray<VoiceNoteForPlayback>;
playbackRate: number;
// timestamp of the message after all the once in the queue
nextMessageTimestamp: number | undefined;
};
export const selectVoiceNoteAndConsecutive = createSelector(
getConversations,
@@ -160,57 +157,12 @@ export const selectVoiceNoteAndConsecutive = createSelector(
return undefined;
}
const conversationMessages =
conversations.messagesByConversation[selectedConversationId];
if (!conversationMessages) {
log.warn('selectedVoiceNote: no conversation messages', {
message: messageId,
});
return;
}
let idx = conversationMessages.messageIds.indexOf(messageId);
// useful if inserting into an active queue
const previousMessageId = conversationMessages.messageIds[idx - 1];
const consecutiveVoiceNotes: Array<VoiceNoteForPlayback> = [];
let nextMessageId: string;
let nextMessage: MessageWithUIFieldsType | undefined;
let nextVoiceNote: VoiceNoteForPlayback | undefined;
do {
idx += 1;
nextMessageId = conversationMessages.messageIds[idx];
if (!nextMessageId) {
nextMessage = undefined;
break;
}
nextMessage = conversations.messagesLookup[nextMessageId];
if (!nextMessage) {
break;
}
if (nextMessage.deletedForEveryone) {
continue;
}
nextVoiceNote = extractVoiceNoteForPlayback(
nextMessage,
ourConversationId
);
if (nextVoiceNote) {
consecutiveVoiceNotes.push(nextVoiceNote);
}
} while (nextVoiceNote);
const conversation = getConversationById(selectedConversationId);
return {
conversationId: selectedConversationId,
voiceNote,
consecutiveVoiceNotes,
playbackRate: conversation?.voiceNotePlaybackRate ?? 1,
previousMessageId,
nextMessageTimestamp: nextMessage?.timestamp,
};
};
}

View File

@@ -80,36 +80,18 @@ export const SmartAllMedia = memo(function SmartAllMedia({
return;
}
if (!ourConversationId) {
log.warn('no ourConversationId');
return;
}
const index = audio.indexOf(mediaItem);
if (index === -1) {
log.warn('audio no longer loaded');
return;
}
const prev = index === 0 ? undefined : audio.at(index - 1);
const next = audio.at(index);
loadVoiceNoteAudio({
voiceNoteData: {
voiceNote,
conversationId: mediaItem.message.conversationId,
previousMessageId: prev?.message.id,
playbackRate: 1,
consecutiveVoiceNotes: [],
nextMessageTimestamp: next?.message.sentAt,
},
position: 0,
context: 'AllMedia',
ourConversationId,
playbackRate: 1,
});
},
[audio, loadVoiceNoteAudio, ourConversationId]
[loadVoiceNoteAudio, ourConversationId]
);
return (

View File

@@ -15,7 +15,6 @@ import {
selectVoiceNoteAndConsecutive,
} from '../selectors/audioPlayer.preload.js';
import { useConversationsActions } from '../ducks/conversations.preload.js';
import { getUserConversationId } from '../selectors/user.std.js';
import { createLogger } from '../../logging/log.std.js';
import {
getConversationByIdSelector,
@@ -38,7 +37,6 @@ export const SmartMessageAudio = memo(function SmartMessageAudio({
const { pushPanelForConversation } = useConversationsActions();
const getVoiceNoteData = useSelector(selectVoiceNoteAndConsecutive);
const ourConversationId = useSelector(getUserConversationId);
const getConversationById = useSelector(getConversationByIdSelector);
const selectedConversationId = useSelector(getSelectedConversationId);
@@ -69,26 +67,14 @@ export const SmartMessageAudio = memo(function SmartMessageAudio({
return;
}
if (!ourConversationId) {
log.warn('SmartMessageAudio: no ourConversationId');
return;
}
loadVoiceNoteAudio({
voiceNoteData,
position,
context: renderingContext,
ourConversationId,
playbackRate,
});
},
[
getVoiceNoteData,
loadVoiceNoteAudio,
ourConversationId,
renderingContext,
playbackRate,
]
[getVoiceNoteData, loadVoiceNoteAudio, renderingContext, playbackRate]
);
return (

View File

@@ -26,9 +26,6 @@ const log = createLogger('VoiceNotesPlaybackProvider');
const stateChangeConfirmDownSound = new Sound({
soundType: SoundType.VoiceNoteStart,
});
const stateChangeConfirmUpSound = new Sound({
soundType: SoundType.VoiceNoteEnd,
});
/**
* Synchronizes the audioPlayer redux state with globalMessageAudio
@@ -45,14 +42,11 @@ export const SmartVoiceNotesPlaybackProvider = memo(
let messageId: undefined | string;
let messageIdForLogging: undefined | string;
let playNextConsecutiveSound = false;
let playFinishConsecutiveSound = false;
if (content && AudioPlayerContent.isVoiceNote(content)) {
({ url, id: messageId } = content.current);
messageIdForLogging = content.current.messageIdForLogging;
playNextConsecutiveSound = content.isConsecutive;
playFinishConsecutiveSound =
content.isConsecutive && content.queue.length === 0;
}
if (content && AudioPlayerContent.isDraft(content)) {
url = content.url;
@@ -123,7 +117,6 @@ export const SmartVoiceNotesPlaybackProvider = memo(
messageId,
messageIdForLogging,
startPosition: active.startPosition,
playFinishConsecutiveSound,
durationChanged,
unloadMessageAudio,
currentTimeUpdated,
@@ -162,7 +155,6 @@ export const SmartVoiceNotesPlaybackProvider = memo(
messageAudioEnded,
messageId,
messageIdForLogging,
playFinishConsecutiveSound,
playNextConsecutiveSound,
previousStartPosition,
unloadMessageAudio,
@@ -179,7 +171,6 @@ function loadAudio({
messageId,
messageIdForLogging,
startPosition,
playFinishConsecutiveSound,
durationChanged,
currentTimeUpdated,
messageAudioEnded,
@@ -190,7 +181,6 @@ function loadAudio({
messageId: string | undefined;
messageIdForLogging: string | undefined;
startPosition: number;
playFinishConsecutiveSound: boolean;
durationChanged: (value: number | undefined) => void;
currentTimeUpdated: (value: number) => void;
messageAudioEnded: () => void;
@@ -225,9 +215,6 @@ function loadAudio({
currentTimeUpdated(globalMessageAudio.currentTime);
},
onEnded() {
if (playFinishConsecutiveSound) {
drop(stateChangeConfirmUpSound.play());
}
messageAudioEnded();
},
onError(error) {

View File

@@ -17,16 +17,14 @@ function voiceNoteDataForMessage(
voiceNote: {
id: messageId,
type: 'outgoing',
timestamp: 0,
receivedAt: 0,
sentAt: 0,
url: undefined,
source: undefined,
sourceServiceId: undefined,
messageIdForLogging: messageId,
isPlayed: false,
},
consecutiveVoiceNotes: [],
previousMessageId: undefined,
nextMessageTimestamp: undefined,
playbackRate: 1,
};
}
@@ -51,7 +49,6 @@ describe('state/selectors/audioPlayer', () => {
voiceNoteData: voiceNoteDataForMessage('id'),
position: 0,
context: 'context',
ourConversationId: 'convo',
playbackRate: 1,
})
);

View File

@@ -30,16 +30,14 @@ function voiceNoteDataForMessage(
voiceNote: {
id: messageId,
type: 'outgoing',
timestamp: 0,
receivedAt: 0,
sentAt: 0,
url: undefined,
source: undefined,
sourceServiceId: undefined,
messageIdForLogging: messageId,
isPlayed: false,
},
consecutiveVoiceNotes: [],
previousMessageId: undefined,
nextMessageTimestamp: undefined,
playbackRate: 1,
};
}
@@ -57,7 +55,6 @@ describe('both/state/ducks/audioPlayer', () => {
voiceNoteData: voiceNoteDataForMessage(MESSAGE_ID),
position: 0,
context: 'context',
ourConversationId: 'convo',
playbackRate: 1,
})
);
@@ -85,7 +82,6 @@ describe('both/state/ducks/audioPlayer', () => {
voiceNoteData: voiceNoteDataForMessage('test'),
position: 0,
context: 'context',
ourConversationId: 'convo',
playbackRate: 1,
})
);