Support consecutive playback in all media

This commit is contained in:
Fedor Indutny
2025-11-18 17:20:08 -08:00
committed by GitHub
parent 745472cc4b
commit 10a9e40a2b
10 changed files with 181 additions and 349 deletions

View File

@@ -585,12 +585,13 @@ export type BackupAttachmentDownloadProgress = {
completedBytes: number; completedBytes: number;
}; };
export type GetOlderMediaOptionsType = Readonly<{ export type GetSortedMediaOptionsType = Readonly<{
conversationId: string; conversationId: string;
limit: number; limit: number;
messageId?: string; messageId?: string;
receivedAt?: number; receivedAt?: number;
sentAt?: number; sentAt?: number;
order: 'older' | 'newer';
type: 'media' | 'audio' | 'documents'; type: 'media' | 'audio' | 'documents';
}>; }>;
@@ -842,7 +843,9 @@ type ReadableInterface = {
) => Array<MessageType>; ) => Array<MessageType>;
// getOlderMessagesByConversation is JSON on server, full message on Client // getOlderMessagesByConversation is JSON on server, full message on Client
hasMedia: (conversationId: string) => boolean; hasMedia: (conversationId: string) => boolean;
getOlderMedia: (options: GetOlderMediaOptionsType) => Array<MediaItemDBType>; getSortedMedia: (
options: GetSortedMediaOptionsType
) => Array<MediaItemDBType>;
getOlderLinkPreviews: ( getOlderLinkPreviews: (
options: GetOlderLinkPreviewsOptionsType options: GetOlderLinkPreviewsOptionsType
) => Array<LinkPreviewMediaItemDBType>; ) => Array<LinkPreviewMediaItemDBType>;

View File

@@ -136,7 +136,7 @@ import type {
GetConversationRangeCenteredOnMessageResultType, GetConversationRangeCenteredOnMessageResultType,
GetKnownMessageAttachmentsResultType, GetKnownMessageAttachmentsResultType,
GetNearbyMessageFromDeletedSetOptionsType, GetNearbyMessageFromDeletedSetOptionsType,
GetOlderMediaOptionsType, GetSortedMediaOptionsType,
GetOlderLinkPreviewsOptionsType, GetOlderLinkPreviewsOptionsType,
GetRecentStoryRepliesOptionsType, GetRecentStoryRepliesOptionsType,
GetUnreadByConversationAndMarkReadResultType, GetUnreadByConversationAndMarkReadResultType,
@@ -456,7 +456,7 @@ export const DataReader: ServerReadableInterface = {
hasGroupCallHistoryMessage, hasGroupCallHistoryMessage,
hasMedia, hasMedia,
getOlderMedia, getSortedMedia,
getOlderLinkPreviews, getOlderLinkPreviews,
getAllNotificationProfiles, getAllNotificationProfiles,
@@ -5242,18 +5242,28 @@ function hasMedia(db: ReadableDB, conversationId: string): boolean {
const { VOICE_MESSAGE } = SignalService.AttachmentPointer.Flags; const { VOICE_MESSAGE } = SignalService.AttachmentPointer.Flags;
function getOlderMedia( function getSortedMedia(
db: ReadableDB, db: ReadableDB,
{ {
order,
conversationId, conversationId,
limit, limit,
messageId, messageId,
receivedAt: maxReceivedAt = Number.MAX_VALUE, receivedAt: givenReceivedAt,
sentAt: maxSentAt = Number.MAX_VALUE, sentAt: givenSentAt,
type, type,
}: GetOlderMediaOptionsType }: GetSortedMediaOptionsType
): Array<MediaItemDBType> { ): Array<MediaItemDBType> {
const timeFilters = { let timeFilters: {
first: QueryFragment;
second: QueryFragment;
};
let timeOrder: QueryFragment;
if (order === 'older') {
const maxReceivedAt = givenReceivedAt ?? Number.MAX_VALUE;
const maxSentAt = givenSentAt ?? Number.MAX_VALUE;
timeFilters = {
first: sqlFragment` first: sqlFragment`
message_attachments.receivedAt = ${maxReceivedAt} message_attachments.receivedAt = ${maxReceivedAt}
AND AND
@@ -5261,6 +5271,23 @@ function getOlderMedia(
`, `,
second: sqlFragment`message_attachments.receivedAt < ${maxReceivedAt}`, second: sqlFragment`message_attachments.receivedAt < ${maxReceivedAt}`,
}; };
timeOrder = sqlFragment`DESC`;
} else if (order === 'newer') {
const minReceivedAt = givenReceivedAt ?? Number.MIN_VALUE;
const minSentAt = givenSentAt ?? Number.MIN_VALUE;
timeFilters = {
first: sqlFragment`
message_attachments.receivedAt = ${minReceivedAt}
AND
message_attachments.sentAt > ${minSentAt}
`,
second: sqlFragment`message_attachments.receivedAt > ${minReceivedAt}`,
};
timeOrder = sqlFragment`ASC`;
} else {
throw missingCaseError(order);
}
let contentFilter: QueryFragment; let contentFilter: QueryFragment;
if (type === 'media') { if (type === 'media') {
@@ -5313,7 +5340,9 @@ function getOlderMedia(
message_attachments.isViewOnce IS NOT 1 AND message_attachments.isViewOnce IS NOT 1 AND
message_attachments.messageType IN ('incoming', 'outgoing') AND message_attachments.messageType IN ('incoming', 'outgoing') AND
(${messageId ?? null} IS NULL OR message_attachments.messageId IS NOT ${messageId ?? null}) (${messageId ?? null} IS NULL OR message_attachments.messageId IS NOT ${messageId ?? null})
ORDER BY message_attachments.receivedAt DESC, message_attachments.sentAt DESC ORDER BY
message_attachments.receivedAt ${timeOrder},
message_attachments.sentAt ${timeOrder}
LIMIT ${limit} LIMIT ${limit}
`; `;

View File

@@ -9,6 +9,7 @@ import { useBoundActions } from '../../hooks/useBoundActions.std.js';
import type { StateType as RootStateType } from '../reducer.preload.js'; import type { StateType as RootStateType } from '../reducer.preload.js';
import { setVoiceNotePlaybackRate } from './conversations.preload.js'; import { setVoiceNotePlaybackRate } from './conversations.preload.js';
import { extractVoiceNoteForPlayback } from '../selectors/audioPlayer.preload.js'; import { extractVoiceNoteForPlayback } from '../selectors/audioPlayer.preload.js';
import { getUserConversationId } from '../selectors/user.std.js';
import type { import type {
VoiceNoteAndConsecutiveForPlayback, VoiceNoteAndConsecutiveForPlayback,
VoiceNoteForPlayback, VoiceNoteForPlayback,
@@ -25,6 +26,14 @@ import { createLogger } from '../../logging/log.std.js';
import { isAudio } from '../../util/Attachment.std.js'; import { isAudio } from '../../util/Attachment.std.js';
import { getLocalAttachmentUrl } from '../../util/getLocalAttachmentUrl.std.js'; import { getLocalAttachmentUrl } from '../../util/getLocalAttachmentUrl.std.js';
import { assertDev } from '../../util/assert.std.js'; import { assertDev } from '../../util/assert.std.js';
import { drop } from '../../util/drop.std.js';
import { Sound, SoundType } from '../../util/Sound.std.js';
import { getMessageById } from '../../messages/getMessageById.preload.js';
import { DataReader } from '../../sql/Client.preload.js';
const stateChangeConfirmUpSound = new Sound({
soundType: SoundType.VoiceNoteEnd,
});
const log = createLogger('audioPlayer'); const log = createLogger('audioPlayer');
@@ -36,17 +45,14 @@ type AudioPlayerContentDraft = ReadonlyDeep<{
url: string; url: string;
}>; }>;
/** A voice note, with a queue for consecutive playback */ /** A voice note consecutive playback */
export type AudioPlayerContentVoiceNote = ReadonlyDeep<{ export type AudioPlayerContentVoiceNote = ReadonlyDeep<{
conversationId: string; conversationId: string;
context: string; context: string;
current: VoiceNoteForPlayback; current: VoiceNoteForPlayback;
queue: ReadonlyArray<VoiceNoteForPlayback>;
nextMessageTimestamp: number | undefined;
// playing because it followed a message // playing because it followed a message
// false on the first of a consecutive group // false on the first of a consecutive group
isConsecutive: boolean; isConsecutive: boolean;
ourConversationId: string | undefined;
}>; }>;
export type ActiveAudioPlayerStateType = ReadonlyDeep<{ export type ActiveAudioPlayerStateType = ReadonlyDeep<{
@@ -79,6 +85,39 @@ export type AudioPlayerStateType = ReadonlyDeep<{
active: ActiveAudioPlayerStateType | undefined; active: ActiveAudioPlayerStateType | undefined;
}>; }>;
// Helpers
async function getNextVoiceNote({
current,
conversationId,
ourConversationId,
}: {
current: VoiceNoteForPlayback;
conversationId: string;
ourConversationId: string;
}): Promise<VoiceNoteForPlayback | undefined> {
const results = await DataReader.getSortedMedia({
conversationId,
limit: 1,
messageId: current.id,
receivedAt: current.receivedAt,
sentAt: current.sentAt,
type: 'audio',
order: 'newer',
});
if (results.length === 0) {
return undefined;
}
const next = await getMessageById(results[0].message.id);
if (next == null) {
return undefined;
}
return extractVoiceNoteForPlayback(next.attributes, ourConversationId);
}
// Actions // Actions
export type SetMessageAudioAction = ReadonlyDeep<{ export type SetMessageAudioAction = ReadonlyDeep<{
@@ -145,9 +184,59 @@ export const actions = {
messageAudioEnded, messageAudioEnded,
}; };
function messageAudioEnded(): MessageAudioEnded { function messageAudioEnded(): ThunkAction<
return { void,
RootStateType,
unknown,
SetMessageAudioAction | MessageAudioEnded
> {
return async (dispatch, getState) => {
const state = getState();
const {
audioPlayer: { active },
} = state;
const ourConversationId = getUserConversationId(getState());
if (ourConversationId == null || active == null) {
dispatch({
type: 'audioPlayer/MESSAGE_AUDIO_ENDED', type: 'audioPlayer/MESSAGE_AUDIO_ENDED',
});
return;
}
const { content, playbackRate } = active;
if (content == null || AudioPlayerContent.isDraft(content)) {
dispatch({
type: 'audioPlayer/MESSAGE_AUDIO_ENDED',
});
return;
}
const { conversationId, context, current } = content;
const next = await getNextVoiceNote({
current,
conversationId,
ourConversationId,
});
if (next == null) {
drop(stateChangeConfirmUpSound.play());
dispatch({
type: 'audioPlayer/MESSAGE_AUDIO_ENDED',
});
return;
}
dispatch({
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: {
conversationId,
context,
current: next,
isConsecutive: true,
startPosition: 0,
playbackRate,
},
});
}; };
} }
@@ -218,32 +307,21 @@ function loadVoiceNoteAudio({
voiceNoteData, voiceNoteData,
position, position,
context, context,
ourConversationId,
playbackRate, playbackRate,
}: { }: {
voiceNoteData: VoiceNoteAndConsecutiveForPlayback; voiceNoteData: VoiceNoteAndConsecutiveForPlayback;
position: number; position: number;
context: string; context: string;
ourConversationId: string;
playbackRate: number; playbackRate: number;
}): SetMessageAudioAction { }): SetMessageAudioAction {
const { const { conversationId, voiceNote } = voiceNoteData;
conversationId,
voiceNote,
consecutiveVoiceNotes,
// playbackRate,
nextMessageTimestamp,
} = voiceNoteData;
return { return {
type: 'audioPlayer/SET_MESSAGE_AUDIO', type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: { payload: {
conversationId, conversationId,
context, context,
current: voiceNote, current: voiceNote,
queue: consecutiveVoiceNotes,
isConsecutive: false, isConsecutive: false,
nextMessageTimestamp,
ourConversationId,
startPosition: position, startPosition: position,
playbackRate, playbackRate,
}, },
@@ -394,90 +472,6 @@ export function reducer(
}; };
} }
if (action.type === 'MESSAGES_ADDED') {
if (!active) {
return state;
}
const { content } = active;
if (!content) {
return state;
}
if (!AudioPlayerContent.isVoiceNote(content)) {
return state;
}
if (content.conversationId !== action.payload.conversationId) {
return state;
}
const updatedQueue: Array<VoiceNoteForPlayback> = [...content.queue];
for (const message of action.payload.messages) {
if (message.deletedForEveryone) {
continue;
}
if (message.timestamp < content.current.timestamp) {
continue;
}
// in range of the queue
if (
content.nextMessageTimestamp === undefined ||
message.timestamp < content.nextMessageTimestamp
) {
if (message.type !== 'incoming' && message.type !== 'outgoing') {
continue;
}
const voiceNote = extractVoiceNoteForPlayback(
message,
content.ourConversationId
);
// index of the message in the queue after this one
const idx = updatedQueue.findIndex(
m => m.timestamp > message.timestamp
);
// break up consecutive queue: drop values older than this message
if (!voiceNote && idx !== -1) {
updatedQueue.splice(idx);
continue;
}
// insert a new voice note
if (voiceNote) {
if (idx === -1) {
log.info(
`MESSAGES_ADDED: Adding voice note ${voiceNote.messageIdForLogging} to end of queue`
);
updatedQueue.push(voiceNote);
} else {
log.info(
`MESSAGES_ADDED: Adding voice note ${voiceNote.messageIdForLogging} to queue at index ${idx}`
);
updatedQueue.splice(idx, 0, voiceNote);
}
}
}
}
if (updatedQueue.length === content.queue.length) {
return state;
}
return {
...state,
active: {
...active,
content: {
...content,
queue: updatedQueue,
},
},
};
}
if (action.type === 'audioPlayer/MESSAGE_AUDIO_ENDED') { if (action.type === 'audioPlayer/MESSAGE_AUDIO_ENDED') {
if (!active) { if (!active) {
return state; return state;
@@ -487,37 +481,6 @@ export function reducer(
return state; return state;
} }
if (AudioPlayerContent.isDraft(content)) {
log.info('MESSAGE_AUDIO_ENDED: Voice note was draft, stopping playback');
return {
...state,
active: undefined,
};
}
const { queue } = content;
const [nextVoiceNote, ...newQueue] = queue;
if (nextVoiceNote) {
log.info(
`MESSAGE_AUDIO_ENDED: Starting next voice note ${nextVoiceNote.messageIdForLogging}`
);
return {
...state,
active: {
...active,
startPosition: 0,
content: {
...content,
current: nextVoiceNote,
queue: newQueue,
isConsecutive: true,
},
},
};
}
log.info('MESSAGE_AUDIO_ENDED: Stopping playback'); log.info('MESSAGE_AUDIO_ENDED: Stopping playback');
return { return {
...state, ...state,
@@ -545,57 +508,16 @@ export function reducer(
// if we deleted the message currently being played // if we deleted the message currently being played
// move on to the next message // move on to the next message
if (content.current.id === id) { if (content.current.id === id) {
const [next, ...rest] = content.queue;
if (!next) {
log.info(
'MESSAGE_DELETED: Removed currently-playing message, stopping playback'
);
return { return {
...state, ...state,
active: undefined, active: undefined,
}; };
} }
log.info(
'MESSAGE_DELETED: Removed currently-playing message, moving to next in queue'
);
return {
...state,
active: {
...active,
content: {
...content,
current: next,
queue: rest,
},
},
};
}
// if we deleted a message on the queue
// just update the queue
const message = content.queue.find(el => el.id === id);
if (message) {
log.info('MESSAGE_DELETED: Removed message from the queue');
return {
...state,
active: {
...active,
content: {
...content,
queue: content.queue.filter(el => el.id !== id),
},
},
};
}
return state; return state;
} }
// if it's a voice note // Update currently playing message if it just downloaded
// and this event is letting us know that it has downloaded
// update the url if it's in the queue
if (action.type === 'MESSAGE_CHANGED') { if (action.type === 'MESSAGE_CHANGED') {
if (!active) { if (!active) {
return state; return state;
@@ -649,28 +571,6 @@ export function reducer(
}; };
} }
// if it's in the queue
const idx = content.queue.findIndex(v => v.id === id);
if (idx !== -1) {
log.info('MESSAGE_CHANGED: Adding content url to message in queue');
const updatedQueue = [...content.queue];
updatedQueue[idx] = {
...updatedQueue[idx],
url,
};
return {
...state,
active: {
...active,
content: {
...content,
queue: updatedQueue,
},
},
};
}
return state; return state;
} }

View File

@@ -112,12 +112,12 @@ function _sortItems<
} }
function _cleanAttachments( function _cleanAttachments(
type: 'media' | 'audio' | 'document', type: 'media' | 'audio' | 'documents',
rawMedia: ReadonlyArray<MediaItemDBType> rawMedia: ReadonlyArray<MediaItemDBType>
): ReadonlyArray<MediaItemType> { ): ReadonlyArray<MediaItemType> {
return rawMedia.map(({ message, index, attachment }) => { return rawMedia.map(({ message, index, attachment }) => {
return { return {
type, type: type === 'documents' ? 'document' : type,
index, index,
attachment: getPropsForAttachment(attachment, 'attachment', message), attachment: getPropsForAttachment(attachment, 'attachment', message),
message, message,
@@ -159,20 +159,23 @@ function initialLoad(
const [rawMedia, rawAudio, rawDocuments, rawLinkPreviews] = const [rawMedia, rawAudio, rawDocuments, rawLinkPreviews] =
await Promise.all([ await Promise.all([
DataReader.getOlderMedia({ DataReader.getSortedMedia({
conversationId, conversationId,
limit: FETCH_CHUNK_COUNT, limit: FETCH_CHUNK_COUNT,
type: 'media', type: 'media',
order: 'older',
}), }),
DataReader.getOlderMedia({ DataReader.getSortedMedia({
conversationId, conversationId,
limit: FETCH_CHUNK_COUNT, limit: FETCH_CHUNK_COUNT,
type: 'audio', type: 'audio',
order: 'older',
}), }),
DataReader.getOlderMedia({ DataReader.getSortedMedia({
conversationId, conversationId,
limit: FETCH_CHUNK_COUNT, limit: FETCH_CHUNK_COUNT,
type: 'documents', type: 'documents',
order: 'older',
}), }),
DataReader.getOlderLinkPreviews({ DataReader.getOlderLinkPreviews({
conversationId, conversationId,
@@ -182,7 +185,7 @@ function initialLoad(
const media = _cleanAttachments('media', rawMedia); const media = _cleanAttachments('media', rawMedia);
const audio = _cleanAttachments('audio', rawAudio); const audio = _cleanAttachments('audio', rawAudio);
const documents = _cleanAttachments('document', rawDocuments); const documents = _cleanAttachments('documents', rawDocuments);
const links = _cleanLinkPreviews(rawLinkPreviews); const links = _cleanLinkPreviews(rawLinkPreviews);
dispatch({ dispatch({
@@ -256,26 +259,23 @@ function loadMore(
let audio: ReadonlyArray<MediaItemType> = []; let audio: ReadonlyArray<MediaItemType> = [];
let documents: ReadonlyArray<MediaItemType> = []; let documents: ReadonlyArray<MediaItemType> = [];
let links: ReadonlyArray<LinkPreviewMediaItemType> = []; let links: ReadonlyArray<LinkPreviewMediaItemType> = [];
if (type === 'media' || type === 'audio' || type === 'documents') {
const rawMedia = await DataReader.getSortedMedia({
...sharedOptions,
order: 'older',
type,
});
const result = _cleanAttachments(type, rawMedia);
if (type === 'media') { if (type === 'media') {
const rawMedia = await DataReader.getOlderMedia({ media = result;
...sharedOptions,
type: 'media',
});
media = _cleanAttachments('media', rawMedia);
} else if (type === 'audio') { } else if (type === 'audio') {
const rawAudio = await DataReader.getOlderMedia({ audio = result;
...sharedOptions,
type: 'audio',
});
audio = _cleanAttachments('audio', rawAudio);
} else if (type === 'documents') { } else if (type === 'documents') {
const rawDocuments = await DataReader.getOlderMedia({ documents = result;
...sharedOptions, } else {
type: 'documents', throw missingCaseError(type);
}); }
documents = _cleanAttachments('document', rawDocuments);
} else if (type === 'links') { } else if (type === 'links') {
const rawPreviews = await DataReader.getOlderLinkPreviews(sharedOptions); const rawPreviews = await DataReader.getOlderLinkPreviews(sharedOptions);
links = _cleanLinkPreviews(rawPreviews); links = _cleanLinkPreviews(rawPreviews);
@@ -452,7 +452,7 @@ export function reducer(
) )
); );
const newDocuments = _cleanAttachments( const newDocuments = _cleanAttachments(
'document', 'documents',
messageMediaItems.filter(({ attachment }) => isFile(attachment)) messageMediaItems.filter(({ attachment }) => isFile(attachment))
); );
const newLinks = _cleanLinkPreviews( const newLinks = _cleanLinkPreviews(

View File

@@ -22,7 +22,6 @@ import {
import type { StateType } from '../reducer.preload.js'; import type { StateType } from '../reducer.preload.js';
import { createLogger } from '../../logging/log.std.js'; import { createLogger } from '../../logging/log.std.js';
import { getLocalAttachmentUrl } from '../../util/getLocalAttachmentUrl.std.js'; import { getLocalAttachmentUrl } from '../../util/getLocalAttachmentUrl.std.js';
import type { MessageWithUIFieldsType } from '../ducks/conversations.preload.js';
import type { ReadonlyMessageAttributesType } from '../../model-types.d.ts'; import type { ReadonlyMessageAttributesType } from '../../model-types.d.ts';
import { getMessageIdForLogging } from '../../util/idForLogging.preload.js'; import { getMessageIdForLogging } from '../../util/idForLogging.preload.js';
import * as Attachment from '../../util/Attachment.std.js'; import * as Attachment from '../../util/Attachment.std.js';
@@ -41,7 +40,8 @@ export type VoiceNoteForPlayback = {
sourceServiceId: ServiceIdString | undefined; sourceServiceId: ServiceIdString | undefined;
isPlayed: boolean; isPlayed: boolean;
messageIdForLogging: string; messageIdForLogging: string;
timestamp: number; sentAt: number;
receivedAt: number;
}; };
export const isPaused = (state: StateType): boolean => { export const isPaused = (state: StateType): boolean => {
@@ -106,7 +106,8 @@ export function extractVoiceNoteForPlayback(
type, type,
isPlayed: isPlayed(type, status, message.readStatus), isPlayed: isPlayed(type, status, message.readStatus),
messageIdForLogging: getMessageIdForLogging(message), messageIdForLogging: getMessageIdForLogging(message),
timestamp: message.timestamp, sentAt: message.sent_at,
receivedAt: message.received_at,
source: message.source, source: message.source,
sourceServiceId: message.sourceServiceId, sourceServiceId: message.sourceServiceId,
}; };
@@ -116,11 +117,7 @@ export function extractVoiceNoteForPlayback(
export type VoiceNoteAndConsecutiveForPlayback = { export type VoiceNoteAndConsecutiveForPlayback = {
conversationId: string; conversationId: string;
voiceNote: VoiceNoteForPlayback; voiceNote: VoiceNoteForPlayback;
previousMessageId: string | undefined;
consecutiveVoiceNotes: ReadonlyArray<VoiceNoteForPlayback>;
playbackRate: number; playbackRate: number;
// timestamp of the message after all the once in the queue
nextMessageTimestamp: number | undefined;
}; };
export const selectVoiceNoteAndConsecutive = createSelector( export const selectVoiceNoteAndConsecutive = createSelector(
getConversations, getConversations,
@@ -160,57 +157,12 @@ export const selectVoiceNoteAndConsecutive = createSelector(
return undefined; return undefined;
} }
const conversationMessages =
conversations.messagesByConversation[selectedConversationId];
if (!conversationMessages) {
log.warn('selectedVoiceNote: no conversation messages', {
message: messageId,
});
return;
}
let idx = conversationMessages.messageIds.indexOf(messageId);
// useful if inserting into an active queue
const previousMessageId = conversationMessages.messageIds[idx - 1];
const consecutiveVoiceNotes: Array<VoiceNoteForPlayback> = [];
let nextMessageId: string;
let nextMessage: MessageWithUIFieldsType | undefined;
let nextVoiceNote: VoiceNoteForPlayback | undefined;
do {
idx += 1;
nextMessageId = conversationMessages.messageIds[idx];
if (!nextMessageId) {
nextMessage = undefined;
break;
}
nextMessage = conversations.messagesLookup[nextMessageId];
if (!nextMessage) {
break;
}
if (nextMessage.deletedForEveryone) {
continue;
}
nextVoiceNote = extractVoiceNoteForPlayback(
nextMessage,
ourConversationId
);
if (nextVoiceNote) {
consecutiveVoiceNotes.push(nextVoiceNote);
}
} while (nextVoiceNote);
const conversation = getConversationById(selectedConversationId); const conversation = getConversationById(selectedConversationId);
return { return {
conversationId: selectedConversationId, conversationId: selectedConversationId,
voiceNote, voiceNote,
consecutiveVoiceNotes,
playbackRate: conversation?.voiceNotePlaybackRate ?? 1, playbackRate: conversation?.voiceNotePlaybackRate ?? 1,
previousMessageId,
nextMessageTimestamp: nextMessage?.timestamp,
}; };
}; };
} }

View File

@@ -80,36 +80,18 @@ export const SmartAllMedia = memo(function SmartAllMedia({
return; return;
} }
if (!ourConversationId) {
log.warn('no ourConversationId');
return;
}
const index = audio.indexOf(mediaItem);
if (index === -1) {
log.warn('audio no longer loaded');
return;
}
const prev = index === 0 ? undefined : audio.at(index - 1);
const next = audio.at(index);
loadVoiceNoteAudio({ loadVoiceNoteAudio({
voiceNoteData: { voiceNoteData: {
voiceNote, voiceNote,
conversationId: mediaItem.message.conversationId, conversationId: mediaItem.message.conversationId,
previousMessageId: prev?.message.id,
playbackRate: 1, playbackRate: 1,
consecutiveVoiceNotes: [],
nextMessageTimestamp: next?.message.sentAt,
}, },
position: 0, position: 0,
context: 'AllMedia', context: 'AllMedia',
ourConversationId,
playbackRate: 1, playbackRate: 1,
}); });
}, },
[audio, loadVoiceNoteAudio, ourConversationId] [loadVoiceNoteAudio, ourConversationId]
); );
return ( return (

View File

@@ -15,7 +15,6 @@ import {
selectVoiceNoteAndConsecutive, selectVoiceNoteAndConsecutive,
} from '../selectors/audioPlayer.preload.js'; } from '../selectors/audioPlayer.preload.js';
import { useConversationsActions } from '../ducks/conversations.preload.js'; import { useConversationsActions } from '../ducks/conversations.preload.js';
import { getUserConversationId } from '../selectors/user.std.js';
import { createLogger } from '../../logging/log.std.js'; import { createLogger } from '../../logging/log.std.js';
import { import {
getConversationByIdSelector, getConversationByIdSelector,
@@ -38,7 +37,6 @@ export const SmartMessageAudio = memo(function SmartMessageAudio({
const { pushPanelForConversation } = useConversationsActions(); const { pushPanelForConversation } = useConversationsActions();
const getVoiceNoteData = useSelector(selectVoiceNoteAndConsecutive); const getVoiceNoteData = useSelector(selectVoiceNoteAndConsecutive);
const ourConversationId = useSelector(getUserConversationId);
const getConversationById = useSelector(getConversationByIdSelector); const getConversationById = useSelector(getConversationByIdSelector);
const selectedConversationId = useSelector(getSelectedConversationId); const selectedConversationId = useSelector(getSelectedConversationId);
@@ -69,26 +67,14 @@ export const SmartMessageAudio = memo(function SmartMessageAudio({
return; return;
} }
if (!ourConversationId) {
log.warn('SmartMessageAudio: no ourConversationId');
return;
}
loadVoiceNoteAudio({ loadVoiceNoteAudio({
voiceNoteData, voiceNoteData,
position, position,
context: renderingContext, context: renderingContext,
ourConversationId,
playbackRate, playbackRate,
}); });
}, },
[ [getVoiceNoteData, loadVoiceNoteAudio, renderingContext, playbackRate]
getVoiceNoteData,
loadVoiceNoteAudio,
ourConversationId,
renderingContext,
playbackRate,
]
); );
return ( return (

View File

@@ -26,9 +26,6 @@ const log = createLogger('VoiceNotesPlaybackProvider');
const stateChangeConfirmDownSound = new Sound({ const stateChangeConfirmDownSound = new Sound({
soundType: SoundType.VoiceNoteStart, soundType: SoundType.VoiceNoteStart,
}); });
const stateChangeConfirmUpSound = new Sound({
soundType: SoundType.VoiceNoteEnd,
});
/** /**
* Synchronizes the audioPlayer redux state with globalMessageAudio * Synchronizes the audioPlayer redux state with globalMessageAudio
@@ -45,14 +42,11 @@ export const SmartVoiceNotesPlaybackProvider = memo(
let messageId: undefined | string; let messageId: undefined | string;
let messageIdForLogging: undefined | string; let messageIdForLogging: undefined | string;
let playNextConsecutiveSound = false; let playNextConsecutiveSound = false;
let playFinishConsecutiveSound = false;
if (content && AudioPlayerContent.isVoiceNote(content)) { if (content && AudioPlayerContent.isVoiceNote(content)) {
({ url, id: messageId } = content.current); ({ url, id: messageId } = content.current);
messageIdForLogging = content.current.messageIdForLogging; messageIdForLogging = content.current.messageIdForLogging;
playNextConsecutiveSound = content.isConsecutive; playNextConsecutiveSound = content.isConsecutive;
playFinishConsecutiveSound =
content.isConsecutive && content.queue.length === 0;
} }
if (content && AudioPlayerContent.isDraft(content)) { if (content && AudioPlayerContent.isDraft(content)) {
url = content.url; url = content.url;
@@ -123,7 +117,6 @@ export const SmartVoiceNotesPlaybackProvider = memo(
messageId, messageId,
messageIdForLogging, messageIdForLogging,
startPosition: active.startPosition, startPosition: active.startPosition,
playFinishConsecutiveSound,
durationChanged, durationChanged,
unloadMessageAudio, unloadMessageAudio,
currentTimeUpdated, currentTimeUpdated,
@@ -162,7 +155,6 @@ export const SmartVoiceNotesPlaybackProvider = memo(
messageAudioEnded, messageAudioEnded,
messageId, messageId,
messageIdForLogging, messageIdForLogging,
playFinishConsecutiveSound,
playNextConsecutiveSound, playNextConsecutiveSound,
previousStartPosition, previousStartPosition,
unloadMessageAudio, unloadMessageAudio,
@@ -179,7 +171,6 @@ function loadAudio({
messageId, messageId,
messageIdForLogging, messageIdForLogging,
startPosition, startPosition,
playFinishConsecutiveSound,
durationChanged, durationChanged,
currentTimeUpdated, currentTimeUpdated,
messageAudioEnded, messageAudioEnded,
@@ -190,7 +181,6 @@ function loadAudio({
messageId: string | undefined; messageId: string | undefined;
messageIdForLogging: string | undefined; messageIdForLogging: string | undefined;
startPosition: number; startPosition: number;
playFinishConsecutiveSound: boolean;
durationChanged: (value: number | undefined) => void; durationChanged: (value: number | undefined) => void;
currentTimeUpdated: (value: number) => void; currentTimeUpdated: (value: number) => void;
messageAudioEnded: () => void; messageAudioEnded: () => void;
@@ -225,9 +215,6 @@ function loadAudio({
currentTimeUpdated(globalMessageAudio.currentTime); currentTimeUpdated(globalMessageAudio.currentTime);
}, },
onEnded() { onEnded() {
if (playFinishConsecutiveSound) {
drop(stateChangeConfirmUpSound.play());
}
messageAudioEnded(); messageAudioEnded();
}, },
onError(error) { onError(error) {

View File

@@ -17,16 +17,14 @@ function voiceNoteDataForMessage(
voiceNote: { voiceNote: {
id: messageId, id: messageId,
type: 'outgoing', type: 'outgoing',
timestamp: 0, receivedAt: 0,
sentAt: 0,
url: undefined, url: undefined,
source: undefined, source: undefined,
sourceServiceId: undefined, sourceServiceId: undefined,
messageIdForLogging: messageId, messageIdForLogging: messageId,
isPlayed: false, isPlayed: false,
}, },
consecutiveVoiceNotes: [],
previousMessageId: undefined,
nextMessageTimestamp: undefined,
playbackRate: 1, playbackRate: 1,
}; };
} }
@@ -51,7 +49,6 @@ describe('state/selectors/audioPlayer', () => {
voiceNoteData: voiceNoteDataForMessage('id'), voiceNoteData: voiceNoteDataForMessage('id'),
position: 0, position: 0,
context: 'context', context: 'context',
ourConversationId: 'convo',
playbackRate: 1, playbackRate: 1,
}) })
); );

View File

@@ -30,16 +30,14 @@ function voiceNoteDataForMessage(
voiceNote: { voiceNote: {
id: messageId, id: messageId,
type: 'outgoing', type: 'outgoing',
timestamp: 0, receivedAt: 0,
sentAt: 0,
url: undefined, url: undefined,
source: undefined, source: undefined,
sourceServiceId: undefined, sourceServiceId: undefined,
messageIdForLogging: messageId, messageIdForLogging: messageId,
isPlayed: false, isPlayed: false,
}, },
consecutiveVoiceNotes: [],
previousMessageId: undefined,
nextMessageTimestamp: undefined,
playbackRate: 1, playbackRate: 1,
}; };
} }
@@ -57,7 +55,6 @@ describe('both/state/ducks/audioPlayer', () => {
voiceNoteData: voiceNoteDataForMessage(MESSAGE_ID), voiceNoteData: voiceNoteDataForMessage(MESSAGE_ID),
position: 0, position: 0,
context: 'context', context: 'context',
ourConversationId: 'convo',
playbackRate: 1, playbackRate: 1,
}) })
); );
@@ -85,7 +82,6 @@ describe('both/state/ducks/audioPlayer', () => {
voiceNoteData: voiceNoteDataForMessage('test'), voiceNoteData: voiceNoteDataForMessage('test'),
position: 0, position: 0,
context: 'context', context: 'context',
ourConversationId: 'convo',
playbackRate: 1, playbackRate: 1,
}) })
); );