Ensure incrementalMac consistency when exporting

This commit is contained in:
trevor-signal
2025-08-20 14:39:21 -04:00
committed by GitHub
parent 22a3aa0ed7
commit 5ba8071aa7
3 changed files with 31 additions and 29 deletions

View File

@@ -249,10 +249,7 @@ export class BackupExportStream extends Readable {
readonly #serviceIdToRecipientId = new Map<string, number>(); readonly #serviceIdToRecipientId = new Map<string, number>();
readonly #e164ToRecipientId = new Map<string, number>(); readonly #e164ToRecipientId = new Map<string, number>();
readonly #roomIdToRecipientId = new Map<string, number>(); readonly #roomIdToRecipientId = new Map<string, number>();
readonly #mediaNamesToLocatorInfos = new Map< readonly #mediaNamesToFilePointers = new Map<string, Backups.FilePointer>();
string,
Backups.FilePointer.ILocatorInfo
>();
readonly #stats: StatsType = { readonly #stats: StatsType = {
adHocCalls: 0, adHocCalls: 0,
callLinks: 0, callLinks: 0,
@@ -355,7 +352,7 @@ export class BackupExportStream extends Readable {
} }
public getMediaNamesIterator(): MapIterator<string> { public getMediaNamesIterator(): MapIterator<string> {
return this.#mediaNamesToLocatorInfos.keys(); return this.#mediaNamesToFilePointers.keys();
} }
public getStats(): Readonly<StatsType> { public getStats(): Readonly<StatsType> {
@@ -2608,16 +2605,17 @@ export class BackupExportStream extends Readable {
const mediaName = getMediaNameForAttachment(attachment); const mediaName = getMediaNameForAttachment(attachment);
// Re-use existing locatorInfo and backup job if we've already seen this file // Re-use existing locatorInfo and backup job if we've already seen this file
const existingLocatorInfo = this.#mediaNamesToLocatorInfos.get(mediaName); const existingFilePointer = this.#mediaNamesToFilePointers.get(mediaName);
if (existingLocatorInfo) { if (existingFilePointer?.locatorInfo) {
filePointer.locatorInfo = existingLocatorInfo; filePointer.locatorInfo = existingFilePointer.locatorInfo;
// Also copy over incrementalMac, since that depends on the encryption key
filePointer.incrementalMac = existingFilePointer.incrementalMac;
filePointer.incrementalMacChunkSize =
existingFilePointer.incrementalMacChunkSize;
} else { } else {
if (filePointer.locatorInfo) { if (filePointer.locatorInfo) {
this.#mediaNamesToLocatorInfos.set( this.#mediaNamesToFilePointers.set(mediaName, filePointer);
mediaName,
filePointer.locatorInfo
);
} }
if (backupJob) { if (backupJob) {

View File

@@ -38,6 +38,7 @@ import {
isValidAttachmentKey, isValidAttachmentKey,
isValidPlaintextHash, isValidPlaintextHash,
} from '../../../types/Crypto'; } from '../../../types/Crypto';
import { isTestOrMockEnvironment } from '../../../environment';
const log = createLogger('filePointers'); const log = createLogger('filePointers');
@@ -227,19 +228,18 @@ export async function getFilePointerForAttachment({
height: attachment.height, height: attachment.height,
caption: attachment.caption, caption: attachment.caption,
blurHash: attachment.blurHash, blurHash: attachment.blurHash,
// Resilience to invalid data in the database from internal testing
...(typeof attachment.incrementalMac === 'string' && attachment.chunkSize
? {
incrementalMac: Bytes.fromBase64(attachment.incrementalMac),
incrementalMacChunkSize: attachment.chunkSize,
}
: {
incrementalMac: undefined,
incrementalMacChunkSize: undefined,
}),
}); });
// TODO: DESKTOP-9112
if (isTestOrMockEnvironment()) {
// Check for string type for resilience to invalid data in the database from internal
// testing
if (typeof attachment.incrementalMac === 'string' && attachment.chunkSize) {
filePointer.incrementalMac = Bytes.fromBase64(attachment.incrementalMac);
filePointer.incrementalMacChunkSize = attachment.chunkSize;
}
}
const locatorInfo = getLocatorInfoForAttachment({ const locatorInfo = getLocatorInfoForAttachment({
attachment, attachment,
isLocalBackup, isLocalBackup,

View File

@@ -100,12 +100,14 @@ describe('backup/attachments', () => {
plaintextHash: Bytes.toHex(getRandomBytes(32)), plaintextHash: Bytes.toHex(getRandomBytes(32)),
key: Bytes.toBase64(generateKeys()), key: Bytes.toBase64(generateKeys()),
digest: Bytes.toBase64(getRandomBytes(32)), digest: Bytes.toBase64(getRandomBytes(32)),
size: 100, size: 100 + index,
contentType: IMAGE_JPEG, contentType: IMAGE_JPEG,
path: `/path/to/file${index}.png`, path: `/path/to/file${index}.png`,
caption: `caption${index}`, caption: `caption${index}`,
localKey: Bytes.toBase64(generateAttachmentKeys()), localKey: Bytes.toBase64(generateAttachmentKeys()),
uploadTimestamp: index, uploadTimestamp: index,
incrementalMac: Bytes.toBase64(getRandomBytes(32)),
chunkSize: index * 128,
thumbnail: { thumbnail: {
size: 1024, size: 1024,
width: 150, width: 150,
@@ -405,11 +407,10 @@ describe('backup/attachments', () => {
it('deduplicates attachments on export based on mediaName', async () => { it('deduplicates attachments on export based on mediaName', async () => {
const attachment1 = composeAttachment(1); const attachment1 = composeAttachment(1);
const attachment2 = { const attachment2 = {
...attachment1, ...composeAttachment(2),
contentType: IMAGE_WEBP, plaintextHash: attachment1.plaintextHash,
caption: 'attachment2caption', key: attachment1.key,
cdnKey: 'attachment2cdnkey', size: attachment1.size,
cdnNumber: 25,
}; };
await asymmetricRoundtripHarness( await asymmetricRoundtripHarness(
@@ -431,6 +432,9 @@ describe('backup/attachments', () => {
...attachment2, ...attachment2,
cdnKey: attachment1.cdnKey, cdnKey: attachment1.cdnKey,
cdnNumber: attachment1.cdnNumber, cdnNumber: attachment1.cdnNumber,
uploadTimestamp: attachment1.uploadTimestamp,
incrementalMac: attachment1.incrementalMac,
chunkSize: attachment1.chunkSize,
}), }),
], ],
}), }),