Files
Desktop/ts/services/backups/index.preload.ts
2026-03-24 09:06:34 -07:00

1494 lines
44 KiB
TypeScript

// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { pipeline } from 'node:stream/promises';
import { PassThrough } from 'node:stream';
import type { Readable, Writable } from 'node:stream';
import { createReadStream, createWriteStream } from 'node:fs';
import { mkdir, rm, stat, unlink, writeFile } from 'node:fs/promises';
import fsExtra from 'fs-extra';
import { basename, join } from 'node:path';
import { createGzip, createGunzip } from 'node:zlib';
import { createCipheriv, createHmac, randomBytes } from 'node:crypto';
import lodash from 'lodash';
import { BackupLevel } from '@signalapp/libsignal-client/zkgroup.js';
import { BackupKey } from '@signalapp/libsignal-client/dist/AccountKeys.js';
import lodashFp from 'lodash/fp.js';
import { ipcRenderer } from 'electron';
import { DataReader, DataWriter } from '../../sql/Client.preload.js';
import { createLogger } from '../../logging/log.std.js';
import * as Bytes from '../../Bytes.std.js';
import { strictAssert } from '../../util/assert.std.js';
import { drop } from '../../util/drop.std.js';
import { TEMP_PATH } from '../../util/basePaths.preload.js';
import { getAbsoluteDownloadsPath } from '../../util/migrations.preload.js';
import { waitForAllBatchers } from '../../util/batcher.std.js';
import { flushAllWaitBatchers } from '../../util/waitBatcher.std.js';
import { DelimitedStream } from '../../util/DelimitedStream.node.js';
import { appendPaddingStream } from '../../util/logPadding.node.js';
import { prependStream } from '../../util/prependStream.node.js';
import { appendMacStream } from '../../util/appendMacStream.node.js';
import { getMacAndUpdateHmac } from '../../util/getMacAndUpdateHmac.node.js';
import { missingCaseError } from '../../util/missingCaseError.std.js';
import { HOUR, SECOND } from '../../util/durations/index.std.js';
import type { ExplodePromiseResultType } from '../../util/explodePromise.std.js';
import { explodePromise } from '../../util/explodePromise.std.js';
import type { RetryBackupImportValue } from '../../state/ducks/installer.preload.js';
import { CipherType, HashType } from '../../types/Crypto.std.js';
import {
InstallScreenBackupStep,
InstallScreenBackupError,
} from '../../types/InstallScreen.std.js';
import * as Errors from '../../types/errors.std.js';
import {
BackupCredentialType,
type BackupsSubscriptionType,
type BackupStatusType,
} from '../../types/backups.node.js';
import { HTTPError } from '../../types/HTTPError.std.js';
import { constantTimeEqual } from '../../Crypto.node.js';
import { measureSize } from '../../AttachmentCrypto.node.js';
import { signalProtocolStore } from '../../SignalProtocolStore.preload.js';
import { isTestOrMockEnvironment } from '../../environment.std.js';
import { runStorageServiceSyncJob } from '../storage.preload.js';
import { BackupExportStream } from './export.preload.js';
import { BackupImportStream } from './import.preload.js';
import {
getBackupId,
getKeyMaterial,
getLocalBackupMetadataKey,
} from './crypto.preload.js';
import { BackupCredentials } from './credentials.preload.js';
import { BackupAPI } from './api.preload.js';
import {
validateBackup,
validateBackupStream,
ValidationType,
} from './validator.preload.js';
import type {
BackupExportOptions,
BackupImportOptions,
ExportResultType,
LocalBackupExportResultType,
OnProgressCallback,
} from './types.std.js';
import {
BackupInstallerError,
BackupDownloadFailedError,
BackupImportCanceledError,
BackupProcessingError,
RelinkRequestedError,
} from './errors.std.js';
import { FileStream } from './util/FileStream.node.js';
import { ToastType } from '../../types/Toast.dom.js';
import { isAdhoc, isNightly } from '../../util/version.std.js';
import { isLocalBackupsEnabled } from '../../util/isLocalBackupsEnabled.preload.js';
import type { ValidateLocalBackupStructureResultType } from './util/localBackup.node.js';
import {
writeLocalBackupMetadata,
verifyLocalBackupMetadata,
writeLocalBackupFilesList,
readLocalBackupFilesList,
pruneLocalBackups,
validateLocalBackupStructure,
getLocalBackupFilesDirectory,
getLocalBackupSnapshotDirectory,
LOCAL_BACKUP_DIR_NAME,
} from './util/localBackup.node.js';
import {
AttachmentPermanentlyMissingError,
getJobIdForLogging,
runAttachmentBackupJob,
} from '../../jobs/AttachmentLocalBackupManager.preload.js';
import { decipherWithAesKey } from '../../util/decipherWithAesKey.node.js';
import { areRemoteBackupsTurnedOn } from '../../util/isBackupEnabled.preload.js';
import {
isOnline,
unlink as unlinkAccount,
} from '../../textsecure/WebAPI.preload.js';
import { itemStorage } from '../../textsecure/Storage.preload.js';
import { LOCAL_BACKUP_VERSION } from './constants.std.js';
import { getTimestampForFolder } from '../../util/timestamp.std.js';
import { MEBIBYTE } from '../../types/AttachmentSize.std.js';
import {
NotEnoughStorageError,
RanOutOfStorageError,
StoragePermissionsError,
} from '../../types/LocalExport.std.js';
import { getFreeDiskSpace } from '../../util/getFreeDiskSpace.node.js';
import { isFeaturedEnabledNoRedux } from '../../util/isFeatureEnabled.dom.js';
const { ensureFile, exists } = fsExtra;
const { throttle } = lodashFp;
const { isEqual, noop } = lodash;
const log = createLogger('backupsService');
const IV_LENGTH = 16;
const BACKUP_REFRESH_INTERVAL = 24 * HOUR;
const MIMINUM_DISK_SPACE_FOR_LOCAL_EXPORT = 200 * MEBIBYTE;
const LOCAL_BACKUP_SNAPSHOTS_TO_KEEP = 2;
export type DownloadOptionsType = Readonly<{
onProgress?: (
backupStep: InstallScreenBackupStep,
currentBytes: number,
totalBytes: number
) => void;
abortSignal?: AbortSignal;
}>;
type DoDownloadOptionsType = Readonly<{
downloadPath: string;
ephemeralKey?: Uint8Array<ArrayBuffer>;
onProgress?: (
backupStep: InstallScreenBackupStep,
currentBytes: number,
totalBytes: number
) => void;
}>;
export type ValidationResultType = Readonly<
| {
result: ExportResultType | LocalBackupExportResultType;
}
| {
error: string;
}
>;
export class BackupsService {
#isStarted = false;
#isRunning: 'import' | 'export' | false = false;
#importController: AbortController | undefined;
#downloadController: AbortController | undefined;
#downloadRetryPromise:
| ExplodePromiseResultType<RetryBackupImportValue>
| undefined;
#localBackupSnapshotDir: string | undefined;
public readonly credentials = new BackupCredentials();
public readonly api = new BackupAPI(this.credentials);
public readonly throttledFetchCloudBackupStatus = throttle(30 * SECOND, () =>
this.#fetchCloudBackupStatus()
);
public readonly throttledFetchSubscriptionStatus = throttle(30 * SECOND, () =>
this.#fetchSubscriptionStatus()
);
public start(): void {
if (!areRemoteBackupsTurnedOn()) {
log.warn('remote backups are not turned on; not starting');
return;
}
if (this.#isStarted) {
log.warn('already started');
return;
}
this.#isStarted = true;
log.info('starting...');
setInterval(() => {
drop(this.#runPeriodicRefresh());
}, BACKUP_REFRESH_INTERVAL);
drop(this.#runPeriodicRefresh());
this.credentials.start();
window.Whisper.events.on('userChanged', async () => {
await this.resetCachedData();
});
}
public async downloadAndImport(
options: DownloadOptionsType
): Promise<{ wasBackupImported: boolean }> {
const backupDownloadPath = itemStorage.get('backupDownloadPath');
if (!backupDownloadPath) {
log.warn('backups.downloadAndImport: no backup download path, skipping');
return { wasBackupImported: false };
}
log.info('backups.downloadAndImport: downloading...');
const ephemeralKey = itemStorage.get('backupEphemeralKey');
const absoluteDownloadPath = getAbsoluteDownloadsPath(backupDownloadPath);
let hasBackup = false;
// eslint-disable-next-line no-constant-condition
while (true) {
try {
// eslint-disable-next-line no-await-in-loop
hasBackup = await this.#doDownloadAndImport({
downloadPath: absoluteDownloadPath,
onProgress: options.onProgress,
ephemeralKey,
});
if (!hasBackup) {
// If the primary cancels sync on their end, then we can link without sync
log.info('backups.downloadAndImport: missing backup');
window.reduxActions.installer.handleMissingBackup();
}
} catch (error) {
this.#downloadRetryPromise = explodePromise<RetryBackupImportValue>();
let installerError: InstallScreenBackupError;
if (error instanceof BackupInstallerError) {
log.error(
'backups.downloadAndImport: got installer error',
Errors.toLogFormat(error)
);
({ installerError } = error);
} else {
log.error(
'backups.downloadAndImport: unknown error, prompting user to retry'
);
installerError = InstallScreenBackupError.Retriable;
}
window.reduxActions.installer.updateBackupImportProgress({
error: installerError,
});
// For download errors, wait for user confirmation to retry or unlink
const nextStep =
error instanceof BackupImportCanceledError
? 'cancel'
: // eslint-disable-next-line no-await-in-loop
await this.#downloadRetryPromise.promise;
if (nextStep === 'retry') {
log.warn('backups.downloadAndImport: retrying');
continue;
}
if (nextStep !== 'cancel') {
throw missingCaseError(nextStep);
}
// If we are here: the user has either canceled manually, or after
// getting an error (potentially fatal).
log.warn('backups.downloadAndImport: unlinking');
// eslint-disable-next-line no-await-in-loop
await this.#unlinkAndDeleteAllData();
try {
// eslint-disable-next-line no-await-in-loop
await unlink(absoluteDownloadPath);
} catch {
// Best-effort
}
// Make sure to fail the backup import process so that background.ts
// will not wait for the syncs.
throw error;
}
break;
}
await itemStorage.remove('backupDownloadPath');
await itemStorage.remove('backupEphemeralKey');
await itemStorage.remove('backupTransitArchive');
await itemStorage.put('isRestoredFromBackup', hasBackup);
log.info('backups.downloadAndImport: done');
return { wasBackupImported: hasBackup };
}
public retryDownload(): void {
if (!this.#downloadRetryPromise) {
return;
}
this.#downloadRetryPromise.resolve('retry');
}
public async upload(): Promise<void> {
await this.#waitForEmptyQueues('backups.upload');
const fileName = `backup-${randomBytes(32).toString('hex')}`;
const filePath = join(TEMP_PATH, fileName);
const backupLevel = await this.credentials.getBackupLevel(
BackupCredentialType.Media
);
log.info(`exportBackup: starting, backup level: ${backupLevel}...`);
try {
const { totalBytes } = await this.exportToDisk(filePath, {
type: 'remote',
level: backupLevel,
abortSignal: new AbortController().signal,
});
await this.api.upload(filePath, totalBytes);
} finally {
try {
await unlink(filePath);
} catch {
// Ignore
}
}
}
public async exportLocalBackup(options: {
backupsBaseDir: string;
abortSignal: AbortSignal;
onProgress: OnProgressCallback;
}): Promise<LocalBackupExportResultType> {
strictAssert(isLocalBackupsEnabled(), 'Local backups must be enabled');
const fnLog = log.child('exportLocalBackup');
fnLog.info('starting...');
if (isOnline()) {
await this.#waitForEmptyQueues('backups.exportLocalBackup');
} else {
fnLog.info('offline; skipping wait for empty queues');
}
// Just in case it's been deleted, ensure the backup dir exists
await mkdir(options.backupsBaseDir, { recursive: true });
const freeSpaceBytes = await getFreeDiskSpace(options.backupsBaseDir);
const bytesNeeded = MIMINUM_DISK_SPACE_FOR_LOCAL_EXPORT - freeSpaceBytes;
if (bytesNeeded > 0) {
fnLog.info(
`Not enough storage; only ${freeSpaceBytes} available, ${MIMINUM_DISK_SPACE_FOR_LOCAL_EXPORT} is minimum needed`
);
}
const filesDir = getLocalBackupFilesDirectory({
backupsBaseDir: options.backupsBaseDir,
});
await mkdir(filesDir, { recursive: true });
const snapshotDir = getLocalBackupSnapshotDirectory(
options.backupsBaseDir,
Date.now()
);
if (await exists(snapshotDir)) {
throw new Error('snapshotDir already exists');
}
try {
await mkdir(snapshotDir, { recursive: true });
const exportResult = await this.exportToDisk(join(snapshotDir, 'main'), {
type: 'local-encrypted',
abortSignal: options.abortSignal,
});
fnLog.info('writing local backup files list');
const filesWritten = await writeLocalBackupFilesList({
snapshotDir,
mediaNames: exportResult.mediaNames,
});
const filesRead = await readLocalBackupFilesList(snapshotDir);
strictAssert(
isEqual(filesWritten, filesRead),
'exportBackup: Local backup files proto must match files written'
);
fnLog.info('writing metadata');
const metadataArgs = {
snapshotDir,
backupId: getBackupId(),
metadataKey: getLocalBackupMetadataKey(),
};
await writeLocalBackupMetadata(metadataArgs);
await verifyLocalBackupMetadata(metadataArgs);
await this.#runLocalAttachmentBackupJobs({
attachmentBackupJobs: exportResult.attachmentBackupJobs,
baseDir: options.backupsBaseDir,
onProgress: options.onProgress,
abortSignal: options.abortSignal,
});
try {
await pruneLocalBackups({
backupsBaseDir: options.backupsBaseDir,
numSnapshotsToKeep: LOCAL_BACKUP_SNAPSHOTS_TO_KEEP,
});
} catch (error) {
fnLog.warn(
'failed to prune old local backups',
Errors.toLogFormat(error)
);
}
return { ...exportResult, snapshotDir };
} catch (e) {
if (options.abortSignal.aborted) {
fnLog.warn('aborted', Errors.toLogFormat(e));
} else {
fnLog.error('encountered error', Errors.toLogFormat(e));
}
fnLog.info('Deleting just-created snapshot directory');
await rm(snapshotDir, { recursive: true, force: true });
fnLog.info('Deleted just-created directory');
// Prune to remove any files which may have been written before the error occurred
try {
await pruneLocalBackups({
backupsBaseDir: options.backupsBaseDir,
numSnapshotsToKeep: LOCAL_BACKUP_SNAPSHOTS_TO_KEEP,
});
} catch (error) {
fnLog.warn(
'failed to prune local backups after export error',
Errors.toLogFormat(error)
);
}
throw e;
}
}
async #runLocalAttachmentBackupJobs({
attachmentBackupJobs,
baseDir,
onProgress,
abortSignal,
}: {
attachmentBackupJobs: ExportResultType['attachmentBackupJobs'];
baseDir: string;
onProgress: OnProgressCallback;
abortSignal: AbortSignal;
}) {
let totalAttachmentBytes = 0;
let currentBytes = 0;
log.info(
`runLocalExportAttachmentBackupJobs: About to process ${attachmentBackupJobs.length} jobs`
);
for (const job of attachmentBackupJobs) {
strictAssert(job.type === 'local', 'must be local');
totalAttachmentBytes += job.data.size;
}
const freeSpaceBytes = await getFreeDiskSpace(baseDir);
const bufferBytes = 100 * MEBIBYTE;
const bytesNeeded = totalAttachmentBytes + bufferBytes - freeSpaceBytes;
if (bytesNeeded > 0) {
log.info(
`exportLocalBackup: Not enough storage; only ${freeSpaceBytes} available, ${totalAttachmentBytes} of attachments to export`
);
throw new NotEnoughStorageError(bytesNeeded);
}
for (const job of attachmentBackupJobs) {
strictAssert(job.type === 'local', 'must be local');
if (abortSignal.aborted) {
log.info(
'exportLocalBackup: Aborted; exiting before processing all attachment jobs'
);
throw new Error('User aborted the export!');
}
try {
// eslint-disable-next-line no-await-in-loop
await runAttachmentBackupJob(job, baseDir);
currentBytes += job.data.size;
onProgress(currentBytes, totalAttachmentBytes);
} catch (error) {
if (error instanceof AttachmentPermanentlyMissingError) {
log.error(
`${getJobIdForLogging(job)}: Attachment was not found; continuing with export`
);
currentBytes += job.data.size;
continue;
}
const stillToExportBytes = totalAttachmentBytes - currentBytes;
if (error.code === 'ENOSPC') {
throw new RanOutOfStorageError(stillToExportBytes);
}
if (error.code === 'EPERM' || error.code === 'EACCES') {
throw new StoragePermissionsError();
}
throw error;
}
}
}
public async stageLocalBackupForImport(
snapshotDir: string
): Promise<ValidateLocalBackupStructureResultType> {
const result = await validateLocalBackupStructure(snapshotDir);
const { success, error } = result;
if (success) {
this.#localBackupSnapshotDir = snapshotDir;
if (!isTestOrMockEnvironment()) {
// Regenerate QR code without link & sync option
window.reduxActions.installer.startInstaller();
// eslint-disable-next-line no-alert
window.alert(
'Staged backup successfully. Please link to perform import.'
);
}
log.info(
`stageLocalBackupForImport: Staged ${snapshotDir} for import. Please link to perform import.`
);
} else {
this.#localBackupSnapshotDir = undefined;
// eslint-disable-next-line no-alert
window.alert(
'Invalid backup snapshot directory; make sure you choose a snapshot directory (e.g. `signal-backup-2026-01-01-12-00-00`)'
);
log.info(
`stageLocalBackupForImport: Invalid snapshot ${snapshotDir}. Error: ${error}.`
);
}
return result;
}
public isLocalBackupStaged(): boolean {
return Boolean(this.#localBackupSnapshotDir);
}
public async importLocalBackup(): Promise<void> {
strictAssert(
this.#localBackupSnapshotDir,
'importLocalBackup: Staged backup is required, use stageLocalBackupForImport()'
);
log.info(`importLocalBackup: Importing ${this.#localBackupSnapshotDir}`);
const backupFile = join(this.#localBackupSnapshotDir, 'main');
await this.importFromDisk(backupFile, {
type: 'local-encrypted',
localBackupSnapshotDir: this.#localBackupSnapshotDir,
});
await verifyLocalBackupMetadata({
snapshotDir: this.#localBackupSnapshotDir,
backupId: getBackupId(),
metadataKey: getLocalBackupMetadataKey(),
});
this.#localBackupSnapshotDir = undefined;
log.info('importLocalBackup: Done');
}
// Test harness
public async exportBackupData(
options: BackupExportOptions
): Promise<{ data: Uint8Array<ArrayBuffer> } & ExportResultType> {
const sink = new PassThrough();
const chunks = new Array<Uint8Array<ArrayBuffer>>();
sink.on('data', chunk => chunks.push(chunk));
const result = await this.#exportBackup(sink, options);
return {
...result,
data: Bytes.concatenate(chunks),
};
}
public async exportToDisk(
path: string,
options: BackupExportOptions
): Promise<ExportResultType> {
const exportResult = await this.#exportBackup(
createWriteStream(path),
options
);
if (options.type === 'local-encrypted' || options.type === 'remote') {
await validateBackup(
() => new FileStream(path),
exportResult.totalBytes,
isTestOrMockEnvironment()
? ValidationType.Internal
: ValidationType.Export
);
}
return exportResult;
}
public async exportPlaintext({
abortSignal,
onProgress,
shouldIncludeMedia,
targetPath,
}: {
abortSignal: AbortSignal;
onProgress: OnProgressCallback;
shouldIncludeMedia: boolean;
targetPath: string;
}): Promise<LocalBackupExportResultType> {
let exportDir: string | undefined;
const fnLog = log.child('exportPlaintext');
try {
fnLog.info('starting...');
const freeSpaceBytes = await getFreeDiskSpace(targetPath);
const bytesNeeded = MIMINUM_DISK_SPACE_FOR_LOCAL_EXPORT - freeSpaceBytes;
if (bytesNeeded > 0) {
fnLog.info(
`Not enough storage; only ${freeSpaceBytes} available, ${MIMINUM_DISK_SPACE_FOR_LOCAL_EXPORT} is minimum needed`
);
throw new NotEnoughStorageError(bytesNeeded);
}
exportDir = join(targetPath, `signal-export-${getTimestampForFolder()}`);
await mkdir(exportDir, { recursive: true });
strictAssert(
isFeaturedEnabledNoRedux({
betaKey: 'desktop.plaintextExport.beta',
prodKey: 'desktop.plaintextExport.prod',
}),
'Plaintext export must be enabled'
);
if (isOnline()) {
await this.#waitForEmptyQueues('backups.exportPlaintext');
} else {
fnLog.info('offline; skipping wait for empty queues');
}
fnLog.info('starting...');
await mkdir(exportDir, { recursive: true });
const exportResult = await this.exportToDisk(
join(exportDir, 'main.jsonl'),
{
type: 'plaintext-export',
abortSignal,
}
);
fnLog.info('writing metadata');
const metadataPath = join(exportDir, 'metadata.json');
await writeFile(
metadataPath,
JSON.stringify({
version: LOCAL_BACKUP_VERSION,
})
);
if (shouldIncludeMedia) {
await this.#runLocalAttachmentBackupJobs({
attachmentBackupJobs: exportResult.attachmentBackupJobs,
baseDir: exportDir,
onProgress,
abortSignal,
});
}
fnLog.info('finished');
return {
...exportResult,
snapshotDir: exportDir,
};
} catch (error) {
fnLog.warn('encountered error', Errors.toLogFormat(error));
if (exportDir) {
fnLog.info('Deleting export directory');
await rm(exportDir, { recursive: true, force: true });
fnLog.info('Export directory deleted');
}
if (error.code === 'EPERM' || error.code === 'EACCES') {
throw new StoragePermissionsError();
}
throw error;
}
}
public async _internalStageLocalBackupForImport(): Promise<ValidateLocalBackupStructureResultType> {
const { canceled, dirPath: snapshotDir } = await ipcRenderer.invoke(
'show-open-folder-dialog'
);
if (canceled || !snapshotDir) {
return {
success: false,
error: 'File dialog canceled',
snapshotDir: undefined,
};
}
return this.stageLocalBackupForImport(snapshotDir);
}
// Test harness
public async _internalValidate(
exportOptions: BackupExportOptions = {
type: 'local-encrypted',
abortSignal: new AbortController().signal,
}
): Promise<ValidationResultType> {
try {
log.info('internal validation: starting');
const start = Date.now();
window.IPC.startTrackingQueryStats();
const recordStream = new BackupExportStream({
...exportOptions,
validationRun: true,
});
recordStream.run();
const totalBytes = await validateBackupStream(recordStream);
window.IPC.stopTrackingQueryStats({
epochName: 'Internal Validate Backup',
});
const duration = Date.now() - start;
log.info('internal validation: succeeded');
return {
result: {
attachmentBackupJobs: recordStream.getAttachmentBackupJobs(),
mediaNames: recordStream.getMediaNames(),
duration,
stats: recordStream.getStats(),
totalBytes,
},
};
} catch (error) {
log.warn(
'internal validation: failed with errors\n',
Errors.toLogFormat(error)
);
return { error: Errors.toLogFormat(error) };
}
}
public async importFromDisk(
backupFile: string,
options: BackupImportOptions
): Promise<void> {
return this.importBackup(() => createReadStream(backupFile), options);
}
public cancelDownloadAndImport(): void {
if (!this.#downloadController && !this.#importController) {
log.error(
'cancelDownloadAndImport: not canceling, download or import is not running'
);
return;
}
if (this.#downloadController) {
log.warn('cancelDownloadAndImport: canceling download');
this.#downloadController.abort();
this.#downloadController = undefined;
if (this.#downloadRetryPromise) {
this.#downloadRetryPromise.resolve('cancel');
}
}
if (this.#importController) {
log.warn('cancelDownloadAndImport: canceling import processing');
this.#importController.abort();
this.#importController = undefined;
}
}
public async importBackup(
createBackupStream: () => Readable,
options: BackupImportOptions
): Promise<void> {
strictAssert(!this.#isRunning, 'BackupService is already running');
window.IPC.startTrackingQueryStats();
log.info(`importBackup: starting ${options.type}...`);
this.#isRunning = 'import';
const importStart = Date.now();
await DataWriter.disableMessageInsertTriggers();
await DataWriter.disableFSync();
try {
const controller = new AbortController();
this.#importController?.abort();
this.#importController = controller;
window.ConversationController.setReadOnly(true);
const importStream = await BackupImportStream.create(options);
if (options.type === 'remote' || options.type === 'local-encrypted') {
const { aesKey, macKey } = getKeyMaterial(
options.ephemeralKey ? new BackupKey(options.ephemeralKey) : undefined
);
// First pass - don't decrypt, only verify mac
let hmac = createHmac(HashType.size256, macKey);
let theirMac: Uint8Array<ArrayBuffer> | undefined;
let totalBytes = 0;
const sink = new PassThrough();
sink.on('data', chunk => {
totalBytes += chunk.byteLength;
});
// Discard the data in the first pass
sink.resume();
await pipeline(
createBackupStream(),
getMacAndUpdateHmac(hmac, theirMacValue => {
theirMac = theirMacValue;
}),
sink
);
if (controller.signal.aborted) {
throw new BackupImportCanceledError();
}
options.onProgress?.(0, totalBytes);
strictAssert(theirMac != null, 'importBackup: Missing MAC');
strictAssert(
constantTimeEqual(hmac.digest(), theirMac),
'importBackup: Bad MAC'
);
// Second pass - decrypt (but still check the mac at the end)
hmac = createHmac(HashType.size256, macKey);
const progressReporter = new PassThrough();
progressReporter.pause();
let currentBytes = 0;
progressReporter.on('data', chunk => {
currentBytes += chunk.byteLength;
options.onProgress?.(currentBytes, totalBytes);
});
await pipeline(
createBackupStream(),
getMacAndUpdateHmac(hmac, noop),
progressReporter,
decipherWithAesKey(aesKey),
createGunzip(),
new DelimitedStream(),
importStream,
{ signal: controller.signal }
);
strictAssert(
constantTimeEqual(hmac.digest(), theirMac),
'importBackup: Bad MAC, second pass'
);
} else if (options.type === 'cross-client-integration-test') {
strictAssert(
isTestOrMockEnvironment(),
'Plaintext backups can be imported only in test harness'
);
strictAssert(
options.ephemeralKey == null,
'Plaintext backups cannot have ephemeral key'
);
await pipeline(
createBackupStream(),
new DelimitedStream(),
importStream
);
} else {
throw missingCaseError(options.type);
}
log.info('importBackup: finished...');
} catch (error) {
if (error.name === 'AbortError') {
log.info('importBackup: canceled by user');
throw new BackupImportCanceledError();
}
log.error(`importBackup: failed, error: ${Errors.toLogFormat(error)}`);
if (isNightly(window.getVersion()) || isAdhoc(window.getVersion())) {
window.reduxActions.toast.showToast({
toastType: ToastType.FailedToImportBackup,
});
}
throw error;
} finally {
window.ConversationController.setReadOnly(false);
this.#isRunning = false;
this.#importController = undefined;
await DataWriter.enableMessageInsertTriggersAndBackfill();
await DataWriter.enableFSyncAndCheckpoint();
window.IPC.stopTrackingQueryStats({ epochName: 'Backup Import' });
if (window.SignalCI) {
window.SignalCI.handleEvent('backupImportComplete', {
duration: Date.now() - importStart,
});
}
}
}
public async fetchAndSaveBackupCdnObjectMetadata(): Promise<void> {
log.info('fetchAndSaveBackupCdnObjectMetadata: clearing existing metadata');
await DataWriter.clearAllBackupCdnObjectMetadata();
strictAssert(
areRemoteBackupsTurnedOn(),
'Remote backups must be turned on to fetch cdn metadata'
);
let cursor: string | undefined;
const PAGE_SIZE = 1000;
let numObjects = 0;
do {
log.info('fetchAndSaveBackupCdnObjectMetadata: fetching next page');
// eslint-disable-next-line no-await-in-loop
const listResult = await this.api.listMedia({ cursor, limit: PAGE_SIZE });
// eslint-disable-next-line no-await-in-loop
await DataWriter.saveBackupCdnObjectMetadata(
listResult.storedMediaObjects.map(object => ({
mediaId: object.mediaId,
cdnNumber: object.cdn,
sizeOnBackupCdn: object.objectLength,
}))
);
numObjects += listResult.storedMediaObjects.length;
cursor = listResult.cursor ?? undefined;
} while (cursor);
log.info(
`fetchAndSaveBackupCdnObjectMetadata: finished fetching metadata for ${numObjects} objects`
);
}
public async getBackupCdnInfo(
mediaId: string
): Promise<
{ isInBackupTier: true; cdnNumber: number } | { isInBackupTier: false }
> {
const storedInfo = await DataReader.getBackupCdnObjectMetadata(mediaId);
if (!storedInfo) {
return { isInBackupTier: false };
}
return { isInBackupTier: true, cdnNumber: storedInfo.cdnNumber };
}
async #doDownloadAndImport({
downloadPath,
ephemeralKey,
onProgress,
}: DoDownloadOptionsType): Promise<boolean> {
const controller = new AbortController();
// Abort previous download
this.#downloadController?.abort();
this.#downloadController = controller;
let downloadOffset = 0;
try {
({ size: downloadOffset } = await stat(downloadPath));
} catch (error) {
if (error.code !== 'ENOENT') {
throw error;
}
// File is missing - start from the beginning
}
const onDownloadProgress = (
currentBytes: number,
totalBytes: number
): void => {
onProgress?.(InstallScreenBackupStep.Download, currentBytes, totalBytes);
};
await ensureFile(downloadPath);
if (controller.signal.aborted) {
throw new BackupImportCanceledError();
}
let stream: Readable;
try {
if (ephemeralKey == null) {
stream = await this.api.download({
downloadOffset,
onProgress: onDownloadProgress,
abortSignal: controller.signal,
});
} else {
let archive = itemStorage.get('backupTransitArchive');
if (archive == null) {
const response = await this.api.getTransferArchive(controller.signal);
if ('error' in response) {
switch (response.error) {
case 'RELINK_REQUESTED':
throw new RelinkRequestedError();
// Primary decided to abort syncing process; continue on with no backup
case 'CONTINUE_WITHOUT_UPLOAD':
log.error(
'backups.doDownloadAndImport: primary requested to continue without syncing'
);
return false;
default:
throw missingCaseError(response.error);
}
}
archive = {
cdn: response.cdn,
key: response.key,
};
await itemStorage.put('backupTransitArchive', archive);
}
stream = await this.api.downloadEphemeral({
archive,
downloadOffset,
onProgress: onDownloadProgress,
abortSignal: controller.signal,
});
}
} catch (error) {
if (controller.signal.aborted) {
throw new BackupImportCanceledError();
}
// No backup on the server
if (error instanceof HTTPError && error.code === 404) {
return false;
}
if (error instanceof BackupInstallerError) {
throw error;
}
log.error(
'backups.doDownloadAndImport: error downloading backup file',
Errors.toLogFormat(error)
);
throw new BackupDownloadFailedError();
}
if (controller.signal.aborted) {
throw new BackupImportCanceledError();
}
try {
await pipeline(
stream,
createWriteStream(downloadPath, {
flags: 'a',
start: downloadOffset,
})
);
if (controller.signal.aborted) {
throw new BackupImportCanceledError();
}
this.#downloadController = undefined;
try {
// Import and start writing to the DB. Make sure we are unlinked
// if the import process is aborted due to error or restart.
const password = itemStorage.get('password');
strictAssert(password != null, 'Must be registered to import backup');
await itemStorage.remove('password');
await this.importFromDisk(downloadPath, {
type: 'remote',
ephemeralKey,
onProgress: (currentBytes, totalBytes) => {
onProgress?.(
InstallScreenBackupStep.Process,
currentBytes,
totalBytes
);
},
});
// Restore password on success
await itemStorage.put('password', password);
} catch (e) {
// Error or manual cancel during import; this is non-retriable
if (e instanceof BackupInstallerError) {
throw e;
} else {
throw new BackupProcessingError(e);
}
} finally {
await unlink(downloadPath);
}
} catch (error) {
// Download canceled
if (error.name === 'AbortError') {
throw new BackupImportCanceledError();
}
// Other errors bubble up and can be retried
throw error;
}
return true;
}
async #exportBackup(
sink: Writable,
options: BackupExportOptions
): Promise<ExportResultType> {
strictAssert(!this.#isRunning, 'BackupService is already running');
log.info('exportBackup: starting...');
this.#isRunning = 'export';
const start = Date.now();
window.IPC.startTrackingQueryStats();
try {
if (options.type === 'remote') {
strictAssert(
areRemoteBackupsTurnedOn(),
'Remote backups must be turned on for a remote export'
);
}
// TODO (DESKTOP-7168): Update mock-server to support this endpoint
if (window.SignalCI || options.type === 'cross-client-integration-test') {
strictAssert(
isTestOrMockEnvironment(),
'exportBackup: cross-client-integration tests must only be run in test harness'
);
}
switch (options.type) {
case 'remote':
log.info('exportBackup: Fetching latest backup CDN metadata');
await this.fetchAndSaveBackupCdnObjectMetadata();
break;
case 'cross-client-integration-test':
case 'local-encrypted':
case 'plaintext-export':
// no need to fetch what's on backup CDN
break;
default:
throw missingCaseError(options);
}
const { aesKey, macKey } = getKeyMaterial();
const recordStream = new BackupExportStream(options);
recordStream.run();
const iv = randomBytes(IV_LENGTH);
let totalBytes = 0;
const { type } = options;
switch (type) {
case 'remote':
case 'local-encrypted':
await pipeline(
recordStream,
createGzip(),
appendPaddingStream(),
createCipheriv(CipherType.AES256CBC, aesKey, iv),
prependStream(iv),
appendMacStream(macKey),
measureSize({
onComplete: size => {
totalBytes = size;
},
}),
sink,
{ signal: options.abortSignal }
);
break;
case 'cross-client-integration-test':
strictAssert(
isTestOrMockEnvironment(),
'exportBackup: Plaintext backups can be exported only in test harness'
);
await pipeline(
recordStream,
measureSize({
onComplete: size => {
totalBytes = size;
},
}),
sink,
{ signal: options.abortSignal }
);
break;
case 'plaintext-export':
await pipeline(
recordStream,
measureSize({
onComplete: size => {
totalBytes = size;
},
}),
sink,
{ signal: options.abortSignal }
);
break;
default:
throw missingCaseError(type);
}
const duration = Date.now() - start;
return {
attachmentBackupJobs: recordStream.getAttachmentBackupJobs(),
mediaNames: recordStream.getMediaNames(),
totalBytes,
stats: recordStream.getStats(),
duration,
};
} finally {
window.IPC.stopTrackingQueryStats({ epochName: 'Backup Export' });
log.info('exportBackup: finished...');
this.#isRunning = false;
}
}
async #runPeriodicRefresh(): Promise<void> {
try {
await this.api.refresh();
log.info('Backup: refreshed');
} catch (error) {
log.error('Backup: periodic refresh failed', Errors.toLogFormat(error));
}
await this.refreshBackupAndSubscriptionStatus();
}
async #unlinkAndDeleteAllData() {
window.reduxActions.installer.updateBackupImportProgress({
error: InstallScreenBackupError.Canceled,
});
try {
await unlinkAccount();
} catch (e) {
log.warn(
'Error while unlinking; this may be expected for the unlink operation',
Errors.toLogFormat(e)
);
}
try {
log.info('backups.unlinkAndDeleteAllData: deleting all data');
await signalProtocolStore.removeAllData();
log.info('backups.unlinkAndDeleteAllData: all data deleted successfully');
} catch (e) {
log.error(
'backups.unlinkAndDeleteAllData: unable to remove all data',
Errors.toLogFormat(e)
);
}
// The QR code should be regenerated only after all data is cleared to prevent
// a race where the QR code doesn't show the backup capability
window.reduxActions.installer.startInstaller();
}
async #waitForEmptyQueues(
reason:
| 'backups.upload'
| 'backups.exportPlaintext'
| 'backups.exportLocalBackup'
) {
// Make sure we are up-to-date on storage service
{
const { promise: storageService, resolve } = explodePromise<void>();
window.Whisper.events.once('storageService:syncComplete', resolve);
runStorageServiceSyncJob({ reason });
runStorageServiceSyncJob.flush();
await storageService;
}
// Clear message queue
await window.waitForEmptyEventQueue();
// Make sure all batches are flushed
await Promise.all([waitForAllBatchers(), flushAllWaitBatchers()]);
}
public isImportRunning(): boolean {
return this.#isRunning === 'import';
}
public isExportRunning(): boolean {
return this.#isRunning === 'export';
}
#getBackupTierFromStorage(): BackupLevel | null {
const backupTier = itemStorage.get('backupTier');
switch (backupTier) {
case BackupLevel.Free:
return BackupLevel.Free;
case BackupLevel.Paid:
return BackupLevel.Paid;
case undefined:
return null;
default:
log.error('Unknown backupTier in storage', backupTier);
return null;
}
}
async #fetchCloudBackupStatus(): Promise<BackupStatusType | undefined> {
let result: BackupStatusType | undefined;
const backupProtoInfo = await this.api.getBackupProtoInfo();
if (backupProtoInfo.backupExists) {
const { createdAt, size: protoSize } = backupProtoInfo;
result = {
createdTimestamp: createdAt.getTime(),
protoSize,
};
}
await itemStorage.put('cloudBackupStatus', result);
return result;
}
async #fetchSubscriptionStatus(): Promise<
BackupsSubscriptionType | undefined
> {
const cachedBackupSubscriptionStatus = itemStorage.get(
'backupSubscriptionStatus'
);
const backupTier = this.#getBackupTierFromStorage();
let result: BackupsSubscriptionType | undefined;
switch (backupTier) {
case null:
case undefined:
case BackupLevel.Free:
result = { status: 'not-found' };
break;
case BackupLevel.Paid:
await itemStorage.put('backupSubscriptionStatus', {
...(cachedBackupSubscriptionStatus ?? { status: 'not-found' }),
isFetching: true,
});
result = await this.api.getSubscriptionInfo();
break;
default:
throw missingCaseError(backupTier);
}
await itemStorage.put('backupSubscriptionStatus', {
...result,
lastFetchedAtMs: Date.now(),
isFetching: false,
});
return result;
}
async refreshBackupAndSubscriptionStatus(): Promise<void> {
await Promise.all([
this.#fetchSubscriptionStatus(),
this.#fetchCloudBackupStatus(),
]);
}
async resetCachedData(): Promise<void> {
this.api.clearCache();
await this.credentials.clearCache();
await itemStorage.remove('backupSubscriptionStatus');
await itemStorage.remove('cloudBackupStatus');
await this.refreshBackupAndSubscriptionStatus();
}
hasMediaBackups(): boolean {
return itemStorage.get('backupTier') === BackupLevel.Paid;
}
getCachedCloudBackupStatus(): BackupStatusType | undefined {
return itemStorage.get('cloudBackupStatus');
}
async pickLocalBackupFolder(): Promise<string | undefined> {
const { canceled, dirPath: backupsParentDir } = await ipcRenderer.invoke(
'show-open-folder-dialog'
);
if (canceled || !backupsParentDir) {
return;
}
const localBackupsBaseDir = join(backupsParentDir, LOCAL_BACKUP_DIR_NAME);
await mkdir(localBackupsBaseDir, { recursive: true });
await itemStorage.put('localBackupFolder', localBackupsBaseDir);
return localBackupsBaseDir;
}
async disableLocalBackups({
deleteExistingBackups,
}: {
deleteExistingBackups: boolean;
}): Promise<void> {
const backupsBaseDir = itemStorage.get('localBackupFolder');
await Promise.all([
itemStorage.remove('lastLocalBackup'),
itemStorage.remove('localBackupFolder'),
itemStorage.remove('backupKeyViewed'),
]);
if (deleteExistingBackups) {
if (!backupsBaseDir) {
log.error('disableLocalBackups: backups dir not set');
return;
}
if (basename(backupsBaseDir) !== LOCAL_BACKUP_DIR_NAME) {
log.warn(
'disableLocalBackups: backups dir does not have expected name, bailing on deleting backups'
);
return;
}
await rm(backupsBaseDir, { force: true, recursive: true });
log.info('disableLocalBackups: deleted backups directory');
}
}
}
export const backupsService = new BackupsService();