Add initial local archive export support.

This commit is contained in:
Cody Henthorne
2024-08-13 17:01:31 -04:00
committed by mtang-signal
parent c39a1ebdb6
commit 8eb0b2f960
31 changed files with 1474 additions and 133 deletions

2
.gitignore vendored
View File

@@ -29,4 +29,4 @@ jni/libspeex/.deps/
pkcs11.password pkcs11.password
dev.keystore dev.keystore
maps.key maps.key
local/ /local/

View File

@@ -1,38 +0,0 @@
package androidx.documentfile.provider;
import android.content.Context;
import android.net.Uri;
import android.provider.DocumentsContract;
import org.signal.core.util.logging.Log;
/**
* Located in androidx package as {@link TreeDocumentFile} is package protected.
*/
public class DocumentFileHelper {
private static final String TAG = Log.tag(DocumentFileHelper.class);
/**
* System implementation swallows the exception and we are having problems with the rename. This inlines the
* same call and logs the exception. Note this implementation does not update the passed in document file like
* the system implementation. Do not use the provided document file after calling this method.
*
* @return true if rename successful
*/
public static boolean renameTo(Context context, DocumentFile documentFile, String displayName) {
if (documentFile instanceof TreeDocumentFile) {
Log.d(TAG, "Renaming document directly");
try {
final Uri result = DocumentsContract.renameDocument(context.getContentResolver(), documentFile.getUri(), displayName);
return result != null;
} catch (Exception e) {
Log.w(TAG, "Unable to rename document file", e);
return false;
}
} else {
Log.d(TAG, "Letting OS rename document: " + documentFile.getClass().getSimpleName());
return documentFile.renameTo(displayName);
}
}
}

View File

@@ -12,9 +12,12 @@ import kotlinx.coroutines.withContext
import org.greenrobot.eventbus.EventBus import org.greenrobot.eventbus.EventBus
import org.signal.core.util.Base64 import org.signal.core.util.Base64
import org.signal.core.util.EventTimer import org.signal.core.util.EventTimer
import org.signal.core.util.concurrent.LimitedWorker
import org.signal.core.util.concurrent.SignalExecutors
import org.signal.core.util.fullWalCheckpoint import org.signal.core.util.fullWalCheckpoint
import org.signal.core.util.logging.Log import org.signal.core.util.logging.Log
import org.signal.core.util.money.FiatMoney import org.signal.core.util.money.FiatMoney
import org.signal.core.util.stream.NonClosingOutputStream
import org.signal.core.util.withinTransaction import org.signal.core.util.withinTransaction
import org.signal.libsignal.messagebackup.MessageBackup import org.signal.libsignal.messagebackup.MessageBackup
import org.signal.libsignal.messagebackup.MessageBackup.ValidationResult import org.signal.libsignal.messagebackup.MessageBackup.ValidationResult
@@ -37,6 +40,7 @@ import org.thoughtcrime.securesms.backup.v2.processor.RecipientBackupProcessor
import org.thoughtcrime.securesms.backup.v2.processor.StickerBackupProcessor import org.thoughtcrime.securesms.backup.v2.processor.StickerBackupProcessor
import org.thoughtcrime.securesms.backup.v2.proto.BackupInfo import org.thoughtcrime.securesms.backup.v2.proto.BackupInfo
import org.thoughtcrime.securesms.backup.v2.stream.BackupExportWriter import org.thoughtcrime.securesms.backup.v2.stream.BackupExportWriter
import org.thoughtcrime.securesms.backup.v2.stream.BackupImportReader
import org.thoughtcrime.securesms.backup.v2.stream.EncryptedBackupReader import org.thoughtcrime.securesms.backup.v2.stream.EncryptedBackupReader
import org.thoughtcrime.securesms.backup.v2.stream.EncryptedBackupWriter import org.thoughtcrime.securesms.backup.v2.stream.EncryptedBackupWriter
import org.thoughtcrime.securesms.backup.v2.stream.PlainTextBackupReader import org.thoughtcrime.securesms.backup.v2.stream.PlainTextBackupReader
@@ -46,6 +50,7 @@ import org.thoughtcrime.securesms.backup.v2.ui.subscription.MessageBackupsTypeFe
import org.thoughtcrime.securesms.components.settings.app.subscription.RecurringInAppPaymentRepository import org.thoughtcrime.securesms.components.settings.app.subscription.RecurringInAppPaymentRepository
import org.thoughtcrime.securesms.crypto.AttachmentSecretProvider import org.thoughtcrime.securesms.crypto.AttachmentSecretProvider
import org.thoughtcrime.securesms.crypto.DatabaseSecretProvider import org.thoughtcrime.securesms.crypto.DatabaseSecretProvider
import org.thoughtcrime.securesms.database.AttachmentTable
import org.thoughtcrime.securesms.database.KeyValueDatabase import org.thoughtcrime.securesms.database.KeyValueDatabase
import org.thoughtcrime.securesms.database.SignalDatabase import org.thoughtcrime.securesms.database.SignalDatabase
import org.thoughtcrime.securesms.database.model.InAppPaymentSubscriberRecord import org.thoughtcrime.securesms.database.model.InAppPaymentSubscriberRecord
@@ -82,6 +87,7 @@ import java.math.BigDecimal
import java.time.ZonedDateTime import java.time.ZonedDateTime
import java.util.Currency import java.util.Currency
import java.util.Locale import java.util.Locale
import java.util.concurrent.atomic.AtomicLong
import kotlin.time.Duration.Companion.milliseconds import kotlin.time.Duration.Companion.milliseconds
object BackupRepository { object BackupRepository {
@@ -90,6 +96,8 @@ object BackupRepository {
private const val VERSION = 1L private const val VERSION = 1L
private const val MAIN_DB_SNAPSHOT_NAME = "signal-snapshot.db" private const val MAIN_DB_SNAPSHOT_NAME = "signal-snapshot.db"
private const val KEYVALUE_DB_SNAPSHOT_NAME = "key-value-snapshot.db" private const val KEYVALUE_DB_SNAPSHOT_NAME = "key-value-snapshot.db"
private const val LOCAL_MAIN_DB_SNAPSHOT_NAME = "local-signal-snapshot.db"
private const val LOCAL_KEYVALUE_DB_SNAPSHOT_NAME = "local-key-value-snapshot.db"
private val resetInitializedStateErrorAction: StatusCodeErrorAction = { error -> private val resetInitializedStateErrorAction: StatusCodeErrorAction = { error ->
when (error.code) { when (error.code) {
@@ -126,7 +134,7 @@ object BackupRepository {
SignalStore.backup.backupTier = null SignalStore.backup.backupTier = null
} }
private fun createSignalDatabaseSnapshot(): SignalDatabase { private fun createSignalDatabaseSnapshot(name: String): SignalDatabase {
// Need to do a WAL checkpoint to ensure that the database file we're copying has all pending writes // Need to do a WAL checkpoint to ensure that the database file we're copying has all pending writes
if (!SignalDatabase.rawDatabase.fullWalCheckpoint()) { if (!SignalDatabase.rawDatabase.fullWalCheckpoint()) {
Log.w(TAG, "Failed to checkpoint WAL for main database! Not guaranteed to be using the most recent data.") Log.w(TAG, "Failed to checkpoint WAL for main database! Not guaranteed to be using the most recent data.")
@@ -137,7 +145,7 @@ object BackupRepository {
val context = AppDependencies.application val context = AppDependencies.application
val existingDbFile = context.getDatabasePath(SignalDatabase.DATABASE_NAME) val existingDbFile = context.getDatabasePath(SignalDatabase.DATABASE_NAME)
val targetFile = File(existingDbFile.parentFile, MAIN_DB_SNAPSHOT_NAME) val targetFile = File(existingDbFile.parentFile, name)
try { try {
existingDbFile.copyTo(targetFile, overwrite = true) existingDbFile.copyTo(targetFile, overwrite = true)
@@ -150,12 +158,12 @@ object BackupRepository {
context = context, context = context,
databaseSecret = DatabaseSecretProvider.getOrCreateDatabaseSecret(context), databaseSecret = DatabaseSecretProvider.getOrCreateDatabaseSecret(context),
attachmentSecret = AttachmentSecretProvider.getInstance(context).getOrCreateAttachmentSecret(), attachmentSecret = AttachmentSecretProvider.getInstance(context).getOrCreateAttachmentSecret(),
name = MAIN_DB_SNAPSHOT_NAME name = name
) )
} }
} }
private fun createSignalStoreSnapshot(): SignalStore { private fun createSignalStoreSnapshot(name: String): SignalStore {
val context = AppDependencies.application val context = AppDependencies.application
// Need to do a WAL checkpoint to ensure that the database file we're copying has all pending writes // Need to do a WAL checkpoint to ensure that the database file we're copying has all pending writes
@@ -166,7 +174,7 @@ object BackupRepository {
// We make a copy of the database within a transaction to ensure that no writes occur while we're copying the file // We make a copy of the database within a transaction to ensure that no writes occur while we're copying the file
return KeyValueDatabase.getInstance(context).writableDatabase.withinTransaction { return KeyValueDatabase.getInstance(context).writableDatabase.withinTransaction {
val existingDbFile = context.getDatabasePath(KeyValueDatabase.DATABASE_NAME) val existingDbFile = context.getDatabasePath(KeyValueDatabase.DATABASE_NAME)
val targetFile = File(existingDbFile.parentFile, KEYVALUE_DB_SNAPSHOT_NAME) val targetFile = File(existingDbFile.parentFile, name)
try { try {
existingDbFile.copyTo(targetFile, overwrite = true) existingDbFile.copyTo(targetFile, overwrite = true)
@@ -175,41 +183,98 @@ object BackupRepository {
throw IllegalStateException("Failed to copy database file!", e) throw IllegalStateException("Failed to copy database file!", e)
} }
val db = KeyValueDatabase.createWithName(context, KEYVALUE_DB_SNAPSHOT_NAME) val db = KeyValueDatabase.createWithName(context, name)
SignalStore(KeyValueStore(db)) SignalStore(KeyValueStore(db))
} }
} }
private fun deleteDatabaseSnapshot() { private fun deleteDatabaseSnapshot(name: String) {
val targetFile = AppDependencies.application.getDatabasePath(MAIN_DB_SNAPSHOT_NAME) val targetFile = AppDependencies.application.getDatabasePath(name)
if (!targetFile.delete()) { if (!targetFile.delete()) {
Log.w(TAG, "Failed to delete main database snapshot!") Log.w(TAG, "Failed to delete main database snapshot!")
} }
} }
private fun deleteSignalStoreSnapshot() { private fun deleteSignalStoreSnapshot(name: String) {
val targetFile = AppDependencies.application.getDatabasePath(KEYVALUE_DB_SNAPSHOT_NAME) val targetFile = AppDependencies.application.getDatabasePath(name)
if (!targetFile.delete()) { if (!targetFile.delete()) {
Log.w(TAG, "Failed to delete key value database snapshot!") Log.w(TAG, "Failed to delete key value database snapshot!")
} }
} }
fun localExport(
main: OutputStream,
localBackupProgressEmitter: ExportProgressListener,
archiveAttachment: (AttachmentTable.LocalArchivableAttachment, () -> InputStream?) -> Unit
) {
val writer = EncryptedBackupWriter(
key = SignalStore.svr.getOrCreateMasterKey().deriveBackupKey(),
aci = SignalStore.account.aci!!,
outputStream = NonClosingOutputStream(main),
append = { main.write(it) }
)
export(currentTime = System.currentTimeMillis(), isLocal = true, writer = writer, progressEmitter = localBackupProgressEmitter) { dbSnapshot ->
val localArchivableAttachments = dbSnapshot
.attachmentTable
.getLocalArchivableAttachments()
.associateBy { MediaName.fromDigest(it.remoteDigest) }
localBackupProgressEmitter.onAttachment(0, localArchivableAttachments.size.toLong())
val progress = AtomicLong(0)
LimitedWorker.execute(SignalExecutors.BOUNDED_IO, 4, localArchivableAttachments.values) { attachment ->
try {
archiveAttachment(attachment) { dbSnapshot.attachmentTable.getAttachmentStream(attachment) }
} catch (e: IOException) {
Log.w(TAG, "Unable to open attachment, skipping", e)
}
val currentProgress = progress.incrementAndGet()
localBackupProgressEmitter.onAttachment(currentProgress, localArchivableAttachments.size.toLong())
}
}
}
fun export(outputStream: OutputStream, append: (ByteArray) -> Unit, plaintext: Boolean = false, currentTime: Long = System.currentTimeMillis()) { fun export(outputStream: OutputStream, append: (ByteArray) -> Unit, plaintext: Boolean = false, currentTime: Long = System.currentTimeMillis()) {
val writer: BackupExportWriter = if (plaintext) {
PlainTextBackupWriter(outputStream)
} else {
EncryptedBackupWriter(
key = SignalStore.svr.getOrCreateMasterKey().deriveBackupKey(),
aci = SignalStore.account.aci!!,
outputStream = outputStream,
append = append
)
}
export(currentTime = currentTime, isLocal = false, writer = writer)
}
/**
* Exports to a blob in memory. Should only be used for testing.
*/
fun debugExport(plaintext: Boolean = false, currentTime: Long = System.currentTimeMillis()): ByteArray {
val outputStream = ByteArrayOutputStream()
export(outputStream = outputStream, append = { mac -> outputStream.write(mac) }, plaintext = plaintext, currentTime = currentTime)
return outputStream.toByteArray()
}
private fun export(
currentTime: Long,
isLocal: Boolean,
writer: BackupExportWriter,
progressEmitter: ExportProgressListener? = null,
exportExtras: ((SignalDatabase) -> Unit)? = null
) {
val eventTimer = EventTimer() val eventTimer = EventTimer()
val dbSnapshot: SignalDatabase = createSignalDatabaseSnapshot() val mainDbName = if (isLocal) LOCAL_MAIN_DB_SNAPSHOT_NAME else MAIN_DB_SNAPSHOT_NAME
val signalStoreSnapshot: SignalStore = createSignalStoreSnapshot() val keyValueDbName = if (isLocal) LOCAL_KEYVALUE_DB_SNAPSHOT_NAME else KEYVALUE_DB_SNAPSHOT_NAME
try { try {
val writer: BackupExportWriter = if (plaintext) { val dbSnapshot: SignalDatabase = createSignalDatabaseSnapshot(mainDbName)
PlainTextBackupWriter(outputStream) val signalStoreSnapshot: SignalStore = createSignalStoreSnapshot(keyValueDbName)
} else {
EncryptedBackupWriter(
key = SignalStore.svr.getOrCreateMasterKey().deriveBackupKey(),
aci = SignalStore.account.aci!!,
outputStream = outputStream,
append = append
)
}
val exportState = ExportState(backupTime = currentTime, mediaBackupEnabled = SignalStore.backup.backsUpMedia) val exportState = ExportState(backupTime = currentTime, mediaBackupEnabled = SignalStore.backup.backsUpMedia)
@@ -223,31 +288,37 @@ object BackupRepository {
// We're using a snapshot, so the transaction is more for perf than correctness // We're using a snapshot, so the transaction is more for perf than correctness
dbSnapshot.rawWritableDatabase.withinTransaction { dbSnapshot.rawWritableDatabase.withinTransaction {
progressEmitter?.onAccount()
AccountDataProcessor.export(dbSnapshot, signalStoreSnapshot) { AccountDataProcessor.export(dbSnapshot, signalStoreSnapshot) {
writer.write(it) writer.write(it)
eventTimer.emit("account") eventTimer.emit("account")
} }
progressEmitter?.onRecipient()
RecipientBackupProcessor.export(dbSnapshot, signalStoreSnapshot, exportState) { RecipientBackupProcessor.export(dbSnapshot, signalStoreSnapshot, exportState) {
writer.write(it) writer.write(it)
eventTimer.emit("recipient") eventTimer.emit("recipient")
} }
progressEmitter?.onThread()
ChatBackupProcessor.export(dbSnapshot, exportState) { frame -> ChatBackupProcessor.export(dbSnapshot, exportState) { frame ->
writer.write(frame) writer.write(frame)
eventTimer.emit("thread") eventTimer.emit("thread")
} }
progressEmitter?.onCall()
AdHocCallBackupProcessor.export(dbSnapshot) { frame -> AdHocCallBackupProcessor.export(dbSnapshot) { frame ->
writer.write(frame) writer.write(frame)
eventTimer.emit("call") eventTimer.emit("call")
} }
progressEmitter?.onSticker()
StickerBackupProcessor.export(dbSnapshot) { frame -> StickerBackupProcessor.export(dbSnapshot) { frame ->
writer.write(frame) writer.write(frame)
eventTimer.emit("sticker-pack") eventTimer.emit("sticker-pack")
} }
progressEmitter?.onMessage()
ChatItemBackupProcessor.export(dbSnapshot, exportState) { frame -> ChatItemBackupProcessor.export(dbSnapshot, exportState) { frame ->
writer.write(frame) writer.write(frame)
eventTimer.emit("message") eventTimer.emit("message")
@@ -255,28 +326,31 @@ object BackupRepository {
} }
} }
exportExtras?.invoke(dbSnapshot)
Log.d(TAG, "export() ${eventTimer.stop().summary}") Log.d(TAG, "export() ${eventTimer.stop().summary}")
} finally { } finally {
deleteDatabaseSnapshot() deleteDatabaseSnapshot(mainDbName)
deleteSignalStoreSnapshot() deleteSignalStoreSnapshot(keyValueDbName)
} }
} }
/** fun localImport(mainStreamFactory: () -> InputStream, mainStreamLength: Long, selfData: SelfData): ImportResult {
* Exports to a blob in memory. Should only be used for testing. val backupKey = SignalStore.svr.getOrCreateMasterKey().deriveBackupKey()
*/
fun debugExport(plaintext: Boolean = false, currentTime: Long = System.currentTimeMillis()): ByteArray { val frameReader = EncryptedBackupReader(
val outputStream = ByteArrayOutputStream() key = backupKey,
export(outputStream = outputStream, append = { mac -> outputStream.write(mac) }, plaintext = plaintext, currentTime = currentTime) aci = selfData.aci,
return outputStream.toByteArray() length = mainStreamLength,
dataStream = mainStreamFactory
)
return frameReader.use { reader ->
import(backupKey, reader, selfData)
}
} }
/**
* @return The time the backup was created, or null if the backup could not be read.
*/
fun import(length: Long, inputStreamFactory: () -> InputStream, selfData: SelfData, plaintext: Boolean = false): ImportResult { fun import(length: Long, inputStreamFactory: () -> InputStream, selfData: SelfData, plaintext: Boolean = false): ImportResult {
val eventTimer = EventTimer()
val backupKey = SignalStore.svr.getOrCreateMasterKey().deriveBackupKey() val backupKey = SignalStore.svr.getOrCreateMasterKey().deriveBackupKey()
val frameReader = if (plaintext) { val frameReader = if (plaintext) {
@@ -290,6 +364,19 @@ object BackupRepository {
) )
} }
return frameReader.use { reader ->
import(backupKey, reader, selfData)
}
}
private fun import(
backupKey: BackupKey,
frameReader: BackupImportReader,
selfData: SelfData,
importExtras: ((EventTimer) -> Unit)? = null
): ImportResult {
val eventTimer = EventTimer()
val header = frameReader.getHeader() val header = frameReader.getHeader()
if (header == null) { if (header == null) {
Log.e(TAG, "Backup is missing header!") Log.e(TAG, "Backup is missing header!")
@@ -364,16 +451,19 @@ object BackupRepository {
eventTimer.emit("chatItem") eventTimer.emit("chatItem")
} }
importExtras?.invoke(eventTimer)
importState.chatIdToLocalThreadId.values.forEach { importState.chatIdToLocalThreadId.values.forEach {
SignalDatabase.threads.update(it, unarchive = false, allowDeletion = false) SignalDatabase.threads.update(it, unarchive = false, allowDeletion = false)
} }
} }
val groups = SignalDatabase.groups.getGroups() SignalDatabase.groups.getGroups().use { groups ->
while (groups.hasNext()) { while (groups.hasNext()) {
val group = groups.next() val group = groups.next()
if (group.id.isV2) { if (group.id.isV2) {
AppDependencies.jobManager.add(RequestGroupV2InfoJob(group.id as GroupId.V2)) AppDependencies.jobManager.add(RequestGroupV2InfoJob(group.id as GroupId.V2))
}
} }
} }
@@ -949,6 +1039,16 @@ object BackupRepository {
iv = Base64.encodeWithPadding(mediaSecrets.iv) iv = Base64.encodeWithPadding(mediaSecrets.iv)
) )
} }
interface ExportProgressListener {
fun onAccount()
fun onRecipient()
fun onThread()
fun onCall()
fun onSticker()
fun onMessage()
fun onAttachment(currentProgress: Long, totalCount: Long)
}
} }
data class ArchivedMediaObject(val mediaId: String, val cdn: Int) data class ArchivedMediaObject(val mediaId: String, val cdn: Int)

View File

@@ -12,3 +12,17 @@ class BackupV2Event(val type: Type, val count: Long, val estimatedTotalCount: Lo
FINISHED FINISHED
} }
} }
class LocalBackupV2Event(val type: Type, val count: Long = 0, val estimatedTotalCount: Long = 0) {
enum class Type {
PROGRESS_ACCOUNT,
PROGRESS_RECIPIENT,
PROGRESS_THREAD,
PROGRESS_CALL,
PROGRESS_STICKER,
PROGRESS_MESSAGE,
PROGRESS_ATTACHMENT,
PROGRESS_VERIFYING,
FINISHED
}
}

View File

@@ -422,7 +422,7 @@ class ChatItemExportIterator(private val cursor: Cursor, private val batchSize:
ChatUpdateMessage( ChatUpdateMessage(
groupCall = GroupCall( groupCall = GroupCall(
state = GroupCall.State.GENERIC, state = GroupCall.State.GENERIC,
startedCallRecipientId = recipients.getByAci(ACI.from(UuidUtil.parseOrThrow(groupCallUpdateDetails.startedCallUuid))).getOrNull()?.toLong(), startedCallRecipientId = UuidUtil.parseOrNull(groupCallUpdateDetails.startedCallUuid)?.let { recipients.getByAci(ACI.from(it)).getOrNull()?.toLong() },
startedCallTimestamp = groupCallUpdateDetails.startedCallTimestamp, startedCallTimestamp = groupCallUpdateDetails.startedCallTimestamp,
endedCallTimestamp = groupCallUpdateDetails.endedCallTimestamp endedCallTimestamp = groupCallUpdateDetails.endedCallTimestamp
) )

View File

@@ -0,0 +1,334 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.thoughtcrime.securesms.backup.v2.local
import android.content.Context
import android.net.Uri
import androidx.documentfile.provider.DocumentFile
import org.signal.core.util.androidx.DocumentFileInfo
import org.signal.core.util.androidx.DocumentFileUtil.delete
import org.signal.core.util.androidx.DocumentFileUtil.hasFile
import org.signal.core.util.androidx.DocumentFileUtil.inputStream
import org.signal.core.util.androidx.DocumentFileUtil.listFiles
import org.signal.core.util.androidx.DocumentFileUtil.mkdirp
import org.signal.core.util.androidx.DocumentFileUtil.newFile
import org.signal.core.util.androidx.DocumentFileUtil.outputStream
import org.signal.core.util.androidx.DocumentFileUtil.renameTo
import org.signal.core.util.logging.Log
import org.whispersystems.signalservice.api.backup.MediaName
import java.io.File
import java.io.IOException
import java.io.InputStream
import java.io.OutputStream
import java.text.SimpleDateFormat
import java.util.Calendar
import java.util.Date
import java.util.Locale
/**
* Provide a domain-specific interface to the root file system backing a local directory based archive.
*/
@Suppress("JoinDeclarationAndAssignment")
class ArchiveFileSystem private constructor(private val context: Context, root: DocumentFile) {
companion object {
val TAG = Log.tag(ArchiveFileSystem::class.java)
const val BACKUP_DIRECTORY_PREFIX: String = "signal-backup"
const val TEMP_BACKUP_DIRECTORY_SUFFIX: String = "tmp"
/**
* Attempt to create an [ArchiveFileSystem] from a tree [Uri].
*
* Should likely only be called on API29+
*/
fun fromUri(context: Context, uri: Uri): ArchiveFileSystem? {
val root = DocumentFile.fromTreeUri(context, uri)
if (root == null || !root.canWrite()) {
return null
}
return ArchiveFileSystem(context, root)
}
/**
* Attempt to create an [ArchiveFileSystem] from a regular [File].
*
* Should likely only be called on < API29.
*/
fun fromFile(context: Context, backupDirectory: File): ArchiveFileSystem {
return ArchiveFileSystem(context, DocumentFile.fromFile(backupDirectory))
}
}
private val signalBackups: DocumentFile
/** File access to shared super-set of archive related files (e.g., media + attachments) */
val filesFileSystem: FilesFileSystem
init {
signalBackups = root.mkdirp("SignalBackups") ?: throw IOException("Unable to create main backups directory")
val filesDirectory = signalBackups.mkdirp("files") ?: throw IOException("Unable to create files directory")
filesFileSystem = FilesFileSystem(context, filesDirectory)
}
/**
* Delete all folders that match the temp/in-progress backup directory naming convention. Used to clean-up
* previous catastrophic backup failures.
*/
fun deleteOldTemporaryBackups() {
for (file in signalBackups.listFiles()) {
if (file.isDirectory) {
val name = file.name
if (name != null && name.startsWith(BACKUP_DIRECTORY_PREFIX) && name.endsWith(TEMP_BACKUP_DIRECTORY_SUFFIX)) {
if (file.delete()) {
Log.w(TAG, "Deleted old temporary backup folder")
} else {
Log.w(TAG, "Could not delete old temporary backup folder")
}
}
}
}
}
/**
* Retain up to [limit] most recent backups and delete all others.
*/
fun deleteOldBackups(limit: Int = 2) {
Log.i(TAG, "Deleting old backups")
listSnapshots()
.drop(limit)
.forEach { it.file.delete() }
}
/**
* Attempt to create a [SnapshotFileSystem] to represent a single backup snapshot.
*/
fun createSnapshot(): SnapshotFileSystem? {
val timestamp = SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.US).format(Date())
val snapshotDirectoryName = "${BACKUP_DIRECTORY_PREFIX}-$timestamp"
if (signalBackups.hasFile(snapshotDirectoryName)) {
Log.w(TAG, "Backup directory already exists!")
return null
}
val workingSnapshotDirectoryName = "$snapshotDirectoryName-$TEMP_BACKUP_DIRECTORY_SUFFIX"
val workingSnapshotDirectory = signalBackups.createDirectory(workingSnapshotDirectoryName) ?: return null
return SnapshotFileSystem(context, snapshotDirectoryName, workingSnapshotDirectoryName, workingSnapshotDirectory)
}
/**
* Delete an in-progress snapshot folder after a handled backup failure.
*
* @return true if the snapshot was deleted
*/
fun cleanupSnapshot(snapshotFileSystem: SnapshotFileSystem): Boolean {
check(snapshotFileSystem.workingSnapshotDirectoryName.isNotEmpty()) { "Cannot call cleanup on unnamed snapshot" }
return signalBackups.findFile(snapshotFileSystem.workingSnapshotDirectoryName)?.delete() ?: false
}
/**
* List all snapshots found in this directory sorted by creation timestamp, newest first.
*/
fun listSnapshots(): List<SnapshotInfo> {
return signalBackups
.listFiles()
.asSequence()
.filter { it.isDirectory }
.mapNotNull { f -> f.name?.let { it to f } }
.filter { (name, _) -> name.startsWith(BACKUP_DIRECTORY_PREFIX) }
.map { (name, file) ->
val timestamp = name.replace(BACKUP_DIRECTORY_PREFIX, "").toMilliseconds()
SnapshotInfo(timestamp, name, file)
}
.sortedByDescending { it.timestamp }
.toList()
}
/**
* Clean up unused files in the shared files directory leveraged across all current snapshots. A file
* is unused if it is not referenced directly by any current snapshots.
*/
fun deleteUnusedFiles() {
Log.i(TAG, "Deleting unused files")
val allFiles: MutableMap<String, DocumentFileInfo> = filesFileSystem.allFiles().toMutableMap()
val snapshots: List<SnapshotInfo> = listSnapshots()
snapshots
.mapNotNull { SnapshotFileSystem.filesInputStream(context, it.file) }
.forEach { input ->
ArchivedFilesReader(input).use { reader ->
reader.forEach { f -> f.mediaName?.let { allFiles.remove(it) } }
}
}
var deleted = 0
allFiles
.values
.forEach {
if (it.documentFile.delete()) {
deleted++
}
}
Log.d(TAG, "Cleanup removed $deleted/${allFiles.size} files")
}
/** Useful metadata for a given archive snapshot */
data class SnapshotInfo(val timestamp: Long, val name: String, val file: DocumentFile)
}
/**
* Domain specific file system for dealing with individual snapshot data.
*/
class SnapshotFileSystem(private val context: Context, private val snapshotDirectoryName: String, val workingSnapshotDirectoryName: String, private val root: DocumentFile) {
companion object {
const val MAIN_NAME = "main"
const val METADATA_NAME = "metadata"
const val FILES_NAME = "files"
/**
* Get the files metadata file directly for a snapshot.
*/
fun filesInputStream(context: Context, snapshotDirectory: DocumentFile): InputStream? {
return snapshotDirectory.findFile(FILES_NAME)?.inputStream(context)
}
}
/**
* Creates an unnamed snapshot file system for use in importing.
*/
constructor(context: Context, root: DocumentFile) : this(context, "", "", root)
fun mainOutputStream(): OutputStream? {
return root.newFile(MAIN_NAME)?.outputStream(context)
}
fun mainInputStream(): InputStream? {
return root.findFile(MAIN_NAME)?.inputStream(context)
}
fun mainLength(): Long? {
return root.findFile(MAIN_NAME)?.length()
}
fun metadataOutputStream(): OutputStream? {
return root.newFile(METADATA_NAME)?.outputStream(context)
}
fun metadataInputStream(): InputStream? {
return root.findFile(METADATA_NAME)?.inputStream(context)
}
fun filesOutputStream(): OutputStream? {
return root.newFile(FILES_NAME)?.outputStream(context)
}
/**
* Rename the snapshot from the working temporary name to final name.
*/
fun finalize(): Boolean {
check(snapshotDirectoryName.isNotEmpty()) { "Cannot call finalize on unnamed snapshot" }
return root.renameTo(context, snapshotDirectoryName)
}
}
/**
* Domain specific file system access for accessing backup files (e.g., attachments, media, etc.).
*/
class FilesFileSystem(private val context: Context, private val root: DocumentFile) {
private val subFolders: Map<String, DocumentFile>
init {
val existingFolders = root.listFiles()
.mapNotNull { f -> f.name?.let { name -> name to f } }
.toMap()
subFolders = (0..255)
.map { i -> i.toString(16).padStart(2, '0') }
.associateWith { name ->
existingFolders[name] ?: root.createDirectory(name)!!
}
}
/**
* Enumerate all files in the directory.
*/
fun allFiles(): Map<String, DocumentFileInfo> {
val allFiles = HashMap<String, DocumentFileInfo>()
for (subfolder in subFolders.values) {
val subFiles = subfolder.listFiles(context)
for (file in subFiles) {
allFiles[file.name] = file
}
}
return allFiles
}
/**
* Creates a new file for the given [mediaName] and returns the output stream for writing to it. The caller
* is responsible for determining if the file already exists (see [allFiles]) and deleting it (see [delete]).
*
* Calling this with a pre-existing file will likely create a second file with a modified name, but is generally
* undefined and should be avoided.
*/
fun fileOutputStream(mediaName: MediaName): OutputStream? {
val subFileDirectoryName = mediaName.name.substring(0..1)
val subFileDirectory = subFolders[subFileDirectoryName]!!
val file = subFileDirectory.createFile("application/octet-stream", mediaName.name)
return file?.outputStream(context)
}
/**
* Given a [file], open and return an [InputStream].
*/
fun fileInputStream(file: DocumentFileInfo): InputStream? {
return file.documentFile.inputStream(context)
}
/**
* Delete a file for the given [mediaName] if it exists.
*
* @return true if deleted, false if not, null if not found
*/
fun delete(mediaName: MediaName): Boolean? {
val subFileDirectoryName = mediaName.name.substring(0..1)
val subFileDirectory = subFolders[subFileDirectoryName]!!
return subFileDirectory.delete(context, mediaName.name)
}
}
private fun String.toMilliseconds(): Long {
val parts: List<String> = split("-").dropLastWhile { it.isEmpty() }
if (parts.size == 7) {
try {
val calendar = Calendar.getInstance(Locale.US)
calendar[Calendar.YEAR] = parts[1].toInt()
calendar[Calendar.MONTH] = parts[2].toInt() - 1
calendar[Calendar.DAY_OF_MONTH] = parts[3].toInt()
calendar[Calendar.HOUR_OF_DAY] = parts[4].toInt()
calendar[Calendar.MINUTE] = parts[5].toInt()
calendar[Calendar.SECOND] = parts[6].toInt()
calendar[Calendar.MILLISECOND] = 0
return calendar.timeInMillis
} catch (e: NumberFormatException) {
Log.w(ArchiveFileSystem.TAG, "Unable to parse timestamp from file name", e)
}
}
return -1
}

View File

@@ -0,0 +1,50 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.thoughtcrime.securesms.backup.v2.local
import org.signal.core.util.readNBytesOrThrow
import org.signal.core.util.readVarInt32
import org.thoughtcrime.securesms.backup.v2.local.proto.FilesFrame
import java.io.EOFException
import java.io.InputStream
/**
* Reads [FilesFrame] protos encoded with their length.
*/
class ArchivedFilesReader(private val inputStream: InputStream) : Iterator<FilesFrame>, AutoCloseable {
private var next: FilesFrame? = null
init {
next = read()
}
override fun hasNext(): Boolean {
return next != null
}
override fun next(): FilesFrame {
next?.let { out ->
next = read()
return out
} ?: throw NoSuchElementException()
}
private fun read(): FilesFrame? {
try {
val length = inputStream.readVarInt32().also { if (it < 0) return null }
val frameBytes: ByteArray = inputStream.readNBytesOrThrow(length)
return FilesFrame.ADAPTER.decode(frameBytes)
} catch (e: EOFException) {
return null
}
}
override fun close() {
inputStream.close()
}
}

View File

@@ -0,0 +1,28 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.thoughtcrime.securesms.backup.v2.local
import org.signal.core.util.writeVarInt32
import org.thoughtcrime.securesms.backup.v2.local.proto.FilesFrame
import java.io.IOException
import java.io.OutputStream
/**
* Write [FilesFrame] protos encoded with their length.
*/
class ArchivedFilesWriter(private val output: OutputStream) : AutoCloseable {
@Throws(IOException::class)
fun write(frame: FilesFrame) {
val bytes = frame.encode()
output.writeVarInt32(bytes.size)
output.write(bytes)
}
override fun close() {
output.close()
}
}

View File

@@ -0,0 +1,177 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.thoughtcrime.securesms.backup.v2.local
import org.greenrobot.eventbus.EventBus
import org.signal.core.util.Base64
import org.signal.core.util.Stopwatch
import org.signal.core.util.StreamUtil
import org.signal.core.util.logging.Log
import org.thoughtcrime.securesms.backup.v2.BackupRepository
import org.thoughtcrime.securesms.backup.v2.LocalBackupV2Event
import org.thoughtcrime.securesms.backup.v2.local.proto.FilesFrame
import org.thoughtcrime.securesms.backup.v2.local.proto.Metadata
import org.thoughtcrime.securesms.database.AttachmentTable
import org.whispersystems.signalservice.api.backup.MediaName
import org.whispersystems.signalservice.api.crypto.AttachmentCipherOutputStream
import org.whispersystems.signalservice.api.crypto.AttachmentCipherStreamUtil
import org.whispersystems.signalservice.internal.crypto.PaddingInputStream
import java.io.IOException
import java.io.InputStream
import java.io.OutputStream
import java.util.Collections
import kotlin.random.Random
typealias ArchiveResult = org.signal.core.util.Result<Unit, LocalArchiver.FailureCause>
/**
* Handle importing and exporting folder-based archives using backupv2 format.
*/
object LocalArchiver {
private val TAG = Log.tag(LocalArchiver::class)
private const val VERSION = 1
/**
* Export archive to the provided [snapshotFileSystem] and store new files in [filesFileSystem].
*/
fun export(snapshotFileSystem: SnapshotFileSystem, filesFileSystem: FilesFileSystem, stopwatch: Stopwatch): ArchiveResult {
Log.i(TAG, "Starting export")
var metadataStream: OutputStream? = null
var mainStream: OutputStream? = null
var filesStream: OutputStream? = null
try {
metadataStream = snapshotFileSystem.metadataOutputStream() ?: return ArchiveResult.failure(FailureCause.METADATA_STREAM)
metadataStream.use { it.write(Metadata(VERSION).encode()) }
stopwatch.split("metadata")
mainStream = snapshotFileSystem.mainOutputStream() ?: return ArchiveResult.failure(FailureCause.MAIN_STREAM)
Log.i(TAG, "Listing all current files")
val allFiles = filesFileSystem.allFiles()
stopwatch.split("files-list")
val mediaNames: MutableSet<MediaName> = Collections.synchronizedSet(HashSet())
Log.i(TAG, "Starting frame export")
BackupRepository.localExport(mainStream, LocalExportProgressListener()) { attachment, source ->
val mediaName = MediaName.fromDigest(attachment.remoteDigest)
mediaNames.add(mediaName)
if (allFiles[mediaName.name]?.size != attachment.cipherLength) {
if (allFiles.containsKey(mediaName.name)) {
filesFileSystem.delete(mediaName)
}
source()?.use { sourceStream ->
val iv = Random.nextBytes(16) // todo [local-backup] but really do an iv from table
val combinedKey = Base64.decode(attachment.remoteKey)
var destination: OutputStream? = filesFileSystem.fileOutputStream(mediaName)
if (destination == null) {
Log.w(TAG, "Unable to create output file for attachment")
// todo [local-backup] should we abort here?
} else {
// todo [local-backup] but deal with attachment disappearing/deleted by normal app use
try {
PaddingInputStream(sourceStream, attachment.size).use { input ->
AttachmentCipherOutputStream(combinedKey, iv, destination).use { output ->
StreamUtil.copy(input, output)
}
}
} catch (e: IOException) {
Log.w(TAG, "Unable to save attachment", e)
// todo [local-backup] should we abort here?
}
}
}
}
}
stopwatch.split("frames-and-files")
filesStream = snapshotFileSystem.filesOutputStream() ?: return ArchiveResult.failure(FailureCause.FILES_STREAM)
ArchivedFilesWriter(filesStream).use { writer ->
mediaNames.forEach { name -> writer.write(FilesFrame(mediaName = name.name)) }
}
stopwatch.split("files-metadata")
} finally {
metadataStream?.close()
mainStream?.close()
filesStream?.close()
}
return ArchiveResult.success(Unit)
}
/**
* Import archive data from a folder on the system. Does not restore attachments.
*/
fun import(snapshotFileSystem: SnapshotFileSystem, selfData: BackupRepository.SelfData): ArchiveResult {
var metadataStream: InputStream? = null
try {
metadataStream = snapshotFileSystem.metadataInputStream() ?: return ArchiveResult.failure(FailureCause.METADATA_STREAM)
val mainStreamLength = snapshotFileSystem.mainLength() ?: return ArchiveResult.failure(FailureCause.MAIN_STREAM)
BackupRepository.localImport(
mainStreamFactory = { snapshotFileSystem.mainInputStream()!! },
mainStreamLength = mainStreamLength,
selfData = selfData
)
} finally {
metadataStream?.close()
}
return ArchiveResult.success(Unit)
}
private val AttachmentTable.LocalArchivableAttachment.cipherLength: Long
get() = AttachmentCipherStreamUtil.getCiphertextLength(PaddingInputStream.getPaddedSize(size))
enum class FailureCause {
METADATA_STREAM, MAIN_STREAM, FILES_STREAM
}
private class LocalExportProgressListener : BackupRepository.ExportProgressListener {
private var lastAttachmentUpdate: Long = 0
override fun onAccount() {
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.PROGRESS_ACCOUNT))
}
override fun onRecipient() {
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.PROGRESS_RECIPIENT))
}
override fun onThread() {
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.PROGRESS_THREAD))
}
override fun onCall() {
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.PROGRESS_CALL))
}
override fun onSticker() {
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.PROGRESS_STICKER))
}
override fun onMessage() {
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.PROGRESS_MESSAGE))
}
override fun onAttachment(currentProgress: Long, totalCount: Long) {
if (lastAttachmentUpdate > System.currentTimeMillis() || lastAttachmentUpdate + 1000 < System.currentTimeMillis() || currentProgress >= totalCount) {
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.PROGRESS_ATTACHMENT, currentProgress, totalCount))
lastAttachmentUpdate = System.currentTimeMillis()
}
}
}
}

View File

@@ -45,7 +45,7 @@ class EncryptedBackupReader(
init { init {
val keyMaterial = key.deriveBackupSecrets(aci) val keyMaterial = key.deriveBackupSecrets(aci)
validateMac(keyMaterial.macKey, length, dataStream()) dataStream().use { validateMac(keyMaterial.macKey, length, it) }
countingStream = CountingInputStream(dataStream()) countingStream = CountingInputStream(dataStream())
val iv = countingStream.readNBytesOrThrow(16) val iv = countingStream.readNBytesOrThrow(16)

View File

@@ -81,6 +81,7 @@ class InternalBackupPlaygroundFragment : ComposeFragment() {
private val viewModel: InternalBackupPlaygroundViewModel by viewModels() private val viewModel: InternalBackupPlaygroundViewModel by viewModels()
private lateinit var exportFileLauncher: ActivityResultLauncher<Intent> private lateinit var exportFileLauncher: ActivityResultLauncher<Intent>
private lateinit var importFileLauncher: ActivityResultLauncher<Intent> private lateinit var importFileLauncher: ActivityResultLauncher<Intent>
private lateinit var importDirectoryLauncher: ActivityResultLauncher<Intent>
private lateinit var validateFileLauncher: ActivityResultLauncher<Intent> private lateinit var validateFileLauncher: ActivityResultLauncher<Intent>
override fun onCreate(savedInstanceState: Bundle?) { override fun onCreate(savedInstanceState: Bundle?) {
@@ -107,6 +108,12 @@ class InternalBackupPlaygroundFragment : ComposeFragment() {
} }
} }
importDirectoryLauncher = registerForActivityResult(ActivityResultContracts.StartActivityForResult()) { result ->
if (result.resultCode == RESULT_OK) {
viewModel.import(result.data!!.data!!)
}
}
validateFileLauncher = registerForActivityResult(ActivityResultContracts.StartActivityForResult()) { result -> validateFileLauncher = registerForActivityResult(ActivityResultContracts.StartActivityForResult()) { result ->
if (result.resultCode == RESULT_OK) { if (result.resultCode == RESULT_OK) {
result.data?.data?.let { uri -> result.data?.data?.let { uri ->
@@ -144,6 +151,10 @@ class InternalBackupPlaygroundFragment : ComposeFragment() {
importFileLauncher.launch(intent) importFileLauncher.launch(intent)
}, },
onImportDirectoryClicked = {
val intent = Intent(Intent.ACTION_OPEN_DOCUMENT_TREE)
importDirectoryLauncher.launch(intent)
},
onPlaintextClicked = { viewModel.onPlaintextToggled() }, onPlaintextClicked = { viewModel.onPlaintextToggled() },
onSaveToDiskClicked = { onSaveToDiskClicked = {
val intent = Intent().apply { val intent = Intent().apply {
@@ -251,6 +262,7 @@ fun Screen(
onExportClicked: () -> Unit = {}, onExportClicked: () -> Unit = {},
onImportMemoryClicked: () -> Unit = {}, onImportMemoryClicked: () -> Unit = {},
onImportFileClicked: () -> Unit = {}, onImportFileClicked: () -> Unit = {},
onImportDirectoryClicked: () -> Unit = {},
onPlaintextClicked: () -> Unit = {}, onPlaintextClicked: () -> Unit = {},
onSaveToDiskClicked: () -> Unit = {}, onSaveToDiskClicked: () -> Unit = {},
onValidateFileClicked: () -> Unit = {}, onValidateFileClicked: () -> Unit = {},
@@ -310,6 +322,11 @@ fun Screen(
) { ) {
Text("Import from file") Text("Import from file")
} }
Buttons.LargeTonal(
onClick = onImportDirectoryClicked
) {
Text("Import from directory")
}
Buttons.LargeTonal( Buttons.LargeTonal(
onClick = onValidateFileClicked onClick = onValidateFileClicked

View File

@@ -5,6 +5,7 @@
package org.thoughtcrime.securesms.components.settings.app.internal.backup package org.thoughtcrime.securesms.components.settings.app.internal.backup
import android.net.Uri
import androidx.compose.runtime.MutableState import androidx.compose.runtime.MutableState
import androidx.compose.runtime.State import androidx.compose.runtime.State
import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.mutableStateOf
@@ -21,6 +22,11 @@ import org.thoughtcrime.securesms.attachments.AttachmentId
import org.thoughtcrime.securesms.attachments.DatabaseAttachment import org.thoughtcrime.securesms.attachments.DatabaseAttachment
import org.thoughtcrime.securesms.backup.v2.BackupMetadata import org.thoughtcrime.securesms.backup.v2.BackupMetadata
import org.thoughtcrime.securesms.backup.v2.BackupRepository import org.thoughtcrime.securesms.backup.v2.BackupRepository
import org.thoughtcrime.securesms.backup.v2.local.ArchiveFileSystem
import org.thoughtcrime.securesms.backup.v2.local.ArchiveResult
import org.thoughtcrime.securesms.backup.v2.local.LocalArchiver
import org.thoughtcrime.securesms.backup.v2.local.LocalArchiver.FailureCause
import org.thoughtcrime.securesms.backup.v2.local.SnapshotFileSystem
import org.thoughtcrime.securesms.database.MessageType import org.thoughtcrime.securesms.database.MessageType
import org.thoughtcrime.securesms.database.SignalDatabase import org.thoughtcrime.securesms.database.SignalDatabase
import org.thoughtcrime.securesms.dependencies.AppDependencies import org.thoughtcrime.securesms.dependencies.AppDependencies
@@ -111,6 +117,27 @@ class InternalBackupPlaygroundViewModel : ViewModel() {
} }
} }
fun import(uri: Uri) {
_state.value = _state.value.copy(backupState = BackupState.IMPORT_IN_PROGRESS)
val self = Recipient.self()
val selfData = BackupRepository.SelfData(self.aci.get(), self.pni.get(), self.e164.get(), ProfileKey(self.profileKey))
disposables += Single.fromCallable {
val archiveFileSystem = ArchiveFileSystem.fromUri(AppDependencies.application, uri)!!
val snapshotInfo = archiveFileSystem.listSnapshots().firstOrNull() ?: return@fromCallable ArchiveResult.failure(FailureCause.MAIN_STREAM)
val snapshotFileSystem = SnapshotFileSystem(AppDependencies.application, snapshotInfo.file)
LocalArchiver.import(snapshotFileSystem, selfData)
}
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribeBy {
backupData = null
_state.value = _state.value.copy(backupState = BackupState.NONE)
}
}
fun validate(length: Long, inputStreamFactory: () -> InputStream) { fun validate(length: Long, inputStreamFactory: () -> InputStream) {
val self = Recipient.self() val self = Recipient.self()
val selfData = BackupRepository.SelfData(self.aci.get(), self.pni.get(), self.e164.get(), ProfileKey(self.profileKey)) val selfData = BackupRepository.SelfData(self.aci.get(), self.pni.get(), self.e164.get(), ProfileKey(self.profileKey))
@@ -218,6 +245,7 @@ class InternalBackupPlaygroundViewModel : ViewModel() {
reUploadAndArchiveMedia(result.result.mediaIdToAttachmentId(it.mediaId)) reUploadAndArchiveMedia(result.result.mediaIdToAttachmentId(it.mediaId))
} }
} }
else -> _mediaState.set { copy(error = MediaStateError(errorText = "$result")) } else -> _mediaState.set { copy(error = MediaStateError(errorText = "$result")) }
} }
} }

View File

@@ -293,6 +293,15 @@ class AttachmentTable(
} ?: throw IOException("No stream for: $attachmentId") } ?: throw IOException("No stream for: $attachmentId")
} }
@Throws(IOException::class)
fun getAttachmentStream(localArchivableAttachment: LocalArchivableAttachment): InputStream {
return try {
getDataStream(localArchivableAttachment.file, localArchivableAttachment.random, 0)
} catch (e: FileNotFoundException) {
throw IOException("No stream for: ${localArchivableAttachment.file}", e)
} ?: throw IOException("No stream for: ${localArchivableAttachment.file}")
}
@Throws(IOException::class) @Throws(IOException::class)
fun getAttachmentThumbnailStream(attachmentId: AttachmentId, offset: Long): InputStream { fun getAttachmentThumbnailStream(attachmentId: AttachmentId, offset: Long): InputStream {
return try { return try {
@@ -443,6 +452,24 @@ class AttachmentTable(
.run() .run()
} }
fun getLocalArchivableAttachments(): List<LocalArchivableAttachment> {
return readableDatabase
.select(*PROJECTION)
.from(TABLE_NAME)
.where("$REMOTE_KEY IS NOT NULL AND $REMOTE_DIGEST IS NOT NULL AND $DATA_FILE IS NOT NULL")
.orderBy("$ID DESC")
.run()
.readToList {
LocalArchivableAttachment(
file = File(it.requireNonNullString(DATA_FILE)),
random = it.requireNonNullBlob(DATA_RANDOM),
size = it.requireLong(DATA_SIZE),
remoteDigest = it.requireBlob(REMOTE_DIGEST)!!,
remoteKey = it.requireBlob(REMOTE_KEY)!!
)
}
}
fun getRestorableAttachments(batchSize: Int): List<DatabaseAttachment> { fun getRestorableAttachments(batchSize: Int): List<DatabaseAttachment> {
return readableDatabase return readableDatabase
.select(*PROJECTION) .select(*PROJECTION)
@@ -450,9 +477,29 @@ class AttachmentTable(
.where("$TRANSFER_STATE = ?", TRANSFER_NEEDS_RESTORE.toString()) .where("$TRANSFER_STATE = ?", TRANSFER_NEEDS_RESTORE.toString())
.limit(batchSize) .limit(batchSize)
.orderBy("$ID DESC") .orderBy("$ID DESC")
.run().readToList { .run()
it.readAttachments() .readToList {
}.flatten() it.readAttachment()
}
}
fun getLocalRestorableAttachments(batchSize: Int): List<LocalRestorableAttachment> {
return readableDatabase
.select(*PROJECTION)
.from(TABLE_NAME)
.where("$REMOTE_KEY IS NOT NULL AND $REMOTE_DIGEST IS NOT NULL AND $TRANSFER_STATE = ?", TRANSFER_NEEDS_RESTORE.toString())
.limit(batchSize)
.orderBy("$ID DESC")
.run()
.readToList {
LocalRestorableAttachment(
attachmentId = AttachmentId(it.requireLong(ID)),
mmsId = it.requireLong(MESSAGE_ID),
size = it.requireLong(DATA_SIZE),
remoteDigest = it.requireBlob(REMOTE_DIGEST)!!,
remoteKey = it.requireBlob(REMOTE_KEY)!!
)
}
} }
fun getTotalRestorableAttachmentSize(): Long { fun getTotalRestorableAttachmentSize(): Long {
@@ -1635,12 +1682,16 @@ class AttachmentTable(
@Throws(FileNotFoundException::class) @Throws(FileNotFoundException::class)
private fun getDataStream(attachmentId: AttachmentId, offset: Long): InputStream? { private fun getDataStream(attachmentId: AttachmentId, offset: Long): InputStream? {
val dataInfo = getDataFileInfo(attachmentId) ?: return null val dataInfo = getDataFileInfo(attachmentId) ?: return null
return getDataStream(dataInfo.file, dataInfo.random, offset)
}
@Throws(FileNotFoundException::class)
private fun getDataStream(file: File, random: ByteArray, offset: Long): InputStream? {
return try { return try {
if (dataInfo.random != null && dataInfo.random.size == 32) { if (random.size == 32) {
ModernDecryptingPartInputStream.createFor(attachmentSecret, dataInfo.random, dataInfo.file, offset) ModernDecryptingPartInputStream.createFor(attachmentSecret, random, file, offset)
} else { } else {
val stream = ClassicDecryptingPartInputStream.createFor(attachmentSecret, dataInfo.file) val stream = ClassicDecryptingPartInputStream.createFor(attachmentSecret, file)
val skipped = stream.skip(offset) val skipped = stream.skip(offset)
if (skipped != offset) { if (skipped != offset) {
Log.w(TAG, "Skip failed: $skipped vs $offset") Log.w(TAG, "Skip failed: $skipped vs $offset")
@@ -2353,4 +2404,20 @@ class AttachmentTable(
class SyncAttachmentId(val syncMessageId: SyncMessageId, val uuid: UUID?, val digest: ByteArray?, val plaintextHash: String?) class SyncAttachmentId(val syncMessageId: SyncMessageId, val uuid: UUID?, val digest: ByteArray?, val plaintextHash: String?)
class SyncAttachment(val id: AttachmentId, val uuid: UUID?, val digest: ByteArray?, val plaintextHash: String?) class SyncAttachment(val id: AttachmentId, val uuid: UUID?, val digest: ByteArray?, val plaintextHash: String?)
class LocalArchivableAttachment(
val file: File,
val random: ByteArray,
val size: Long,
val remoteDigest: ByteArray,
val remoteKey: ByteArray
)
class LocalRestorableAttachment(
val attachmentId: AttachmentId,
val mmsId: Long,
val size: Long,
val remoteDigest: ByteArray,
val remoteKey: ByteArray
)
} }

View File

@@ -23,15 +23,15 @@ import org.thoughtcrime.securesms.service.KeyCachingService
import org.thoughtcrime.securesms.util.TextSecurePreferences import org.thoughtcrime.securesms.util.TextSecurePreferences
import java.io.File import java.io.File
open class SignalDatabase(private val context: Application, databaseSecret: DatabaseSecret, attachmentSecret: AttachmentSecret, private val name: String = DATABASE_NAME) : open class SignalDatabase(private val context: Application, databaseSecret: DatabaseSecret, attachmentSecret: AttachmentSecret, name: String = DATABASE_NAME) :
SQLiteOpenHelper( SQLiteOpenHelper(
context, context,
DATABASE_NAME, name,
databaseSecret.asString(), databaseSecret.asString(),
null, null,
SignalDatabaseMigrations.DATABASE_VERSION, SignalDatabaseMigrations.DATABASE_VERSION,
0, 0,
SqlCipherErrorHandler(DATABASE_NAME), SqlCipherErrorHandler(name),
SqlCipherDatabaseHook(), SqlCipherDatabaseHook(),
true true
), ),

View File

@@ -157,6 +157,7 @@ public final class JobManagerFactories {
put(LeaveGroupV2Job.KEY, new LeaveGroupV2Job.Factory()); put(LeaveGroupV2Job.KEY, new LeaveGroupV2Job.Factory());
put(LeaveGroupV2WorkerJob.KEY, new LeaveGroupV2WorkerJob.Factory()); put(LeaveGroupV2WorkerJob.KEY, new LeaveGroupV2WorkerJob.Factory());
put(LinkedDeviceInactiveCheckJob.KEY, new LinkedDeviceInactiveCheckJob.Factory()); put(LinkedDeviceInactiveCheckJob.KEY, new LinkedDeviceInactiveCheckJob.Factory());
put(LocalArchiveJob.KEY, new LocalArchiveJob.Factory());
put(LocalBackupJob.KEY, new LocalBackupJob.Factory()); put(LocalBackupJob.KEY, new LocalBackupJob.Factory());
put(LocalBackupJobApi29.KEY, new LocalBackupJobApi29.Factory()); put(LocalBackupJobApi29.KEY, new LocalBackupJobApi29.Factory());
put(MarkerJob.KEY, new MarkerJob.Factory()); put(MarkerJob.KEY, new MarkerJob.Factory());

View File

@@ -0,0 +1,190 @@
package org.thoughtcrime.securesms.jobs
import org.greenrobot.eventbus.EventBus
import org.greenrobot.eventbus.Subscribe
import org.greenrobot.eventbus.ThreadMode
import org.signal.core.util.Result
import org.signal.core.util.Stopwatch
import org.signal.core.util.logging.Log
import org.thoughtcrime.securesms.R
import org.thoughtcrime.securesms.backup.BackupFileIOError
import org.thoughtcrime.securesms.backup.FullBackupExporter.BackupCanceledException
import org.thoughtcrime.securesms.backup.v2.LocalBackupV2Event
import org.thoughtcrime.securesms.backup.v2.local.ArchiveFileSystem
import org.thoughtcrime.securesms.backup.v2.local.LocalArchiver
import org.thoughtcrime.securesms.backup.v2.local.SnapshotFileSystem
import org.thoughtcrime.securesms.jobmanager.Job
import org.thoughtcrime.securesms.keyvalue.SignalStore
import org.thoughtcrime.securesms.notifications.NotificationChannels
import org.thoughtcrime.securesms.service.GenericForegroundService
import org.thoughtcrime.securesms.service.NotificationController
import org.thoughtcrime.securesms.util.BackupUtil
import org.thoughtcrime.securesms.util.StorageUtil
import java.io.IOException
/**
* Local backup job for installs using new backupv2 folder format.
*
* @see LocalBackupJob.enqueue
*/
class LocalArchiveJob internal constructor(parameters: Parameters) : Job(parameters) {
companion object {
const val KEY: String = "LocalArchiveJob"
private val TAG = Log.tag(LocalArchiveJob::class.java)
}
override fun serialize(): ByteArray? {
return null
}
override fun getFactoryKey(): String {
return KEY
}
override fun run(): Result {
Log.i(TAG, "Executing backup job...")
BackupFileIOError.clearNotification(context)
val updater = ProgressUpdater()
var notification: NotificationController? = null
try {
notification = GenericForegroundService.startForegroundTask(
context,
context.getString(R.string.LocalBackupJob_creating_signal_backup),
NotificationChannels.getInstance().BACKUPS,
R.drawable.ic_signal_backup
)
} catch (e: UnableToStartException) {
Log.w(TAG, "Unable to start foreground backup service, continuing without service")
}
try {
updater.notification = notification
EventBus.getDefault().register(updater)
notification?.setIndeterminateProgress()
val stopwatch = Stopwatch("archive-export")
val archiveFileSystem = if (BackupUtil.isUserSelectionRequired(context)) {
val backupDirectoryUri = SignalStore.settings.signalBackupDirectory
if (backupDirectoryUri == null || backupDirectoryUri.path == null) {
throw IOException("Backup Directory has not been selected!")
}
ArchiveFileSystem.fromUri(context, backupDirectoryUri)
} else {
ArchiveFileSystem.fromFile(context, StorageUtil.getOrCreateBackupV2Directory())
}
if (archiveFileSystem == null) {
BackupFileIOError.ACCESS_ERROR.postNotification(context)
Log.w(TAG, "Cannot write to backup directory location.")
return Result.failure()
}
stopwatch.split("create-fs")
archiveFileSystem.deleteOldTemporaryBackups()
stopwatch.split("delete-old")
val snapshotFileSystem: SnapshotFileSystem = archiveFileSystem.createSnapshot() ?: return Result.failure()
stopwatch.split("create-snapshot")
try {
try {
val result = LocalArchiver.export(snapshotFileSystem, archiveFileSystem.filesFileSystem, stopwatch)
Log.i(TAG, "Archive finished with result: $result")
if (result !is org.signal.core.util.Result.Success) {
return Result.failure()
}
} catch (e: Exception) {
Log.w(TAG, "Unable to create local archive", e)
return Result.failure()
}
stopwatch.split("archive-create")
// todo [local-backup] verify local backup
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.PROGRESS_VERIFYING))
val valid = true
stopwatch.split("archive-verify")
if (valid) {
snapshotFileSystem.finalize()
stopwatch.split("archive-finalize")
} else {
BackupFileIOError.VERIFICATION_FAILED.postNotification(context)
}
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.FINISHED))
stopwatch.stop(TAG)
} catch (e: BackupCanceledException) {
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.FINISHED))
Log.w(TAG, "Archive cancelled")
throw e
} catch (e: IOException) {
Log.w(TAG, "Error during archive!", e)
EventBus.getDefault().post(LocalBackupV2Event(LocalBackupV2Event.Type.FINISHED))
BackupFileIOError.postNotificationForException(context, e)
throw e
} finally {
val cleanUpWasRequired = archiveFileSystem.cleanupSnapshot(snapshotFileSystem)
if (cleanUpWasRequired) {
Log.w(TAG, "Archive failed. Snapshot temp folder needed to be deleted")
}
}
stopwatch.split("new-archive-done")
archiveFileSystem.deleteOldBackups()
stopwatch.split("delete-old")
archiveFileSystem.deleteUnusedFiles()
stopwatch.split("delete-unused")
stopwatch.stop(TAG)
} finally {
notification?.close()
EventBus.getDefault().unregister(updater)
updater.notification = null
}
return Result.success()
}
override fun onFailure() {
}
private class ProgressUpdater {
var notification: NotificationController? = null
private var previousType: LocalBackupV2Event.Type? = null
@Subscribe(threadMode = ThreadMode.POSTING)
fun onEvent(event: LocalBackupV2Event) {
val notification = notification ?: return
if (previousType != event.type) {
notification.replaceTitle(event.type.toString()) // todo [local-backup] use actual strings
previousType = event.type
}
if (event.estimatedTotalCount == 0L) {
notification.setIndeterminateProgress()
} else {
notification.setProgress(event.estimatedTotalCount, event.count)
}
}
}
class Factory : Job.Factory<LocalArchiveJob?> {
override fun create(parameters: Parameters, serializedData: ByteArray?): LocalArchiveJob {
return LocalArchiveJob(parameters)
}
}
}

View File

@@ -65,6 +65,16 @@ public final class LocalBackupJob extends BaseJob {
} }
} }
public static void enqueueArchive() {
JobManager jobManager = AppDependencies.getJobManager();
Parameters.Builder parameters = new Parameters.Builder()
.setQueue(QUEUE)
.setMaxInstancesForFactory(1)
.setMaxAttempts(3);
jobManager.add(new LocalArchiveJob(parameters.build()));
}
private LocalBackupJob(@NonNull Job.Parameters parameters) { private LocalBackupJob(@NonNull Job.Parameters parameters) {
super(parameters); super(parameters);
} }

View File

@@ -7,19 +7,19 @@ import android.net.Uri;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.documentfile.provider.DocumentFile; import androidx.documentfile.provider.DocumentFile;
import androidx.documentfile.provider.DocumentFileHelper;
import org.greenrobot.eventbus.EventBus; import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe; import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode; import org.greenrobot.eventbus.ThreadMode;
import org.signal.core.util.Stopwatch; import org.signal.core.util.Stopwatch;
import org.signal.core.util.ThreadUtil;
import org.signal.core.util.logging.Log; import org.signal.core.util.logging.Log;
import org.thoughtcrime.securesms.R; import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.backup.BackupEvent; import org.thoughtcrime.securesms.backup.BackupEvent;
import org.thoughtcrime.securesms.backup.BackupFileIOError; import org.thoughtcrime.securesms.backup.BackupFileIOError;
import org.thoughtcrime.securesms.backup.BackupPassphrase; import org.thoughtcrime.securesms.backup.BackupPassphrase;
import org.thoughtcrime.securesms.backup.BackupVerifier; import org.thoughtcrime.securesms.backup.BackupVerifier;
import org.signal.core.util.androidx.DocumentFileUtil;
import org.signal.core.util.androidx.DocumentFileUtil.OperationResult;
import org.thoughtcrime.securesms.backup.FullBackupExporter; import org.thoughtcrime.securesms.backup.FullBackupExporter;
import org.thoughtcrime.securesms.crypto.AttachmentSecretProvider; import org.thoughtcrime.securesms.crypto.AttachmentSecretProvider;
import org.thoughtcrime.securesms.database.SignalDatabase; import org.thoughtcrime.securesms.database.SignalDatabase;
@@ -36,7 +36,6 @@ import java.text.SimpleDateFormat;
import java.util.Date; import java.util.Date;
import java.util.Locale; import java.util.Locale;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.TimeUnit;
/** /**
* Backup Job for installs requiring Scoped Storage. * Backup Job for installs requiring Scoped Storage.
@@ -52,16 +51,6 @@ public final class LocalBackupJobApi29 extends BaseJob {
public static final String TEMP_BACKUP_FILE_PREFIX = ".backup"; public static final String TEMP_BACKUP_FILE_PREFIX = ".backup";
public static final String TEMP_BACKUP_FILE_SUFFIX = ".tmp"; public static final String TEMP_BACKUP_FILE_SUFFIX = ".tmp";
private static final int MAX_STORAGE_ATTEMPTS = 5;
private static final long[] WAIT_FOR_SCOPED_STORAGE = new long[] {
TimeUnit.SECONDS.toMillis(0),
TimeUnit.SECONDS.toMillis(2),
TimeUnit.SECONDS.toMillis(10),
TimeUnit.SECONDS.toMillis(20),
TimeUnit.SECONDS.toMillis(30)
};
LocalBackupJobApi29(@NonNull Parameters parameters) { LocalBackupJobApi29(@NonNull Parameters parameters) {
super(parameters); super(parameters);
} }
@@ -189,43 +178,54 @@ public final class LocalBackupJobApi29 extends BaseJob {
} }
private boolean verifyBackup(String backupPassword, DocumentFile temporaryFile, BackupEvent finishedEvent) throws FullBackupExporter.BackupCanceledException { private boolean verifyBackup(String backupPassword, DocumentFile temporaryFile, BackupEvent finishedEvent) throws FullBackupExporter.BackupCanceledException {
Boolean valid = null; OperationResult result = DocumentFileUtil.retryDocumentFileOperation((attempt, maxAttempts) -> {
int attempts = 0; Log.i(TAG, "Verify attempt " + (attempt + 1) + "/" + maxAttempts);
while (attempts < MAX_STORAGE_ATTEMPTS && valid == null && !isCanceled()) { try (InputStream cipherStream = DocumentFileUtil.inputStream(temporaryFile, context)) {
ThreadUtil.sleep(WAIT_FOR_SCOPED_STORAGE[attempts]); if (cipherStream == null) {
Log.w(TAG, "Found backup file but unable to open input stream");
return OperationResult.Retry.INSTANCE;
}
try (InputStream cipherStream = context.getContentResolver().openInputStream(temporaryFile.getUri())) { boolean valid;
try { try {
valid = BackupVerifier.verifyFile(cipherStream, backupPassword, finishedEvent.getCount(), this::isCanceled); valid = BackupVerifier.verifyFile(cipherStream, backupPassword, finishedEvent.getCount(), this::isCanceled);
} catch (IOException e) { } catch (IOException e) {
Log.w(TAG, "Unable to verify backup", e); Log.w(TAG, "Unable to verify backup", e);
valid = false; valid = false;
} }
return new OperationResult.Success(valid);
} catch (SecurityException | IOException e) { } catch (SecurityException | IOException e) {
attempts++; Log.w(TAG, "Unable to find backup file", e);
Log.w(TAG, "Unable to find backup file, attempt: " + attempts + "/" + MAX_STORAGE_ATTEMPTS, e);
} }
}
if (isCanceled()) {
return new OperationResult.Success(false);
}
return OperationResult.Retry.INSTANCE;
});
if (isCanceled()) { if (isCanceled()) {
throw new FullBackupExporter.BackupCanceledException(); throw new FullBackupExporter.BackupCanceledException();
} }
return valid != null ? valid : false; return result.isSuccess() && ((OperationResult.Success) result).getValue();
} }
@SuppressLint("NewApi") @SuppressLint("NewApi")
private void renameBackup(String fileName, DocumentFile temporaryFile) throws IOException { private void renameBackup(String fileName, DocumentFile temporaryFile) throws IOException {
int attempts = 0; OperationResult result = DocumentFileUtil.retryDocumentFileOperation((attempt, maxAttempts) -> {
Log.i(TAG, "Rename attempt " + (attempt + 1) + "/" + maxAttempts);
if (DocumentFileUtil.renameTo(temporaryFile, context, fileName)) {
return new OperationResult.Success(true);
} else {
return OperationResult.Retry.INSTANCE;
}
});
while (attempts < MAX_STORAGE_ATTEMPTS && !DocumentFileHelper.renameTo(context, temporaryFile, fileName)) { if (!result.isSuccess()) {
ThreadUtil.sleep(WAIT_FOR_SCOPED_STORAGE[attempts]);
attempts++;
Log.w(TAG, "Unable to rename backup file, attempt: " + attempts + "/" + MAX_STORAGE_ATTEMPTS);
}
if (attempts >= MAX_STORAGE_ATTEMPTS) {
Log.w(TAG, "Failed to rename temp file"); Log.w(TAG, "Failed to rename temp file");
throw new IOException("Renaming temporary backup file failed!"); throw new IOException("Renaming temporary backup file failed!");
} }

View File

@@ -30,7 +30,7 @@ public final class RequestGroupV2InfoJob extends BaseJob {
/** /**
* Get a particular group state revision for group after message queues are drained. * Get a particular group state revision for group after message queues are drained.
*/ */
public RequestGroupV2InfoJob(@NonNull GroupId.V2 groupId, int toRevision) { private RequestGroupV2InfoJob(@NonNull GroupId.V2 groupId, int toRevision) {
this(new Parameters.Builder() this(new Parameters.Builder()
.setQueue("RequestGroupV2InfoSyncJob") .setQueue("RequestGroupV2InfoSyncJob")
.addConstraint(DecryptionsDrainedConstraint.KEY) .addConstraint(DecryptionsDrainedConstraint.KEY)

View File

@@ -467,7 +467,7 @@ class RestoreAttachmentJob private constructor(
pointer, pointer,
thumbnailFile, thumbnailFile,
maxThumbnailSize, maxThumbnailSize,
true, true, // TODO [backup] don't ignore
progressListener progressListener
) )

View File

@@ -224,7 +224,7 @@ class RestoreAttachmentThumbnailJob private constructor(
pointer, pointer,
thumbnailFile, thumbnailFile,
maxThumbnailSize, maxThumbnailSize,
true, true, // TODO [backup] don't ignore
progressListener progressListener
) )

View File

@@ -46,6 +46,24 @@ public class StorageUtil {
return backups; return backups;
} }
public static File getOrCreateBackupV2Directory() throws NoExternalStorageException {
File storage = Environment.getExternalStorageDirectory();
if (!storage.canWrite()) {
throw new NoExternalStorageException();
}
File backups = getBackupV2Directory();
if (!backups.exists()) {
if (!backups.mkdirs()) {
throw new NoExternalStorageException("Unable to create backup directory...");
}
}
return backups;
}
public static File getBackupDirectory() throws NoExternalStorageException { public static File getBackupDirectory() throws NoExternalStorageException {
File storage = Environment.getExternalStorageDirectory(); File storage = Environment.getExternalStorageDirectory();
File signal = new File(storage, "Signal"); File signal = new File(storage, "Signal");
@@ -59,6 +77,18 @@ public class StorageUtil {
return backups; return backups;
} }
public static File getBackupV2Directory() throws NoExternalStorageException {
File storage = Environment.getExternalStorageDirectory();
File backups = new File(storage, "Signal");
//noinspection ConstantConditions
if (BuildConfig.APPLICATION_ID.startsWith(PRODUCTION_PACKAGE_ID + ".")) {
backups = new File(storage, BuildConfig.APPLICATION_ID.substring(PRODUCTION_PACKAGE_ID.length() + 1));
}
return backups;
}
@RequiresApi(24) @RequiresApi(24)
public static @NonNull String getDisplayPath(@NonNull Context context, @NonNull Uri uri) { public static @NonNull String getDisplayPath(@NonNull Context context, @NonNull Uri uri) {
String lastPathSegment = Objects.requireNonNull(uri.getLastPathSegment()); String lastPathSegment = Objects.requireNonNull(uri.getLastPathSegment());

View File

@@ -0,0 +1,15 @@
syntax = "proto3";
package signal.backup.local;
option java_package = "org.thoughtcrime.securesms.backup.v2.local.proto";
message Metadata {
uint32 version = 1;
}
message FilesFrame {
oneof item {
string mediaName = 1;
}
}

View File

@@ -0,0 +1,19 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.signal.core.util.stream
import java.io.FilterOutputStream
import java.io.OutputStream
/**
* Wraps a provided [OutputStream] but ignores calls to [OutputStream.close] on it but will call [OutputStream.flush] just in case.
* Wrappers must call [OutputStream.close] on the passed in [wrap] stream directly.
*/
class NonClosingOutputStream(wrap: OutputStream) : FilterOutputStream(wrap) {
override fun close() {
flush()
}
}

View File

@@ -11,6 +11,7 @@ dependencies {
api(project(":core-util-jvm")) api(project(":core-util-jvm"))
implementation(libs.androidx.sqlite) implementation(libs.androidx.sqlite)
implementation(libs.androidx.documentfile)
testImplementation(testLibs.junit.junit) testImplementation(testLibs.junit.junit)
testImplementation(testLibs.mockito.core) testImplementation(testLibs.mockito.core)

View File

@@ -0,0 +1,10 @@
package androidx.documentfile.provider
/**
* Located in androidx package as [TreeDocumentFile] is package protected.
*
* @return true if can be used like a tree document file (e.g., use content resolver queries)
*/
fun DocumentFile.isTreeDocumentFile(): Boolean {
return this is TreeDocumentFile
}

View File

@@ -0,0 +1,14 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.signal.core.util.androidx
import androidx.documentfile.provider.DocumentFile
/**
* Information about a file within the storage. Useful because default [DocumentFile] implementations
* re-query info on each access.
*/
data class DocumentFileInfo(val documentFile: DocumentFile, val name: String, val size: Long)

View File

@@ -0,0 +1,222 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.signal.core.util.androidx
import android.content.Context
import android.provider.DocumentsContract
import androidx.documentfile.provider.DocumentFile
import androidx.documentfile.provider.isTreeDocumentFile
import org.signal.core.util.ThreadUtil
import org.signal.core.util.logging.Log
import org.signal.core.util.requireLong
import org.signal.core.util.requireNonNullString
import org.signal.core.util.requireString
import java.io.InputStream
import java.io.OutputStream
import kotlin.time.Duration.Companion.seconds
/**
* Collection of helper and optimizized operations for working with [DocumentFile]s.
*/
object DocumentFileUtil {
private val TAG = Log.tag(DocumentFileUtil::class)
private val FILE_PROJECTION = arrayOf(DocumentsContract.Document.COLUMN_DOCUMENT_ID, DocumentsContract.Document.COLUMN_DISPLAY_NAME, DocumentsContract.Document.COLUMN_SIZE)
private const val FILE_SELECTION = "${DocumentsContract.Document.COLUMN_DISPLAY_NAME} = ?"
private const val LIST_FILES_SELECTION = "${DocumentsContract.Document.COLUMN_MIME_TYPE} != ?"
private val LIST_FILES_SELECTION_ARS = arrayOf(DocumentsContract.Document.MIME_TYPE_DIR)
private const val MAX_STORAGE_ATTEMPTS: Int = 5
private val WAIT_FOR_SCOPED_STORAGE: LongArray = longArrayOf(0, 2.seconds.inWholeMilliseconds, 10.seconds.inWholeMilliseconds, 20.seconds.inWholeMilliseconds, 30.seconds.inWholeMilliseconds)
/** Returns true if the directory represented by the [DocumentFile] has a child with [name]. */
fun DocumentFile.hasFile(name: String): Boolean {
return findFile(name) != null
}
/** Returns the [DocumentFile] for a newly created binary file or null if unable or it already exists */
fun DocumentFile.newFile(name: String): DocumentFile? {
return if (hasFile(name)) {
Log.w(TAG, "Attempt to create new file ($name) but it already exists")
null
} else {
createFile("application/octet-stream", name)
}
}
/** Returns a [DocumentFile] for directory by [name], creating it if it doesn't already exist */
fun DocumentFile.mkdirp(name: String): DocumentFile? {
return findFile(name) ?: createDirectory(name)
}
/** Open an [OutputStream] to the file represented by the [DocumentFile] */
fun DocumentFile.outputStream(context: Context): OutputStream? {
return context.contentResolver.openOutputStream(uri)
}
/** Open an [InputStream] to the file represented by the [DocumentFile] */
@JvmStatic
fun DocumentFile.inputStream(context: Context): InputStream? {
return context.contentResolver.openInputStream(uri)
}
/**
* Will attempt to find the named [file] in the [root] directory and delete it if found.
*
* @return true if found and deleted, false if the file couldn't be deleted, and null if not found
*/
fun DocumentFile.delete(context: Context, file: String): Boolean? {
return findFile(context, file)?.documentFile?.delete()
}
/**
* Will attempt to find the name [fileName] in the [root] directory and return useful information if found using
* a single [Context.getContentResolver] query.
*
* Recommend using this over [DocumentFile.findFile] to prevent excess queries for all files and names.
*
* If direct queries fail to find the file, will fallback to using [DocumentFile.findFile].
*/
fun DocumentFile.findFile(context: Context, fileName: String): DocumentFileInfo? {
val child = if (isTreeDocumentFile()) {
val childrenUri = DocumentsContract.buildChildDocumentsUriUsingTree(uri, DocumentsContract.getDocumentId(uri))
try {
context
.contentResolver
.query(childrenUri, FILE_PROJECTION, FILE_SELECTION, arrayOf(fileName), null)
?.use { cursor ->
if (cursor.count == 1) {
cursor.moveToFirst()
val uri = DocumentsContract.buildDocumentUriUsingTree(uri, cursor.requireString(DocumentsContract.Document.COLUMN_DOCUMENT_ID))
val displayName = cursor.requireNonNullString(DocumentsContract.Document.COLUMN_DISPLAY_NAME)
val length = cursor.requireLong(DocumentsContract.Document.COLUMN_SIZE)
DocumentFileInfo(DocumentFile.fromSingleUri(context, uri)!!, displayName, length)
} else {
val message = if (cursor.count > 1) "Multiple files" else "No files"
Log.w(TAG, "$message returned with same name")
null
}
}
} catch (e: Exception) {
Log.d(TAG, "Unable to find file directly on ${javaClass.simpleName}, falling back to OS", e)
null
}
} else {
null
}
return child ?: this.findFile(fileName)?.let { DocumentFileInfo(it, it.name!!, it.length()) }
}
/**
* List file names and sizes in the [DocumentFile] by directly querying the content resolver ourselves. The system
* implementation makes a separate query for each name and length method call and gets expensive over 1000's of files.
*
* Will fallback to the provided document file's implementation of [DocumentFile.listFiles] if unable to do it directly.
*/
fun DocumentFile.listFiles(context: Context): List<DocumentFileInfo> {
if (isTreeDocumentFile()) {
val childrenUri = DocumentsContract.buildChildDocumentsUriUsingTree(uri, DocumentsContract.getDocumentId(uri))
try {
val results = context
.contentResolver
.query(childrenUri, FILE_PROJECTION, LIST_FILES_SELECTION, LIST_FILES_SELECTION_ARS, null)
?.use { cursor ->
val results = ArrayList<DocumentFileInfo>(cursor.count)
while (cursor.moveToNext()) {
val uri = DocumentsContract.buildDocumentUriUsingTree(uri, cursor.requireString(DocumentsContract.Document.COLUMN_DOCUMENT_ID))
val displayName = cursor.requireString(DocumentsContract.Document.COLUMN_DISPLAY_NAME)
val length = cursor.requireLong(DocumentsContract.Document.COLUMN_SIZE)
if (displayName != null) {
results.add(DocumentFileInfo(DocumentFile.fromSingleUri(context, uri)!!, displayName, length))
}
}
results
}
if (results != null) {
return results
} else {
Log.w(TAG, "Content provider returned null for query on ${javaClass.simpleName}, falling back to OS")
}
} catch (e: Exception) {
Log.d(TAG, "Unable to query files directly on ${javaClass.simpleName}, falling back to OS", e)
}
}
return listFiles()
.asSequence()
.filter { it.isFile }
.mapNotNull { file -> file.name?.let { DocumentFileInfo(file, it, file.length()) } }
.toList()
}
/**
* System implementation swallows the exception and we are having problems with the rename. This inlines the
* same call and logs the exception. Note this implementation does not update the passed in document file like
* the system implementation. Do not use the provided document file after calling this method.
*
* @return true if rename successful
*/
@JvmStatic
fun DocumentFile.renameTo(context: Context, displayName: String): Boolean {
if (isTreeDocumentFile()) {
Log.d(TAG, "Renaming document directly")
try {
val result = DocumentsContract.renameDocument(context.contentResolver, uri, displayName)
return result != null
} catch (e: Exception) {
Log.w(TAG, "Unable to rename document file, falling back to OS", e)
return renameTo(displayName)
}
} else {
return renameTo(displayName)
}
}
/**
* Historically, we've seen issues with [DocumentFile] operations not working on the first try. This
* retry loop will retry those operations with a varying backoff in attempt to make them work.
*/
@JvmStatic
fun <T> retryDocumentFileOperation(operation: DocumentFileOperation<T>): OperationResult {
var attempts = 0
var operationResult = operation.operation(attempts, MAX_STORAGE_ATTEMPTS)
while (attempts < MAX_STORAGE_ATTEMPTS && !operationResult.isSuccess()) {
ThreadUtil.sleep(WAIT_FOR_SCOPED_STORAGE[attempts])
attempts++
operationResult = operation.operation(attempts, MAX_STORAGE_ATTEMPTS)
}
return operationResult
}
/** Operation to perform in a retry loop via [retryDocumentFileOperation] that could fail based on timing */
fun interface DocumentFileOperation<T> {
fun operation(attempt: Int, maxAttempts: Int): OperationResult
}
/** Result of a single operation in a retry loop via [retryDocumentFileOperation] */
sealed interface OperationResult {
fun isSuccess(): Boolean {
return this is Success
}
/** The operation completed successful */
data class Success(val value: Boolean) : OperationResult
/** Retry the operation */
data object Retry : OperationResult
}
}

View File

@@ -0,0 +1,36 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.signal.core.util.concurrent
import java.util.concurrent.CountDownLatch
import java.util.concurrent.ExecutorService
import java.util.concurrent.Semaphore
object LimitedWorker {
/**
* Call [worker] on a thread from [executor] for each element in [input] using only up to [maxThreads] concurrently.
*
* This method will block until all work is completed. There is no guarantee that the same threads
* will be used but that only up to [maxThreads] will be actively doing work.
*/
@JvmStatic
fun <T> execute(executor: ExecutorService, maxThreads: Int, input: Collection<T>, worker: (T) -> Unit) {
val doneWorkLatch = CountDownLatch(input.size)
val semaphore = Semaphore(maxThreads)
for (work in input) {
semaphore.acquire()
executor.execute {
worker(work)
semaphore.release()
doneWorkLatch.countDown()
}
}
doneWorkLatch.await()
}
}

View File

@@ -98,6 +98,7 @@ dependencyResolutionManagement {
library("androidx-asynclayoutinflater", "androidx.asynclayoutinflater:asynclayoutinflater:1.1.0-alpha01") library("androidx-asynclayoutinflater", "androidx.asynclayoutinflater:asynclayoutinflater:1.1.0-alpha01")
library("androidx-asynclayoutinflater-appcompat", "androidx.asynclayoutinflater:asynclayoutinflater-appcompat:1.1.0-alpha01") library("androidx-asynclayoutinflater-appcompat", "androidx.asynclayoutinflater:asynclayoutinflater-appcompat:1.1.0-alpha01")
library("androidx-emoji2", "androidx.emoji2:emoji2:1.4.0") library("androidx-emoji2", "androidx.emoji2:emoji2:1.4.0")
library("androidx-documentfile", "androidx.documentfile:documentfile:1.0.0")
// Material // Material
library("material-material", "com.google.android.material:material:1.8.0") library("material-material", "com.google.android.material:material:1.8.0")

View File

@@ -25,6 +25,7 @@ import java.security.InvalidKeyException;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.Arrays; import java.util.Arrays;
import java.util.function.Supplier;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.annotation.Nullable; import javax.annotation.Nullable;
@@ -71,32 +72,42 @@ public class AttachmentCipherInputStream extends FilterInputStream {
*/ */
public static InputStream createForAttachment(File file, long plaintextLength, byte[] combinedKeyMaterial, byte[] digest, byte[] incrementalDigest, int incrementalMacChunkSize, boolean ignoreDigest) public static InputStream createForAttachment(File file, long plaintextLength, byte[] combinedKeyMaterial, byte[] digest, byte[] incrementalDigest, int incrementalMacChunkSize, boolean ignoreDigest)
throws InvalidMessageException, IOException throws InvalidMessageException, IOException
{
return createForAttachment(() -> new FileInputStream(file), file.length(), plaintextLength, combinedKeyMaterial, digest, incrementalDigest, incrementalMacChunkSize, ignoreDigest);
}
/**
* Passing in a null incrementalDigest and/or 0 for the chunk size at the call site disables incremental mac validation.
*
* Passing in true for ignoreDigest DOES NOT VERIFY THE DIGEST
*/
public static InputStream createForAttachment(StreamSupplier streamSupplier, long streamLength, long plaintextLength, byte[] combinedKeyMaterial, byte[] digest, byte[] incrementalDigest, int incrementalMacChunkSize, boolean ignoreDigest)
throws InvalidMessageException, IOException
{ {
byte[][] parts = Util.split(combinedKeyMaterial, CIPHER_KEY_SIZE, MAC_KEY_SIZE); byte[][] parts = Util.split(combinedKeyMaterial, CIPHER_KEY_SIZE, MAC_KEY_SIZE);
Mac mac = initMac(parts[1]); Mac mac = initMac(parts[1]);
if (file.length() <= BLOCK_SIZE + mac.getMacLength()) { if (streamLength <= BLOCK_SIZE + mac.getMacLength()) {
throw new InvalidMessageException("Message shorter than crypto overhead!"); throw new InvalidMessageException("Message shorter than crypto overhead! length: " + streamLength);
} }
if (!ignoreDigest && digest == null) { if (!ignoreDigest && digest == null) {
throw new InvalidMessageException("Missing digest!"); throw new InvalidMessageException("Missing digest!");
} }
final InputStream wrappedStream; final InputStream wrappedStream;
final boolean hasIncrementalMac = incrementalDigest != null && incrementalDigest.length > 0 && incrementalMacChunkSize > 0; final boolean hasIncrementalMac = incrementalDigest != null && incrementalDigest.length > 0 && incrementalMacChunkSize > 0;
if (!hasIncrementalMac) { if (!hasIncrementalMac) {
try (FileInputStream macVerificationStream = new FileInputStream(file)) { try (InputStream macVerificationStream = streamSupplier.openStream()) {
verifyMac(macVerificationStream, file.length(), mac, digest); verifyMac(macVerificationStream, streamLength, mac, digest);
} }
wrappedStream = new FileInputStream(file); wrappedStream = streamSupplier.openStream();
} else { } else {
wrappedStream = new IncrementalMacInputStream( wrappedStream = new IncrementalMacInputStream(
new IncrementalMacAdditionalValidationsInputStream( new IncrementalMacAdditionalValidationsInputStream(
new FileInputStream(file), streamSupplier.openStream(),
file.length(), streamLength,
mac, mac,
digest digest
), ),
@@ -104,7 +115,7 @@ public class AttachmentCipherInputStream extends FilterInputStream {
ChunkSizeChoice.everyNthByte(incrementalMacChunkSize), ChunkSizeChoice.everyNthByte(incrementalMacChunkSize),
incrementalDigest); incrementalDigest);
} }
InputStream inputStream = new AttachmentCipherInputStream(wrappedStream, parts[0], file.length() - BLOCK_SIZE - mac.getMacLength()); InputStream inputStream = new AttachmentCipherInputStream(wrappedStream, parts[0], streamLength - BLOCK_SIZE - mac.getMacLength());
if (plaintextLength != 0) { if (plaintextLength != 0) {
inputStream = new ContentLengthInputStream(inputStream, plaintextLength); inputStream = new ContentLengthInputStream(inputStream, plaintextLength);
@@ -381,4 +392,8 @@ public class AttachmentCipherInputStream extends FilterInputStream {
} }
} }
} }
public interface StreamSupplier {
@Nonnull InputStream openStream() throws IOException;
}
} }