Do most of the proto and database groundwork for the new mediaName.

This commit is contained in:
Greyson Parrelli
2025-06-20 11:47:54 -04:00
committed by Cody Henthorne
parent e705495638
commit 38c8f852bf
431 changed files with 600 additions and 781 deletions

View File

@@ -23,13 +23,7 @@ class ArchivedAttachment : Attachment {
val archiveCdn: Int?
@JvmField
val archiveMediaName: String
@JvmField
val archiveMediaId: String
@JvmField
val archiveThumbnailMediaId: String
val plaintextHash: ByteArray
constructor(
contentType: String?,
@@ -37,13 +31,9 @@ class ArchivedAttachment : Attachment {
cdn: Int,
uploadTimestamp: Long?,
key: ByteArray,
iv: ByteArray?,
cdnKey: String?,
archiveCdn: Int?,
archiveMediaName: String,
archiveMediaId: String,
archiveThumbnailMediaId: String,
digest: ByteArray,
plaintextHash: ByteArray,
incrementalMac: ByteArray?,
incrementalMacChunkSize: Int?,
width: Int?,
@@ -66,8 +56,7 @@ class ArchivedAttachment : Attachment {
cdn = Cdn.fromCdnNumber(cdn),
remoteLocation = cdnKey,
remoteKey = Base64.encodeWithoutPadding(key),
remoteIv = iv,
remoteDigest = digest,
remoteDigest = null,
incrementalDigest = incrementalMac,
fastPreflightId = null,
voiceNote = voiceNote,
@@ -85,24 +74,18 @@ class ArchivedAttachment : Attachment {
uuid = uuid
) {
this.archiveCdn = archiveCdn
this.archiveMediaName = archiveMediaName
this.archiveMediaId = archiveMediaId
this.archiveThumbnailMediaId = archiveThumbnailMediaId
this.plaintextHash = plaintextHash
}
constructor(parcel: Parcel) : super(parcel) {
archiveCdn = parcel.readInt().takeIf { it != NO_ARCHIVE_CDN }
archiveMediaName = parcel.readString()!!
archiveMediaId = parcel.readString()!!
archiveThumbnailMediaId = parcel.readString()!!
plaintextHash = parcel.createByteArray()!!
}
override fun writeToParcel(dest: Parcel, flags: Int) {
super.writeToParcel(dest, flags)
dest.writeInt(archiveCdn ?: NO_ARCHIVE_CDN)
dest.writeString(archiveMediaName)
dest.writeString(archiveMediaId)
dest.writeString(archiveThumbnailMediaId)
dest.writeByteArray(plaintextHash)
}
override val uri: Uri? = null

View File

@@ -39,8 +39,6 @@ abstract class Attachment(
@JvmField
val remoteKey: String?,
@JvmField
val remoteIv: ByteArray?,
@JvmField
val remoteDigest: ByteArray?,
@JvmField
val incrementalDigest: ByteArray?,
@@ -90,7 +88,6 @@ abstract class Attachment(
cdn = Cdn.deserialize(parcel.readInt()),
remoteLocation = parcel.readString(),
remoteKey = parcel.readString(),
remoteIv = ParcelUtil.readByteArray(parcel),
remoteDigest = ParcelUtil.readByteArray(parcel),
incrementalDigest = ParcelUtil.readByteArray(parcel),
fastPreflightId = parcel.readString(),

View File

@@ -54,7 +54,6 @@ class DatabaseAttachment : Attachment {
cdn: Cdn,
location: String?,
key: String?,
iv: ByteArray?,
digest: ByteArray?,
incrementalDigest: ByteArray?,
incrementalMacChunkSize: Int,
@@ -85,7 +84,6 @@ class DatabaseAttachment : Attachment {
cdn = cdn,
remoteLocation = location,
remoteKey = key,
remoteIv = iv,
remoteDigest = digest,
incrementalDigest = incrementalDigest,
fastPreflightId = fastPreflightId,

View File

@@ -47,7 +47,6 @@ class PointerAttachment : Attachment {
cdn = cdn,
remoteLocation = location,
remoteKey = key,
remoteIv = iv,
remoteDigest = digest,
incrementalDigest = incrementalDigest,
fastPreflightId = fastPreflightId,

View File

@@ -23,7 +23,6 @@ class TombstoneAttachment : Attachment {
cdn = Cdn.CDN_0,
remoteLocation = null,
remoteKey = null,
remoteIv = null,
remoteDigest = null,
incrementalDigest = null,
fastPreflightId = null,
@@ -66,7 +65,6 @@ class TombstoneAttachment : Attachment {
cdn = Cdn.CDN_0,
remoteLocation = null,
remoteKey = null,
remoteIv = null,
remoteDigest = null,
incrementalDigest = incrementalMac,
fastPreflightId = null,

View File

@@ -75,7 +75,6 @@ class UriAttachment : Attachment {
cdn = Cdn.CDN_0,
remoteLocation = null,
remoteKey = null,
remoteIv = null,
remoteDigest = null,
incrementalDigest = null,
fastPreflightId = fastPreflightId,

View File

@@ -20,7 +20,6 @@ class WallpaperAttachment() : Attachment(
cdn = Cdn.CDN_0,
remoteLocation = null,
remoteKey = null,
remoteIv = null,
remoteDigest = null,
incrementalDigest = null,
fastPreflightId = null,

View File

@@ -22,7 +22,6 @@ import org.thoughtcrime.securesms.database.SignalDatabase
import org.thoughtcrime.securesms.dependencies.AppDependencies
import org.thoughtcrime.securesms.jobs.ArchiveCommitAttachmentDeletesJob
import org.thoughtcrime.securesms.jobs.ArchiveThumbnailUploadJob
import org.thoughtcrime.securesms.jobs.BackfillDigestJob
import org.thoughtcrime.securesms.jobs.UploadAttachmentToArchiveJob
import org.thoughtcrime.securesms.keyvalue.SignalStore
import org.thoughtcrime.securesms.keyvalue.protos.ArchiveUploadProgressState
@@ -109,7 +108,6 @@ object ArchiveUploadProgress {
)
}
AppDependencies.jobManager.cancelAllInQueue(BackfillDigestJob.QUEUE)
AppDependencies.jobManager.cancelAllInQueue(ArchiveCommitAttachmentDeletesJob.ARCHIVE_ATTACHMENT_QUEUE)
UploadAttachmentToArchiveJob.getAllQueueKeys().forEach {
AppDependencies.jobManager.cancelAllInQueue(it)
@@ -126,7 +124,7 @@ object ArchiveUploadProgress {
Log.d(TAG, "Flushing job manager queue...")
AppDependencies.jobManager.flush()
val queues = setOf(BackfillDigestJob.QUEUE, ArchiveThumbnailUploadJob.KEY, ArchiveCommitAttachmentDeletesJob.ARCHIVE_ATTACHMENT_QUEUE) + UploadAttachmentToArchiveJob.getAllQueueKeys()
val queues = setOf(ArchiveThumbnailUploadJob.KEY, ArchiveCommitAttachmentDeletesJob.ARCHIVE_ATTACHMENT_QUEUE) + UploadAttachmentToArchiveJob.getAllQueueKeys()
Log.d(TAG, "Waiting for cancelations to occur...")
while (!AppDependencies.jobManager.areQueuesEmpty(queues)) {
delay(1.seconds)

View File

@@ -691,7 +691,7 @@ object BackupRepository {
val localArchivableAttachments = dbSnapshot
.attachmentTable
.getLocalArchivableAttachments()
.associateBy { MediaName.fromDigest(it.remoteDigest) }
.associateBy { MediaName.fromPlaintextHashAndRemoteKey(it.plaintextHash, it.remoteKey) }
localBackupProgressEmitter.onAttachment(0, localArchivableAttachments.size.toLong())
@@ -1965,13 +1965,14 @@ class ArchiveMediaItemIterator(private val cursor: Cursor) : Iterator<ArchiveMed
override fun hasNext(): Boolean = !cursor.isAfterLast
override fun next(): ArchiveMediaItem {
val digest = cursor.requireNonNullBlob(AttachmentTable.REMOTE_DIGEST)
val plaintextHash = cursor.requireNonNullBlob(AttachmentTable.DATA_HASH_END)
val remoteKey = cursor.requireNonNullBlob(AttachmentTable.REMOTE_KEY)
val cdn = cursor.requireIntOrNull(AttachmentTable.ARCHIVE_CDN)
val mediaId = MediaName.fromDigest(digest).toMediaId(SignalStore.backup.mediaRootBackupKey).encode()
val thumbnailMediaId = MediaName.fromDigestForThumbnail(digest).toMediaId(SignalStore.backup.mediaRootBackupKey).encode()
val mediaId = MediaName.fromPlaintextHashAndRemoteKey(plaintextHash, remoteKey).toMediaId(SignalStore.backup.mediaRootBackupKey).encode()
val thumbnailMediaId = MediaName.fromPlaintextHashAndRemoteKeyForThumbnail(plaintextHash, remoteKey).toMediaId(SignalStore.backup.mediaRootBackupKey).encode()
cursor.moveToNext()
return ArchiveMediaItem(mediaId, thumbnailMediaId, cdn, digest)
return ArchiveMediaItem(mediaId, thumbnailMediaId, cdn, plaintextHash, remoteKey)
}
}

View File

@@ -7,6 +7,8 @@ package org.thoughtcrime.securesms.backup.v2
import android.text.TextUtils
import org.signal.core.util.Base64
import org.signal.core.util.Base64.decodeBase64
import org.signal.core.util.Base64.decodeBase64OrThrow
import org.thoughtcrime.securesms.attachments.DatabaseAttachment
import org.thoughtcrime.securesms.attachments.InvalidAttachmentException
import org.thoughtcrime.securesms.database.AttachmentTable
@@ -22,8 +24,8 @@ import java.util.Optional
object DatabaseAttachmentArchiveUtil {
@JvmStatic
fun requireMediaName(attachment: DatabaseAttachment): MediaName {
require(isDigestValidated(attachment))
return MediaName.fromDigest(attachment.remoteDigest!!)
require(hadIntegrityCheckPerformed(attachment))
return MediaName.fromPlaintextHashAndRemoteKey(attachment.dataHash!!.decodeBase64OrThrow(), attachment.remoteKey!!.decodeBase64OrThrow())
}
/**
@@ -31,14 +33,21 @@ object DatabaseAttachmentArchiveUtil {
*/
@JvmStatic
fun requireMediaNameAsString(attachment: DatabaseAttachment): String {
require(isDigestValidated(attachment))
return MediaName.fromDigest(attachment.remoteDigest!!).name
require(hadIntegrityCheckPerformed(attachment))
return MediaName.fromPlaintextHashAndRemoteKey(attachment.dataHash!!.decodeBase64OrThrow(), attachment.remoteKey!!.decodeBase64OrThrow()).name
}
@JvmStatic
fun getMediaName(attachment: DatabaseAttachment): MediaName? {
return if (isDigestValidated(attachment)) {
attachment.remoteDigest?.let { MediaName.fromDigest(it) }
return if (hadIntegrityCheckPerformed(attachment)) {
val plaintextHash = attachment.dataHash.decodeBase64()
val remoteKey = attachment.remoteKey?.decodeBase64()
if (plaintextHash != null && remoteKey != null) {
MediaName.fromPlaintextHashAndRemoteKey(plaintextHash, remoteKey)
} else {
null
}
} else {
null
}
@@ -46,11 +55,11 @@ object DatabaseAttachmentArchiveUtil {
@JvmStatic
fun requireThumbnailMediaName(attachment: DatabaseAttachment): MediaName {
require(isDigestValidated(attachment))
return MediaName.fromDigestForThumbnail(attachment.remoteDigest!!)
require(hadIntegrityCheckPerformed(attachment))
return MediaName.fromPlaintextHashAndRemoteKeyForThumbnail(attachment.dataHash!!.decodeBase64OrThrow(), attachment.remoteKey!!.decodeBase64OrThrow())
}
private fun isDigestValidated(attachment: DatabaseAttachment): Boolean {
private fun hadIntegrityCheckPerformed(attachment: DatabaseAttachment): Boolean {
return when (attachment.transferState) {
AttachmentTable.TRANSFER_PROGRESS_DONE,
AttachmentTable.TRANSFER_NEEDS_RESTORE,

View File

@@ -63,7 +63,7 @@ object LocalArchiver {
return@localExport
}
val mediaName = MediaName.fromDigest(attachment.remoteDigest)
val mediaName = MediaName.fromPlaintextHashAndRemoteKey(attachment.plaintextHash, attachment.remoteKey)
mediaNames.add(mediaName)
@@ -73,7 +73,6 @@ object LocalArchiver {
}
source()?.use { sourceStream ->
val iv = attachment.remoteIv
val combinedKey = Base64.decode(attachment.remoteKey)
val destination: OutputStream? = filesFileSystem.fileOutputStream(mediaName)
@@ -84,7 +83,7 @@ object LocalArchiver {
// todo [local-backup] but deal with attachment disappearing/deleted by normal app use
try {
PaddingInputStream(sourceStream, attachment.size).use { input ->
AttachmentCipherOutputStream(combinedKey, iv, destination).use { output ->
AttachmentCipherOutputStream(combinedKey, null, destination).use { output ->
StreamUtil.copy(input, output)
}
}

View File

@@ -24,7 +24,6 @@ import org.thoughtcrime.securesms.backup.v2.proto.FilePointer
import org.thoughtcrime.securesms.conversation.colors.AvatarColor
import org.thoughtcrime.securesms.database.AttachmentTable
import org.thoughtcrime.securesms.stickers.StickerLocator
import org.whispersystems.signalservice.api.backup.MediaName
import org.whispersystems.signalservice.api.messages.SignalServiceAttachmentPointer
import org.whispersystems.signalservice.api.messages.SignalServiceAttachmentRemoteId
import org.whispersystems.signalservice.api.util.UuidUtil
@@ -48,81 +47,84 @@ fun FilePointer?.toLocalAttachment(
): Attachment? {
if (this == null || this.locatorInfo == null) return null
val hasMediaName = this.locatorInfo.mediaName.isNotEmpty()
val hasTransitInfo = this.locatorInfo.transitCdnKey != null
val attachmentType = when {
this.locatorInfo.plaintextHash != null -> AttachmentType.ARCHIVE
this.locatorInfo.encryptedDigest != null && this.locatorInfo.transitCdnKey != null -> AttachmentType.TRANSIT
else -> AttachmentType.INVALID
}
if (hasTransitInfo && !hasMediaName) {
val signalAttachmentPointer = SignalServiceAttachmentPointer(
cdnNumber = this.locatorInfo.transitCdnNumber ?: Cdn.CDN_0.cdnNumber,
remoteId = SignalServiceAttachmentRemoteId.from(locatorInfo.transitCdnKey),
contentType = contentType,
key = this.locatorInfo.key.toByteArray(),
size = Optional.ofNullable(locatorInfo.size),
preview = Optional.empty(),
width = this.width ?: 0,
height = this.height ?: 0,
digest = Optional.ofNullable(this.locatorInfo.digest.toByteArray()),
incrementalDigest = Optional.ofNullable(this.incrementalMac?.toByteArray()),
incrementalMacChunkSize = this.incrementalMacChunkSize ?: 0,
fileName = Optional.ofNullable(fileName),
voiceNote = voiceNote,
isBorderless = borderless,
isGif = gif,
caption = Optional.ofNullable(this.caption),
blurHash = Optional.ofNullable(this.blurHash),
uploadTimestamp = this.locatorInfo.transitTierUploadTimestamp?.clampToValidBackupRange() ?: 0,
uuid = UuidUtil.fromByteStringOrNull(uuid)
)
return PointerAttachment.forPointer(
pointer = Optional.of(signalAttachmentPointer),
stickerLocator = stickerLocator,
transferState = if (wasDownloaded) AttachmentTable.TRANSFER_NEEDS_RESTORE else AttachmentTable.TRANSFER_PROGRESS_PENDING
).orNull()
} else if (!hasMediaName) {
return TombstoneAttachment(
contentType = contentType,
incrementalMac = this.incrementalMac?.toByteArray(),
incrementalMacChunkSize = this.incrementalMacChunkSize,
width = this.width,
height = this.height,
caption = this.caption,
fileName = this.fileName,
blurHash = this.blurHash,
voiceNote = voiceNote,
borderless = borderless,
gif = gif,
quote = quote,
stickerLocator = stickerLocator,
uuid = UuidUtil.fromByteStringOrNull(uuid)
)
} else {
return ArchivedAttachment(
contentType = contentType,
size = this.locatorInfo.size.toLong(),
cdn = this.locatorInfo.transitCdnNumber ?: Cdn.CDN_0.cdnNumber,
uploadTimestamp = this.locatorInfo.transitTierUploadTimestamp ?: 0,
key = this.locatorInfo.key.toByteArray(),
iv = null,
cdnKey = this.locatorInfo.transitCdnKey?.nullIfBlank(),
archiveCdn = this.locatorInfo.mediaTierCdnNumber,
archiveMediaName = this.locatorInfo.mediaName,
archiveMediaId = importState.mediaRootBackupKey.deriveMediaId(MediaName(this.locatorInfo.mediaName)).encode(),
archiveThumbnailMediaId = importState.mediaRootBackupKey.deriveMediaId(MediaName.forThumbnailFromMediaName(this.locatorInfo.mediaName)).encode(),
digest = this.locatorInfo.digest.toByteArray(),
incrementalMac = this.incrementalMac?.toByteArray(),
incrementalMacChunkSize = this.incrementalMacChunkSize,
width = this.width,
height = this.height,
caption = this.caption,
blurHash = this.blurHash,
voiceNote = voiceNote,
borderless = borderless,
gif = gif,
quote = quote,
stickerLocator = stickerLocator,
uuid = UuidUtil.fromByteStringOrNull(uuid),
fileName = fileName
)
return when (attachmentType) {
AttachmentType.ARCHIVE -> {
ArchivedAttachment(
contentType = contentType,
size = this.locatorInfo.size.toLong(),
cdn = this.locatorInfo.transitCdnNumber ?: Cdn.CDN_0.cdnNumber,
uploadTimestamp = this.locatorInfo.transitTierUploadTimestamp ?: 0,
key = this.locatorInfo.key.toByteArray(),
cdnKey = this.locatorInfo.transitCdnKey?.nullIfBlank(),
archiveCdn = this.locatorInfo.mediaTierCdnNumber,
plaintextHash = this.locatorInfo.plaintextHash!!.toByteArray(),
incrementalMac = this.incrementalMac?.toByteArray(),
incrementalMacChunkSize = this.incrementalMacChunkSize,
width = this.width,
height = this.height,
caption = this.caption,
blurHash = this.blurHash,
voiceNote = voiceNote,
borderless = borderless,
stickerLocator = stickerLocator,
gif = gif,
quote = quote,
uuid = UuidUtil.fromByteStringOrNull(uuid),
fileName = fileName
)
}
AttachmentType.TRANSIT -> {
val signalAttachmentPointer = SignalServiceAttachmentPointer(
cdnNumber = this.locatorInfo.transitCdnNumber ?: Cdn.CDN_0.cdnNumber,
remoteId = SignalServiceAttachmentRemoteId.from(locatorInfo.transitCdnKey!!),
contentType = contentType,
key = this.locatorInfo.key.toByteArray(),
size = Optional.ofNullable(locatorInfo.size),
preview = Optional.empty(),
width = this.width ?: 0,
height = this.height ?: 0,
digest = Optional.ofNullable(this.locatorInfo.encryptedDigest!!.toByteArray()),
incrementalDigest = Optional.ofNullable(this.incrementalMac?.toByteArray()),
incrementalMacChunkSize = this.incrementalMacChunkSize ?: 0,
fileName = Optional.ofNullable(fileName),
voiceNote = voiceNote,
isBorderless = borderless,
isGif = gif,
caption = Optional.ofNullable(this.caption),
blurHash = Optional.ofNullable(this.blurHash),
uploadTimestamp = this.locatorInfo.transitTierUploadTimestamp?.clampToValidBackupRange() ?: 0,
uuid = UuidUtil.fromByteStringOrNull(uuid)
)
PointerAttachment.forPointer(
pointer = Optional.of(signalAttachmentPointer),
stickerLocator = stickerLocator,
transferState = if (wasDownloaded) AttachmentTable.TRANSFER_NEEDS_RESTORE else AttachmentTable.TRANSFER_PROGRESS_PENDING
).orNull()
}
AttachmentType.INVALID -> {
TombstoneAttachment(
contentType = contentType,
incrementalMac = this.incrementalMac?.toByteArray(),
incrementalMacChunkSize = this.incrementalMacChunkSize,
width = this.width,
height = this.height,
caption = this.caption,
fileName = this.fileName,
blurHash = this.blurHash,
voiceNote = voiceNote,
borderless = borderless,
gif = gif,
quote = quote,
stickerLocator = stickerLocator,
uuid = UuidUtil.fromByteStringOrNull(uuid)
)
}
}
}
@@ -192,21 +194,17 @@ fun FilePointer.Builder.setLegacyLocators(attachment: DatabaseAttachment, mediaA
}
fun DatabaseAttachment.toLocatorInfo(): FilePointer.LocatorInfo {
if (this.remoteKey.isNullOrBlank() || this.remoteDigest == null || this.size == 0L) {
return FilePointer.LocatorInfo()
}
val attachmentType = this.toRemoteAttachmentType()
if (this.transferState == AttachmentTable.TRANSFER_PROGRESS_PERMANENT_FAILURE && this.archiveTransferState != AttachmentTable.ArchiveTransferState.FINISHED) {
if (attachmentType == AttachmentType.INVALID) {
return FilePointer.LocatorInfo()
}
val locatorBuilder = FilePointer.LocatorInfo.Builder()
val remoteKey = Base64.decode(this.remoteKey).toByteString()
val archiveMediaName = this.getMediaName()?.toString()
val remoteKey = Base64.decode(this.remoteKey!!).toByteString()
locatorBuilder.key = remoteKey
locatorBuilder.digest = this.remoteDigest.toByteString()
locatorBuilder.size = this.size.toInt()
if (this.remoteLocation.isNotNullOrBlank()) {
@@ -215,8 +213,17 @@ fun DatabaseAttachment.toLocatorInfo(): FilePointer.LocatorInfo {
locatorBuilder.transitTierUploadTimestamp = this.uploadTimestamp.takeIf { it > 0 }?.clampToValidBackupRange()
}
locatorBuilder.mediaTierCdnNumber = this.archiveCdn?.takeIf { archiveMediaName != null }
locatorBuilder.mediaName = archiveMediaName.emptyIfNull()
@Suppress("KotlinConstantConditions")
when (attachmentType) {
AttachmentType.ARCHIVE -> {
locatorBuilder.plaintextHash = Base64.decode(this.dataHash!!).toByteString()
locatorBuilder.mediaTierCdnNumber = this.archiveCdn
}
AttachmentType.TRANSIT -> {
locatorBuilder.encryptedDigest = this.remoteDigest!!.toByteString()
}
AttachmentType.INVALID -> Unit
}
return locatorBuilder.build()
}
@@ -260,3 +267,30 @@ fun RemoteAvatarColor.toLocal(): AvatarColor {
RemoteAvatarColor.A210 -> AvatarColor.A210
}
}
private fun DatabaseAttachment.toRemoteAttachmentType(): AttachmentType {
if (this.remoteKey.isNullOrBlank()) {
return AttachmentType.INVALID
}
if (this.transferState == AttachmentTable.TRANSFER_PROGRESS_PERMANENT_FAILURE && this.archiveTransferState != AttachmentTable.ArchiveTransferState.FINISHED) {
return AttachmentType.INVALID
}
val activelyOnArchiveCdn = this.archiveTransferState == AttachmentTable.ArchiveTransferState.FINISHED
val couldBeOnArchiveCdn = this.transferState == AttachmentTable.TRANSFER_PROGRESS_DONE && this.archiveTransferState != AttachmentTable.ArchiveTransferState.PERMANENT_FAILURE
if (this.dataHash != null && (activelyOnArchiveCdn || couldBeOnArchiveCdn)) {
return AttachmentType.ARCHIVE
}
if (this.remoteDigest != null && this.remoteLocation.isNotNullOrBlank()) {
return AttachmentType.TRANSIT
}
return AttachmentType.INVALID
}
private enum class AttachmentType {
TRANSIT, ARCHIVE, INVALID
}

View File

@@ -36,7 +36,6 @@ import org.signal.core.util.Base64
import org.signal.core.util.SqlUtil
import org.signal.core.util.StreamUtil
import org.signal.core.util.ThreadUtil
import org.signal.core.util.allMatch
import org.signal.core.util.copyTo
import org.signal.core.util.count
import org.signal.core.util.delete
@@ -61,7 +60,6 @@ import org.signal.core.util.requireNonNullString
import org.signal.core.util.requireObject
import org.signal.core.util.requireString
import org.signal.core.util.select
import org.signal.core.util.stream.LimitedInputStream
import org.signal.core.util.stream.NullOutputStream
import org.signal.core.util.toInt
import org.signal.core.util.update
@@ -79,17 +77,6 @@ import org.thoughtcrime.securesms.crypto.AttachmentSecret
import org.thoughtcrime.securesms.crypto.ClassicDecryptingPartInputStream
import org.thoughtcrime.securesms.crypto.ModernDecryptingPartInputStream
import org.thoughtcrime.securesms.crypto.ModernEncryptingPartOutputStream
import org.thoughtcrime.securesms.database.AttachmentTable.ArchiveTransferState.COPY_PENDING
import org.thoughtcrime.securesms.database.AttachmentTable.ArchiveTransferState.FINISHED
import org.thoughtcrime.securesms.database.AttachmentTable.ArchiveTransferState.NONE
import org.thoughtcrime.securesms.database.AttachmentTable.ArchiveTransferState.PERMANENT_FAILURE
import org.thoughtcrime.securesms.database.AttachmentTable.ArchiveTransferState.UPLOAD_IN_PROGRESS
import org.thoughtcrime.securesms.database.AttachmentTable.ArchiveTransferState.entries
import org.thoughtcrime.securesms.database.AttachmentTable.Companion.DATA_FILE
import org.thoughtcrime.securesms.database.AttachmentTable.Companion.DATA_HASH_END
import org.thoughtcrime.securesms.database.AttachmentTable.Companion.PREUPLOAD_MESSAGE_ID
import org.thoughtcrime.securesms.database.AttachmentTable.Companion.TRANSFER_PROGRESS_DONE
import org.thoughtcrime.securesms.database.AttachmentTable.ThumbnailRestoreState.entries
import org.thoughtcrime.securesms.database.MessageTable.SyncMessageId
import org.thoughtcrime.securesms.database.SignalDatabase.Companion.messages
import org.thoughtcrime.securesms.database.SignalDatabase.Companion.threads
@@ -144,7 +131,6 @@ class AttachmentTable(
const val MESSAGE_ID = "message_id"
const val CONTENT_TYPE = "content_type"
const val REMOTE_KEY = "remote_key"
const val REMOTE_IV = "remote_iv"
const val REMOTE_LOCATION = "remote_location"
const val REMOTE_DIGEST = "remote_digest"
const val REMOTE_INCREMENTAL_DIGEST = "remote_incremental_digest"
@@ -203,7 +189,6 @@ class AttachmentTable(
MESSAGE_ID,
CONTENT_TYPE,
REMOTE_KEY,
REMOTE_IV,
REMOTE_LOCATION,
REMOTE_DIGEST,
REMOTE_INCREMENTAL_DIGEST,
@@ -284,7 +269,6 @@ class AttachmentTable(
$THUMBNAIL_RANDOM BLOB DEFAULT NULL,
$THUMBNAIL_RESTORE_STATE INTEGER DEFAULT ${ThumbnailRestoreState.NONE.value},
$ATTACHMENT_UUID TEXT DEFAULT NULL,
$REMOTE_IV BLOB DEFAULT NULL,
$OFFLOAD_RESTORED_AT INTEGER DEFAULT 0
)
"""
@@ -297,7 +281,7 @@ class AttachmentTable(
"CREATE INDEX IF NOT EXISTS attachment_transfer_state_index ON $TABLE_NAME ($TRANSFER_STATE);",
"CREATE INDEX IF NOT EXISTS attachment_sticker_pack_id_index ON $TABLE_NAME ($STICKER_PACK_ID);",
"CREATE INDEX IF NOT EXISTS attachment_data_hash_start_index ON $TABLE_NAME ($DATA_HASH_START);",
"CREATE INDEX IF NOT EXISTS attachment_data_hash_end_index ON $TABLE_NAME ($DATA_HASH_END);",
"CREATE INDEX IF NOT EXISTS attachment_data_hash_end_remote_key_index ON $TABLE_NAME ($DATA_HASH_END, $REMOTE_KEY);",
"CREATE INDEX IF NOT EXISTS $DATA_FILE_INDEX ON $TABLE_NAME ($DATA_FILE);",
"CREATE INDEX IF NOT EXISTS attachment_archive_transfer_state ON $TABLE_NAME ($ARCHIVE_TRANSFER_STATE);",
"CREATE INDEX IF NOT EXISTS attachment_remote_digest_index ON $TABLE_NAME ($REMOTE_DIGEST);"
@@ -511,7 +495,7 @@ class AttachmentTable(
return readableDatabase
.select(*PROJECTION)
.from(TABLE_NAME)
.where("$REMOTE_KEY IS NOT NULL AND $REMOTE_DIGEST IS NOT NULL AND $REMOTE_IV IS NOT NULL AND $DATA_FILE IS NOT NULL")
.where("$REMOTE_KEY IS NOT NULL AND $DATA_HASH_END IS NOT NULL AND $DATA_FILE IS NOT NULL")
.orderBy("$ID DESC")
.run()
.readToList {
@@ -519,16 +503,15 @@ class AttachmentTable(
file = File(it.requireNonNullString(DATA_FILE)),
random = it.requireNonNullBlob(DATA_RANDOM),
size = it.requireLong(DATA_SIZE),
remoteDigest = it.requireBlob(REMOTE_DIGEST)!!,
remoteKey = it.requireBlob(REMOTE_KEY)!!,
remoteIv = it.requireBlob(REMOTE_IV)!!
plaintextHash = Base64.decode(it.requireNonNullString(DATA_HASH_END))
)
}
}
fun getRestorableAttachments(batchSize: Int): List<RestorableAttachment> {
return readableDatabase
.select(ID, MESSAGE_ID, DATA_SIZE, REMOTE_DIGEST, REMOTE_KEY)
.select(ID, MESSAGE_ID, DATA_SIZE, DATA_HASH_END, REMOTE_KEY)
.from(TABLE_NAME)
.where("$TRANSFER_STATE = ?", TRANSFER_NEEDS_RESTORE)
.limit(batchSize)
@@ -539,7 +522,7 @@ class AttachmentTable(
attachmentId = AttachmentId(it.requireLong(ID)),
mmsId = it.requireLong(MESSAGE_ID),
size = it.requireLong(DATA_SIZE),
remoteDigest = it.requireBlob(REMOTE_DIGEST),
plaintextHash = it.requireBlob(DATA_HASH_END),
remoteKey = it.requireBlob(REMOTE_KEY)
)
}
@@ -549,7 +532,7 @@ class AttachmentTable(
return readableDatabase
.select(ID, MESSAGE_ID, DATA_SIZE, REMOTE_DIGEST, REMOTE_KEY)
.from(TABLE_NAME)
.where("$TRANSFER_STATE = ?", TRANSFER_RESTORE_OFFLOADED)
.where("$TRANSFER_STATE = ? AND $DATA_HASH_END NOT NULL AND $REMOTE_KEY NOT NULL", TRANSFER_RESTORE_OFFLOADED)
.orderBy("$ID DESC")
.run()
.readToList {
@@ -557,8 +540,8 @@ class AttachmentTable(
attachmentId = AttachmentId(it.requireLong(ID)),
mmsId = it.requireLong(MESSAGE_ID),
size = it.requireLong(DATA_SIZE),
remoteDigest = it.requireBlob(REMOTE_DIGEST),
remoteKey = it.requireBlob(REMOTE_KEY)
plaintextHash = it.requireNonNullBlob(DATA_HASH_END),
remoteKey = it.requireNonNullBlob(REMOTE_KEY)
)
}
}
@@ -595,49 +578,31 @@ class AttachmentTable(
}
/**
* At archive creation time, we need to ensure that all relevant attachments have populated (key, iv, digest) tuples.
* At archive creation time, we need to ensure that all relevant attachments have populated [REMOTE_KEY]s.
* This does that.
*/
fun createKeyIvDigestForAttachmentsThatNeedArchiveUpload(): Int {
fun createRemoteKeyForAttachmentsThatNeedArchiveUpload(): Int {
var count = 0
writableDatabase.select(ID, REMOTE_KEY, REMOTE_IV, REMOTE_DIGEST, DATA_FILE, DATA_RANDOM)
writableDatabase.select(ID, REMOTE_KEY, DATA_FILE, DATA_RANDOM)
.from(TABLE_NAME)
.where(
"""
$ARCHIVE_TRANSFER_STATE = ${ArchiveTransferState.NONE.value} AND
$DATA_FILE NOT NULL AND
$TRANSFER_STATE = $TRANSFER_PROGRESS_DONE AND
(
$REMOTE_KEY IS NULL OR
$REMOTE_IV IS NULL OR
$REMOTE_DIGEST IS NULL
)
$REMOTE_KEY IS NULL
"""
)
.run()
.forEach { cursor ->
val attachmentId = AttachmentId(cursor.requireLong(ID))
Log.w(TAG, "[createKeyIvDigestForAttachmentsThatNeedArchiveUpload][$attachmentId] Missing key, iv, or digest. Generating.")
Log.w(TAG, "[createRemoteKeyForAttachmentsThatNeedArchiveUpload][$attachmentId] Missing key. Generating.")
val key = cursor.requireString(REMOTE_KEY)?.let { Base64.decode(it) } ?: Util.getSecretBytes(64)
val iv = cursor.requireBlob(REMOTE_IV) ?: Util.getSecretBytes(16)
val digest = run {
val fileInfo = getDataFileInfo(attachmentId)!!
try {
calculateDigest(fileInfo, key, iv)
} catch (e: FileNotFoundException) {
Log.w(TAG, "[createKeyIvDigestForAttachmentsThatNeedArchiveUpload][$attachmentId] Could not find file ${fileInfo.file}. Delete all later?")
return@forEach
}
}
writableDatabase.update(TABLE_NAME)
.values(
REMOTE_KEY to Base64.encodeWithPadding(key),
REMOTE_IV to iv,
REMOTE_DIGEST to digest
)
.values(REMOTE_KEY to Base64.encodeWithPadding(key))
.where("$ID = ?", attachmentId.id)
.run()
@@ -717,14 +682,14 @@ class AttachmentTable(
/**
* Sets the archive transfer state for the given attachment by digest.
*/
fun resetArchiveTransferStateByDigest(digest: ByteArray) {
fun resetArchiveTransferStateByPlaintextHashAndRemoteKey(plaintextHash: ByteArray, remoteKey: ByteArray) {
writableDatabase
.update(TABLE_NAME)
.values(
ARCHIVE_TRANSFER_STATE to ArchiveTransferState.NONE.value,
ARCHIVE_CDN to null
)
.where("$REMOTE_DIGEST = ?", digest)
.where("$DATA_HASH_END = ? AND $REMOTE_KEY = ?", plaintextHash, remoteKey)
.run()
}
@@ -1188,11 +1153,9 @@ class AttachmentTable(
* When we find out about a new inbound attachment pointer, we insert a row for it that contains all the info we need to download it via [insertAttachmentWithData].
* Later, we download the data for that pointer. Call this method once you have the data to associate it with the attachment. At this point, it is assumed
* that the content of the attachment will never change.
*
* @return True if we had to change the digest as part of saving the file, otherwise false.
*/
@Throws(MmsException::class)
fun finalizeAttachmentAfterDownload(mmsId: Long, attachmentId: AttachmentId, inputStream: LimitedInputStream, iv: ByteArray, offloadRestoredAt: Duration? = null): Boolean {
fun finalizeAttachmentAfterDownload(mmsId: Long, attachmentId: AttachmentId, inputStream: InputStream, offloadRestoredAt: Duration? = null) {
Log.i(TAG, "[finalizeAttachmentAfterDownload] Finalizing downloaded data for $attachmentId. (MessageId: $mmsId, $attachmentId)")
val existingPlaceholder: DatabaseAttachment = getAttachment(attachmentId) ?: throw MmsException("No attachment found for id: $attachmentId")
@@ -1200,23 +1163,6 @@ class AttachmentTable(
val fileWriteResult: DataFileWriteResult = writeToDataFile(newDataFile(context), inputStream, TransformProperties.empty(), closeInputStream = false)
val transferFile: File? = getTransferFile(databaseHelper.signalReadableDatabase, attachmentId)
val paddingAllZeroes = inputStream.use { limitStream ->
limitStream.leftoverStream().allMatch { it == 0x00.toByte() }
}
// Existing digest may be null for non-user attachments, like things pulled from S3
val digest = if (existingPlaceholder.remoteDigest != null && paddingAllZeroes) {
Log.d(TAG, "[finalizeAttachmentAfterDownload] $attachmentId has all-zero padding. Digest is good.")
existingPlaceholder.remoteDigest
} else {
Log.w(TAG, "[finalizeAttachmentAfterDownload] $attachmentId has non-zero padding bytes. Recomputing digest.")
val key = Base64.decode(existingPlaceholder.remoteKey!!)
calculateDigest(fileWriteResult, key, iv)
}
val digestChanged = !digest.contentEquals(existingPlaceholder.remoteDigest)
val foundDuplicate = writableDatabase.withinTransaction { db ->
// We can look and see if we have any exact matches on hash_ends and dedupe the file if we see one.
// We don't look at hash_start here because that could result in us matching on a file that got compressed down to something smaller, effectively lowering
@@ -1263,15 +1209,10 @@ class AttachmentTable(
values.put(REMOTE_LOCATION, existingPlaceholder.remoteLocation)
values.put(CDN_NUMBER, existingPlaceholder.cdn.serialize())
values.put(REMOTE_KEY, existingPlaceholder.remoteKey!!)
values.put(REMOTE_IV, iv)
values.put(REMOTE_DIGEST, digest)
values.put(REMOTE_DIGEST, existingPlaceholder.remoteDigest)
values.put(REMOTE_INCREMENTAL_DIGEST, existingPlaceholder.incrementalDigest)
values.put(REMOTE_INCREMENTAL_DIGEST_CHUNK_SIZE, existingPlaceholder.incrementalMacChunkSize)
if (digestChanged) {
values.put(UPLOAD_TIMESTAMP, 0)
}
if (offloadRestoredAt != null) {
values.put(OFFLOAD_RESTORED_AT, offloadRestoredAt.inWholeMilliseconds)
}
@@ -1313,8 +1254,6 @@ class AttachmentTable(
if (MediaUtil.isAudio(existingPlaceholder)) {
GenerateAudioWaveFormJob.enqueue(existingPlaceholder.attachmentId)
}
return digestChanged
}
@Throws(IOException::class)
@@ -1390,7 +1329,6 @@ class AttachmentTable(
CDN_NUMBER to uploadResult.cdnNumber,
REMOTE_LOCATION to uploadResult.remoteId.toString(),
REMOTE_KEY to Base64.encodeWithPadding(uploadResult.key),
REMOTE_IV to uploadResult.iv,
REMOTE_DIGEST to uploadResult.digest,
REMOTE_INCREMENTAL_DIGEST to uploadResult.incrementalDigest,
REMOTE_INCREMENTAL_DIGEST_CHUNK_SIZE to uploadResult.incrementalDigestChunkSize,
@@ -1500,9 +1438,8 @@ class AttachmentTable(
}
}
fun createKeyIvIfNecessary(attachmentId: AttachmentId) {
fun createRemoteKeyIfNecessary(attachmentId: AttachmentId) {
val key = Util.getSecretBytes(64)
val iv = Util.getSecretBytes(16)
writableDatabase.withinTransaction {
writableDatabase
@@ -1510,12 +1447,6 @@ class AttachmentTable(
.values(REMOTE_KEY to Base64.encodeWithPadding(key))
.where("$ID = ? AND $REMOTE_KEY IS NULL", attachmentId.id)
.run()
writableDatabase
.update(TABLE_NAME)
.values(REMOTE_IV to iv)
.where("$ID = ? AND $REMOTE_IV IS NULL", attachmentId.id)
.run()
}
}
@@ -1562,30 +1493,14 @@ class AttachmentTable(
.readToList { it.requireNonNullString(DATA_FILE) }
}
/**
* As part of the digest backfill process, this updates the (key, IV, digest) tuple for an attachment.
*/
fun updateKeyIvDigest(attachmentId: AttachmentId, key: ByteArray, iv: ByteArray, digest: ByteArray) {
writableDatabase
.update(TABLE_NAME)
.values(
REMOTE_KEY to Base64.encodeWithPadding(key),
REMOTE_IV to iv,
REMOTE_DIGEST to digest
)
.where("$ID = ?", attachmentId.id)
.run()
}
/**
* As part of the digest backfill process, this updates the (key, IV, digest) tuple for all attachments that share a data file (and are done downloading).
*/
fun updateKeyIvDigestByDataFile(dataFile: String, key: ByteArray, iv: ByteArray, digest: ByteArray) {
fun updateRemoteKeyAndDigestByDataFile(dataFile: String, key: ByteArray, digest: ByteArray) {
writableDatabase
.update(TABLE_NAME)
.values(
REMOTE_KEY to Base64.encodeWithPadding(key),
REMOTE_IV to iv,
REMOTE_DIGEST to digest
)
.where("$DATA_FILE = ? AND $TRANSFER_STATE = $TRANSFER_PROGRESS_DONE", dataFile)
@@ -1907,37 +1822,6 @@ class AttachmentTable(
notifyConversationListeners(threadId)
}
/**
* This will ensure that a (key/iv/digest) tuple exists for an attachment, filling each one if necessary.
*/
@Throws(IOException::class)
fun createKeyIvDigestIfNecessary(attachment: DatabaseAttachment) {
if (attachment.remoteKey != null && attachment.remoteIv != null && attachment.remoteDigest != null) {
return
}
val attachmentId = attachment.attachmentId
Log.w(TAG, "[createKeyIvDigestIfNecessary][$attachmentId] Missing one of (key, iv, digest). Filling in the gaps.")
val key = attachment.remoteKey?.let { Base64.decode(it) } ?: Util.getSecretBytes(64)
val iv = attachment.remoteIv ?: Util.getSecretBytes(16)
val digest: ByteArray = run {
val fileInfo = getDataFileInfo(attachmentId) ?: throw IOException("No data file found for $attachmentId!")
calculateDigest(fileInfo, key, iv)
}
writableDatabase
.update(TABLE_NAME)
.values(
REMOTE_KEY to Base64.encodeWithPadding(key),
REMOTE_IV to iv,
REMOTE_DIGEST to digest
)
.where("$ID = ?", attachmentId.id)
.run()
}
fun getAttachments(cursor: Cursor): List<DatabaseAttachment> {
return try {
if (cursor.getColumnIndex(ATTACHMENT_JSON_ALIAS) != -1) {
@@ -1966,7 +1850,6 @@ class AttachmentTable(
cdn = Cdn.deserialize(jsonObject.getInt(CDN_NUMBER)),
location = jsonObject.getString(REMOTE_LOCATION),
key = jsonObject.getString(REMOTE_KEY),
iv = null,
digest = null,
incrementalDigest = null,
incrementalMacChunkSize = 0,
@@ -2067,11 +1950,11 @@ class AttachmentTable(
/**
* Updates all attachments that share the same digest with the given archive CDN.
*/
fun setArchiveCdnByDigest(digest: ByteArray, archiveCdn: Int) {
fun setArchiveCdnByPlaintextHashAndRemoteKey(plaintextHash: ByteArray, remoteKey: ByteArray, archiveCdn: Int) {
writableDatabase
.update(TABLE_NAME)
.values(ARCHIVE_CDN to archiveCdn)
.where("$REMOTE_DIGEST = ?", digest)
.where("$DATA_HASH_END= ? AND $REMOTE_KEY = ?", plaintextHash, remoteKey)
.run()
}
@@ -2098,11 +1981,7 @@ class AttachmentTable(
.run()
}
private fun calculateDigest(fileInfo: DataFileWriteResult, key: ByteArray, iv: ByteArray): ByteArray {
return calculateDigest(file = fileInfo.file, random = fileInfo.random, length = fileInfo.length, key = key, iv = iv)
}
private fun calculateDigest(fileInfo: DataFileInfo, key: ByteArray, iv: ByteArray): ByteArray {
private fun calculateDigest(fileInfo: DataFileWriteResult, key: ByteArray, iv: ByteArray = Util.getSecretBytes(16)): ByteArray {
return calculateDigest(file = fileInfo.file, random = fileInfo.random, length = fileInfo.length, key = key, iv = iv)
}
@@ -2364,6 +2243,8 @@ class AttachmentTable(
Log.d(TAG, "[insertAttachment] Inserting attachment for messageId $messageId.")
val attachmentId: AttachmentId = writableDatabase.withinTransaction { db ->
val plaintextHash = attachment.plaintextHash.takeIf { it.isNotEmpty() }?.let { Base64.encodeWithPadding(it) }
val contentValues = ContentValues().apply {
put(MESSAGE_ID, messageId)
put(CONTENT_TYPE, attachment.contentType)
@@ -2389,6 +2270,11 @@ class AttachmentTable(
put(ATTACHMENT_UUID, attachment.uuid?.toString())
put(BLUR_HASH, attachment.blurHash?.hash)
if (plaintextHash != null) {
put(DATA_HASH_START, plaintextHash)
put(DATA_HASH_END, plaintextHash)
}
attachment.stickerLocator?.let { sticker ->
put(STICKER_PACK_ID, sticker.packId)
put(STICKER_PACK_KEY, sticker.packKey)
@@ -2525,7 +2411,6 @@ class AttachmentTable(
contentValues.put(REMOTE_LOCATION, uploadTemplate?.remoteLocation)
contentValues.put(REMOTE_DIGEST, uploadTemplate?.remoteDigest)
contentValues.put(REMOTE_KEY, uploadTemplate?.remoteKey)
contentValues.put(REMOTE_IV, uploadTemplate?.remoteIv)
contentValues.put(FILE_NAME, StorageUtil.getCleanFileName(attachment.fileName))
contentValues.put(FAST_PREFLIGHT_ID, attachment.fastPreflightId)
contentValues.put(VOICE_NOTE, if (attachment.voiceNote) 1 else 0)
@@ -2576,7 +2461,7 @@ class AttachmentTable(
fun insertWallpaper(dataStream: InputStream): AttachmentId {
return insertAttachmentWithData(WALLPAPER_MESSAGE_ID, dataStream, WallpaperAttachment(), quote = false).also { id ->
createKeyIvIfNecessary(id)
createRemoteKeyIfNecessary(id)
}
}
@@ -2673,7 +2558,6 @@ class AttachmentTable(
cdn = cursor.requireObject(CDN_NUMBER, Cdn.Serializer),
location = cursor.requireString(REMOTE_LOCATION),
key = cursor.requireString(REMOTE_KEY),
iv = cursor.requireBlob(REMOTE_IV),
digest = cursor.requireBlob(REMOTE_DIGEST),
incrementalDigest = cursor.requireBlob(REMOTE_INCREMENTAL_DIGEST),
incrementalMacChunkSize = cursor.requireInt(REMOTE_INCREMENTAL_DIGEST_CHUNK_SIZE),
@@ -3043,16 +2927,15 @@ class AttachmentTable(
val file: File,
val random: ByteArray,
val size: Long,
val remoteDigest: ByteArray,
val remoteKey: ByteArray,
val remoteIv: ByteArray
val plaintextHash: ByteArray,
val remoteKey: ByteArray
)
class RestorableAttachment(
val attachmentId: AttachmentId,
val mmsId: Long,
val size: Long,
val remoteDigest: ByteArray?,
val plaintextHash: ByteArray?,
val remoteKey: ByteArray?
) {
override fun equals(other: Any?): Boolean {

View File

@@ -91,9 +91,14 @@ class BackupMediaSnapshotTable(context: Context, database: SignalDatabase) : Dat
const val LAST_SEEN_ON_REMOTE_SNAPSHOT_VERSION = "last_seen_on_remote_snapshot_version"
/**
* The remote digest for the media object. This is used to find matching attachments in the attachment table when necessary.
* The plaintext hash of the media object. This is used to find matching attachments in the attachment table when necessary.
*/
const val REMOTE_DIGEST = "remote_digest"
const val PLAINTEXT_HASH = "plaintext_hash"
/**
* The remote that was used for encrypting for the media object. This is used to find matching attachments in the attachment table when necessary.
*/
const val REMOTE_KEY = "remote_key"
/** Constant representing a [SNAPSHOT_VERSION] version that has not yet been set. */
const val UNKNOWN_VERSION = -1
@@ -111,7 +116,8 @@ class BackupMediaSnapshotTable(context: Context, database: SignalDatabase) : Dat
$SNAPSHOT_VERSION INTEGER NOT NULL DEFAULT $UNKNOWN_VERSION,
$IS_PENDING INTEGER NOT NULL DEFAULT 0,
$IS_THUMBNAIL INTEGER NOT NULL DEFAULT 0,
$REMOTE_DIGEST BLOB NOT NULL,
$PLAINTEXT_HASH BLOB NOT NULL,
$REMOTE_KEY BLOB NOT NULL,
$LAST_SEEN_ON_REMOTE_SNAPSHOT_VERSION INTEGER NOT NULL DEFAULT 0
)
""".trimIndent()
@@ -130,11 +136,11 @@ class BackupMediaSnapshotTable(context: Context, database: SignalDatabase) : Dat
.chunked(SqlUtil.MAX_QUERY_ARGS)
.forEach { chunk ->
writePendingMediaObjectsChunk(
chunk.map { MediaEntry(it.mediaId, it.cdn, it.digest, isThumbnail = false) }
chunk.map { MediaEntry(it.mediaId, it.cdn, it.plaintextHash, it.remoteKey, isThumbnail = false) }
)
writePendingMediaObjectsChunk(
chunk.map { MediaEntry(it.thumbnailMediaId, it.cdn, it.digest, isThumbnail = true) }
chunk.map { MediaEntry(it.thumbnailMediaId, it.cdn, it.plaintextHash, it.remoteKey, isThumbnail = true) }
)
}
}
@@ -238,14 +244,15 @@ class BackupMediaSnapshotTable(context: Context, database: SignalDatabase) : Dat
return readableDatabase.rawQuery(
"""
WITH input_pairs($MEDIA_ID, $CDN) AS (VALUES $inputValues)
SELECT a.$REMOTE_DIGEST, b.$CDN
SELECT a.$PLAINTEXT_HASH, a.$REMOTE_KEY b.$CDN
FROM $TABLE_NAME a
JOIN input_pairs b ON a.$MEDIA_ID = b.$MEDIA_ID
WHERE a.$CDN != b.$CDN AND a.$IS_THUMBNAIL = 0 AND $SNAPSHOT_VERSION = $MAX_VERSION
"""
).readToList { cursor ->
CdnMismatchResult(
digest = cursor.requireNonNullBlob(REMOTE_DIGEST),
plaintextHash = cursor.requireNonNullBlob(PLAINTEXT_HASH),
remoteKey = cursor.requireNonNullBlob(REMOTE_KEY),
cdn = cursor.requireInt(CDN)
)
}
@@ -277,7 +284,7 @@ class BackupMediaSnapshotTable(context: Context, database: SignalDatabase) : Dat
*/
fun getMediaObjectsLastSeenOnCdnBeforeSnapshotVersion(snapshotVersion: Long): Cursor {
return readableDatabase
.select(MEDIA_ID, CDN, REMOTE_DIGEST, IS_THUMBNAIL)
.select(MEDIA_ID, CDN, PLAINTEXT_HASH, REMOTE_KEY, IS_THUMBNAIL)
.from(TABLE_NAME)
.where("$LAST_SEEN_ON_REMOTE_SNAPSHOT_VERSION < $snapshotVersion AND $SNAPSHOT_VERSION = $snapshotVersion")
.run()
@@ -288,21 +295,23 @@ class BackupMediaSnapshotTable(context: Context, database: SignalDatabase) : Dat
contentValuesOf(
MEDIA_ID to it.mediaId,
CDN to it.cdn,
REMOTE_DIGEST to it.digest,
PLAINTEXT_HASH to it.plaintextHash,
REMOTE_KEY to it.remoteKey,
IS_THUMBNAIL to it.isThumbnail.toInt(),
SNAPSHOT_VERSION to UNKNOWN_VERSION,
IS_PENDING to 1
)
}
val query = SqlUtil.buildSingleBulkInsert(TABLE_NAME, arrayOf(MEDIA_ID, CDN, REMOTE_DIGEST, IS_THUMBNAIL, SNAPSHOT_VERSION, IS_PENDING), values)
val query = SqlUtil.buildSingleBulkInsert(TABLE_NAME, arrayOf(MEDIA_ID, CDN, PLAINTEXT_HASH, REMOTE_KEY, IS_THUMBNAIL, SNAPSHOT_VERSION, IS_PENDING), values)
writableDatabase.execSQL(
query.where +
"""
ON CONFLICT($MEDIA_ID) DO UPDATE SET
$CDN = excluded.$CDN,
$REMOTE_DIGEST = excluded.$REMOTE_DIGEST,
$PLAINTEXT_HASH = excluded.$PLAINTEXT_HASH,
$REMOTE_KEY = excluded.$REMOTE_KEY,
$IS_THUMBNAIL = excluded.$IS_THUMBNAIL,
$IS_PENDING = excluded.$IS_PENDING
""",
@@ -314,18 +323,21 @@ class BackupMediaSnapshotTable(context: Context, database: SignalDatabase) : Dat
val mediaId: String,
val thumbnailMediaId: String,
val cdn: Int?,
val digest: ByteArray
val plaintextHash: ByteArray,
val remoteKey: ByteArray
)
class CdnMismatchResult(
val digest: ByteArray,
val plaintextHash: ByteArray,
val remoteKey: ByteArray,
val cdn: Int
)
class MediaEntry(
val mediaId: String,
val cdn: Int?,
val digest: ByteArray,
val plaintextHash: ByteArray,
val remoteKey: ByteArray,
val isThumbnail: Boolean
) {
companion object {
@@ -333,7 +345,8 @@ class BackupMediaSnapshotTable(context: Context, database: SignalDatabase) : Dat
return MediaEntry(
mediaId = cursor.requireNonNullString(MEDIA_ID),
cdn = cursor.requireIntOrNull(CDN),
digest = cursor.requireNonNullBlob(REMOTE_DIGEST),
plaintextHash = cursor.requireNonNullBlob(PLAINTEXT_HASH),
remoteKey = cursor.requireNonNullBlob(REMOTE_KEY),
isThumbnail = cursor.requireBoolean(IS_THUMBNAIL)
)
}

View File

@@ -31,7 +31,6 @@ class MediaTable internal constructor(context: Context?, databaseHelper: SignalD
${AttachmentTable.TABLE_NAME}.${AttachmentTable.CDN_NUMBER},
${AttachmentTable.TABLE_NAME}.${AttachmentTable.REMOTE_LOCATION},
${AttachmentTable.TABLE_NAME}.${AttachmentTable.REMOTE_KEY},
${AttachmentTable.TABLE_NAME}.${AttachmentTable.REMOTE_IV},
${AttachmentTable.TABLE_NAME}.${AttachmentTable.REMOTE_DIGEST},
${AttachmentTable.TABLE_NAME}.${AttachmentTable.FAST_PREFLIGHT_ID},
${AttachmentTable.TABLE_NAME}.${AttachmentTable.VOICE_NOTE},

View File

@@ -134,6 +134,7 @@ import org.thoughtcrime.securesms.database.helpers.migration.V276_AttachmentCdnD
import org.thoughtcrime.securesms.database.helpers.migration.V277_AddNotificationProfileStorageSync
import org.thoughtcrime.securesms.database.helpers.migration.V278_BackupSnapshotTableVersions
import org.thoughtcrime.securesms.database.helpers.migration.V279_AddNotificationProfileForeignKey
import org.thoughtcrime.securesms.database.helpers.migration.V280_RemoveAttachmentIv
import org.thoughtcrime.securesms.database.SQLiteDatabase as SignalSqliteDatabase
/**
@@ -273,10 +274,11 @@ object SignalDatabaseMigrations {
276 to V276_AttachmentCdnDefaultValueMigration,
277 to V277_AddNotificationProfileStorageSync,
278 to V278_BackupSnapshotTableVersions,
279 to V279_AddNotificationProfileForeignKey
279 to V279_AddNotificationProfileForeignKey,
280 to V280_RemoveAttachmentIv
)
const val DATABASE_VERSION = 279
const val DATABASE_VERSION = 280
@JvmStatic
fun migrate(context: Application, db: SignalSqliteDatabase, oldVersion: Int, newVersion: Int) {

View File

@@ -0,0 +1,45 @@
/*
* Copyright 2025 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.thoughtcrime.securesms.database.helpers.migration
import android.app.Application
import org.thoughtcrime.securesms.database.SQLiteDatabase
/**
* We've changed our mediaName calculation to be based on plaintextHash + remoteKey instead of remoteDigest. That means we no longer need to store the IV
* in the database, because the only reason we were storing it before was to have a consistent remoteDigest calculation.
*
* Also, because we're changing the mediaName calculation, we need to reset all of the archive status's.
*/
object V280_RemoveAttachmentIv : SignalDatabaseMigration {
override fun migrate(context: Application, db: SQLiteDatabase, oldVersion: Int, newVersion: Int) {
db.execSQL("ALTER TABLE attachment DROP COLUMN remote_iv")
db.execSQL("DROP INDEX attachment_data_hash_end_index")
db.execSQL("CREATE INDEX IF NOT EXISTS attachment_data_hash_end_remote_key_index ON attachment (data_hash_end, remote_key)")
// Rebuild table to allow us to have new non-null columns
db.execSQL("DROP TABLE backup_media_snapshot")
db.execSQL(
"""
CREATE TABLE backup_media_snapshot (
_id INTEGER PRIMARY KEY,
media_id TEXT NOT NULL UNIQUE,
cdn INTEGER,
snapshot_version INTEGER NOT NULL DEFAULT -1,
is_pending INTEGER NOT NULL DEFAULT 0,
is_thumbnail INTEGER NOT NULL DEFAULT 0,
plaintext_hash BLOB NOT NULL,
remote_key BLOB NOT NULL,
last_seen_on_remote_snapshot_version INTEGER NOT NULL DEFAULT 0
)
"""
)
db.execSQL("CREATE INDEX IF NOT EXISTS backup_snapshot_version_index ON backup_media_snapshot (snapshot_version DESC) WHERE snapshot_version != -1")
// Reset archive transfer state
db.execSQL("UPDATE attachment SET archive_transfer_state = 0 WHERE archive_transfer_state != 0")
}
}

View File

@@ -45,7 +45,7 @@ class ArchiveAttachmentBackfillJob private constructor(parameters: Parameters) :
val jobs = SignalDatabase.attachments.getAttachmentsThatNeedArchiveUpload()
.map { attachmentId -> UploadAttachmentToArchiveJob(attachmentId) }
SignalDatabase.attachments.createKeyIvDigestForAttachmentsThatNeedArchiveUpload()
SignalDatabase.attachments.createRemoteKeyForAttachmentsThatNeedArchiveUpload()
ArchiveUploadProgress.onAttachmentsStarted(SignalDatabase.attachments.getPendingArchiveUploadBytes())

View File

@@ -127,7 +127,7 @@ class ArchiveAttachmentReconciliationJob private constructor(
val entry = BackupMediaSnapshotTable.MediaEntry.fromCursor(it)
// TODO [backup] Re-enqueue thumbnail uploads if necessary
if (!entry.isThumbnail) {
SignalDatabase.attachments.resetArchiveTransferStateByDigest(entry.digest)
SignalDatabase.attachments.resetArchiveTransferStateByPlaintextHashAndRemoteKey(entry.plaintextHash, entry.remoteKey)
}
}
@@ -170,7 +170,7 @@ class ArchiveAttachmentReconciliationJob private constructor(
if (cdnMismatches.isNotEmpty()) {
Log.w(TAG, "Found ${cdnMismatches.size} items with CDNs that differ from what we have locally. Updating our local store.")
for (mismatch in cdnMismatches) {
SignalDatabase.attachments.setArchiveCdnByDigest(mismatch.digest, mismatch.cdn)
SignalDatabase.attachments.setArchiveCdnByPlaintextHashAndRemoteKey(mismatch.plaintextHash, mismatch.remoteKey, mismatch.cdn)
}
}

View File

@@ -24,6 +24,7 @@ import org.thoughtcrime.securesms.net.SignalNetwork
import org.thoughtcrime.securesms.util.ImageCompressionUtil
import org.thoughtcrime.securesms.util.MediaUtil
import org.thoughtcrime.securesms.util.RemoteConfig
import org.thoughtcrime.securesms.util.Util
import org.whispersystems.signalservice.api.NetworkResult
import org.whispersystems.signalservice.api.messages.SignalServiceAttachment
import org.whispersystems.signalservice.api.messages.SignalServiceAttachmentStream
@@ -105,7 +106,7 @@ class ArchiveThumbnailUploadJob private constructor(
.then { form ->
SignalNetwork.attachments.getResumableUploadSpec(
key = mediaRootBackupKey.deriveThumbnailTransitKey(attachment.requireThumbnailMediaName()),
iv = attachment.remoteIv!!,
iv = Util.getSecretBytes(16),
uploadForm = form
)
}

View File

@@ -11,7 +11,6 @@ import org.greenrobot.eventbus.EventBus
import org.signal.core.util.Base64
import org.signal.core.util.Hex
import org.signal.core.util.logging.Log
import org.signal.core.util.stream.LimitedInputStream
import org.signal.libsignal.protocol.InvalidMacException
import org.signal.libsignal.protocol.InvalidMessageException
import org.thoughtcrime.securesms.attachments.Attachment
@@ -214,15 +213,9 @@ class AttachmentDownloadJob private constructor(
Log.i(TAG, "Downloading push part $attachmentId")
SignalDatabase.attachments.setTransferState(messageId, attachmentId, AttachmentTable.TRANSFER_PROGRESS_STARTED)
val digestChanged = when (attachment.cdn) {
Cdn.S3 -> {
retrieveAttachmentForReleaseChannel(messageId, attachmentId, attachment)
false
}
else -> {
retrieveAttachment(messageId, attachmentId, attachment)
}
when (attachment.cdn) {
Cdn.S3 -> retrieveAttachmentForReleaseChannel(messageId, attachmentId, attachment)
else -> retrieveAttachment(messageId, attachmentId, attachment)
}
if (SignalStore.backup.backsUpMedia) {
@@ -231,11 +224,6 @@ class AttachmentDownloadJob private constructor(
Log.i(TAG, "[$attachmentId] Already archived. Skipping.")
}
digestChanged -> {
Log.i(TAG, "[$attachmentId] Digest for attachment changed after download. Re-uploading to archive.")
AppDependencies.jobManager.add(UploadAttachmentToArchiveJob(attachmentId))
}
attachment.cdn !in CopyAttachmentToArchiveJob.ALLOWED_SOURCE_CDNS -> {
Log.i(TAG, "[$attachmentId] Attachment CDN doesn't support copying to archive. Re-uploading to archive.")
AppDependencies.jobManager.add(UploadAttachmentToArchiveJob(attachmentId))
@@ -268,7 +256,7 @@ class AttachmentDownloadJob private constructor(
messageId: Long,
attachmentId: AttachmentId,
attachment: DatabaseAttachment
): Boolean {
) {
val maxReceiveSize: Long = RemoteConfig.maxAttachmentReceiveSizeBytes
val attachmentFile: File = SignalDatabase.attachments.getOrCreateTransferFile(attachmentId)
@@ -289,7 +277,7 @@ class AttachmentDownloadJob private constructor(
}
}
val downloadResult = AppDependencies
val decryptingStream = AppDependencies
.signalServiceMessageReceiver
.retrieveAttachment(
pointer,
@@ -298,7 +286,7 @@ class AttachmentDownloadJob private constructor(
progressListener
)
return SignalDatabase.attachments.finalizeAttachmentAfterDownload(messageId, attachmentId, downloadResult.dataStream, downloadResult.iv)
SignalDatabase.attachments.finalizeAttachmentAfterDownload(messageId, attachmentId, decryptingStream)
} catch (e: RangeException) {
Log.w(TAG, "Range exception, file size " + attachmentFile.length(), e)
if (attachmentFile.delete()) {
@@ -314,7 +302,7 @@ class AttachmentDownloadJob private constructor(
if (SignalStore.backup.backsUpMedia && e.code == 404 && attachment.archiveTransferState === AttachmentTable.ArchiveTransferState.FINISHED) {
Log.i(TAG, "Retrying download from archive CDN")
RestoreAttachmentJob.restoreAttachment(attachment)
return false
return
}
Log.w(TAG, "Experienced exception while trying to download an attachment.", e)
@@ -334,8 +322,6 @@ class AttachmentDownloadJob private constructor(
markFailed(messageId, attachmentId)
}
}
return false
}
@Throws(InvalidAttachmentException::class)
@@ -399,21 +385,17 @@ class AttachmentDownloadJob private constructor(
try {
S3.getObject(attachment.fileName!!).use { response ->
val body = response.body
if (body != null) {
if (body.contentLength() > RemoteConfig.maxAttachmentReceiveSizeBytes) {
throw MmsException("Attachment too large, failing download")
}
SignalDatabase.attachments.createKeyIvIfNecessary(attachmentId)
val updatedAttachment = SignalDatabase.attachments.getAttachment(attachmentId)!!
SignalDatabase.attachments.finalizeAttachmentAfterDownload(
messageId,
attachmentId,
LimitedInputStream.withoutLimits((body.source() as Source).buffer().inputStream()),
iv = updatedAttachment.remoteIv!!
)
if (body.contentLength() > RemoteConfig.maxAttachmentReceiveSizeBytes) {
throw MmsException("Attachment too large, failing download")
}
SignalDatabase.attachments.createRemoteKeyIfNecessary(attachmentId)
SignalDatabase.attachments.finalizeAttachmentAfterDownload(
messageId,
attachmentId,
(body.source() as Source).buffer().inputStream()
)
}
} catch (e: MmsException) {
Log.w(TAG, "Experienced exception while trying to download an attachment.", e)

View File

@@ -31,6 +31,7 @@ import org.thoughtcrime.securesms.net.SignalNetwork
import org.thoughtcrime.securesms.recipients.Recipient
import org.thoughtcrime.securesms.service.AttachmentProgressService
import org.thoughtcrime.securesms.util.RemoteConfig
import org.thoughtcrime.securesms.util.Util
import org.whispersystems.signalservice.api.attachment.AttachmentUploadResult
import org.whispersystems.signalservice.api.crypto.AttachmentCipherStreamUtil
import org.whispersystems.signalservice.api.messages.AttachmentTransferProgress
@@ -131,7 +132,7 @@ class AttachmentUploadJob private constructor(
throw NotPushRegisteredException()
}
SignalDatabase.attachments.createKeyIvIfNecessary(attachmentId)
SignalDatabase.attachments.createRemoteKeyIfNecessary(attachmentId)
val databaseAttachment = SignalDatabase.attachments.getAttachment(attachmentId) ?: throw InvalidAttachmentException("Cannot find the specified attachment.")
@@ -155,7 +156,7 @@ class AttachmentUploadJob private constructor(
.then { form ->
SignalNetwork.attachments.getResumableUploadSpec(
key = Base64.decode(databaseAttachment.remoteKey!!),
iv = databaseAttachment.remoteIv!!,
iv = Util.getSecretBytes(16),
uploadForm = form
)
}

View File

@@ -1,107 +0,0 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.thoughtcrime.securesms.jobs
import org.signal.core.util.Base64
import org.signal.core.util.copyTo
import org.signal.core.util.logging.Log
import org.signal.core.util.stream.NullOutputStream
import org.signal.core.util.withinTransaction
import org.thoughtcrime.securesms.attachments.AttachmentId
import org.thoughtcrime.securesms.database.SignalDatabase
import org.thoughtcrime.securesms.jobmanager.Job
import org.thoughtcrime.securesms.jobs.protos.BackfillDigestJobData
import org.thoughtcrime.securesms.util.Util
import org.whispersystems.signalservice.api.crypto.AttachmentCipherOutputStream
import org.whispersystems.signalservice.internal.crypto.PaddingInputStream
import java.io.IOException
/**
* This goes through all attachments with pre-existing data and recalcuates their digests.
* This is important for backupsV2, where we need to know an attachment's digest in advance.
*
* This job needs to be careful to (1) minimize time in the transaction, and (2) never write partial results to disk, i.e. only write the full (key/iv/digest)
* tuple together all at once (partial writes could poison the db, preventing us from retrying properly in the event of a crash or transient error).
*/
class BackfillDigestJob private constructor(
private val attachmentId: AttachmentId,
params: Parameters
) : Job(params) {
companion object {
private val TAG = Log.tag(BackfillDigestJob::class)
const val KEY = "BackfillDigestJob"
const val QUEUE = "BackfillDigestJob"
}
constructor(attachmentId: AttachmentId) : this(
attachmentId = attachmentId,
params = Parameters.Builder()
.setQueue(QUEUE)
.setMaxAttempts(3)
.setLifespan(Parameters.IMMORTAL)
.build()
)
override fun serialize(): ByteArray {
return BackfillDigestJobData(attachmentId = attachmentId.id).encode()
}
override fun getFactoryKey(): String = KEY
override fun run(): Result {
val (originalKey, originalIv, decryptingStream) = SignalDatabase.rawDatabase.withinTransaction {
val attachment = SignalDatabase.attachments.getAttachment(attachmentId)
if (attachment == null) {
Log.w(TAG, "$attachmentId no longer exists! Skipping.")
return Result.success()
}
if (!attachment.hasData) {
Log.w(TAG, "$attachmentId no longer has any data! Skipping.")
return Result.success()
}
val stream = try {
SignalDatabase.attachments.getAttachmentStream(attachmentId, offset = 0)
} catch (e: IOException) {
Log.w(TAG, "Could not open a stream for $attachmentId. Assuming that the file no longer exists. Skipping.", e)
return Result.success()
}
// In order to match the exact digest calculation, we need to use the same padding that we would use when uploading the attachment.
Triple(attachment.remoteKey?.let { Base64.decode(it) }, attachment.remoteIv, PaddingInputStream(stream, attachment.size))
}
val key = originalKey ?: Util.getSecretBytes(64)
val iv = originalIv ?: Util.getSecretBytes(16)
val cipherOutputStream = AttachmentCipherOutputStream(key, iv, NullOutputStream)
decryptingStream.copyTo(cipherOutputStream)
val digest = cipherOutputStream.transmittedDigest
SignalDatabase.attachments.updateKeyIvDigest(
attachmentId = attachmentId,
key = key,
iv = iv,
digest = digest
)
return Result.success()
}
override fun onFailure() {
Log.w(TAG, "Failed to backfill digest for $attachmentId!")
}
class Factory : Job.Factory<BackfillDigestJob> {
override fun create(parameters: Parameters, serializedData: ByteArray?): BackfillDigestJob {
val attachmentId = AttachmentId(BackfillDigestJobData.ADAPTER.decode(serializedData!!).attachmentId)
return BackfillDigestJob(attachmentId, parameters)
}
}
}

View File

@@ -32,12 +32,13 @@ class BackfillDigestsForDataFileJob private constructor(
companion object {
private val TAG = Log.tag(BackfillDigestsForDataFileJob::class)
const val KEY = "BackfillDigestsForDataFileJob"
const val QUEUE = "BackfillDigestJob"
}
constructor(dataFile: String) : this(
dataFile = dataFile,
params = Parameters.Builder()
.setQueue(BackfillDigestJob.QUEUE)
.setQueue(QUEUE)
.setMaxAttempts(3)
.setLifespan(Parameters.IMMORTAL)
.build()
@@ -50,7 +51,7 @@ class BackfillDigestsForDataFileJob private constructor(
override fun getFactoryKey(): String = KEY
override fun run(): Result {
val (originalKey, originalIv, decryptingStream) = SignalDatabase.rawDatabase.withinTransaction {
val (originalKey, decryptingStream) = SignalDatabase.rawDatabase.withinTransaction {
val attachment = SignalDatabase.attachments.getMostRecentValidAttachmentUsingDataFile(dataFile)
if (attachment == null) {
Log.w(TAG, "No attachments using file $dataFile exist anymore! Skipping.")
@@ -65,21 +66,19 @@ class BackfillDigestsForDataFileJob private constructor(
}
// In order to match the exact digest calculation, we need to use the same padding that we would use when uploading the attachment.
Triple(attachment.remoteKey?.let { Base64.decode(it) }, attachment.remoteIv, PaddingInputStream(stream, attachment.size))
Pair(attachment.remoteKey?.let { Base64.decode(it) }, PaddingInputStream(stream, attachment.size))
}
val key = originalKey ?: Util.getSecretBytes(64)
val iv = originalIv ?: Util.getSecretBytes(16)
val cipherOutputStream = AttachmentCipherOutputStream(key, iv, NullOutputStream)
val cipherOutputStream = AttachmentCipherOutputStream(key, iv = null, NullOutputStream)
decryptingStream.copyTo(cipherOutputStream)
val digest = cipherOutputStream.transmittedDigest
SignalDatabase.attachments.updateKeyIvDigestByDataFile(
SignalDatabase.attachments.updateRemoteKeyAndDigestByDataFile(
dataFile = dataFile,
key = key,
iv = iv,
digest = digest
)

View File

@@ -100,7 +100,6 @@ class BackupMessagesJob private constructor(
.addConstraint(if (SignalStore.backup.backupWithCellular) NetworkConstraint.KEY else WifiConstraint.KEY)
.setMaxAttempts(3)
.setMaxInstancesForFactory(1)
.setQueue(BackfillDigestJob.QUEUE) // We want to ensure digests have been backfilled before this runs. Could eventually remove this constraint.
.build()
)
@@ -132,7 +131,7 @@ class BackupMessagesJob private constructor(
val stopwatch = Stopwatch("BackupMessagesJob")
SignalDatabase.attachments.createKeyIvDigestForAttachmentsThatNeedArchiveUpload().takeIf { it > 0 }?.let { count -> Log.w(TAG, "Needed to create $count key/iv/digests.") }
SignalDatabase.attachments.createRemoteKeyForAttachmentsThatNeedArchiveUpload().takeIf { it > 0 }?.let { count -> Log.w(TAG, "Needed to create $count key/iv/digests.") }
stopwatch.split("key-iv-digest")
if (isCanceled) {

View File

@@ -50,7 +50,6 @@ import org.thoughtcrime.securesms.migrations.AvatarColorStorageServiceMigrationJ
import org.thoughtcrime.securesms.migrations.AvatarIdRemovalMigrationJob;
import org.thoughtcrime.securesms.migrations.AvatarMigrationJob;
import org.thoughtcrime.securesms.migrations.BackfillDigestsForDuplicatesMigrationJob;
import org.thoughtcrime.securesms.migrations.BackfillDigestsMigrationJob;
import org.thoughtcrime.securesms.migrations.BackupJitterMigrationJob;
import org.thoughtcrime.securesms.migrations.BackupNotificationMigrationJob;
import org.thoughtcrime.securesms.migrations.BadE164MigrationJob;
@@ -131,7 +130,6 @@ public final class JobManagerFactories {
put(AutomaticSessionResetJob.KEY, new AutomaticSessionResetJob.Factory());
put(AvatarGroupsV1DownloadJob.KEY, new AvatarGroupsV1DownloadJob.Factory());
put(AvatarGroupsV2DownloadJob.KEY, new AvatarGroupsV2DownloadJob.Factory());
put(BackfillDigestJob.KEY, new BackfillDigestJob.Factory());
put(BackfillDigestsForDataFileJob.KEY, new BackfillDigestsForDataFileJob.Factory());
put(BackupDeleteJob.KEY, new BackupDeleteJob.Factory());
put(BackupMessagesJob.KEY, new BackupMessagesJob.Factory());
@@ -284,7 +282,6 @@ public final class JobManagerFactories {
put(AvatarColorStorageServiceMigrationJob.KEY, new AvatarColorStorageServiceMigrationJob.Factory());
put(AvatarIdRemovalMigrationJob.KEY, new AvatarIdRemovalMigrationJob.Factory());
put(AvatarMigrationJob.KEY, new AvatarMigrationJob.Factory());
put(BackfillDigestsMigrationJob.KEY, new BackfillDigestsMigrationJob.Factory());
put(BackfillDigestsForDuplicatesMigrationJob.KEY, new BackfillDigestsForDuplicatesMigrationJob.Factory());
put(BackupJitterMigrationJob.KEY, new BackupJitterMigrationJob.Factory());
put(BackupNotificationMigrationJob.KEY, new BackupNotificationMigrationJob.Factory());
@@ -394,6 +391,8 @@ public final class JobManagerFactories {
put("BackupMediaSnapshotSyncJob", new FailingJob.Factory());
put("PnpInitializeDevicesJob", new FailingJob.Factory());
put("BackupRestoreJob", new FailingJob.Factory());
put("BackfillDigestsMigrationJob", new PassingMigrationJob.Factory());
put("BackfillDigestJob", new FailingJob.Factory());
}};
}

View File

@@ -256,7 +256,7 @@ class RestoreAttachmentJob private constructor(
}
}
val downloadResult = if (useArchiveCdn) {
val decryptingStream = if (useArchiveCdn) {
archiveFile = SignalDatabase.attachments.getOrCreateArchiveTransferFile(attachmentId)
val cdnCredentials = BackupRepository.getCdnReadCredentials(BackupRepository.CredentialType.MEDIA, attachment.archiveCdn ?: RemoteConfig.backupFallbackArchiveCdn).successOrThrow().headers
@@ -280,7 +280,7 @@ class RestoreAttachmentJob private constructor(
)
}
SignalDatabase.attachments.finalizeAttachmentAfterDownload(messageId, attachmentId, downloadResult.dataStream, downloadResult.iv, if (manual) System.currentTimeMillis().milliseconds else null)
SignalDatabase.attachments.finalizeAttachmentAfterDownload(messageId, attachmentId, decryptingStream, if (manual) System.currentTimeMillis().milliseconds else null)
} catch (e: RangeException) {
val transferFile = archiveFile ?: attachmentFile
Log.w(TAG, "Range exception, file size " + transferFile.length(), e)

View File

@@ -131,7 +131,7 @@ class RestoreAttachmentThumbnailJob private constructor(
val pointer = attachment.createArchiveThumbnailPointer()
Log.i(TAG, "Downloading thumbnail for $attachmentId")
val downloadResult = AppDependencies.signalServiceMessageReceiver
val decryptingStream = AppDependencies.signalServiceMessageReceiver
.retrieveArchivedThumbnail(
SignalStore.backup.mediaRootBackupKey.deriveMediaSecrets(attachment.requireThumbnailMediaName()),
cdnCredentials,
@@ -142,7 +142,7 @@ class RestoreAttachmentThumbnailJob private constructor(
progressListener
)
SignalDatabase.attachments.finalizeAttachmentThumbnailAfterDownload(attachmentId, attachment.remoteDigest, downloadResult.dataStream, thumbnailTransferFile)
SignalDatabase.attachments.finalizeAttachmentThumbnailAfterDownload(attachmentId, attachment.remoteDigest, decryptingStream, thumbnailTransferFile)
if (!SignalDatabase.messages.isStory(messageId)) {
AppDependencies.messageNotifier.updateNotification(context)

View File

@@ -52,8 +52,8 @@ class RestoreLocalAttachmentJob private constructor(
possibleRestorableAttachments
.forEachIndexed { index, attachment ->
val fileInfo = if (attachment.remoteKey != null && attachment.remoteDigest != null) {
val mediaName = MediaName.fromDigest(attachment.remoteDigest).name
val fileInfo = if (attachment.plaintextHash != null && attachment.remoteKey != null) {
val mediaName = MediaName.fromPlaintextHashAndRemoteKey(attachment.plaintextHash, attachment.remoteKey).name
mediaNameToFileInfo[mediaName]
} else {
null
@@ -158,7 +158,7 @@ class RestoreLocalAttachmentJob private constructor(
incrementalDigest = null,
incrementalMacChunkSize = 0
).use { input ->
SignalDatabase.attachments.finalizeAttachmentAfterDownload(attachment.mmsId, attachment.attachmentId, input, iv)
SignalDatabase.attachments.finalizeAttachmentAfterDownload(attachment.mmsId, attachment.attachmentId, input)
}
} catch (e: InvalidMessageException) {
Log.w(TAG, "Experienced an InvalidMessageException while trying to read attachment.", e)

View File

@@ -26,6 +26,7 @@ import org.thoughtcrime.securesms.jobs.protos.UploadAttachmentToArchiveJobData
import org.thoughtcrime.securesms.keyvalue.SignalStore
import org.thoughtcrime.securesms.net.SignalNetwork
import org.thoughtcrime.securesms.service.AttachmentProgressService
import org.thoughtcrime.securesms.util.Util
import org.whispersystems.signalservice.api.NetworkResult
import org.whispersystems.signalservice.api.archive.ArchiveMediaUploadFormStatusCodes
import org.whispersystems.signalservice.api.attachment.AttachmentUploadResult
@@ -124,8 +125,8 @@ class UploadAttachmentToArchiveJob private constructor(
return Result.success()
}
if (attachment.remoteKey == null || attachment.remoteIv == null) {
Log.w(TAG, "[$attachmentId] Attachment is missing remote key or IV! Cannot upload.")
if (attachment.remoteKey == null) {
Log.w(TAG, "[$attachmentId] Attachment is missing remote key! Cannot upload.")
return Result.failure()
}
@@ -144,7 +145,7 @@ class UploadAttachmentToArchiveJob private constructor(
if (uploadSpec == null) {
Log.d(TAG, "[$attachmentId] Need an upload spec. Fetching...")
val (spec, result) = fetchResumableUploadSpec(key = Base64.decode(attachment.remoteKey), iv = attachment.remoteIv)
val (spec, result) = fetchResumableUploadSpec(key = Base64.decode(attachment.remoteKey), iv = Util.getSecretBytes(16))
if (result != null) {
return result
}

View File

@@ -732,7 +732,7 @@ public class ApplicationMigrations {
}
if (lastSeenVersion < Version.BACKFILL_DIGESTS_V2) {
jobs.put(Version.BACKFILL_DIGESTS_V2, new BackfillDigestsMigrationJob());
// jobs.put(Version.BACKFILL_DIGESTS_V2, new BackfillDigestsMigrationJob());
}
if (lastSeenVersion < Version.CALL_LINK_STORAGE_SYNC) {

View File

@@ -1,41 +0,0 @@
package org.thoughtcrime.securesms.migrations
import org.signal.core.util.logging.Log
import org.thoughtcrime.securesms.database.SignalDatabase
import org.thoughtcrime.securesms.dependencies.AppDependencies
import org.thoughtcrime.securesms.jobmanager.Job
import org.thoughtcrime.securesms.jobs.BackfillDigestJob
/**
* Finds all attachments that need new digests and schedules a [BackfillDigestJob] for each.
*/
internal class BackfillDigestsMigrationJob(
parameters: Parameters = Parameters.Builder().build()
) : MigrationJob(parameters) {
companion object {
val TAG = Log.tag(BackfillDigestsMigrationJob::class.java)
const val KEY = "BackfillDigestsMigrationJob"
}
override fun getFactoryKey(): String = KEY
override fun isUiBlocking(): Boolean = false
override fun performMigration() {
val jobs = SignalDatabase.attachments.getAttachmentsThatNeedNewDigests()
.map { BackfillDigestJob(it) }
AppDependencies.jobManager.addAll(jobs)
Log.i(TAG, "Enqueued ${jobs.size} backfill digest jobs.")
}
override fun shouldRetry(e: Exception): Boolean = false
class Factory : Job.Factory<BackfillDigestsMigrationJob> {
override fun create(parameters: Parameters, serializedData: ByteArray?): BackfillDigestsMigrationJob {
return BackfillDigestsMigrationJob(parameters)
}
}
}

View File

@@ -86,7 +86,7 @@ class PartDataSource implements DataSource {
throw new InvalidMessageException("Missing digest!");
}
this.inputStream = AttachmentCipherInputStream.createForArchivedMediaOuterAndInnerLayers(mediaKeyMaterial, archiveFile, originalCipherLength, attachment.size, decodedKey, attachment.remoteDigest, attachment.getIncrementalDigest(), attachment.incrementalMacChunkSize);
this.inputStream = AttachmentCipherInputStream.createForArchivedMedia(mediaKeyMaterial, archiveFile, originalCipherLength, attachment.size, decodedKey, attachment.remoteDigest, attachment.getIncrementalDigest(), attachment.incrementalMacChunkSize);
} catch (InvalidMessageException e) {
throw new IOException("Error decrypting attachment stream!", e);
}