Setup backupV2 infrastructure and testing.

Co-authored-by: Clark Chen <clark@signal.org>
This commit is contained in:
Greyson Parrelli
2023-09-13 15:15:33 -04:00
committed by Cody Henthorne
parent feb74d90f6
commit b540b5813e
47 changed files with 3782 additions and 274 deletions

View File

@@ -0,0 +1,108 @@
/*
* Copyright 2023 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.signal.core.util
import kotlin.math.ceil
import kotlin.math.floor
import kotlin.time.Duration.Companion.nanoseconds
import kotlin.time.DurationUnit
/**
* Used to track performance metrics for large clusters of similar events.
* For instance, if you were doing a backup restore and had to important many different kinds of data in an unknown order, you could
* use this to learn stats around how long each kind of data takes to import.
*
* It is assumed that all events are happening serially with no delays in between.
*
* The timer tracks things at nanosecond granularity, but presents data as fractional milliseconds for readability.
*/
class EventTimer {
private val durationsByGroup: MutableMap<String, MutableList<Long>> = mutableMapOf()
private val startTime = System.nanoTime()
private var lastTimeNanos: Long = startTime
/**
* Indicates an event in the specified group has finished.
*/
fun emit(group: String) {
val now = System.nanoTime()
val duration = now - lastTimeNanos
durationsByGroup.getOrPut(group) { mutableListOf() } += duration
lastTimeNanos = now
}
/**
* Stops the timer and returns a mapping of group -> [EventMetrics], which will tell you various statistics around timings for that group.
*/
fun stop(): EventTimerResults {
val data: Map<String, EventMetrics> = durationsByGroup
.mapValues { entry ->
val sorted: List<Long> = entry.value.sorted()
EventMetrics(
totalTime = sorted.sum().nanoseconds.toDouble(DurationUnit.MILLISECONDS),
eventCount = sorted.size,
sortedDurationNanos = sorted
)
}
return EventTimerResults(data)
}
class EventTimerResults(data: Map<String, EventMetrics>) : Map<String, EventMetrics> by data {
val summary by lazy {
val builder = StringBuilder()
builder.append("[overall] totalTime: ${data.values.map { it.totalTime }.sum().roundedString(2)} ")
for (entry in data) {
builder.append("[${entry.key}] totalTime: ${entry.value.totalTime.roundedString(2)}, count: ${entry.value.eventCount}, p50: ${entry.value.p(50)}, p90: ${entry.value.p(90)}, p99: ${entry.value.p(99)} ")
}
builder.toString()
}
}
data class EventMetrics(
/** The sum of all event durations, in fractional milliseconds. */
val totalTime: Double,
/** Total number of events observed. */
val eventCount: Int,
private val sortedDurationNanos: List<Long>
) {
/**
* Returns the percentile of the duration data (e.g. p50, p90) as a formatted string containing fractional milliseconds rounded to the requested number of decimal places.
*/
fun p(percentile: Int, decimalPlaces: Int = 2): String {
return pNanos(percentile).nanoseconds.toDouble(DurationUnit.MILLISECONDS).roundedString(decimalPlaces)
}
private fun pNanos(percentile: Int): Long {
if (sortedDurationNanos.isEmpty()) {
return 0L
}
val index: Float = (percentile / 100f) * (sortedDurationNanos.size - 1)
val lowerIndex: Int = floor(index).toInt()
val upperIndex: Int = ceil(index).toInt()
if (lowerIndex == upperIndex) {
return sortedDurationNanos[lowerIndex]
}
val interpolationFactor: Float = index - lowerIndex
val lowerValue: Long = sortedDurationNanos[lowerIndex]
val upperValue: Long = sortedDurationNanos[upperIndex]
return floor(lowerValue + (upperValue - lowerValue) * interpolationFactor).toLong()
}
}
}

View File

@@ -0,0 +1,38 @@
/*
* Copyright 2023 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.signal.core.util
import java.io.IOException
import java.io.InputStream
import kotlin.jvm.Throws
/**
* Reads the entire stream into a [ByteArray].
*/
@Throws(IOException::class)
fun InputStream.readFully(): ByteArray {
return StreamUtil.readFully(this)
}
/**
* Fills reads data from the stream into the [buffer] until it is full.
* Throws an [IOException] if the stream doesn't have enough data to fill the buffer.
*/
@Throws(IOException::class)
fun InputStream.readFully(buffer: ByteArray) {
return StreamUtil.readFully(this, buffer)
}
/**
* Reads the specified number of bytes from the stream and returns it as a [ByteArray].
* Throws an [IOException] if the stream doesn't have that many bytes.
*/
@Throws(IOException::class)
fun InputStream.readNBytesOrThrow(length: Int): ByteArray {
val buffer: ByteArray = ByteArray(length)
this.readFully(buffer)
return buffer
}

View File

@@ -109,6 +109,15 @@ object SqlUtil {
}
}
/**
* For tables that have an autoincrementing primary key, this will reset the key to start back at 1.
* IMPORTANT: This is quite dangerous! Only do this if you're effectively resetting the entire database.
*/
@JvmStatic
fun resetAutoIncrementValue(db: SupportSQLiteDatabase, targetTable: String) {
db.execSQL("DELETE FROM sqlite_sequence WHERE name=?", arrayOf(targetTable))
}
@JvmStatic
fun isEmpty(db: SupportSQLiteDatabase, table: String): Boolean {
db.query("SELECT COUNT(*) FROM $table", null).use { cursor ->
@@ -388,36 +397,30 @@ object SqlUtil {
val builder = StringBuilder()
builder.append("INSERT INTO ").append(tableName).append(" (")
for (i in columns.indices) {
builder.append(columns[i])
if (i < columns.size - 1) {
builder.append(", ")
}
}
val columnString = columns.joinToString(separator = ", ")
builder.append(columnString)
builder.append(") VALUES ")
val placeholder = StringBuilder()
placeholder.append("(")
for (i in columns.indices) {
placeholder.append("?")
if (i < columns.size - 1) {
placeholder.append(", ")
val placeholders = contentValues
.map { values ->
columns
.map { column ->
if (values[column] != null) {
if (values[column] is ByteArray) {
"X'${Hex.toStringCondensed(values[column] as ByteArray).uppercase()}'"
} else {
"?"
}
} else {
"null"
}
}
.joinToString(separator = ", ", prefix = "(", postfix = ")")
}
}
.joinToString(separator = ", ")
placeholder.append(")")
var i = 0
val len = contentValues.size
while (i < len) {
builder.append(placeholder)
if (i < len - 1) {
builder.append(", ")
}
i++
}
builder.append(placeholders)
val query = builder.toString()
val args: MutableList<String> = mutableListOf()
@@ -425,7 +428,10 @@ object SqlUtil {
for (values in contentValues) {
for (column in columns) {
val value = values[column]
args += if (value != null) values[column].toString() else "null"
if (value != null && value !is ByteArray) {
args += value.toString()
}
}
}

View File

@@ -42,3 +42,15 @@ fun String?.emptyIfNull(): String {
fun String.toSingleLine(): String {
return this.trimIndent().split("\n").joinToString(separator = " ")
}
fun String?.nullIfEmpty(): String? {
return this?.ifEmpty {
null
}
}
fun String?.nullIfBlank(): String? {
return this?.ifBlank {
null
}
}

View File

@@ -229,6 +229,24 @@ public final class SqlUtilTest {
assertArrayEquals(new String[] { "1", "2" }, output.get(0).getWhereArgs());
}
@Test
public void buildBulkInsert_single_singleBatch_containsNulls() {
List<ContentValues> contentValues = new ArrayList<>();
ContentValues cv1 = new ContentValues();
cv1.put("a", 1);
cv1.put("b", 2);
cv1.put("c", (String) null);
contentValues.add(cv1);
List<SqlUtil.Query> output = SqlUtil.buildBulkInsert("mytable", new String[] { "a", "b", "c"}, contentValues);
assertEquals(1, output.size());
assertEquals("INSERT INTO mytable (a, b, c) VALUES (?, ?, null)", output.get(0).getWhere());
assertArrayEquals(new String[] { "1", "2" }, output.get(0).getWhereArgs());
}
@Test
public void buildBulkInsert_multiple_singleBatch() {
List<ContentValues> contentValues = new ArrayList<>();