Improve memory usage of new APNG renderer by making it streaming.

This commit is contained in:
Greyson Parrelli
2026-03-20 21:50:12 -04:00
committed by Cody Henthorne
parent 48374e6950
commit 25b01a30be
13 changed files with 485 additions and 341 deletions

View File

@@ -33,7 +33,7 @@ import org.thoughtcrime.securesms.giph.model.ChunkedImageUrl;
import org.thoughtcrime.securesms.glide.cache.ApngDrawableTranscoder;
import org.thoughtcrime.securesms.glide.cache.ApngFrameDrawableTranscoder;
import org.thoughtcrime.securesms.glide.cache.ApngInputStreamFactoryResourceDecoder;
import org.thoughtcrime.securesms.glide.cache.ApngInputStreamResourceDecoder;
import org.thoughtcrime.securesms.glide.cache.EncryptedApngCacheDecoder;
import org.thoughtcrime.securesms.glide.cache.ByteBufferApngDecoder;
import org.thoughtcrime.securesms.glide.cache.EncryptedApngCacheEncoder;
import org.thoughtcrime.securesms.glide.cache.EncryptedApngResourceEncoder;
@@ -93,10 +93,9 @@ public class SignalGlideComponents implements RegisterGlideComponents {
if (SignalStore.labs().getNewApngRenderer()) {
registry.prepend(InputStream.class, ApngDecoder.class, new ApngInputStreamResourceDecoder());
registry.prepend(InputStreamFactory.class, ApngDecoder.class, new ApngInputStreamFactoryResourceDecoder());
registry.prepend(ApngDecoder.class, new EncryptedApngResourceEncoder(secret));
registry.prepend(File.class, ApngDecoder.class, new EncryptedCacheDecoder<>(secret, new ApngInputStreamResourceDecoder()));
registry.prepend(File.class, ApngDecoder.class, new EncryptedApngCacheDecoder(secret));
registry.register(ApngDecoder.class, Drawable.class, new ApngDrawableTranscoder());
} else {
ByteBufferApngDecoder byteBufferApngDecoder = new ByteBufferApngDecoder();

View File

@@ -9,10 +9,8 @@ import com.bumptech.glide.load.Options
import com.bumptech.glide.load.ResourceDecoder
import com.bumptech.glide.load.engine.Resource
import org.signal.apng.ApngDecoder
import org.signal.core.util.readFully
import org.signal.glide.apng.ApngOptions
import org.signal.glide.common.io.InputStreamFactory
import java.io.ByteArrayInputStream
import java.io.IOException
class ApngInputStreamFactoryResourceDecoder : ResourceDecoder<InputStreamFactory, ApngDecoder> {
@@ -27,8 +25,7 @@ class ApngInputStreamFactoryResourceDecoder : ResourceDecoder<InputStreamFactory
@Throws(IOException::class)
override fun decode(source: InputStreamFactory, width: Int, height: Int, options: Options): Resource<ApngDecoder>? {
val data: ByteArray = source.create().readFully()
val decoder = ApngDecoder(ByteArrayInputStream(data))
return ApngResource(decoder, data.size)
val decoder = ApngDecoder.create { source.create() }
return ApngResource(decoder)
}
}

View File

@@ -1,39 +0,0 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.thoughtcrime.securesms.glide.cache
import com.bumptech.glide.load.Options
import com.bumptech.glide.load.ResourceDecoder
import com.bumptech.glide.load.engine.Resource
import org.signal.apng.ApngDecoder
import org.signal.core.util.readFully
import org.signal.core.util.stream.LimitedInputStream
import org.signal.glide.apng.ApngOptions
import java.io.ByteArrayInputStream
import java.io.IOException
import java.io.InputStream
class ApngInputStreamResourceDecoder : ResourceDecoder<InputStream, ApngDecoder> {
companion object {
/** Set to match [com.bumptech.glide.load.data.InputStreamRewinder]'s read limit */
private const val READ_LIMIT: Long = 5 * 1024 * 1024
}
override fun handles(source: InputStream, options: Options): Boolean {
return if (options.get(ApngOptions.ANIMATE)!!) {
ApngDecoder.isApng(LimitedInputStream(source, READ_LIMIT))
} else {
false
}
}
@Throws(IOException::class)
override fun decode(source: InputStream, width: Int, height: Int, options: Options): Resource<ApngDecoder>? {
val data: ByteArray = source.readFully()
val decoder = ApngDecoder(ByteArrayInputStream(data))
return ApngResource(decoder, data.size)
}
}

View File

@@ -8,14 +8,14 @@ package org.thoughtcrime.securesms.glide.cache
import com.bumptech.glide.load.engine.Resource
import org.signal.apng.ApngDecoder
class ApngResource(private val decoder: ApngDecoder, private val size: Int) : Resource<ApngDecoder> {
class ApngResource(private val decoder: ApngDecoder) : Resource<ApngDecoder> {
override fun getResourceClass(): Class<ApngDecoder> = ApngDecoder::class.java
override fun get(): ApngDecoder = decoder
override fun getSize(): Int = size
override fun getSize(): Int = 0
override fun recycle() {
decoder.inputStream.close()
decoder.close()
}
}

View File

@@ -0,0 +1,45 @@
/*
* Copyright 2024 Signal Messenger, LLC
* SPDX-License-Identifier: AGPL-3.0-only
*/
package org.thoughtcrime.securesms.glide.cache
import com.bumptech.glide.load.Options
import com.bumptech.glide.load.ResourceDecoder
import com.bumptech.glide.load.engine.Resource
import org.signal.apng.ApngDecoder
import org.signal.core.util.logging.Log
import org.signal.core.util.stream.LimitedInputStream
import org.signal.glide.apng.ApngOptions
import java.io.File
import java.io.IOException
internal class EncryptedApngCacheDecoder(private val secret: ByteArray) : EncryptedCoder(), ResourceDecoder<File, ApngDecoder> {
companion object {
private val TAG = Log.tag(EncryptedApngCacheDecoder::class.java)
private const val READ_LIMIT: Long = 5 * 1024 * 1024
}
override fun handles(source: File, options: Options): Boolean {
if (options.get(ApngOptions.ANIMATE) != true) {
return false
}
return try {
createEncryptedInputStream(secret, source).use { inputStream ->
ApngDecoder.isApng(LimitedInputStream(inputStream, READ_LIMIT))
}
} catch (e: IOException) {
Log.w(TAG, e)
false
}
}
@Throws(IOException::class)
override fun decode(source: File, width: Int, height: Int, options: Options): Resource<ApngDecoder>? {
val decoder = ApngDecoder.create { createEncryptedInputStream(secret, source) }
return ApngResource(decoder)
}
}

View File

@@ -17,11 +17,11 @@ internal class EncryptedApngResourceEncoder(private val secret: ByteArray) : Enc
override fun encode(data: Resource<ApngDecoder>, file: File, options: Options): Boolean {
try {
val input = data.get().inputStream
val input = data.get().streamFactory()
val output = createEncryptedOutputStream(secret, file)
input.reset()
input.copyTo(output)
input.close()
return true
} catch (e: IOException) {

View File

@@ -88,7 +88,6 @@ import org.thoughtcrime.securesms.stickers.StickerLocator
import org.thoughtcrime.securesms.util.StickyHeaderDecoration
import org.thoughtcrime.securesms.util.viewModel
import java.util.Locale
import org.signal.core.ui.R as CoreUiR
class StarredMessagesActivity : PassphraseRequiredActivity() {

View File

@@ -139,6 +139,27 @@ fun InputStream.copyTo(outputStream: OutputStream, closeInputStream: Boolean = t
return StreamUtil.copy(this, outputStream, closeInputStream, closeOutputStream)
}
/**
* Skips exactly [n] bytes from this stream. Unlike [InputStream.skip], this method
* guarantees all bytes are skipped by looping and falling back to [read] if needed.
*
* @throws IOException if the stream ends before [n] bytes have been skipped.
*/
@Throws(IOException::class)
fun InputStream.skipNBytesOrThrow(n: Long) {
var remaining = n
while (remaining > 0) {
val skipped = skip(remaining)
if (skipped > 0) {
remaining -= skipped
} else if (read() == -1) {
throw IOException("Stream ended before $n bytes could be skipped (${n - remaining} skipped)")
} else {
remaining--
}
}
}
/**
* Returns true if every byte in this stream matches the predicate, otherwise false.
*/

View File

@@ -289,7 +289,8 @@ class DemoActivity : ComponentActivity() {
description.text = testModel.description
val decoder = ApngDecoder(itemView.context.assets.open(testModel.filename))
val context = itemView.context
val decoder = ApngDecoder.create { context.assets.open(testModel.filename) }
val drawable = ApngDrawable(decoder)
image.setImageDrawable(drawable)
}

View File

@@ -26,7 +26,7 @@ class PlayerActivity : ComponentActivity() {
val nextButton = findViewById<Button>(R.id.next_button)
frameMetadata = findViewById<TextView>(R.id.frame_metadata)
val decoder = ApngDecoder(assets.open("broken03.png"))
val decoder = ApngDecoder.create { assets.open("broken03.png") }
val drawable = ApngDrawable(decoder).apply {
stop()
debugDrawBounds = true

View File

@@ -10,22 +10,39 @@ import android.graphics.BitmapFactory
import androidx.annotation.WorkerThread
import org.signal.core.util.readNBytesOrThrow
import org.signal.core.util.readUInt
import org.signal.core.util.skipNBytesOrThrow
import org.signal.core.util.stream.Crc32OutputStream
import org.signal.core.util.toUInt
import org.signal.core.util.toUShort
import org.signal.core.util.writeUInt
import java.io.ByteArrayOutputStream
import java.io.Closeable
import java.io.EOFException
import java.io.IOException
import java.io.InputStream
import java.io.OutputStream
import java.util.zip.CRC32
/**
* Full spec:
* http://www.w3.org/TR/PNG/
* A streaming APNG decoder that only holds lightweight metadata in memory.
*
* During [create], the stream is scanned once to record IHDR fields, prefix chunks
* (palette, gamma, etc.), and per-frame fcTL metadata + byte offsets into the stream
* where image data lives. No frame image data is retained.
*
* At draw time, [decodeFrame] opens a stream from the factory and reads forward to the
* requested frame's data. Since frames are almost always requested in order, each call
* reads forward from the current position. The stream is only reopened when the animation
* loops back to an earlier frame.
*
* Full spec: http://www.w3.org/TR/PNG/
*/
class ApngDecoder(val inputStream: InputStream) {
class ApngDecoder private constructor(
val streamFactory: () -> InputStream,
val metadata: Metadata,
val frames: List<Frame>,
private val ihdr: Chunk.IHDR,
private val prefixChunks: List<Chunk.ArbitraryChunk>
) : Closeable {
companion object {
private val PNG_MAGIC = byteArrayOf(0x89.toByte(), 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A)
@@ -58,6 +75,226 @@ class ApngDecoder(val inputStream: InputStream) {
}
}
/**
* Scans the stream to build metadata, then closes it. No frame image data is retained.
*/
@Throws(IOException::class)
fun create(streamFactory: () -> InputStream): ApngDecoder {
val inputStream = streamFactory()
try {
return scanMetadata(inputStream, streamFactory)
} finally {
inputStream.close()
}
}
/**
* So, PNG's are composed of chunks of various types. An APNG is a valid PNG on it's own, but has some extra
* chunks that can be read to play the animation. The chunk structure of an APNG looks something like this:
*
* ---------------------
* IHDR - Mandatory first chunk, contains metadata about the image (width, height, etc).
*
* [ in any order...
* acTL - Contains metadata about the animation. The presence of this chunk is what tells us that we have an APNG.
* fcTL - (Optional) If present, it tells us that the first IDAT chunk is part of the animation itself. Contains information about how the frame is
* rendered (see below).
* xxx - There are plenty of other possible chunks that can go here. We don't care about them, but we need to remember them and give them to the
* PNG encoder as we create each frame. Could be critical data like what palette is used for rendering.
* ]
*
* IDAT - Contains the compressed image data. For an APNG, the first IDAT represents the "default" state of the image that will be shown even if the
* renderer doesn't support APNGs. If an fcTL is present before this, the IDAT also represents the first frame of the animation.
*
* ( in pairs, repeated for each frame...
* fcTL - This contains metadata about the frame, such as dimensions, delay, and positioning.
* fdAT - This contains the actual frame data that we want to render.
* )
*
* xxx - There are other possible chunks that could be placed after the animation sequence but before the end of the file. Usually things like tEXt chunks
* that contain metadata and whatnot. They're not important.
* IEND - Mandatory last chunk. Marks the end of the file.
* ---------------------
*
* We need to read and recognize a subset of these chunks that tell us how the APNG is structured. However, the actual encoding/decoding of the PNG data
* can be done by the system. We just need to parse out all of the frames and other metadata in order to render the animation.
*
* Unlike the old approach (which read all frame data into memory), this method only records byte offsets into the stream where frame data lives.
* The actual frame data is read on demand in [decodeFrame].
*/
private fun scanMetadata(inputStream: InputStream, streamFactory: () -> InputStream): ApngDecoder {
val scanner = StreamScanner(inputStream)
// Read the magic bytes to verify that this is a PNG
val magic = scanner.readBytes(8)
if (!magic.contentEquals(PNG_MAGIC)) {
throw IllegalArgumentException("Not a PNG!")
}
// The IHDR chunk is the first chunk in a PNG file and contains metadata about the image.
// Per spec it must appear first, so if it's missing the file is invalid.
val ihdrLength = scanner.readUInt()
val ihdrType = scanner.readBytes(4).toString(Charsets.US_ASCII)
if (ihdrType != "IHDR") {
throw IOException("First chunk is not IHDR!")
}
val ihdrData = scanner.readBytes(ihdrLength.toInt())
scanner.skipBytes(4) // CRC
val ihdr = Chunk.IHDR(
width = ihdrData.sliceArray(0 until 4).toUInt(),
height = ihdrData.sliceArray(4 until 8).toUInt(),
bitDepth = ihdrData[8],
colorType = ihdrData[9],
compressionMethod = ihdrData[10],
filterMethod = ihdrData[11],
interlaceMethod = ihdrData[12]
)
// Next, we want to read all of the chunks up to the first IDAT chunk.
// The first IDAT chunk represents the default image, and possibly the first frame of the animation (depending on the presence of an fcTL chunk).
// In order for this to be a valid APNG, there _must_ be an acTL chunk before the first IDAT chunk.
val framePrefixChunks: MutableList<Chunk.ArbitraryChunk> = mutableListOf()
var earlyActl: Chunk.acTL? = null
var earlyFctl: Chunk.fcTL? = null
var chunkLength: UInt
var chunkType: String
while (true) {
chunkLength = scanner.readUInt()
chunkType = scanner.readBytes(4).toString(Charsets.US_ASCII)
if (chunkType == "IDAT") {
break
}
when (chunkType) {
"acTL" -> {
val data = scanner.readBytes(chunkLength.toInt())
scanner.skipBytes(4) // CRC
earlyActl = Chunk.acTL(
numFrames = data.sliceArray(0 until 4).toUInt(),
numPlays = data.sliceArray(4 until 8).toUInt()
)
}
"fcTL" -> {
val data = scanner.readBytes(chunkLength.toInt())
scanner.skipBytes(4) // CRC
earlyFctl = parseFctl(data)
}
else -> {
val data = scanner.readBytes(chunkLength.toInt())
val crc = scanner.readUInt()
framePrefixChunks += Chunk.ArbitraryChunk(chunkLength, chunkType, data, crc)
}
}
}
if (earlyActl == null) {
throw IOException("Missing acTL chunk! Not an APNG!")
}
val metadata = Metadata(
width = ihdr.width.toInt(),
height = ihdr.height.toInt(),
numPlays = earlyActl.numPlays.toInt().takeIf { it > 0 } ?: Int.MAX_VALUE
)
val frames: MutableList<Frame> = mutableListOf()
// Collect all consecutive IDAT data regions -- PNG allows splitting image data across multiple IDATs.
// We just read the first IDAT's length + type. Data starts at the current position.
val idatRegions = mutableListOf<DataRegion>()
idatRegions += DataRegion(streamOffset = scanner.position, length = chunkLength.toLong())
scanner.skipBytes(chunkLength.toLong() + 4) // data + CRC
// Collect more consecutive IDATs
chunkLength = scanner.readUInt()
chunkType = scanner.readBytes(4).toString(Charsets.US_ASCII)
while (chunkType == "IDAT") {
idatRegions += DataRegion(streamOffset = scanner.position, length = chunkLength.toLong())
scanner.skipBytes(chunkLength.toLong() + 4) // data + CRC
chunkLength = scanner.readUInt()
chunkType = scanner.readBytes(4).toString(Charsets.US_ASCII)
}
if (earlyFctl != null) {
frames += Frame(fcTL = earlyFctl, dataRegions = idatRegions, isIdat = true)
}
// Now process remaining chunks: fcTL + fdAT pairs
// chunkLength/chunkType already hold the first non-IDAT chunk after the IDAT run
while (chunkType != "IEND") {
// Scan forward to the next fcTL
while (chunkType != "fcTL") {
scanner.skipBytes(chunkLength.toLong() + 4) // data + CRC
chunkLength = scanner.readUInt()
chunkType = scanner.readBytes(4).toString(Charsets.US_ASCII)
if (chunkType == "IEND") break
}
if (chunkType == "IEND") break
// Read the fcTL data
val fctlData = scanner.readBytes(chunkLength.toInt())
scanner.skipBytes(4) // CRC
val fctl = parseFctl(fctlData)
// Collect all consecutive fdAT data regions -- frames can span multiple fdATs per the spec
val fdatRegions = mutableListOf<DataRegion>()
chunkLength = scanner.readUInt()
chunkType = scanner.readBytes(4).toString(Charsets.US_ASCII)
while (chunkType == "fdAT") {
// fdAT data starts with 4-byte sequence number, then the actual image data
scanner.skipBytes(4) // sequence number
val imageDataLength = chunkLength.toLong() - 4
fdatRegions += DataRegion(streamOffset = scanner.position, length = imageDataLength)
scanner.skipBytes(imageDataLength + 4) // image data + CRC
chunkLength = scanner.readUInt()
chunkType = scanner.readBytes(4).toString(Charsets.US_ASCII)
}
if (fdatRegions.isNotEmpty()) {
frames += Frame(fcTL = fctl, dataRegions = fdatRegions, isIdat = false)
}
}
return ApngDecoder(
streamFactory = streamFactory,
metadata = metadata,
frames = frames,
ihdr = ihdr,
prefixChunks = framePrefixChunks
)
}
private fun parseFctl(data: ByteArray): Chunk.fcTL {
return Chunk.fcTL(
sequenceNumber = data.sliceArray(0 until 4).toUInt(),
width = data.sliceArray(4 until 8).toUInt(),
height = data.sliceArray(8 until 12).toUInt(),
xOffset = data.sliceArray(12 until 16).toUInt(),
yOffset = data.sliceArray(16 until 20).toUInt(),
delayNum = data.sliceArray(20 until 22).toUShort(),
delayDen = data.sliceArray(22 until 24).toUShort(),
disposeOp = when (data[24]) {
0.toByte() -> Chunk.fcTL.DisposeOp.NONE
1.toByte() -> Chunk.fcTL.DisposeOp.BACKGROUND
2.toByte() -> Chunk.fcTL.DisposeOp.PREVIOUS
else -> throw IOException("Invalid disposeOp: ${data[24]}")
},
blendOp = when (data[25]) {
0.toByte() -> Chunk.fcTL.BlendOp.SOURCE
1.toByte() -> Chunk.fcTL.BlendOp.OVER
else -> throw IOException("Invalid blendOp: ${data[25]}")
}
)
}
private fun OutputStream.withCrc32(block: OutputStream.() -> Unit): UInt {
return Crc32OutputStream(this)
.apply(block)
@@ -66,136 +303,73 @@ class ApngDecoder(val inputStream: InputStream) {
}
}
var metadata: Metadata? = null
private var currentStream: InputStream? = null
private var currentStreamPos: Long = 0
/**
* So, PNG's are composed of chunks of various types. An APNG is a valid PNG on it's own, but has some extra
* chunks that can be read to play the animation. The chunk structure of an APNG looks something like this:
*
* ---------------------
* IHDR - Mandatory first chunk, contains metadata about the image (width, height, etc).
*
* [ in any order...
* acTL - Contains metadata about the animation. The presence of this chunk is what tells us that we have an APNG.
* fcTL - (Optional) If present, it tells us that the first IDAT chunk is part of the animation itself. Contains information about how the frame is
* rendered (see below).
* xxx - There are plenty of other possible chunks that can go here. We don't care about them, but we need to remember them and give them to the
* PNG encoder as we create each frame. Could be critical data like what palette is used for rendering.
* ]
*
* IDAT - Contains the compressed image data. For an APNG, the first IDAT represents the "default" state of the image that will be shown even if the
* renderer doesn't support APNGs. If an fcTL is present before this, the IDAT also represents the first frame of the animation.
*
* ( in pairs, repeated for each frame...
* fcTL - This contains metadata about the frame, such as dimensions, delay, and positioning.
* fdAT - This contains the actual frame data that we want to render.
* )
*
* xxx - There are other possible chunks that could be placed after the animation sequence but before the end of the file. Usually things like tEXt chunks
* that contain metadata and whatnot. They're not important.
* IEND - Mandatory last chunk. Marks the end of the file.
* ---------------------
*
* We need to read and recognize a subset of these chunks that tell us how the APNG is structured. However, the actual encoding/decoding of the PNG data
* can be done by the system. We just need to parse out all of the frames and other metadata in order to render the animation.
* Decodes the frame at the given index by streaming from the source.
* For sequential access (the normal case), this just reads forward from the current position.
* When looping back to an earlier frame, the stream is reopened.
*/
fun debugGetAllFrames(): List<Frame> {
// Read the magic bytes to verify that this is a PNG
val magic = inputStream.readNBytesOrThrow(8)
if (!magic.contentEquals(PNG_MAGIC)) {
throw IllegalArgumentException("Not a PNG!")
@WorkerThread
fun decodeFrame(index: Int): Bitmap {
val frame = frames[index]
val regions = frame.dataRegions
val targetOffset = regions.first().streamOffset
if (currentStream == null || currentStreamPos > targetOffset) {
currentStream?.close()
currentStream = streamFactory()
currentStreamPos = 0
}
// The IHDR chunk is the first chunk in a PNG file and contains metadata about the image.
// Per spec it must appear first, so if it's missing the file is invalid.
val ihdr = inputStream.readChunk() ?: throw IOException("Missing IHDR chunk!")
if (ihdr !is Chunk.IHDR) {
throw IOException("First chunk is not IHDR!")
val stream = currentStream!!
// Skip forward to the first data region
val toSkip = targetOffset - currentStreamPos
if (toSkip > 0) {
stream.skipNBytesOrThrow(toSkip)
currentStreamPos = targetOffset
}
// Next, we want to read all of the chunks up to the first IDAT chunk.
// The first IDAT chunk represents the default image, and possibly the first frame the animation (depending on the presence of an fcTL chunk).
// In order for this to be a valid APNG, there _must_ be an acTL chunk before the first IDAT chunk.
val framePrefixChunks: MutableList<Chunk.ArbitraryChunk> = mutableListOf()
var earlyActl: Chunk.acTL? = null
var earlyFctl: Chunk.fcTL? = null
// Read all data regions for this frame
val totalDataSize = regions.sumOf { it.length.toInt() }
val frameData = ByteArray(totalDataSize)
var writeOffset = 0
var chunk = inputStream.readChunk()
while (chunk != null && chunk !is Chunk.IDAT) {
when (chunk) {
is Chunk.acTL -> earlyActl = chunk
is Chunk.fcTL -> earlyFctl = chunk
is Chunk.ArbitraryChunk -> framePrefixChunks += chunk
else -> throw IOException("Unexpected chunk type before IDAT: $chunk")
}
chunk = inputStream.readChunk()
}
if (chunk == null) {
throw EOFException("Hit the end of the file before we hit an IDAT!")
}
if (earlyActl == null) {
throw IOException("Missing acTL chunk! Not an APNG!")
}
metadata = Metadata(
width = ihdr.width.toInt(),
height = ihdr.height.toInt(),
numPlays = earlyActl.numPlays.toInt().takeIf { it > 0 } ?: Int.MAX_VALUE
)
// Collect all consecutive IDAT chunks -- PNG allows splitting image data across multiple IDATs
val idatData = ByteArrayOutputStream()
idatData.write((chunk as Chunk.IDAT).data)
chunk = inputStream.readChunk()
while (chunk is Chunk.IDAT) {
idatData.write(chunk.data)
chunk = inputStream.readChunk()
}
val frames: MutableList<Frame> = mutableListOf()
if (earlyFctl != null) {
val allIdatData = idatData.toByteArray()
val pngData = encodePng(ihdr, framePrefixChunks, allIdatData.size.toUInt(), allIdatData)
frames += Frame(pngData, earlyFctl)
}
// chunk already points to the first non-IDAT chunk from the collection loop above
while (chunk != null && chunk !is Chunk.IEND) {
while (chunk != null && chunk !is Chunk.fcTL) {
chunk = inputStream.readChunk()
for (region in regions) {
// Skip to this region if needed (handles gaps between consecutive chunks)
val regionSkip = region.streamOffset - currentStreamPos
if (regionSkip > 0) {
stream.skipNBytesOrThrow(regionSkip)
currentStreamPos = region.streamOffset
}
if (chunk == null) {
break
var read = 0
val regionLength = region.length.toInt()
while (read < regionLength) {
val n = stream.read(frameData, writeOffset + read, regionLength - read)
if (n == -1) throw IOException("Unexpected end of stream reading frame $index")
read += n
}
currentStreamPos += region.length
writeOffset += regionLength
if (chunk !is Chunk.fcTL) {
throw IOException("Expected an fcTL chunk, got $chunk instead!")
}
val fctl: Chunk.fcTL = chunk
chunk = inputStream.readChunk()
if (chunk !is Chunk.fdAT) {
throw IOException("Expected an fdAT chunk, got $chunk instead!")
}
// Collect all consecutive fdAT chunks -- frames can span multiple fdATs per the spec
val fdatData = ByteArrayOutputStream()
while (chunk is Chunk.fdAT) {
fdatData.write(chunk.data)
chunk = inputStream.readChunk()
}
val allFdatData = fdatData.toByteArray()
val pngData = encodePng(ihdr.copy(width = fctl.width, height = fctl.height), framePrefixChunks, allFdatData.size.toUInt(), allFdatData)
frames += Frame(pngData, fctl)
// Skip the CRC after this chunk's data
stream.skipNBytesOrThrow(4)
currentStreamPos += 4
}
return frames
// Encode as a standalone PNG and decode to bitmap
val frameIhdr = if (frame.isIdat) ihdr else ihdr.copy(width = frame.fcTL.width, height = frame.fcTL.height)
val pngData = encodePng(frameIhdr, prefixChunks, totalDataSize.toUInt(), frameData)
return BitmapFactory.decodeByteArray(pngData, 0, pngData.size)
?: throw IOException("Failed to decode frame bitmap")
}
override fun close() {
currentStream?.close()
currentStream = null
}
private fun encodePng(ihdr: Chunk.IHDR, prefixChunks: List<Chunk.ArbitraryChunk>, dataLength: UInt, data: ByteArray): ByteArray {
@@ -246,7 +420,37 @@ class ApngDecoder(val inputStream: InputStream) {
this.writeUInt(chunk.crc)
}
// TODO private
/**
* Tracks position while reading through a stream during the metadata scan.
*/
private class StreamScanner(private val inputStream: InputStream) {
var position: Long = 0
private set
fun readBytes(n: Int): ByteArray {
val bytes = inputStream.readNBytesOrThrow(n)
position += n
return bytes
}
fun readUInt(): UInt {
return readBytes(4).toUInt()
}
fun skipBytes(n: Long) {
inputStream.skipNBytesOrThrow(n)
position += n
}
}
/**
* A region of data within the stream, identified by its byte offset and length.
*/
class DataRegion(
val streamOffset: Long,
val length: Long
)
sealed class Chunk {
/**
* Contains metadata about the overall image. Must appear first.
@@ -265,11 +469,6 @@ class ApngDecoder(val inputStream: InputStream) {
}
}
/**
* Contains the actual compressed PNG image data. For an APNG, the IDAT chunk represents the default image and possibly the first frame of the animation.
*/
class IDAT(val length: UInt, val data: ByteArray) : Chunk()
/**
* Marks the end of the file.
*/
@@ -333,20 +532,9 @@ class ApngDecoder(val inputStream: InputStream) {
}
}
/**
* Contains the actual compressed image data for a single frame of the animation. Appears after each fcTL chunk.
* The contents of [data] are actually an [IDAT] chunk, meaning that to decode the frame, we can just bolt metadata to the front of the file and hand
* it off to the system decoder.
*/
class fdAT(
val length: UInt,
val sequenceNumber: UInt,
val data: ByteArray
) : Chunk()
/**
* Represents a PNG chunk that we don't care about because it's not APNG-specific.
* We still have to remember it and give it the PNG encoder as we create each frame, but we don't need to understand it.
* We still have to remember it and give it to the PNG encoder as we create each frame, but we don't need to understand it.
*/
class ArbitraryChunk(
val length: UInt,
@@ -360,16 +548,19 @@ class ApngDecoder(val inputStream: InputStream) {
}
}
/**
* Lightweight frame descriptor. Contains only the fcTL metadata and byte offsets
* ([dataRegions]) pointing into the stream where the compressed image data lives.
* No image data is held in memory.
*
* [isIdat] is true when this frame's data comes from IDAT chunks (the default image),
* meaning the IHDR dimensions should be used as-is rather than the fcTL dimensions.
*/
class Frame(
val pngData: ByteArray,
val fcTL: Chunk.fcTL
) {
@WorkerThread
fun decodeBitmap(): Bitmap {
return BitmapFactory.decodeByteArray(pngData, 0, pngData.size)
?: throw IOException("Failed to decode frame bitmap")
}
}
val fcTL: Chunk.fcTL,
val dataRegions: List<DataRegion>,
val isIdat: Boolean
)
class Metadata(
val width: Int,
@@ -377,83 +568,3 @@ class ApngDecoder(val inputStream: InputStream) {
val numPlays: Int
)
}
private fun InputStream.readChunk(): ApngDecoder.Chunk? {
try {
val length: UInt = this.readUInt()
val type: String = this.readNBytesOrThrow(4).toString(Charsets.US_ASCII)
val data = this.readNBytesOrThrow(length.toInt())
val dataCrc = CRC32().also { it.update(type.toByteArray(Charsets.US_ASCII)) }.also { it.update(data) }.value
val targetCrc = this.readUInt().toLong()
if (dataCrc != targetCrc) {
return null
}
return when (type) {
"IHDR" -> {
ApngDecoder.Chunk.IHDR(
width = data.sliceArray(0 until 4).toUInt(),
height = data.sliceArray(4 until 8).toUInt(),
bitDepth = data[8],
colorType = data[9],
compressionMethod = data[10],
filterMethod = data[11],
interlaceMethod = data[12]
)
}
"IDAT" -> {
ApngDecoder.Chunk.IDAT(length, data)
}
"IEND" -> {
ApngDecoder.Chunk.IEND
}
"acTL" -> {
ApngDecoder.Chunk.acTL(
numFrames = data.sliceArray(0 until 4).toUInt(),
numPlays = data.sliceArray(4 until 8).toUInt()
)
}
"fcTL" -> {
ApngDecoder.Chunk.fcTL(
sequenceNumber = data.sliceArray(0 until 4).toUInt(),
width = data.sliceArray(4 until 8).toUInt(),
height = data.sliceArray(8 until 12).toUInt(),
xOffset = data.sliceArray(12 until 16).toUInt(),
yOffset = data.sliceArray(16 until 20).toUInt(),
delayNum = data.sliceArray(20 until 22).toUShort(),
delayDen = data.sliceArray(22 until 24).toUShort(),
disposeOp = when (data[24]) {
0.toByte() -> ApngDecoder.Chunk.fcTL.DisposeOp.NONE
1.toByte() -> ApngDecoder.Chunk.fcTL.DisposeOp.BACKGROUND
2.toByte() -> ApngDecoder.Chunk.fcTL.DisposeOp.PREVIOUS
else -> throw IOException("Invalid disposeOp: ${data[24]}")
},
blendOp = when (data[25]) {
0.toByte() -> ApngDecoder.Chunk.fcTL.BlendOp.SOURCE
1.toByte() -> ApngDecoder.Chunk.fcTL.BlendOp.OVER
else -> throw IOException("Invalid blendOp: ${data[25]}")
}
)
}
"fdAT" -> {
ApngDecoder.Chunk.fdAT(
length = length,
sequenceNumber = data.sliceArray(0 until 4).toUInt(),
data = data.sliceArray(4 until data.size)
)
}
else -> {
ApngDecoder.Chunk.ArbitraryChunk(length, type, data, targetCrc.toInt().toUInt())
}
}
} catch (e: EOFException) {
return null
}
}

View File

@@ -33,25 +33,24 @@ class ApngDrawable(val decoder: ApngDecoder) : Drawable(), Animatable {
}
val currentFrame: ApngDecoder.Frame
get() = frames[position]
get() = decoder.frames[position]
var position = 0
private set
val frameCount: Int
get() = frames.size
get() = decoder.frames.size
var debugDrawBounds = false
var loopForever = false
private val frames: List<ApngDecoder.Frame> = decoder.debugGetAllFrames()
private var playCount = 0
private val frameRect = Rect(0, 0, 0, 0)
private var timeForNextFrame = 0L
private val activeBitmap = Bitmap.createBitmap(decoder.metadata?.width ?: 0, decoder.metadata?.height ?: 0, Bitmap.Config.ARGB_8888)
private val pendingBitmap = Bitmap.createBitmap(decoder.metadata?.width ?: 0, decoder.metadata?.height ?: 0, Bitmap.Config.ARGB_8888)
private val disposeOpBitmap = Bitmap.createBitmap(decoder.metadata?.width ?: 0, decoder.metadata?.height ?: 0, Bitmap.Config.ARGB_8888)
private val activeBitmap = Bitmap.createBitmap(decoder.metadata.width, decoder.metadata.height, Bitmap.Config.ARGB_8888)
private val pendingBitmap = Bitmap.createBitmap(decoder.metadata.width, decoder.metadata.height, Bitmap.Config.ARGB_8888)
private val disposeOpBitmap = Bitmap.createBitmap(decoder.metadata.width, decoder.metadata.height, Bitmap.Config.ARGB_8888)
private val pendingCanvas = Canvas(pendingBitmap)
private val activeCanvas = Canvas(activeBitmap)
@@ -71,17 +70,17 @@ class ApngDrawable(val decoder: ApngDecoder) : Drawable(), Animatable {
return
}
val totalPlays = decoder.metadata?.numPlays ?: Int.MAX_VALUE
val totalPlays = decoder.metadata.numPlays
if (playCount >= totalPlays && !loopForever) {
canvas.drawBitmap(activeBitmap, 0f, 0f, null)
return
}
val frame = frames[position]
drawFrame(frame)
val frame = decoder.frames[position]
drawFrame(frame, position)
canvas.drawBitmap(activeBitmap, 0f, 0f, null)
position = (position + 1) % frames.size
position = (position + 1) % decoder.frames.size
if (position == 0) {
playCount++
}
@@ -91,11 +90,11 @@ class ApngDrawable(val decoder: ApngDecoder) : Drawable(), Animatable {
}
override fun getIntrinsicWidth(): Int {
return decoder.metadata?.width ?: 0
return decoder.metadata.width
}
override fun getIntrinsicHeight(): Int {
return decoder.metadata?.height ?: 0
return decoder.metadata.height
}
override fun setAlpha(alpha: Int) {
@@ -110,6 +109,16 @@ class ApngDrawable(val decoder: ApngDecoder) : Drawable(), Animatable {
return PixelFormat.OPAQUE
}
override fun setVisible(visible: Boolean, restart: Boolean): Boolean {
val changed = super.setVisible(visible, restart)
if (visible) {
start()
} else {
stop()
}
return changed
}
override fun start() {
playing = true
invalidateSelf()
@@ -124,30 +133,31 @@ class ApngDrawable(val decoder: ApngDecoder) : Drawable(), Animatable {
}
fun nextFrame() {
position = (position + 1) % frames.size
position = (position + 1) % decoder.frames.size
if (position == 0) {
playCount++
}
drawFrame(frames[position])
drawFrame(decoder.frames[position], position)
}
fun prevFrame() {
if (position == 0) {
position = frames.size - 1
position = decoder.frames.size - 1
playCount--
} else {
position--
}
drawFrame(frames[position])
drawFrame(decoder.frames[position], position)
}
fun recycle() {
decoder.close()
activeBitmap.recycle()
pendingBitmap.recycle()
disposeOpBitmap.recycle()
}
private fun drawFrame(frame: ApngDecoder.Frame) {
private fun drawFrame(frame: ApngDecoder.Frame, frameIndex: Int) {
frameRect.updateBoundsFrom(frame)
// If the disposeOp is PREVIOUS, then we need to save the contents of the frame before we draw into it
@@ -157,7 +167,7 @@ class ApngDrawable(val decoder: ApngDecoder) : Drawable(), Animatable {
}
// Start with a clean slate if this is the first frame
if (position == 0) {
if (frameIndex == 0) {
pendingCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR)
}
@@ -174,7 +184,7 @@ class ApngDrawable(val decoder: ApngDecoder) : Drawable(), Animatable {
}
}
val frameBitmap = frame.decodeBitmap()
val frameBitmap = decoder.decodeFrame(frameIndex)
pendingCanvas.drawBitmap(frameBitmap, frame.fcTL.xOffset.toFloat(), frame.fcTL.yOffset.toFloat(), null)
frameBitmap.recycle()
@@ -198,7 +208,7 @@ class ApngDrawable(val decoder: ApngDecoder) : Drawable(), Animatable {
// This disposeOp means we want to reset the drawing region of the frame to the content that was there before it was drawn.
// Per spec, if the first frame has a disposeOp of DISPOSE_OP_PREVIOUS, we treat it as DISPOSE_OP_BACKGROUND
if (position == 0) {
if (frameIndex == 0) {
pendingCanvas.drawRect(frameRect, CLEAR_PAINT)
} else {
pendingCanvas.drawRect(frameRect, CLEAR_PAINT)

View File

@@ -51,7 +51,7 @@ class ApngDecoderTest {
val result = decode("test01.png")
assertNotNull(result.metadata)
assertEquals(1, result.frames.size)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
@@ -59,7 +59,7 @@ class ApngDecoderTest {
val result = decode("test02.png")
assertNotNull(result.metadata)
assertEquals(1, result.frames.size)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
// -- Split IDAT and fdAT --
@@ -79,7 +79,7 @@ class ApngDecoderTest {
val result = decode("test05.png")
assertNotNull(result.metadata)
assertEquals(1, result.frames.size)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
@@ -87,7 +87,7 @@ class ApngDecoderTest {
val result = decode("test06.png")
assertNotNull(result.metadata)
assertEquals(1, result.frames.size)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
// -- Dispose ops --
@@ -97,7 +97,7 @@ class ApngDecoderTest {
val result = decode("test07.png")
assertTrue(result.frames.size >= 2)
assertTrue(result.frames.any { it.fcTL.disposeOp == ApngDecoder.Chunk.fcTL.DisposeOp.NONE })
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
@@ -105,14 +105,14 @@ class ApngDecoderTest {
val result = decode("test08.png")
assertTrue(result.frames.size >= 2)
assertTrue(result.frames.any { it.fcTL.disposeOp == ApngDecoder.Chunk.fcTL.DisposeOp.BACKGROUND })
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test09 - DISPOSE_OP_BACKGROUND final frame`() {
val result = decode("test09.png")
assertTrue(result.frames.size >= 2)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
@@ -120,14 +120,14 @@ class ApngDecoderTest {
val result = decode("test10.png")
assertTrue(result.frames.size >= 2)
assertTrue(result.frames.any { it.fcTL.disposeOp == ApngDecoder.Chunk.fcTL.DisposeOp.PREVIOUS })
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test11 - DISPOSE_OP_PREVIOUS final frame`() {
val result = decode("test11.png")
assertTrue(result.frames.size >= 2)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
@@ -135,7 +135,7 @@ class ApngDecoderTest {
val result = decode("test12.png")
assertTrue(result.frames.size >= 2)
assertEquals(ApngDecoder.Chunk.fcTL.DisposeOp.PREVIOUS, result.frames[0].fcTL.disposeOp)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
// -- Dispose ops with regions --
@@ -144,30 +144,30 @@ class ApngDecoderTest {
fun `test13 - DISPOSE_OP_NONE in region`() {
val result = decode("test13.png")
assertTrue(result.frames.size >= 2)
val subFrame = result.frames.find { it.fcTL.width != result.metadata!!.width.toUInt() || it.fcTL.height != result.metadata!!.height.toUInt() }
val subFrame = result.frames.find { it.fcTL.width != result.metadata.width.toUInt() || it.fcTL.height != result.metadata.height.toUInt() }
assertNotNull("Expected at least one sub-region frame", subFrame)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test14 - DISPOSE_OP_BACKGROUND before region`() {
val result = decode("test14.png")
assertTrue(result.frames.size >= 2)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test15 - DISPOSE_OP_BACKGROUND in region`() {
val result = decode("test15.png")
assertTrue(result.frames.size >= 2)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test16 - DISPOSE_OP_PREVIOUS in region`() {
val result = decode("test16.png")
assertTrue(result.frames.size >= 2)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
// -- Blend ops --
@@ -177,21 +177,21 @@ class ApngDecoderTest {
val result = decode("test17.png")
assertTrue(result.frames.size >= 2)
assertTrue(result.frames.any { it.fcTL.blendOp == ApngDecoder.Chunk.fcTL.BlendOp.SOURCE })
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test18 - BLEND_OP_SOURCE on transparent colour`() {
val result = decode("test18.png")
assertTrue(result.frames.size >= 2)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test19 - BLEND_OP_SOURCE on nearly-transparent colour`() {
val result = decode("test19.png")
assertTrue(result.frames.size >= 2)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
@@ -199,14 +199,14 @@ class ApngDecoderTest {
val result = decode("test20.png")
assertTrue(result.frames.size >= 2)
assertTrue(result.frames.any { it.fcTL.blendOp == ApngDecoder.Chunk.fcTL.BlendOp.OVER })
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test21 - BLEND_OP_OVER repeatedly with nearly-transparent colours`() {
val result = decode("test21.png")
assertTrue(result.frames.size >= 2)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
// -- Blending and gamma --
@@ -215,14 +215,14 @@ class ApngDecoderTest {
fun `test22 - BLEND_OP_OVER with gamma`() {
val result = decode("test22.png")
assertTrue(result.frames.isNotEmpty())
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test23 - BLEND_OP_OVER with gamma nearly black`() {
val result = decode("test23.png")
assertTrue(result.frames.isNotEmpty())
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
// -- Chunk ordering --
@@ -232,7 +232,7 @@ class ApngDecoderTest {
val result = decode("test24.png")
assertNotNull(result.metadata)
assertTrue(result.frames.isNotEmpty())
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
// -- Delays --
@@ -297,19 +297,19 @@ class ApngDecoderTest {
@Test
fun `test30 - num_plays 0 means infinite`() {
val result = decode("test30.png")
assertEquals(Int.MAX_VALUE, result.metadata!!.numPlays)
assertEquals(Int.MAX_VALUE, result.metadata.numPlays)
}
@Test
fun `test31 - num_plays 1`() {
val result = decode("test31.png")
assertEquals(1, result.metadata!!.numPlays)
assertEquals(1, result.metadata.numPlays)
}
@Test
fun `test32 - num_plays 2`() {
val result = decode("test32.png")
assertEquals(2, result.metadata!!.numPlays)
assertEquals(2, result.metadata.numPlays)
}
// -- Other color depths and types --
@@ -318,42 +318,42 @@ class ApngDecoderTest {
fun `test33 - 16-bit colour`() {
val result = decode("test33.png")
assertTrue(result.frames.isNotEmpty())
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test34 - 8-bit greyscale`() {
val result = decode("test34.png")
assertTrue(result.frames.isNotEmpty())
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test35 - 8-bit greyscale and alpha with blending`() {
val result = decode("test35.png")
assertTrue(result.frames.isNotEmpty())
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test36 - 2-color palette`() {
val result = decode("test36.png")
assertTrue(result.frames.isNotEmpty())
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test37 - 2-bit palette and alpha`() {
val result = decode("test37.png")
assertTrue(result.frames.isNotEmpty())
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
fun `test38 - 1-bit palette and alpha with blending`() {
val result = decode("test38.png")
assertTrue(result.frames.isNotEmpty())
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
// -- Real-world samples --
@@ -363,7 +363,7 @@ class ApngDecoderTest {
val result = decode("ball.png")
assertNotNull(result.metadata)
assertTrue(result.frames.size > 1)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
@@ -371,7 +371,7 @@ class ApngDecoderTest {
val result = decode("clock.png")
assertNotNull(result.metadata)
assertTrue(result.frames.size > 1)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
@Test
@@ -379,7 +379,7 @@ class ApngDecoderTest {
val result = decode("elephant.png")
assertNotNull(result.metadata)
assertTrue(result.frames.size > 1)
result.frames.forEach { assertNotNull(it.decodeBitmap()) }
result.frames.forEachIndexed { i, _ -> assertNotNull(result.decoder.decodeFrame(i)) }
}
// -- Helpers --
@@ -390,9 +390,8 @@ class ApngDecoderTest {
}
private fun decode(filename: String): DecodeResult {
val decoder = ApngDecoder(open(filename))
val frames = decoder.debugGetAllFrames()
return DecodeResult(decoder.metadata, frames)
val decoder = ApngDecoder.create { open(filename) }
return DecodeResult(decoder, decoder.metadata, decoder.frames)
}
private val ApngDecoder.Frame.delayMs: Long
@@ -403,7 +402,8 @@ class ApngDecoderTest {
}
private data class DecodeResult(
val metadata: ApngDecoder.Metadata?,
val decoder: ApngDecoder,
val metadata: ApngDecoder.Metadata,
val frames: List<ApngDecoder.Frame>
)
}