mirror of
https://github.com/signalapp/Signal-Android.git
synced 2026-04-21 00:59:49 +01:00
Move all files to natural position.
This commit is contained in:
@@ -0,0 +1,43 @@
|
||||
package org.thoughtcrime.securesms.video;
|
||||
|
||||
import android.media.MediaDataSource;
|
||||
|
||||
import androidx.annotation.RequiresApi;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@RequiresApi(23)
|
||||
public class ByteArrayMediaDataSource extends MediaDataSource {
|
||||
|
||||
private byte[] data;
|
||||
|
||||
public ByteArrayMediaDataSource(byte[] data) {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readAt(long position, byte[] buffer, int offset, int size) throws IOException {
|
||||
if (data == null) throw new IOException("ByteArrayMediaDataSource is closed");
|
||||
|
||||
long bytesAvailable = getSize() - position;
|
||||
int read = Math.min(size, (int) bytesAvailable);
|
||||
if (read <= 0) return -1;
|
||||
|
||||
if (buffer != null) {
|
||||
System.arraycopy(data, (int) position, buffer, offset, read);
|
||||
}
|
||||
|
||||
return read;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSize() throws IOException {
|
||||
if (data == null) throw new IOException("ByteArrayMediaDataSource is closed");
|
||||
return data.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
data = null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
package org.thoughtcrime.securesms.video;
|
||||
|
||||
import android.media.MediaDataSource;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.RequiresApi;
|
||||
|
||||
import org.thoughtcrime.securesms.crypto.AttachmentSecret;
|
||||
import org.thoughtcrime.securesms.crypto.ClassicDecryptingPartInputStream;
|
||||
import org.thoughtcrime.securesms.util.Util;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
@RequiresApi(23)
|
||||
final class ClassicEncryptedMediaDataSource extends MediaDataSource {
|
||||
|
||||
private final AttachmentSecret attachmentSecret;
|
||||
private final File mediaFile;
|
||||
private final long length;
|
||||
|
||||
ClassicEncryptedMediaDataSource(@NonNull AttachmentSecret attachmentSecret, @NonNull File mediaFile, long length) {
|
||||
this.attachmentSecret = attachmentSecret;
|
||||
this.mediaFile = mediaFile;
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readAt(long position, byte[] bytes, int offset, int length) throws IOException {
|
||||
try (InputStream inputStream = ClassicDecryptingPartInputStream.createFor(attachmentSecret, mediaFile)) {
|
||||
byte[] buffer = new byte[4096];
|
||||
long headerRemaining = position;
|
||||
|
||||
while (headerRemaining > 0) {
|
||||
int read = inputStream.read(buffer, 0, Util.toIntExact(Math.min((long)buffer.length, headerRemaining)));
|
||||
|
||||
if (read == -1) return -1;
|
||||
|
||||
headerRemaining -= read;
|
||||
}
|
||||
|
||||
return inputStream.read(bytes, offset, length);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSize() {
|
||||
return length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
package org.thoughtcrime.securesms.video;
|
||||
|
||||
import android.media.MediaDataSource;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.annotation.RequiresApi;
|
||||
|
||||
import org.thoughtcrime.securesms.crypto.AttachmentSecret;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
@RequiresApi(23)
|
||||
public final class EncryptedMediaDataSource {
|
||||
|
||||
public static MediaDataSource createFor(@NonNull AttachmentSecret attachmentSecret, @NonNull File mediaFile, @Nullable byte[] random, long length) {
|
||||
if (random == null) {
|
||||
return new ClassicEncryptedMediaDataSource(attachmentSecret, mediaFile, length);
|
||||
} else {
|
||||
return new ModernEncryptedMediaDataSource(attachmentSecret, mediaFile, random, length);
|
||||
}
|
||||
}
|
||||
|
||||
public static MediaDataSource createForDiskBlob(@NonNull AttachmentSecret attachmentSecret, @NonNull File mediaFile) {
|
||||
return new ModernEncryptedMediaDataSource(attachmentSecret, mediaFile, null, 0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,217 @@
|
||||
package org.thoughtcrime.securesms.video;
|
||||
|
||||
import android.content.Context;
|
||||
import android.media.MediaDataSource;
|
||||
import android.media.MediaMetadataRetriever;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.annotation.RequiresApi;
|
||||
|
||||
import com.google.android.exoplayer2.util.MimeTypes;
|
||||
|
||||
import org.thoughtcrime.securesms.logging.Log;
|
||||
import org.thoughtcrime.securesms.mms.MediaStream;
|
||||
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
|
||||
import org.thoughtcrime.securesms.video.videoconverter.EncodingException;
|
||||
import org.thoughtcrime.securesms.video.videoconverter.MediaConverter;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.FileDescriptor;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.text.NumberFormat;
|
||||
import java.util.Locale;
|
||||
|
||||
@RequiresApi(26)
|
||||
public final class InMemoryTranscoder implements Closeable {
|
||||
|
||||
private static final String TAG = Log.tag(InMemoryTranscoder.class);
|
||||
|
||||
private static final int MAXIMUM_TARGET_VIDEO_BITRATE = VideoUtil.VIDEO_BIT_RATE;
|
||||
private static final int LOW_RES_TARGET_VIDEO_BITRATE = 1_750_000;
|
||||
private static final int MINIMUM_TARGET_VIDEO_BITRATE = 500_000;
|
||||
private static final int AUDIO_BITRATE = VideoUtil.AUDIO_BIT_RATE;
|
||||
private static final int OUTPUT_FORMAT = VideoUtil.VIDEO_SHORT_WIDTH;
|
||||
private static final int LOW_RES_OUTPUT_FORMAT = 480;
|
||||
|
||||
private final Context context;
|
||||
private final MediaDataSource dataSource;
|
||||
private final long upperSizeLimit;
|
||||
private final long inSize;
|
||||
private final long duration;
|
||||
private final int inputBitRate;
|
||||
private final int targetVideoBitRate;
|
||||
private final long memoryFileEstimate;
|
||||
private final boolean transcodeRequired;
|
||||
private final long fileSizeEstimate;
|
||||
private final int outputFormat;
|
||||
|
||||
private @Nullable MemoryFileDescriptor memoryFile;
|
||||
|
||||
/**
|
||||
* @param upperSizeLimit A upper size to transcode to. The actual output size can be up to 10% smaller.
|
||||
*/
|
||||
public InMemoryTranscoder(@NonNull Context context, @NonNull MediaDataSource dataSource, long upperSizeLimit) throws IOException, VideoSourceException {
|
||||
this.context = context;
|
||||
this.dataSource = dataSource;
|
||||
|
||||
final MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
|
||||
try {
|
||||
mediaMetadataRetriever.setDataSource(dataSource);
|
||||
} catch (RuntimeException e) {
|
||||
Log.w(TAG, "Unable to read datasource", e);
|
||||
throw new VideoSourceException("Unable to read datasource", e);
|
||||
}
|
||||
|
||||
long upperSizeLimitWithMargin = (long) (upperSizeLimit / 1.1);
|
||||
|
||||
this.inSize = dataSource.getSize();
|
||||
this.duration = getDuration(mediaMetadataRetriever);
|
||||
this.inputBitRate = bitRate(inSize, duration);
|
||||
this.targetVideoBitRate = getTargetVideoBitRate(upperSizeLimitWithMargin, duration);
|
||||
this.upperSizeLimit = upperSizeLimit;
|
||||
|
||||
this.transcodeRequired = inputBitRate >= targetVideoBitRate * 1.2 || inSize > upperSizeLimit || containsLocation(mediaMetadataRetriever);
|
||||
if (!transcodeRequired) {
|
||||
Log.i(TAG, "Video is within 20% of target bitrate, below the size limit and contained no location metadata.");
|
||||
}
|
||||
|
||||
this.fileSizeEstimate = (targetVideoBitRate + AUDIO_BITRATE) * duration / 8000;
|
||||
this.memoryFileEstimate = (long) (fileSizeEstimate * 1.1);
|
||||
this.outputFormat = targetVideoBitRate < LOW_RES_TARGET_VIDEO_BITRATE
|
||||
? LOW_RES_OUTPUT_FORMAT
|
||||
: OUTPUT_FORMAT;
|
||||
}
|
||||
|
||||
public @NonNull MediaStream transcode(@NonNull Progress progress) throws IOException, EncodingException, VideoSizeException {
|
||||
if (memoryFile != null) throw new AssertionError("Not expecting to reuse transcoder");
|
||||
|
||||
float durationSec = duration / 1000f;
|
||||
|
||||
NumberFormat numberFormat = NumberFormat.getInstance(Locale.US);
|
||||
|
||||
Log.i(TAG, String.format(Locale.US,
|
||||
"Transcoding:\n" +
|
||||
"Target bitrate : %s + %s = %s\n" +
|
||||
"Target format : %dp\n" +
|
||||
"Video duration : %.1fs\n" +
|
||||
"Size limit : %s kB\n" +
|
||||
"Estimate : %s kB\n" +
|
||||
"Input size : %s kB\n" +
|
||||
"Input bitrate : %s bps",
|
||||
numberFormat.format(targetVideoBitRate),
|
||||
numberFormat.format(AUDIO_BITRATE),
|
||||
numberFormat.format(targetVideoBitRate + AUDIO_BITRATE),
|
||||
outputFormat,
|
||||
durationSec,
|
||||
numberFormat.format(upperSizeLimit / 1024),
|
||||
numberFormat.format(fileSizeEstimate / 1024),
|
||||
numberFormat.format(inSize / 1024),
|
||||
numberFormat.format(inputBitRate)));
|
||||
|
||||
if (fileSizeEstimate > upperSizeLimit) {
|
||||
throw new VideoSizeException("Size constraints could not be met!");
|
||||
}
|
||||
|
||||
memoryFile = MemoryFileDescriptor.newMemoryFileDescriptor(context,
|
||||
"TRANSCODE",
|
||||
memoryFileEstimate);
|
||||
final long startTime = System.currentTimeMillis();
|
||||
|
||||
final FileDescriptor memoryFileFileDescriptor = memoryFile.getFileDescriptor();
|
||||
|
||||
final MediaConverter converter = new MediaConverter();
|
||||
|
||||
converter.setInput(dataSource);
|
||||
converter.setOutput(memoryFileFileDescriptor);
|
||||
converter.setVideoResolution(outputFormat);
|
||||
converter.setVideoBitrate(targetVideoBitRate);
|
||||
converter.setAudioBitrate(AUDIO_BITRATE);
|
||||
|
||||
converter.setListener(percent -> {
|
||||
progress.onProgress(percent);
|
||||
return false;
|
||||
});
|
||||
|
||||
converter.convert();
|
||||
|
||||
// output details of the transcoding
|
||||
long outSize = memoryFile.size();
|
||||
float encodeDurationSec = (System.currentTimeMillis() - startTime) / 1000f;
|
||||
|
||||
Log.i(TAG, String.format(Locale.US,
|
||||
"Transcoding complete:\n" +
|
||||
"Transcode time : %.1fs (%.1fx)\n" +
|
||||
"Output size : %s kB\n" +
|
||||
" of Original : %.1f%%\n" +
|
||||
" of Estimate : %.1f%%\n" +
|
||||
" of Memory : %.1f%%\n" +
|
||||
"Output bitrate : %s bps",
|
||||
encodeDurationSec,
|
||||
durationSec / encodeDurationSec,
|
||||
numberFormat.format(outSize / 1024),
|
||||
(outSize * 100d) / inSize,
|
||||
(outSize * 100d) / fileSizeEstimate,
|
||||
(outSize * 100d) / memoryFileEstimate,
|
||||
numberFormat.format(bitRate(outSize, duration))));
|
||||
|
||||
if (outSize > upperSizeLimit) {
|
||||
throw new VideoSizeException("Size constraints could not be met!");
|
||||
}
|
||||
|
||||
memoryFile.seek(0);
|
||||
|
||||
return new MediaStream(new FileInputStream(memoryFileFileDescriptor), MimeTypes.VIDEO_MP4, 0, 0);
|
||||
}
|
||||
|
||||
public boolean isTranscodeRequired() {
|
||||
return transcodeRequired;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (memoryFile != null) {
|
||||
memoryFile.close();
|
||||
}
|
||||
}
|
||||
|
||||
private static int bitRate(long bytes, long duration) {
|
||||
return (int) (bytes * 8 / (duration / 1000f));
|
||||
}
|
||||
|
||||
private static int getTargetVideoBitRate(long sizeGuideBytes, long duration) {
|
||||
sizeGuideBytes -= (duration / 1000d) * AUDIO_BITRATE / 8;
|
||||
|
||||
double targetAttachmentSizeBits = sizeGuideBytes * 8L;
|
||||
|
||||
double bitRateToFixTarget = targetAttachmentSizeBits / (duration / 1000d);
|
||||
return Math.max(MINIMUM_TARGET_VIDEO_BITRATE, Math.min(MAXIMUM_TARGET_VIDEO_BITRATE, (int) bitRateToFixTarget));
|
||||
}
|
||||
|
||||
private static long getDuration(MediaMetadataRetriever mediaMetadataRetriever) throws VideoSourceException {
|
||||
String durationString = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
|
||||
if (durationString == null) {
|
||||
throw new VideoSourceException("Cannot determine duration of video, null meta data");
|
||||
}
|
||||
try {
|
||||
long duration = Long.parseLong(durationString);
|
||||
if (duration <= 0) {
|
||||
throw new VideoSourceException("Cannot determine duration of video, meta data: " + durationString);
|
||||
}
|
||||
return duration;
|
||||
} catch (NumberFormatException e) {
|
||||
throw new VideoSourceException("Cannot determine duration of video, meta data: " + durationString, e);
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean containsLocation(MediaMetadataRetriever mediaMetadataRetriever) {
|
||||
String locationString = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_LOCATION);
|
||||
return locationString != null;
|
||||
}
|
||||
|
||||
public interface Progress {
|
||||
|
||||
void onProgress(int percent);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
package org.thoughtcrime.securesms.video;
|
||||
|
||||
import android.media.MediaDataSource;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.annotation.RequiresApi;
|
||||
|
||||
import org.thoughtcrime.securesms.crypto.AttachmentSecret;
|
||||
import org.thoughtcrime.securesms.crypto.ModernDecryptingPartInputStream;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* Create via {@link EncryptedMediaDataSource}.
|
||||
* <p>
|
||||
* A {@link MediaDataSource} that points to an encrypted file.
|
||||
* <p>
|
||||
* It is "modern" compared to the {@link ClassicEncryptedMediaDataSource}. And "modern" refers to
|
||||
* the presence of a random part of the key supplied in the constructor.
|
||||
*/
|
||||
@RequiresApi(23)
|
||||
final class ModernEncryptedMediaDataSource extends MediaDataSource {
|
||||
|
||||
private final AttachmentSecret attachmentSecret;
|
||||
private final File mediaFile;
|
||||
private final byte[] random;
|
||||
private final long length;
|
||||
|
||||
ModernEncryptedMediaDataSource(@NonNull AttachmentSecret attachmentSecret, @NonNull File mediaFile, @Nullable byte[] random, long length) {
|
||||
this.attachmentSecret = attachmentSecret;
|
||||
this.mediaFile = mediaFile;
|
||||
this.random = random;
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readAt(long position, byte[] bytes, int offset, int length) throws IOException {
|
||||
try (InputStream inputStream = createInputStream(position)) {
|
||||
int totalRead = 0;
|
||||
|
||||
while (length > 0) {
|
||||
int read = inputStream.read(bytes, offset, length);
|
||||
|
||||
if (read == -1) {
|
||||
if (totalRead == 0) {
|
||||
return -1;
|
||||
} else {
|
||||
return totalRead;
|
||||
}
|
||||
}
|
||||
|
||||
length -= read;
|
||||
offset += read;
|
||||
totalRead += read;
|
||||
}
|
||||
|
||||
return totalRead;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSize() {
|
||||
return length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
}
|
||||
|
||||
private InputStream createInputStream(long position) throws IOException {
|
||||
if (random == null) {
|
||||
return ModernDecryptingPartInputStream.createFor(attachmentSecret, mediaFile, position);
|
||||
} else {
|
||||
return ModernDecryptingPartInputStream.createFor(attachmentSecret, random, mediaFile, position);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,184 @@
|
||||
/*
|
||||
* Copyright (C) 2017 Whisper Systems
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
package org.thoughtcrime.securesms.video;
|
||||
|
||||
import android.content.Context;
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
import android.util.AttributeSet;
|
||||
import android.view.View;
|
||||
import android.view.Window;
|
||||
import android.view.WindowManager;
|
||||
import android.widget.FrameLayout;
|
||||
|
||||
import com.google.android.exoplayer2.DefaultLoadControl;
|
||||
import com.google.android.exoplayer2.ExoPlayerFactory;
|
||||
import com.google.android.exoplayer2.LoadControl;
|
||||
import com.google.android.exoplayer2.Player;
|
||||
import com.google.android.exoplayer2.SimpleExoPlayer;
|
||||
import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory;
|
||||
import com.google.android.exoplayer2.extractor.ExtractorsFactory;
|
||||
import com.google.android.exoplayer2.source.ExtractorMediaSource;
|
||||
import com.google.android.exoplayer2.source.MediaSource;
|
||||
import com.google.android.exoplayer2.trackselection.AdaptiveTrackSelection;
|
||||
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
|
||||
import com.google.android.exoplayer2.trackselection.TrackSelection;
|
||||
import com.google.android.exoplayer2.trackselection.TrackSelector;
|
||||
import com.google.android.exoplayer2.ui.PlayerControlView;
|
||||
import com.google.android.exoplayer2.ui.PlayerView;
|
||||
import com.google.android.exoplayer2.upstream.BandwidthMeter;
|
||||
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
|
||||
import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
|
||||
|
||||
import org.thoughtcrime.securesms.R;
|
||||
import org.thoughtcrime.securesms.mms.VideoSlide;
|
||||
import org.thoughtcrime.securesms.util.ViewUtil;
|
||||
import org.thoughtcrime.securesms.video.exo.AttachmentDataSourceFactory;
|
||||
|
||||
public class VideoPlayer extends FrameLayout {
|
||||
|
||||
private static final String TAG = VideoPlayer.class.getSimpleName();
|
||||
|
||||
private final PlayerView exoView;
|
||||
|
||||
private SimpleExoPlayer exoPlayer;
|
||||
private PlayerControlView exoControls;
|
||||
private Window window;
|
||||
private PlayerStateCallback playerStateCallback;
|
||||
|
||||
public VideoPlayer(Context context) {
|
||||
this(context, null);
|
||||
}
|
||||
|
||||
public VideoPlayer(Context context, AttributeSet attrs) {
|
||||
this(context, attrs, 0);
|
||||
}
|
||||
|
||||
public VideoPlayer(Context context, AttributeSet attrs, int defStyleAttr) {
|
||||
super(context, attrs, defStyleAttr);
|
||||
|
||||
inflate(context, R.layout.video_player, this);
|
||||
|
||||
this.exoView = ViewUtil.findById(this, R.id.video_view);
|
||||
this.exoControls = new PlayerControlView(getContext());
|
||||
this.exoControls.setShowTimeoutMs(-1);
|
||||
}
|
||||
|
||||
public void setVideoSource(@NonNull VideoSlide videoSource, boolean autoplay) {
|
||||
BandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
|
||||
TrackSelection.Factory videoTrackSelectionFactory = new AdaptiveTrackSelection.Factory(bandwidthMeter);
|
||||
TrackSelector trackSelector = new DefaultTrackSelector(videoTrackSelectionFactory);
|
||||
LoadControl loadControl = new DefaultLoadControl();
|
||||
|
||||
exoPlayer = ExoPlayerFactory.newSimpleInstance(getContext(), trackSelector, loadControl);
|
||||
exoPlayer.addListener(new ExoPlayerListener(window, playerStateCallback));
|
||||
exoView.setPlayer(exoPlayer);
|
||||
exoControls.setPlayer(exoPlayer);
|
||||
|
||||
DefaultDataSourceFactory defaultDataSourceFactory = new DefaultDataSourceFactory(getContext(), "GenericUserAgent", null);
|
||||
AttachmentDataSourceFactory attachmentDataSourceFactory = new AttachmentDataSourceFactory(getContext(), defaultDataSourceFactory, null);
|
||||
ExtractorsFactory extractorsFactory = new DefaultExtractorsFactory();
|
||||
|
||||
MediaSource mediaSource = new ExtractorMediaSource(videoSource.getUri(), attachmentDataSourceFactory, extractorsFactory, null, null);
|
||||
|
||||
exoPlayer.prepare(mediaSource);
|
||||
exoPlayer.setPlayWhenReady(autoplay);
|
||||
}
|
||||
|
||||
public void pause() {
|
||||
this.exoPlayer.setPlayWhenReady(false);
|
||||
}
|
||||
|
||||
public void hideControls() {
|
||||
if (this.exoView != null) {
|
||||
this.exoView.hideController();
|
||||
}
|
||||
}
|
||||
|
||||
public @Nullable View getControlView() {
|
||||
if (this.exoControls != null) {
|
||||
return this.exoControls;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public void cleanup() {
|
||||
if (this.exoPlayer != null) {
|
||||
this.exoPlayer.release();
|
||||
}
|
||||
}
|
||||
|
||||
public void loopForever() {
|
||||
if (this.exoPlayer != null) {
|
||||
exoPlayer.setRepeatMode(Player.REPEAT_MODE_ONE);
|
||||
}
|
||||
}
|
||||
|
||||
public long getDuration() {
|
||||
if (this.exoPlayer != null) {
|
||||
return this.exoPlayer.getDuration();
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
public void setWindow(@Nullable Window window) {
|
||||
this.window = window;
|
||||
}
|
||||
|
||||
public void setPlayerStateCallbacks(@Nullable PlayerStateCallback playerStateCallback) {
|
||||
this.playerStateCallback = playerStateCallback;
|
||||
}
|
||||
|
||||
private static class ExoPlayerListener extends Player.DefaultEventListener {
|
||||
private final Window window;
|
||||
private final PlayerStateCallback playerStateCallback;
|
||||
|
||||
ExoPlayerListener(Window window, PlayerStateCallback playerStateCallback) {
|
||||
this.window = window;
|
||||
this.playerStateCallback = playerStateCallback;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPlayerStateChanged(boolean playWhenReady, int playbackState) {
|
||||
switch(playbackState) {
|
||||
case Player.STATE_IDLE:
|
||||
case Player.STATE_BUFFERING:
|
||||
case Player.STATE_ENDED:
|
||||
window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
|
||||
break;
|
||||
case Player.STATE_READY:
|
||||
if (playWhenReady) {
|
||||
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
|
||||
} else {
|
||||
window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
|
||||
}
|
||||
notifyPlayerReady();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyPlayerReady() {
|
||||
if (playerStateCallback != null) playerStateCallback.onPlayerReady();
|
||||
}
|
||||
}
|
||||
|
||||
public interface PlayerStateCallback {
|
||||
void onPlayerReady();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
package org.thoughtcrime.securesms.video;
|
||||
|
||||
public final class VideoSizeException extends Exception {
|
||||
|
||||
VideoSizeException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
package org.thoughtcrime.securesms.video;
|
||||
|
||||
public final class VideoSourceException extends Exception {
|
||||
|
||||
VideoSourceException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
VideoSourceException(String message, Exception inner) {
|
||||
super(message, inner);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
package org.thoughtcrime.securesms.video;
|
||||
|
||||
import android.content.Context;
|
||||
import android.content.res.Resources;
|
||||
import android.media.MediaFormat;
|
||||
import android.util.DisplayMetrics;
|
||||
import android.util.Size;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.RequiresApi;
|
||||
|
||||
import org.thoughtcrime.securesms.mms.MediaConstraints;
|
||||
import org.thoughtcrime.securesms.util.MediaUtil;
|
||||
|
||||
public final class VideoUtil {
|
||||
|
||||
public static final int AUDIO_BIT_RATE = 192_000;
|
||||
public static final int VIDEO_FRAME_RATE = 30;
|
||||
public static final int VIDEO_BIT_RATE = 2_000_000;
|
||||
public static final int VIDEO_LONG_WIDTH = 1280;
|
||||
public static final int VIDEO_SHORT_WIDTH = 720;
|
||||
public static final int VIDEO_MAX_LENGTH_S = 30;
|
||||
|
||||
private static final int TOTAL_BYTES_PER_SECOND = (VIDEO_BIT_RATE / 8) + (AUDIO_BIT_RATE / 8);
|
||||
|
||||
@RequiresApi(21)
|
||||
public static final String VIDEO_MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC;
|
||||
public static final String AUDIO_MIME_TYPE = "audio/mp4a-latm";
|
||||
|
||||
public static final String RECORDED_VIDEO_CONTENT_TYPE = MediaUtil.VIDEO_MP4;
|
||||
|
||||
private VideoUtil() { }
|
||||
|
||||
@RequiresApi(21)
|
||||
public static Size getVideoRecordingSize() {
|
||||
return isPortrait(screenSize())
|
||||
? new Size(VIDEO_SHORT_WIDTH, VIDEO_LONG_WIDTH)
|
||||
: new Size(VIDEO_LONG_WIDTH, VIDEO_SHORT_WIDTH);
|
||||
}
|
||||
|
||||
public static int getMaxVideoDurationInSeconds(@NonNull Context context, @NonNull MediaConstraints mediaConstraints) {
|
||||
int allowedSize = mediaConstraints.getCompressedVideoMaxSize(context);
|
||||
int duration = (int) Math.floor((float) allowedSize / TOTAL_BYTES_PER_SECOND);
|
||||
|
||||
return Math.min(duration, VIDEO_MAX_LENGTH_S);
|
||||
}
|
||||
|
||||
@RequiresApi(21)
|
||||
private static Size screenSize() {
|
||||
DisplayMetrics metrics = Resources.getSystem().getDisplayMetrics();
|
||||
return new Size(metrics.widthPixels, metrics.heightPixels);
|
||||
}
|
||||
|
||||
@RequiresApi(21)
|
||||
private static boolean isPortrait(Size size) {
|
||||
return size.getWidth() < size.getHeight();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
package org.thoughtcrime.securesms.video.exo;
|
||||
|
||||
|
||||
import android.net.Uri;
|
||||
|
||||
import com.google.android.exoplayer2.upstream.DataSource;
|
||||
import com.google.android.exoplayer2.upstream.DataSpec;
|
||||
import com.google.android.exoplayer2.upstream.DefaultDataSource;
|
||||
import com.google.android.exoplayer2.upstream.TransferListener;
|
||||
|
||||
import org.thoughtcrime.securesms.mms.PartAuthority;
|
||||
import org.thoughtcrime.securesms.providers.BlobProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class AttachmentDataSource implements DataSource {
|
||||
|
||||
private final DefaultDataSource defaultDataSource;
|
||||
private final PartDataSource partDataSource;
|
||||
private final BlobDataSource blobDataSource;
|
||||
|
||||
private DataSource dataSource;
|
||||
|
||||
public AttachmentDataSource(DefaultDataSource defaultDataSource,
|
||||
PartDataSource partDataSource,
|
||||
BlobDataSource blobDataSource)
|
||||
{
|
||||
this.defaultDataSource = defaultDataSource;
|
||||
this.partDataSource = partDataSource;
|
||||
this.blobDataSource = blobDataSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addTransferListener(TransferListener transferListener) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public long open(DataSpec dataSpec) throws IOException {
|
||||
if (BlobProvider.isAuthority(dataSpec.uri)) dataSource = blobDataSource;
|
||||
else if (PartAuthority.isLocalUri(dataSpec.uri)) dataSource = partDataSource;
|
||||
else dataSource = defaultDataSource;
|
||||
|
||||
return dataSource.open(dataSpec);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] buffer, int offset, int readLength) throws IOException {
|
||||
return dataSource.read(buffer, offset, readLength);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Uri getUri() {
|
||||
return dataSource.getUri();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, List<String>> getResponseHeaders() {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
dataSource.close();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
package org.thoughtcrime.securesms.video.exo;
|
||||
|
||||
|
||||
import android.content.Context;
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
import com.google.android.exoplayer2.upstream.DataSource;
|
||||
import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
|
||||
import com.google.android.exoplayer2.upstream.TransferListener;
|
||||
|
||||
public class AttachmentDataSourceFactory implements DataSource.Factory {
|
||||
|
||||
private final Context context;
|
||||
|
||||
private final DefaultDataSourceFactory defaultDataSourceFactory;
|
||||
private final TransferListener listener;
|
||||
|
||||
public AttachmentDataSourceFactory(@NonNull Context context,
|
||||
@NonNull DefaultDataSourceFactory defaultDataSourceFactory,
|
||||
@Nullable TransferListener listener)
|
||||
{
|
||||
this.context = context;
|
||||
this.defaultDataSourceFactory = defaultDataSourceFactory;
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AttachmentDataSource createDataSource() {
|
||||
return new AttachmentDataSource(defaultDataSourceFactory.createDataSource(),
|
||||
new PartDataSource(context, listener),
|
||||
new BlobDataSource(context, listener));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,85 @@
|
||||
package org.thoughtcrime.securesms.video.exo;
|
||||
|
||||
|
||||
import android.content.Context;
|
||||
import android.net.Uri;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
import com.google.android.exoplayer2.upstream.DataSource;
|
||||
import com.google.android.exoplayer2.upstream.DataSpec;
|
||||
import com.google.android.exoplayer2.upstream.TransferListener;
|
||||
|
||||
import org.thoughtcrime.securesms.providers.BlobProvider;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class BlobDataSource implements DataSource {
|
||||
|
||||
private final @NonNull Context context;
|
||||
private final @Nullable TransferListener listener;
|
||||
|
||||
private Uri uri;
|
||||
private InputStream inputStream;
|
||||
|
||||
BlobDataSource(@NonNull Context context, @Nullable TransferListener listener) {
|
||||
this.context = context.getApplicationContext();
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addTransferListener(TransferListener transferListener) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public long open(DataSpec dataSpec) throws IOException {
|
||||
this.uri = dataSpec.uri;
|
||||
this.inputStream = BlobProvider.getInstance().getStream(context, uri, dataSpec.position);
|
||||
|
||||
if (listener != null) {
|
||||
listener.onTransferStart(this, dataSpec, false);
|
||||
}
|
||||
|
||||
long size = unwrapLong(BlobProvider.getFileSize(uri));
|
||||
if (size - dataSpec.position <= 0) throw new EOFException("No more data");
|
||||
|
||||
return size - dataSpec.position;
|
||||
}
|
||||
|
||||
private long unwrapLong(@Nullable Long boxed) {
|
||||
return boxed == null ? 0L : boxed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] buffer, int offset, int readLength) throws IOException {
|
||||
int read = inputStream.read(buffer, offset, readLength);
|
||||
|
||||
if (read > 0 && listener != null) {
|
||||
listener.onBytesTransferred(this, null, false, read);
|
||||
}
|
||||
|
||||
return read;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Uri getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, List<String>> getResponseHeaders() {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
inputStream.close();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,88 @@
|
||||
package org.thoughtcrime.securesms.video.exo;
|
||||
|
||||
|
||||
import android.content.Context;
|
||||
import android.net.Uri;
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
import com.google.android.exoplayer2.upstream.DataSource;
|
||||
import com.google.android.exoplayer2.upstream.DataSpec;
|
||||
import com.google.android.exoplayer2.upstream.TransferListener;
|
||||
|
||||
import org.thoughtcrime.securesms.attachments.Attachment;
|
||||
import org.thoughtcrime.securesms.database.AttachmentDatabase;
|
||||
import org.thoughtcrime.securesms.database.DatabaseFactory;
|
||||
import org.thoughtcrime.securesms.mms.PartUriParser;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class PartDataSource implements DataSource {
|
||||
|
||||
private final @NonNull Context context;
|
||||
private final @Nullable TransferListener listener;
|
||||
|
||||
private Uri uri;
|
||||
private InputStream inputSteam;
|
||||
|
||||
PartDataSource(@NonNull Context context, @Nullable TransferListener listener) {
|
||||
this.context = context.getApplicationContext();
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addTransferListener(TransferListener transferListener) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public long open(DataSpec dataSpec) throws IOException {
|
||||
this.uri = dataSpec.uri;
|
||||
|
||||
AttachmentDatabase attachmentDatabase = DatabaseFactory.getAttachmentDatabase(context);
|
||||
PartUriParser partUri = new PartUriParser(uri);
|
||||
Attachment attachment = attachmentDatabase.getAttachment(partUri.getPartId());
|
||||
|
||||
if (attachment == null) throw new IOException("Attachment not found");
|
||||
|
||||
this.inputSteam = attachmentDatabase.getAttachmentStream(partUri.getPartId(), dataSpec.position);
|
||||
|
||||
if (listener != null) {
|
||||
listener.onTransferStart(this, dataSpec, false);
|
||||
}
|
||||
|
||||
if (attachment.getSize() - dataSpec.position <= 0) throw new EOFException("No more data");
|
||||
|
||||
return attachment.getSize() - dataSpec.position;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] buffer, int offset, int readLength) throws IOException {
|
||||
int read = inputSteam.read(buffer, offset, readLength);
|
||||
|
||||
if (read > 0 && listener != null) {
|
||||
listener.onBytesTransferred(this, null, false, read);
|
||||
}
|
||||
|
||||
return read;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Uri getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, List<String>> getResponseHeaders() {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
inputSteam.close();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaMuxer;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.RequiresApi;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileDescriptor;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public final class AndroidMuxer implements Muxer {
|
||||
|
||||
private final MediaMuxer muxer;
|
||||
|
||||
AndroidMuxer(final @NonNull File file) throws IOException {
|
||||
muxer = new MediaMuxer(file.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
|
||||
}
|
||||
|
||||
|
||||
@RequiresApi(26)
|
||||
AndroidMuxer(final @NonNull FileDescriptor fileDescriptor) throws IOException {
|
||||
muxer = new MediaMuxer(fileDescriptor, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void start() {
|
||||
muxer.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
muxer.stop();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int addTrack(final @NonNull MediaFormat format) {
|
||||
return muxer.addTrack(format);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeSampleData(final int trackIndex, final @NonNull ByteBuffer byteBuf, final @NonNull MediaCodec.BufferInfo bufferInfo) {
|
||||
muxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
muxer.release();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,419 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
import org.thoughtcrime.securesms.logging.Log;
|
||||
import org.thoughtcrime.securesms.video.VideoUtil;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Locale;
|
||||
|
||||
final class AudioTrackConverter {
|
||||
|
||||
private static final String TAG = "media-converter";
|
||||
private static final boolean VERBOSE = false; // lots of logging
|
||||
|
||||
private static final String OUTPUT_AUDIO_MIME_TYPE = VideoUtil.AUDIO_MIME_TYPE; // Advanced Audio Coding
|
||||
private static final int OUTPUT_AUDIO_AAC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC; //MediaCodecInfo.CodecProfileLevel.AACObjectHE;
|
||||
|
||||
private static final int TIMEOUT_USEC = 10000;
|
||||
|
||||
private final long mTimeFrom;
|
||||
private final long mTimeTo;
|
||||
private final int mAudioBitrate;
|
||||
|
||||
final long mInputDuration;
|
||||
|
||||
private final MediaExtractor mAudioExtractor;
|
||||
private final MediaCodec mAudioDecoder;
|
||||
private final MediaCodec mAudioEncoder;
|
||||
|
||||
private final ByteBuffer[] mAudioDecoderInputBuffers;
|
||||
private ByteBuffer[] mAudioDecoderOutputBuffers;
|
||||
private final ByteBuffer[] mAudioEncoderInputBuffers;
|
||||
private ByteBuffer[] mAudioEncoderOutputBuffers;
|
||||
private final MediaCodec.BufferInfo mAudioDecoderOutputBufferInfo;
|
||||
private final MediaCodec.BufferInfo mAudioEncoderOutputBufferInfo;
|
||||
|
||||
MediaFormat mEncoderOutputAudioFormat;
|
||||
|
||||
boolean mAudioExtractorDone;
|
||||
private boolean mAudioDecoderDone;
|
||||
boolean mAudioEncoderDone;
|
||||
|
||||
private int mOutputAudioTrack = -1;
|
||||
|
||||
private int mPendingAudioDecoderOutputBufferIndex = -1;
|
||||
long mMuxingAudioPresentationTime;
|
||||
|
||||
private int mAudioExtractedFrameCount;
|
||||
private int mAudioDecodedFrameCount;
|
||||
private int mAudioEncodedFrameCount;
|
||||
|
||||
private Muxer mMuxer;
|
||||
|
||||
static @Nullable
|
||||
AudioTrackConverter create(
|
||||
final @NonNull MediaConverter.Input input,
|
||||
final long timeFrom,
|
||||
final long timeTo,
|
||||
final int audioBitrate) throws IOException {
|
||||
|
||||
final MediaExtractor audioExtractor = input.createExtractor();
|
||||
final int audioInputTrack = getAndSelectAudioTrackIndex(audioExtractor);
|
||||
if (audioInputTrack == -1) {
|
||||
audioExtractor.release();
|
||||
return null;
|
||||
}
|
||||
return new AudioTrackConverter(audioExtractor, audioInputTrack, timeFrom, timeTo, audioBitrate);
|
||||
}
|
||||
|
||||
private AudioTrackConverter(
|
||||
final @NonNull MediaExtractor audioExtractor,
|
||||
final int audioInputTrack,
|
||||
long timeFrom,
|
||||
long timeTo,
|
||||
int audioBitrate) throws IOException {
|
||||
|
||||
mTimeFrom = timeFrom;
|
||||
mTimeTo = timeTo;
|
||||
mAudioExtractor = audioExtractor;
|
||||
mAudioBitrate = audioBitrate;
|
||||
|
||||
final MediaCodecInfo audioCodecInfo = MediaConverter.selectCodec(OUTPUT_AUDIO_MIME_TYPE);
|
||||
if (audioCodecInfo == null) {
|
||||
// Don't fail CTS if they don't have an AAC codec (not here, anyway).
|
||||
Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_AUDIO_MIME_TYPE);
|
||||
throw new FileNotFoundException();
|
||||
}
|
||||
if (VERBOSE) Log.d(TAG, "audio found codec: " + audioCodecInfo.getName());
|
||||
|
||||
final MediaFormat inputAudioFormat = mAudioExtractor.getTrackFormat(audioInputTrack);
|
||||
mInputDuration = inputAudioFormat.containsKey(MediaFormat.KEY_DURATION) ? inputAudioFormat.getLong(MediaFormat.KEY_DURATION) : 0;
|
||||
|
||||
final MediaFormat outputAudioFormat =
|
||||
MediaFormat.createAudioFormat(
|
||||
OUTPUT_AUDIO_MIME_TYPE,
|
||||
inputAudioFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE),
|
||||
inputAudioFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
|
||||
outputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, audioBitrate);
|
||||
outputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE);
|
||||
|
||||
// Create a MediaCodec for the desired codec, then configure it as an encoder with
|
||||
// our desired properties. Request a Surface to use for input.
|
||||
mAudioEncoder = createAudioEncoder(audioCodecInfo, outputAudioFormat);
|
||||
// Create a MediaCodec for the decoder, based on the extractor's format.
|
||||
mAudioDecoder = createAudioDecoder(inputAudioFormat);
|
||||
|
||||
mAudioDecoderInputBuffers = mAudioDecoder.getInputBuffers();
|
||||
mAudioDecoderOutputBuffers = mAudioDecoder.getOutputBuffers();
|
||||
mAudioEncoderInputBuffers = mAudioEncoder.getInputBuffers();
|
||||
mAudioEncoderOutputBuffers = mAudioEncoder.getOutputBuffers();
|
||||
mAudioDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
|
||||
mAudioEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
|
||||
|
||||
if (mTimeFrom > 0) {
|
||||
mAudioExtractor.seekTo(mTimeFrom * 1000, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
|
||||
Log.i(TAG, "Seek audio:" + mTimeFrom + " " + mAudioExtractor.getSampleTime());
|
||||
}
|
||||
}
|
||||
|
||||
void setMuxer(final @NonNull Muxer muxer) throws IOException {
|
||||
mMuxer = muxer;
|
||||
if (mEncoderOutputAudioFormat != null) {
|
||||
Log.d(TAG, "muxer: adding audio track.");
|
||||
if (!mEncoderOutputAudioFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
|
||||
mEncoderOutputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitrate);
|
||||
}
|
||||
if (!mEncoderOutputAudioFormat.containsKey(MediaFormat.KEY_AAC_PROFILE)) {
|
||||
mEncoderOutputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE);
|
||||
}
|
||||
mOutputAudioTrack = muxer.addTrack(mEncoderOutputAudioFormat);
|
||||
}
|
||||
}
|
||||
|
||||
void step() throws IOException {
|
||||
// Extract audio from file and feed to decoder.
|
||||
// Do not extract audio if we have determined the output format but we are not yet
|
||||
// ready to mux the frames.
|
||||
while (!mAudioExtractorDone && (mEncoderOutputAudioFormat == null || mMuxer != null)) {
|
||||
int decoderInputBufferIndex = mAudioDecoder.dequeueInputBuffer(TIMEOUT_USEC);
|
||||
if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
if (VERBOSE) Log.d(TAG, "no audio decoder input buffer");
|
||||
break;
|
||||
}
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "audio decoder: returned input buffer: " + decoderInputBufferIndex);
|
||||
}
|
||||
final ByteBuffer decoderInputBuffer = mAudioDecoderInputBuffers[decoderInputBufferIndex];
|
||||
final int size = mAudioExtractor.readSampleData(decoderInputBuffer, 0);
|
||||
final long presentationTime = mAudioExtractor.getSampleTime();
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "audio extractor: returned buffer of size " + size);
|
||||
Log.d(TAG, "audio extractor: returned buffer for time " + presentationTime);
|
||||
}
|
||||
mAudioExtractorDone = size < 0 || (mTimeTo > 0 && presentationTime > mTimeTo * 1000);
|
||||
if (mAudioExtractorDone) {
|
||||
if (VERBOSE) Log.d(TAG, "audio extractor: EOS");
|
||||
mAudioDecoder.queueInputBuffer(
|
||||
decoderInputBufferIndex,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
} else {
|
||||
mAudioDecoder.queueInputBuffer(
|
||||
decoderInputBufferIndex,
|
||||
0,
|
||||
size,
|
||||
presentationTime,
|
||||
mAudioExtractor.getSampleFlags());
|
||||
}
|
||||
mAudioExtractor.advance();
|
||||
mAudioExtractedFrameCount++;
|
||||
// We extracted a frame, let's try something else next.
|
||||
break;
|
||||
}
|
||||
|
||||
// Poll output frames from the audio decoder.
|
||||
// Do not poll if we already have a pending buffer to feed to the encoder.
|
||||
while (!mAudioDecoderDone && mPendingAudioDecoderOutputBufferIndex == -1
|
||||
&& (mEncoderOutputAudioFormat == null || mMuxer != null)) {
|
||||
final int decoderOutputBufferIndex =
|
||||
mAudioDecoder.dequeueOutputBuffer(
|
||||
mAudioDecoderOutputBufferInfo, TIMEOUT_USEC);
|
||||
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
if (VERBOSE) Log.d(TAG, "no audio decoder output buffer");
|
||||
break;
|
||||
}
|
||||
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
||||
if (VERBOSE) Log.d(TAG, "audio decoder: output buffers changed");
|
||||
mAudioDecoderOutputBuffers = mAudioDecoder.getOutputBuffers();
|
||||
break;
|
||||
}
|
||||
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
||||
if (VERBOSE) {
|
||||
MediaFormat decoderOutputAudioFormat = mAudioDecoder.getOutputFormat();
|
||||
Log.d(TAG, "audio decoder: output format changed: " + decoderOutputAudioFormat);
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "audio decoder: returned output buffer: " + decoderOutputBufferIndex);
|
||||
Log.d(TAG, "audio decoder: returned buffer of size " + mAudioDecoderOutputBufferInfo.size);
|
||||
}
|
||||
if ((mAudioDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
|
||||
if (VERBOSE) Log.d(TAG, "audio decoder: codec config buffer");
|
||||
mAudioDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
|
||||
break;
|
||||
}
|
||||
if (mAudioDecoderOutputBufferInfo.presentationTimeUs < mTimeFrom * 1000 &&
|
||||
(mAudioDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
|
||||
if (VERBOSE)
|
||||
Log.d(TAG, "audio decoder: frame prior to " + mAudioDecoderOutputBufferInfo.presentationTimeUs);
|
||||
mAudioDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
|
||||
break;
|
||||
}
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "audio decoder: returned buffer for time " + mAudioDecoderOutputBufferInfo.presentationTimeUs);
|
||||
Log.d(TAG, "audio decoder: output buffer is now pending: " + mPendingAudioDecoderOutputBufferIndex);
|
||||
}
|
||||
mPendingAudioDecoderOutputBufferIndex = decoderOutputBufferIndex;
|
||||
mAudioDecodedFrameCount++;
|
||||
// We extracted a pending frame, let's try something else next.
|
||||
break;
|
||||
}
|
||||
|
||||
// Feed the pending decoded audio buffer to the audio encoder.
|
||||
while (mPendingAudioDecoderOutputBufferIndex != -1) {
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "audio decoder: attempting to process pending buffer: " + mPendingAudioDecoderOutputBufferIndex);
|
||||
}
|
||||
final int encoderInputBufferIndex = mAudioEncoder.dequeueInputBuffer(TIMEOUT_USEC);
|
||||
if (encoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
if (VERBOSE) Log.d(TAG, "no audio encoder input buffer");
|
||||
break;
|
||||
}
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "audio encoder: returned input buffer: " + encoderInputBufferIndex);
|
||||
}
|
||||
final ByteBuffer encoderInputBuffer = mAudioEncoderInputBuffers[encoderInputBufferIndex];
|
||||
final int size = mAudioDecoderOutputBufferInfo.size;
|
||||
final long presentationTime = mAudioDecoderOutputBufferInfo.presentationTimeUs;
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "audio decoder: processing pending buffer: " + mPendingAudioDecoderOutputBufferIndex);
|
||||
}
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "audio decoder: pending buffer of size " + size);
|
||||
Log.d(TAG, "audio decoder: pending buffer for time " + presentationTime);
|
||||
}
|
||||
if (size >= 0) {
|
||||
final ByteBuffer decoderOutputBuffer = mAudioDecoderOutputBuffers[mPendingAudioDecoderOutputBufferIndex].duplicate();
|
||||
decoderOutputBuffer.position(mAudioDecoderOutputBufferInfo.offset);
|
||||
decoderOutputBuffer.limit(mAudioDecoderOutputBufferInfo.offset + size);
|
||||
encoderInputBuffer.position(0);
|
||||
encoderInputBuffer.put(decoderOutputBuffer);
|
||||
|
||||
mAudioEncoder.queueInputBuffer(
|
||||
encoderInputBufferIndex,
|
||||
0,
|
||||
size,
|
||||
presentationTime,
|
||||
mAudioDecoderOutputBufferInfo.flags);
|
||||
}
|
||||
mAudioDecoder.releaseOutputBuffer(mPendingAudioDecoderOutputBufferIndex, false);
|
||||
mPendingAudioDecoderOutputBufferIndex = -1;
|
||||
if ((mAudioDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
if (VERBOSE) Log.d(TAG, "audio decoder: EOS");
|
||||
mAudioDecoderDone = true;
|
||||
}
|
||||
// We enqueued a pending frame, let's try something else next.
|
||||
break;
|
||||
}
|
||||
|
||||
// Poll frames from the audio encoder and send them to the muxer.
|
||||
while (!mAudioEncoderDone && (mEncoderOutputAudioFormat == null || mMuxer != null)) {
|
||||
final int encoderOutputBufferIndex = mAudioEncoder.dequeueOutputBuffer(mAudioEncoderOutputBufferInfo, TIMEOUT_USEC);
|
||||
if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
if (VERBOSE) Log.d(TAG, "no audio encoder output buffer");
|
||||
break;
|
||||
}
|
||||
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
||||
if (VERBOSE) Log.d(TAG, "audio encoder: output buffers changed");
|
||||
mAudioEncoderOutputBuffers = mAudioEncoder.getOutputBuffers();
|
||||
break;
|
||||
}
|
||||
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
||||
if (VERBOSE) Log.d(TAG, "audio encoder: output format changed");
|
||||
Preconditions.checkState("audio encoder changed its output format again?", mOutputAudioTrack < 0);
|
||||
|
||||
mEncoderOutputAudioFormat = mAudioEncoder.getOutputFormat();
|
||||
break;
|
||||
}
|
||||
Preconditions.checkState("should have added track before processing output", mMuxer != null);
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "audio encoder: returned output buffer: " + encoderOutputBufferIndex);
|
||||
Log.d(TAG, "audio encoder: returned buffer of size " + mAudioEncoderOutputBufferInfo.size);
|
||||
}
|
||||
final ByteBuffer encoderOutputBuffer = mAudioEncoderOutputBuffers[encoderOutputBufferIndex];
|
||||
if ((mAudioEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
|
||||
if (VERBOSE) Log.d(TAG, "audio encoder: codec config buffer");
|
||||
// Simply ignore codec config buffers.
|
||||
mAudioEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
|
||||
break;
|
||||
}
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "audio encoder: returned buffer for time " + mAudioEncoderOutputBufferInfo.presentationTimeUs);
|
||||
}
|
||||
if (mAudioEncoderOutputBufferInfo.size != 0) {
|
||||
mMuxer.writeSampleData(mOutputAudioTrack, encoderOutputBuffer, mAudioEncoderOutputBufferInfo);
|
||||
mMuxingAudioPresentationTime = Math.max(mMuxingAudioPresentationTime, mAudioEncoderOutputBufferInfo.presentationTimeUs);
|
||||
}
|
||||
if ((mAudioEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
if (VERBOSE) Log.d(TAG, "audio encoder: EOS");
|
||||
mAudioEncoderDone = true;
|
||||
}
|
||||
mAudioEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
|
||||
mAudioEncodedFrameCount++;
|
||||
// We enqueued an encoded frame, let's try something else next.
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void release() throws Exception {
|
||||
Exception exception = null;
|
||||
try {
|
||||
if (mAudioExtractor != null) {
|
||||
mAudioExtractor.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "error while releasing mAudioExtractor", e);
|
||||
exception = e;
|
||||
}
|
||||
try {
|
||||
if (mAudioDecoder != null) {
|
||||
mAudioDecoder.stop();
|
||||
mAudioDecoder.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "error while releasing mAudioDecoder", e);
|
||||
if (exception == null) {
|
||||
exception = e;
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (mAudioEncoder != null) {
|
||||
mAudioEncoder.stop();
|
||||
mAudioEncoder.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "error while releasing mAudioEncoder", e);
|
||||
if (exception == null) {
|
||||
exception = e;
|
||||
}
|
||||
}
|
||||
if (exception != null) {
|
||||
throw exception;
|
||||
}
|
||||
}
|
||||
|
||||
String dumpState() {
|
||||
return String.format(Locale.US,
|
||||
"A{"
|
||||
+ "extracted:%d(done:%b) "
|
||||
+ "decoded:%d(done:%b) "
|
||||
+ "encoded:%d(done:%b) "
|
||||
+ "pending:%d "
|
||||
+ "muxing:%b(track:%d} )",
|
||||
mAudioExtractedFrameCount, mAudioExtractorDone,
|
||||
mAudioDecodedFrameCount, mAudioDecoderDone,
|
||||
mAudioEncodedFrameCount, mAudioEncoderDone,
|
||||
mPendingAudioDecoderOutputBufferIndex,
|
||||
mMuxer != null, mOutputAudioTrack);
|
||||
}
|
||||
|
||||
void verifyEndState() {
|
||||
Preconditions.checkState("no frame should be pending", -1 == mPendingAudioDecoderOutputBufferIndex);
|
||||
}
|
||||
|
||||
private static @NonNull
|
||||
MediaCodec createAudioDecoder(final @NonNull MediaFormat inputFormat) throws IOException {
|
||||
final MediaCodec decoder = MediaCodec.createDecoderByType(MediaConverter.getMimeTypeFor(inputFormat));
|
||||
decoder.configure(inputFormat, null, null, 0);
|
||||
decoder.start();
|
||||
return decoder;
|
||||
}
|
||||
|
||||
private static @NonNull
|
||||
MediaCodec createAudioEncoder(final @NonNull MediaCodecInfo codecInfo, final @NonNull MediaFormat format) throws IOException {
|
||||
final MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
|
||||
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
encoder.start();
|
||||
return encoder;
|
||||
}
|
||||
|
||||
private static int getAndSelectAudioTrackIndex(MediaExtractor extractor) {
|
||||
for (int index = 0; index < extractor.getTrackCount(); ++index) {
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "format for track " + index + " is " + MediaConverter.getMimeTypeFor(extractor.getTrackFormat(index)));
|
||||
}
|
||||
if (isAudioFormat(extractor.getTrackFormat(index))) {
|
||||
extractor.selectTrack(index);
|
||||
return index;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
private static boolean isAudioFormat(final @NonNull MediaFormat format) {
|
||||
return MediaConverter.getMimeTypeFor(format).startsWith("audio/");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
public final class EncodingException extends Exception {
|
||||
EncodingException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
EncodingException(String message, Exception inner) {
|
||||
super(message, inner);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,187 @@
|
||||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* This file has been modified by Signal.
|
||||
*/
|
||||
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLExt;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.view.Surface;
|
||||
|
||||
import org.thoughtcrime.securesms.logging.Log;
|
||||
|
||||
|
||||
/**
|
||||
* Holds state associated with a Surface used for MediaCodec encoder input.
|
||||
* <p>
|
||||
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
|
||||
* to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
|
||||
* to the video encoder.
|
||||
*/
|
||||
final class InputSurface {
|
||||
private static final String TAG = "InputSurface";
|
||||
private static final boolean VERBOSE = false;
|
||||
|
||||
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
|
||||
private static final int EGL_OPENGL_ES2_BIT = 4;
|
||||
|
||||
private EGLDisplay mEGLDisplay;
|
||||
private EGLContext mEGLContext;
|
||||
private EGLSurface mEGLSurface;
|
||||
|
||||
private Surface mSurface;
|
||||
|
||||
/**
|
||||
* Creates an InputSurface from a Surface.
|
||||
*/
|
||||
InputSurface(Surface surface) throws TranscodingException {
|
||||
if (surface == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
mSurface = surface;
|
||||
|
||||
eglSetup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
|
||||
*/
|
||||
private void eglSetup() throws TranscodingException {
|
||||
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new TranscodingException("unable to get EGL14 display");
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
|
||||
mEGLDisplay = null;
|
||||
throw new TranscodingException("unable to initialize EGL14");
|
||||
}
|
||||
|
||||
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
|
||||
// to be able to tell if the frame is reasonable.
|
||||
int[] attribList = {
|
||||
EGL14.EGL_RED_SIZE, 8,
|
||||
EGL14.EGL_GREEN_SIZE, 8,
|
||||
EGL14.EGL_BLUE_SIZE, 8,
|
||||
EGL14.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL_RECORDABLE_ANDROID, 1,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
|
||||
numConfigs, 0)) {
|
||||
throw new TranscodingException("unable to find RGB888+recordable ES2 EGL config");
|
||||
}
|
||||
|
||||
// Configure context for OpenGL ES 2.0.
|
||||
int[] attrib_list = {
|
||||
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
|
||||
attrib_list, 0);
|
||||
checkEglError("eglCreateContext");
|
||||
if (mEGLContext == null) {
|
||||
throw new TranscodingException("null context");
|
||||
}
|
||||
|
||||
// Create a window surface, and attach it to the Surface we received.
|
||||
int[] surfaceAttribs = {
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
|
||||
surfaceAttribs, 0);
|
||||
checkEglError("eglCreateWindowSurface");
|
||||
if (mEGLSurface == null) {
|
||||
throw new TranscodingException("surface was null");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard all resources held by this class, notably the EGL context. Also releases the
|
||||
* Surface that was passed to our constructor.
|
||||
*/
|
||||
public void release() {
|
||||
if (EGL14.eglGetCurrentContext().equals(mEGLContext)) {
|
||||
// Clear the current context and surface to ensure they are discarded immediately.
|
||||
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
|
||||
EGL14.EGL_NO_CONTEXT);
|
||||
}
|
||||
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
|
||||
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
|
||||
//EGL14.eglTerminate(mEGLDisplay);
|
||||
|
||||
mSurface.release();
|
||||
|
||||
// null everything out so future attempts to use this object will cause an NPE
|
||||
mEGLDisplay = null;
|
||||
mEGLContext = null;
|
||||
mEGLSurface = null;
|
||||
|
||||
mSurface = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes our EGL context and surface current.
|
||||
*/
|
||||
void makeCurrent() throws TranscodingException {
|
||||
if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
|
||||
throw new TranscodingException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls eglSwapBuffers. Use this to "publish" the current frame.
|
||||
*/
|
||||
boolean swapBuffers() {
|
||||
return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Surface that the MediaCodec receives buffers from.
|
||||
*/
|
||||
public Surface getSurface() {
|
||||
return mSurface;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
|
||||
*/
|
||||
void setPresentationTime(long nsecs) {
|
||||
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for EGL errors.
|
||||
*/
|
||||
private static void checkEglError(String msg) throws TranscodingException {
|
||||
boolean failed = false;
|
||||
int error;
|
||||
while ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
|
||||
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
|
||||
failed = true;
|
||||
}
|
||||
if (failed) {
|
||||
throw new TranscodingException("EGL error encountered (see log)");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,409 @@
|
||||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* This file has been modified by Signal.
|
||||
*/
|
||||
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.content.Context;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecList;
|
||||
import android.media.MediaDataSource;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import android.net.Uri;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.annotation.RequiresApi;
|
||||
import androidx.annotation.StringDef;
|
||||
import androidx.annotation.WorkerThread;
|
||||
|
||||
import org.thoughtcrime.securesms.logging.Log;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileDescriptor;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public final class MediaConverter {
|
||||
private static final String TAG = "media-converter";
|
||||
private static final boolean VERBOSE = false; // lots of logging
|
||||
|
||||
// Describes when the annotation will be discarded
|
||||
@Retention(RetentionPolicy.SOURCE)
|
||||
@StringDef({VIDEO_CODEC_H264, VIDEO_CODEC_H265})
|
||||
public @interface VideoCodec {}
|
||||
public static final String VIDEO_CODEC_H264 = "video/avc";
|
||||
public static final String VIDEO_CODEC_H265 = "video/hevc";
|
||||
|
||||
private Input mInput;
|
||||
private Output mOutput;
|
||||
|
||||
private long mTimeFrom;
|
||||
private long mTimeTo;
|
||||
private int mVideoResolution;
|
||||
private int mVideoBitrate = 2000000; // 2Mbps
|
||||
private @VideoCodec String mVideoCodec = VIDEO_CODEC_H264;
|
||||
private int mAudioBitrate = 128000; // 128Kbps
|
||||
|
||||
private Listener mListener;
|
||||
private boolean mCancelled;
|
||||
|
||||
public interface Listener {
|
||||
boolean onProgress(int percent);
|
||||
}
|
||||
|
||||
public MediaConverter() {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setInput(final @NonNull File file) {
|
||||
mInput = new FileInput(file);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setInput(final @NonNull Context context, final @NonNull Uri uri) {
|
||||
mInput = new UriInput(context, uri);
|
||||
}
|
||||
|
||||
@RequiresApi(23)
|
||||
@SuppressWarnings("unused")
|
||||
public void setInput(final @NonNull MediaDataSource mediaDataSource) {
|
||||
mInput = new MediaDataSourceInput(mediaDataSource);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setOutput(final @NonNull File file) {
|
||||
mOutput = new FileOutput(file);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@RequiresApi(26)
|
||||
public void setOutput(final @NonNull FileDescriptor fileDescriptor) {
|
||||
mOutput = new FileDescriptorOutput(fileDescriptor);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setTimeRange(long timeFrom, long timeTo) {
|
||||
mTimeFrom = timeFrom;
|
||||
mTimeTo = timeTo;
|
||||
|
||||
if (timeTo > 0 && timeFrom >= timeTo) {
|
||||
throw new IllegalArgumentException("timeFrom:" + timeFrom + " timeTo:" + timeTo);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setVideoResolution(int videoResolution) {
|
||||
mVideoResolution = videoResolution;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setVideoCodec(final @VideoCodec String videoCodec) throws FileNotFoundException {
|
||||
if (selectCodec(videoCodec) == null) {
|
||||
throw new FileNotFoundException();
|
||||
}
|
||||
mVideoCodec = videoCodec;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setVideoBitrate(final int videoBitrate) {
|
||||
mVideoBitrate = videoBitrate;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setAudioBitrate(final int audioBitrate) {
|
||||
mAudioBitrate = audioBitrate;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void setListener(final Listener listener) {
|
||||
mListener = listener;
|
||||
}
|
||||
|
||||
@WorkerThread
|
||||
@RequiresApi(23)
|
||||
public void convert() throws EncodingException, IOException {
|
||||
// Exception that may be thrown during release.
|
||||
Exception exception = null;
|
||||
Muxer muxer = null;
|
||||
VideoTrackConverter videoTrackConverter = null;
|
||||
AudioTrackConverter audioTrackConverter = null;
|
||||
|
||||
try {
|
||||
videoTrackConverter = VideoTrackConverter.create(mInput, mTimeFrom, mTimeTo, mVideoResolution, mVideoBitrate, mVideoCodec);
|
||||
audioTrackConverter = AudioTrackConverter.create(mInput, mTimeFrom, mTimeTo, mAudioBitrate);
|
||||
|
||||
if (videoTrackConverter == null && audioTrackConverter == null) {
|
||||
throw new EncodingException("No video and audio tracks");
|
||||
}
|
||||
|
||||
muxer = mOutput.createMuxer();
|
||||
|
||||
doExtractDecodeEditEncodeMux(
|
||||
videoTrackConverter,
|
||||
audioTrackConverter,
|
||||
muxer);
|
||||
|
||||
} catch (EncodingException | IOException e) {
|
||||
Log.e(TAG, "error converting", e);
|
||||
exception = e;
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "error converting", e);
|
||||
exception = e;
|
||||
} finally {
|
||||
if (VERBOSE) Log.d(TAG, "releasing extractor, decoder, encoder, and muxer");
|
||||
// Try to release everything we acquired, even if one of the releases fails, in which
|
||||
// case we save the first exception we got and re-throw at the end (unless something
|
||||
// other exception has already been thrown). This guarantees the first exception thrown
|
||||
// is reported as the cause of the error, everything is (attempted) to be released, and
|
||||
// all other exceptions appear in the logs.
|
||||
try {
|
||||
if (videoTrackConverter != null) {
|
||||
videoTrackConverter.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
if (exception == null) {
|
||||
exception = e;
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (audioTrackConverter != null) {
|
||||
audioTrackConverter.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
if (exception == null) {
|
||||
exception = e;
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (muxer != null) {
|
||||
muxer.stop();
|
||||
muxer.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "error while releasing muxer", e);
|
||||
if (exception == null) {
|
||||
exception = e;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (exception != null) {
|
||||
throw new EncodingException("Transcode failed", exception);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Does the actual work for extracting, decoding, encoding and muxing.
|
||||
*/
|
||||
private void doExtractDecodeEditEncodeMux(
|
||||
final @Nullable VideoTrackConverter videoTrackConverter,
|
||||
final @Nullable AudioTrackConverter audioTrackConverter,
|
||||
final @NonNull Muxer muxer) throws IOException, TranscodingException {
|
||||
|
||||
boolean muxing = false;
|
||||
int percentProcessed = 0;
|
||||
long inputDuration = Math.max(
|
||||
videoTrackConverter == null ? 0 : videoTrackConverter.mInputDuration,
|
||||
audioTrackConverter == null ? 0 : audioTrackConverter.mInputDuration);
|
||||
|
||||
while (!mCancelled &&
|
||||
((videoTrackConverter != null && !videoTrackConverter.mVideoEncoderDone) ||
|
||||
(audioTrackConverter != null &&!audioTrackConverter.mAudioEncoderDone))) {
|
||||
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "loop: " +
|
||||
(videoTrackConverter == null ? "" : videoTrackConverter.dumpState()) +
|
||||
(audioTrackConverter == null ? "" : audioTrackConverter.dumpState()) +
|
||||
" muxing:" + muxing);
|
||||
}
|
||||
|
||||
if (videoTrackConverter != null && (audioTrackConverter == null || audioTrackConverter.mAudioExtractorDone || videoTrackConverter.mMuxingVideoPresentationTime <= audioTrackConverter.mMuxingAudioPresentationTime)) {
|
||||
videoTrackConverter.step();
|
||||
}
|
||||
|
||||
if (audioTrackConverter != null && (videoTrackConverter == null || videoTrackConverter.mVideoExtractorDone || videoTrackConverter.mMuxingVideoPresentationTime >= audioTrackConverter.mMuxingAudioPresentationTime)) {
|
||||
audioTrackConverter.step();
|
||||
}
|
||||
|
||||
if (inputDuration != 0 && mListener != null) {
|
||||
final long timeFromUs = mTimeFrom <= 0 ? 0 : mTimeFrom * 1000;
|
||||
final long timeToUs = mTimeTo <= 0 ? inputDuration : mTimeTo * 1000;
|
||||
final int curPercentProcessed = (int) (100 *
|
||||
(Math.max(
|
||||
videoTrackConverter == null ? 0 : videoTrackConverter.mMuxingVideoPresentationTime,
|
||||
audioTrackConverter == null ? 0 : audioTrackConverter.mMuxingAudioPresentationTime)
|
||||
- timeFromUs) / (timeToUs - timeFromUs));
|
||||
|
||||
if (curPercentProcessed != percentProcessed) {
|
||||
percentProcessed = curPercentProcessed;
|
||||
mCancelled = mCancelled || mListener.onProgress(percentProcessed);
|
||||
}
|
||||
}
|
||||
|
||||
if (!muxing
|
||||
&& (videoTrackConverter == null || videoTrackConverter.mEncoderOutputVideoFormat != null)
|
||||
&& (audioTrackConverter == null || audioTrackConverter.mEncoderOutputAudioFormat != null)) {
|
||||
if (videoTrackConverter != null) {
|
||||
videoTrackConverter.setMuxer(muxer);
|
||||
}
|
||||
if (audioTrackConverter != null) {
|
||||
audioTrackConverter.setMuxer(muxer);
|
||||
}
|
||||
Log.d(TAG, "muxer: starting");
|
||||
muxer.start();
|
||||
muxing = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Basic sanity checks.
|
||||
if (videoTrackConverter != null) {
|
||||
videoTrackConverter.verifyEndState();
|
||||
}
|
||||
if (audioTrackConverter != null) {
|
||||
audioTrackConverter.verifyEndState();
|
||||
}
|
||||
|
||||
// TODO: Check the generated output file.
|
||||
}
|
||||
|
||||
static String getMimeTypeFor(MediaFormat format) {
|
||||
return format.getString(MediaFormat.KEY_MIME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the first codec capable of encoding the specified MIME type, or null if no match was
|
||||
* found.
|
||||
*/
|
||||
static MediaCodecInfo selectCodec(final String mimeType) {
|
||||
final int numCodecs = MediaCodecList.getCodecCount();
|
||||
for (int i = 0; i < numCodecs; i++) {
|
||||
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
|
||||
|
||||
if (!codecInfo.isEncoder()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
final String[] types = codecInfo.getSupportedTypes();
|
||||
for (String type : types) {
|
||||
if (type.equalsIgnoreCase(mimeType)) {
|
||||
return codecInfo;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
interface Input {
|
||||
@NonNull
|
||||
MediaExtractor createExtractor() throws IOException;
|
||||
}
|
||||
|
||||
private static class FileInput implements Input {
|
||||
|
||||
final File file;
|
||||
|
||||
FileInput(final @NonNull File file) {
|
||||
this.file = file;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NonNull
|
||||
MediaExtractor createExtractor() throws IOException {
|
||||
final MediaExtractor extractor = new MediaExtractor();
|
||||
extractor.setDataSource(file.getAbsolutePath());
|
||||
return extractor;
|
||||
}
|
||||
}
|
||||
|
||||
private static class UriInput implements Input {
|
||||
|
||||
final Uri uri;
|
||||
final Context context;
|
||||
|
||||
UriInput(final @NonNull Context context, final @NonNull Uri uri) {
|
||||
this.uri = uri;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NonNull
|
||||
MediaExtractor createExtractor() throws IOException {
|
||||
final MediaExtractor extractor = new MediaExtractor();
|
||||
extractor.setDataSource(context, uri, null);
|
||||
return extractor;
|
||||
}
|
||||
}
|
||||
|
||||
@RequiresApi(23)
|
||||
private static class MediaDataSourceInput implements Input {
|
||||
|
||||
private final MediaDataSource mediaDataSource;
|
||||
|
||||
MediaDataSourceInput(final @NonNull MediaDataSource mediaDataSource) {
|
||||
this.mediaDataSource = mediaDataSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NonNull
|
||||
MediaExtractor createExtractor() throws IOException {
|
||||
final MediaExtractor extractor = new MediaExtractor();
|
||||
extractor.setDataSource(mediaDataSource);
|
||||
return extractor;
|
||||
}
|
||||
}
|
||||
|
||||
interface Output {
|
||||
@NonNull
|
||||
Muxer createMuxer() throws IOException;
|
||||
}
|
||||
|
||||
private static class FileOutput implements Output {
|
||||
|
||||
final File file;
|
||||
|
||||
FileOutput(final @NonNull File file) {
|
||||
this.file = file;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NonNull
|
||||
Muxer createMuxer() throws IOException {
|
||||
return new AndroidMuxer(file);
|
||||
}
|
||||
}
|
||||
|
||||
@RequiresApi(26)
|
||||
private static class FileDescriptorOutput implements Output {
|
||||
|
||||
final FileDescriptor fileDescriptor;
|
||||
|
||||
FileDescriptorOutput(final @NonNull FileDescriptor fileDescriptor) {
|
||||
this.fileDescriptor = fileDescriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NonNull
|
||||
Muxer createMuxer() throws IOException {
|
||||
return new AndroidMuxer(fileDescriptor);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public interface Muxer {
|
||||
|
||||
void start() throws IOException;
|
||||
|
||||
void stop() throws IOException;
|
||||
|
||||
int addTrack(@NonNull MediaFormat format) throws IOException;
|
||||
|
||||
void writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException;
|
||||
|
||||
void release();
|
||||
}
|
||||
@@ -0,0 +1,303 @@
|
||||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* This file has been modified by Signal.
|
||||
*/
|
||||
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.view.Surface;
|
||||
|
||||
import org.thoughtcrime.securesms.logging.Log;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.egl.EGLDisplay;
|
||||
import javax.microedition.khronos.egl.EGLSurface;
|
||||
|
||||
/**
|
||||
* Holds state associated with a Surface used for MediaCodec decoder output.
|
||||
* <p>
|
||||
* The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
|
||||
* and then create a Surface for that SurfaceTexture. The Surface can be passed to
|
||||
* MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
|
||||
* texture with updateTexImage, then render the texture with GL to a pbuffer.
|
||||
* <p>
|
||||
* The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
|
||||
* Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
|
||||
* we just draw it on whatever surface is current.
|
||||
* <p>
|
||||
* By default, the Surface will be using a BufferQueue in asynchronous mode, so we
|
||||
* can potentially drop frames.
|
||||
*/
|
||||
final class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
|
||||
private static final String TAG = "OutputSurface";
|
||||
private static final boolean VERBOSE = false;
|
||||
|
||||
private static final int EGL_OPENGL_ES2_BIT = 4;
|
||||
|
||||
private EGL10 mEGL;
|
||||
private EGLDisplay mEGLDisplay;
|
||||
private EGLContext mEGLContext;
|
||||
private EGLSurface mEGLSurface;
|
||||
|
||||
private SurfaceTexture mSurfaceTexture;
|
||||
private Surface mSurface;
|
||||
|
||||
private final Object mFrameSyncObject = new Object(); // guards mFrameAvailable
|
||||
private boolean mFrameAvailable;
|
||||
|
||||
private TextureRender mTextureRender;
|
||||
|
||||
/**
|
||||
* Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new
|
||||
* EGL context and surface will be made current. Creates a Surface that can be passed
|
||||
* to MediaCodec.configure().
|
||||
*/
|
||||
OutputSurface(int width, int height) throws TranscodingException {
|
||||
if (width <= 0 || height <= 0) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
|
||||
eglSetup(width, height);
|
||||
makeCurrent();
|
||||
|
||||
setup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an OutputSurface using the current EGL context. Creates a Surface that can be
|
||||
* passed to MediaCodec.configure().
|
||||
*/
|
||||
OutputSurface() throws TranscodingException {
|
||||
setup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates instances of TextureRender and SurfaceTexture, and a Surface associated
|
||||
* with the SurfaceTexture.
|
||||
*/
|
||||
private void setup() throws TranscodingException {
|
||||
mTextureRender = new TextureRender();
|
||||
mTextureRender.surfaceCreated();
|
||||
|
||||
// Even if we don't access the SurfaceTexture after the constructor returns, we
|
||||
// still need to keep a reference to it. The Surface doesn't retain a reference
|
||||
// at the Java level, so if we don't either then the object can get GCed, which
|
||||
// causes the native finalizer to run.
|
||||
if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
|
||||
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
|
||||
|
||||
// This doesn't work if OutputSurface is created on the thread that CTS started for
|
||||
// these test cases.
|
||||
//
|
||||
// The CTS-created thread has a Looper, and the SurfaceTexture constructor will
|
||||
// create a Handler that uses it. The "frame available" message is delivered
|
||||
// there, but since we're not a Looper-based thread we'll never see it. For
|
||||
// this to do anything useful, OutputSurface must be created on a thread without
|
||||
// a Looper, so that SurfaceTexture uses the main application Looper instead.
|
||||
//
|
||||
// Java language note: passing "this" out of a constructor is generally unwise,
|
||||
// but we should be able to get away with it here.
|
||||
mSurfaceTexture.setOnFrameAvailableListener(this);
|
||||
|
||||
mSurface = new Surface(mSurfaceTexture);
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer.
|
||||
*/
|
||||
private void eglSetup(int width, int height) throws TranscodingException {
|
||||
mEGL = (EGL10)EGLContext.getEGL();
|
||||
mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
|
||||
if (!mEGL.eglInitialize(mEGLDisplay, null)) {
|
||||
throw new TranscodingException("unable to initialize EGL10");
|
||||
}
|
||||
|
||||
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
|
||||
// to be able to tell if the frame is reasonable.
|
||||
int[] attribList = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) {
|
||||
throw new TranscodingException("unable to find RGB888+pbuffer EGL config");
|
||||
}
|
||||
|
||||
// Configure context for OpenGL ES 2.0.
|
||||
int[] attrib_list = {
|
||||
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT,
|
||||
attrib_list);
|
||||
checkEglError("eglCreateContext");
|
||||
if (mEGLContext == null) {
|
||||
throw new TranscodingException("null context");
|
||||
}
|
||||
|
||||
// Create a pbuffer surface. By using this for output, we can use glReadPixels
|
||||
// to test values in the output.
|
||||
int[] surfaceAttribs = {
|
||||
EGL10.EGL_WIDTH, width,
|
||||
EGL10.EGL_HEIGHT, height,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs);
|
||||
checkEglError("eglCreatePbufferSurface");
|
||||
if (mEGLSurface == null) {
|
||||
throw new TranscodingException("surface was null");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard all resources held by this class, notably the EGL context.
|
||||
*/
|
||||
public void release() {
|
||||
if (mEGL != null) {
|
||||
if (mEGL.eglGetCurrentContext().equals(mEGLContext)) {
|
||||
// Clear the current context and surface to ensure they are discarded immediately.
|
||||
mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE,
|
||||
EGL10.EGL_NO_CONTEXT);
|
||||
}
|
||||
mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface);
|
||||
mEGL.eglDestroyContext(mEGLDisplay, mEGLContext);
|
||||
//mEGL.eglTerminate(mEGLDisplay);
|
||||
}
|
||||
|
||||
mSurface.release();
|
||||
|
||||
// this causes a bunch of warnings that appear harmless but might confuse someone:
|
||||
// W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
|
||||
//mSurfaceTexture.release();
|
||||
|
||||
// null everything out so future attempts to use this object will cause an NPE
|
||||
mEGLDisplay = null;
|
||||
mEGLContext = null;
|
||||
mEGLSurface = null;
|
||||
mEGL = null;
|
||||
|
||||
mTextureRender = null;
|
||||
mSurface = null;
|
||||
mSurfaceTexture = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes our EGL context and surface current.
|
||||
*/
|
||||
private void makeCurrent() throws TranscodingException {
|
||||
if (mEGL == null) {
|
||||
throw new TranscodingException("not configured for makeCurrent");
|
||||
}
|
||||
checkEglError("before makeCurrent");
|
||||
if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
|
||||
throw new TranscodingException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Surface that we draw onto.
|
||||
*/
|
||||
public Surface getSurface() {
|
||||
return mSurface;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces the fragment shader.
|
||||
*/
|
||||
void changeFragmentShader(String fragmentShader) throws TranscodingException {
|
||||
mTextureRender.changeFragmentShader(fragmentShader);
|
||||
}
|
||||
|
||||
/**
|
||||
* Latches the next buffer into the texture. Must be called from the thread that created
|
||||
* the OutputSurface object, after the onFrameAvailable callback has signaled that new
|
||||
* data is available.
|
||||
*/
|
||||
void awaitNewImage() throws TranscodingException {
|
||||
final int TIMEOUT_MS = 750;
|
||||
|
||||
synchronized (mFrameSyncObject) {
|
||||
final long expireTime = System.currentTimeMillis() + TIMEOUT_MS;
|
||||
|
||||
while (!mFrameAvailable) {
|
||||
try {
|
||||
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
|
||||
// stalling the test if it doesn't arrive.
|
||||
mFrameSyncObject.wait(TIMEOUT_MS);
|
||||
|
||||
if (!mFrameAvailable && System.currentTimeMillis() > expireTime) {
|
||||
throw new TranscodingException("Surface frame wait timed out");
|
||||
}
|
||||
} catch (InterruptedException ie) {
|
||||
// shouldn't happen
|
||||
throw new TranscodingException(ie);
|
||||
}
|
||||
}
|
||||
mFrameAvailable = false;
|
||||
}
|
||||
|
||||
// Latch the data.
|
||||
TextureRender.checkGlError("before updateTexImage");
|
||||
mSurfaceTexture.updateTexImage();
|
||||
}
|
||||
|
||||
/**
|
||||
* Draws the data from SurfaceTexture onto the current EGL surface.
|
||||
*/
|
||||
void drawImage() throws TranscodingException {
|
||||
mTextureRender.drawFrame(mSurfaceTexture);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameAvailable(SurfaceTexture st) {
|
||||
if (VERBOSE) Log.d(TAG, "new frame available");
|
||||
synchronized (mFrameSyncObject) {
|
||||
if (mFrameAvailable) {
|
||||
try {
|
||||
throw new TranscodingException("mFrameAvailable already set, frame could be dropped");
|
||||
} catch (TranscodingException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
mFrameAvailable = true;
|
||||
mFrameSyncObject.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for EGL errors.
|
||||
*/
|
||||
private void checkEglError(String msg) throws TranscodingException {
|
||||
boolean failed = false;
|
||||
int error;
|
||||
while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) {
|
||||
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
|
||||
failed = true;
|
||||
}
|
||||
if (failed) {
|
||||
throw new TranscodingException("EGL error encountered (see log)");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
final class Preconditions {
|
||||
|
||||
static void checkState(final Object errorMessage, final boolean expression) {
|
||||
if (!expression) {
|
||||
throw new IllegalStateException(String.valueOf(errorMessage));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* This file has been modified by Signal.
|
||||
*/
|
||||
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
|
||||
import org.thoughtcrime.securesms.logging.Log;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.FloatBuffer;
|
||||
|
||||
/**
|
||||
* Code for rendering a texture onto a surface using OpenGL ES 2.0.
|
||||
*/
|
||||
final class TextureRender {
|
||||
private static final String TAG = "TextureRender";
|
||||
|
||||
private static final int FLOAT_SIZE_BYTES = 4;
|
||||
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
|
||||
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
|
||||
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
|
||||
private final float[] mTriangleVerticesData = {
|
||||
// X, Y, Z, U, V
|
||||
-1.0f, -1.0f, 0, 0.f, 0.f,
|
||||
1.0f, -1.0f, 0, 1.f, 0.f,
|
||||
-1.0f, 1.0f, 0, 0.f, 1.f,
|
||||
1.0f, 1.0f, 0, 1.f, 1.f,
|
||||
};
|
||||
|
||||
private final FloatBuffer mTriangleVertices;
|
||||
|
||||
private static final String VERTEX_SHADER =
|
||||
"uniform mat4 uMVPMatrix;\n" +
|
||||
"uniform mat4 uSTMatrix;\n" +
|
||||
"attribute vec4 aPosition;\n" +
|
||||
"attribute vec4 aTextureCoord;\n" +
|
||||
"varying vec2 vTextureCoord;\n" +
|
||||
"void main() {\n" +
|
||||
" gl_Position = uMVPMatrix * aPosition;\n" +
|
||||
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
|
||||
"}\n";
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"#extension GL_OES_EGL_image_external : require\n" +
|
||||
"precision mediump float;\n" + // highp here doesn't seem to matter
|
||||
"varying vec2 vTextureCoord;\n" +
|
||||
"uniform samplerExternalOES sTexture;\n" +
|
||||
"void main() {\n" +
|
||||
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
"}\n";
|
||||
|
||||
private final float[] mMVPMatrix = new float[16];
|
||||
private final float[] mSTMatrix = new float[16];
|
||||
|
||||
private int mProgram;
|
||||
private int mTextureID = -12345;
|
||||
private int muMVPMatrixHandle;
|
||||
private int muSTMatrixHandle;
|
||||
private int maPositionHandle;
|
||||
private int maTextureHandle;
|
||||
|
||||
TextureRender() {
|
||||
mTriangleVertices = ByteBuffer.allocateDirect(
|
||||
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
|
||||
.order(ByteOrder.nativeOrder()).asFloatBuffer();
|
||||
mTriangleVertices.put(mTriangleVerticesData).position(0);
|
||||
|
||||
Matrix.setIdentityM(mSTMatrix, 0);
|
||||
}
|
||||
|
||||
int getTextureId() {
|
||||
return mTextureID;
|
||||
}
|
||||
|
||||
void drawFrame(SurfaceTexture st) throws TranscodingException {
|
||||
checkGlError("onDrawFrame start");
|
||||
st.getTransformMatrix(mSTMatrix);
|
||||
|
||||
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
|
||||
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
|
||||
|
||||
GLES20.glUseProgram(mProgram);
|
||||
checkGlError("glUseProgram");
|
||||
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
|
||||
|
||||
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
|
||||
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
|
||||
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
|
||||
checkGlError("glVertexAttribPointer maPosition");
|
||||
GLES20.glEnableVertexAttribArray(maPositionHandle);
|
||||
checkGlError("glEnableVertexAttribArray maPositionHandle");
|
||||
|
||||
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
|
||||
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
|
||||
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
|
||||
checkGlError("glVertexAttribPointer maTextureHandle");
|
||||
GLES20.glEnableVertexAttribArray(maTextureHandle);
|
||||
checkGlError("glEnableVertexAttribArray maTextureHandle");
|
||||
|
||||
Matrix.setIdentityM(mMVPMatrix, 0);
|
||||
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
|
||||
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
|
||||
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
checkGlError("glDrawArrays");
|
||||
GLES20.glFinish();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes GL state. Call this after the EGL surface has been created and made current.
|
||||
*/
|
||||
void surfaceCreated() throws TranscodingException {
|
||||
mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
if (mProgram == 0) {
|
||||
throw new TranscodingException("failed creating program");
|
||||
}
|
||||
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
|
||||
checkGlError("glGetAttribLocation aPosition");
|
||||
if (maPositionHandle == -1) {
|
||||
throw new TranscodingException("Could not get attrib location for aPosition");
|
||||
}
|
||||
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
|
||||
checkGlError("glGetAttribLocation aTextureCoord");
|
||||
if (maTextureHandle == -1) {
|
||||
throw new TranscodingException("Could not get attrib location for aTextureCoord");
|
||||
}
|
||||
|
||||
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
|
||||
checkGlError("glGetUniformLocation uMVPMatrix");
|
||||
if (muMVPMatrixHandle == -1) {
|
||||
throw new TranscodingException("Could not get attrib location for uMVPMatrix");
|
||||
}
|
||||
|
||||
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
|
||||
checkGlError("glGetUniformLocation uSTMatrix");
|
||||
if (muSTMatrixHandle == -1) {
|
||||
throw new TranscodingException("Could not get attrib location for uSTMatrix");
|
||||
}
|
||||
|
||||
int[] textures = new int[1];
|
||||
GLES20.glGenTextures(1, textures, 0);
|
||||
|
||||
mTextureID = textures[0];
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
|
||||
checkGlError("glBindTexture mTextureID");
|
||||
|
||||
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
|
||||
GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
|
||||
GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
|
||||
GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
|
||||
GLES20.GL_CLAMP_TO_EDGE);
|
||||
checkGlError("glTexParameter");
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces the fragment shader.
|
||||
*/
|
||||
public void changeFragmentShader(String fragmentShader) throws TranscodingException {
|
||||
GLES20.glDeleteProgram(mProgram);
|
||||
mProgram = createProgram(VERTEX_SHADER, fragmentShader);
|
||||
if (mProgram == 0) {
|
||||
throw new TranscodingException("failed creating program");
|
||||
}
|
||||
}
|
||||
|
||||
private static int loadShader(int shaderType, String source) throws TranscodingException {
|
||||
int shader = GLES20.glCreateShader(shaderType);
|
||||
checkGlError("glCreateShader type=" + shaderType);
|
||||
GLES20.glShaderSource(shader, source);
|
||||
GLES20.glCompileShader(shader);
|
||||
int[] compiled = new int[1];
|
||||
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
|
||||
if (compiled[0] == 0) {
|
||||
Log.e(TAG, "Could not compile shader " + shaderType + ":");
|
||||
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
|
||||
GLES20.glDeleteShader(shader);
|
||||
shader = 0;
|
||||
}
|
||||
return shader;
|
||||
}
|
||||
|
||||
private int createProgram(String vertexSource, String fragmentSource) throws TranscodingException {
|
||||
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
|
||||
if (vertexShader == 0) {
|
||||
return 0;
|
||||
}
|
||||
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
|
||||
if (pixelShader == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int program = GLES20.glCreateProgram();
|
||||
checkGlError("glCreateProgram");
|
||||
if (program == 0) {
|
||||
Log.e(TAG, "Could not create program");
|
||||
}
|
||||
GLES20.glAttachShader(program, vertexShader);
|
||||
checkGlError("glAttachShader");
|
||||
GLES20.glAttachShader(program, pixelShader);
|
||||
checkGlError("glAttachShader");
|
||||
GLES20.glLinkProgram(program);
|
||||
int[] linkStatus = new int[1];
|
||||
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
|
||||
if (linkStatus[0] != GLES20.GL_TRUE) {
|
||||
Log.e(TAG, "Could not link program: ");
|
||||
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
|
||||
GLES20.glDeleteProgram(program);
|
||||
program = 0;
|
||||
}
|
||||
return program;
|
||||
}
|
||||
|
||||
static void checkGlError(String msg) throws TranscodingException {
|
||||
boolean failed = false;
|
||||
int error;
|
||||
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
|
||||
Log.e(TAG, msg + ": GLES20 error: 0x" + Integer.toHexString(error));
|
||||
failed = true;
|
||||
}
|
||||
if (failed) {
|
||||
throw new TranscodingException("GLES20 error encountered (see log)");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
final class TranscodingException extends Exception {
|
||||
|
||||
TranscodingException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
TranscodingException(Throwable inner) {
|
||||
super(inner);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,511 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import android.view.Surface;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.annotation.RequiresApi;
|
||||
|
||||
import org.thoughtcrime.securesms.logging.Log;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
final class VideoTrackConverter {
|
||||
|
||||
private static final String TAG = "media-converter";
|
||||
private static final boolean VERBOSE = false; // lots of logging
|
||||
|
||||
private static final int OUTPUT_VIDEO_IFRAME_INTERVAL = 1; // 1 second between I-frames
|
||||
private static final int OUTPUT_VIDEO_FRAME_RATE = 30; // needed only for MediaFormat.KEY_I_FRAME_INTERVAL to work; the actual frame rate matches the source
|
||||
|
||||
private static final int TIMEOUT_USEC = 10000;
|
||||
|
||||
private static final String MEDIA_FORMAT_KEY_DISPLAY_WIDTH = "display-width";
|
||||
private static final String MEDIA_FORMAT_KEY_DISPLAY_HEIGHT = "display-height";
|
||||
|
||||
private final long mTimeFrom;
|
||||
private final long mTimeTo;
|
||||
|
||||
final long mInputDuration;
|
||||
|
||||
private final MediaExtractor mVideoExtractor;
|
||||
private final MediaCodec mVideoDecoder;
|
||||
private final MediaCodec mVideoEncoder;
|
||||
|
||||
private final InputSurface mInputSurface;
|
||||
private final OutputSurface mOutputSurface;
|
||||
|
||||
private final ByteBuffer[] mVideoDecoderInputBuffers;
|
||||
private ByteBuffer[] mVideoEncoderOutputBuffers;
|
||||
private final MediaCodec.BufferInfo mVideoDecoderOutputBufferInfo;
|
||||
private final MediaCodec.BufferInfo mVideoEncoderOutputBufferInfo;
|
||||
|
||||
MediaFormat mEncoderOutputVideoFormat;
|
||||
|
||||
boolean mVideoExtractorDone;
|
||||
private boolean mVideoDecoderDone;
|
||||
boolean mVideoEncoderDone;
|
||||
|
||||
private int mOutputVideoTrack = -1;
|
||||
|
||||
long mMuxingVideoPresentationTime;
|
||||
|
||||
private int mVideoExtractedFrameCount;
|
||||
private int mVideoDecodedFrameCount;
|
||||
private int mVideoEncodedFrameCount;
|
||||
|
||||
private Muxer mMuxer;
|
||||
|
||||
@RequiresApi(23)
|
||||
static @Nullable VideoTrackConverter create(
|
||||
final @NonNull MediaConverter.Input input,
|
||||
final long timeFrom,
|
||||
final long timeTo,
|
||||
final int videoResolution,
|
||||
final int videoBitrate,
|
||||
final @NonNull String videoCodec) throws IOException, TranscodingException {
|
||||
|
||||
final MediaExtractor videoExtractor = input.createExtractor();
|
||||
final int videoInputTrack = getAndSelectVideoTrackIndex(videoExtractor);
|
||||
if (videoInputTrack == -1) {
|
||||
videoExtractor.release();
|
||||
return null;
|
||||
}
|
||||
return new VideoTrackConverter(videoExtractor, videoInputTrack, timeFrom, timeTo, videoResolution, videoBitrate, videoCodec);
|
||||
}
|
||||
|
||||
|
||||
@RequiresApi(23)
|
||||
private VideoTrackConverter(
|
||||
final @NonNull MediaExtractor videoExtractor,
|
||||
final int videoInputTrack,
|
||||
final long timeFrom,
|
||||
final long timeTo,
|
||||
final int videoResolution,
|
||||
final int videoBitrate,
|
||||
final @NonNull String videoCodec) throws IOException, TranscodingException {
|
||||
|
||||
mTimeFrom = timeFrom;
|
||||
mTimeTo = timeTo;
|
||||
mVideoExtractor = videoExtractor;
|
||||
|
||||
final MediaCodecInfo videoCodecInfo = MediaConverter.selectCodec(videoCodec);
|
||||
if (videoCodecInfo == null) {
|
||||
// Don't fail CTS if they don't have an AVC codec (not here, anyway).
|
||||
Log.e(TAG, "Unable to find an appropriate codec for " + videoCodec);
|
||||
throw new FileNotFoundException();
|
||||
}
|
||||
if (VERBOSE) Log.d(TAG, "video found codec: " + videoCodecInfo.getName());
|
||||
|
||||
final MediaFormat inputVideoFormat = mVideoExtractor.getTrackFormat(videoInputTrack);
|
||||
|
||||
mInputDuration = inputVideoFormat.containsKey(MediaFormat.KEY_DURATION) ? inputVideoFormat.getLong(MediaFormat.KEY_DURATION) : 0;
|
||||
|
||||
final int rotation = inputVideoFormat.containsKey(MediaFormat.KEY_ROTATION) ? inputVideoFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
|
||||
final int width = inputVideoFormat.containsKey(MEDIA_FORMAT_KEY_DISPLAY_WIDTH)
|
||||
? inputVideoFormat.getInteger(MEDIA_FORMAT_KEY_DISPLAY_WIDTH)
|
||||
: inputVideoFormat.getInteger(MediaFormat.KEY_WIDTH);
|
||||
final int height = inputVideoFormat.containsKey(MEDIA_FORMAT_KEY_DISPLAY_HEIGHT)
|
||||
? inputVideoFormat.getInteger(MEDIA_FORMAT_KEY_DISPLAY_HEIGHT)
|
||||
: inputVideoFormat.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
int outputWidth = width;
|
||||
int outputHeight = height;
|
||||
if (outputWidth < outputHeight) {
|
||||
outputWidth = videoResolution;
|
||||
outputHeight = height * outputWidth / width;
|
||||
} else {
|
||||
outputHeight = videoResolution;
|
||||
outputWidth = width * outputHeight / height;
|
||||
}
|
||||
// many encoders do not work when height and width are not multiple of 16 (also, some iPhones do not play some heights)
|
||||
outputHeight = (outputHeight + 7) & ~0xF;
|
||||
outputWidth = (outputWidth + 7) & ~0xF;
|
||||
|
||||
final int outputWidthRotated;
|
||||
final int outputHeightRotated;
|
||||
if ((rotation % 180 == 90)) {
|
||||
//noinspection SuspiciousNameCombination
|
||||
outputWidthRotated = outputHeight;
|
||||
//noinspection SuspiciousNameCombination
|
||||
outputHeightRotated = outputWidth;
|
||||
} else {
|
||||
outputWidthRotated = outputWidth;
|
||||
outputHeightRotated = outputHeight;
|
||||
}
|
||||
|
||||
final MediaFormat outputVideoFormat = MediaFormat.createVideoFormat(videoCodec, outputWidthRotated, outputHeightRotated);
|
||||
|
||||
// Set some properties. Failing to specify some of these can cause the MediaCodec
|
||||
// configure() call to throw an unhelpful exception.
|
||||
outputVideoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
outputVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, videoBitrate);
|
||||
outputVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE);
|
||||
outputVideoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, OUTPUT_VIDEO_IFRAME_INTERVAL);
|
||||
if (VERBOSE) Log.d(TAG, "video format: " + outputVideoFormat);
|
||||
|
||||
// Create a MediaCodec for the desired codec, then configure it as an encoder with
|
||||
// our desired properties. Request a Surface to use for input.
|
||||
final AtomicReference<Surface> inputSurfaceReference = new AtomicReference<>();
|
||||
mVideoEncoder = createVideoEncoder(videoCodecInfo, outputVideoFormat, inputSurfaceReference);
|
||||
mInputSurface = new InputSurface(inputSurfaceReference.get());
|
||||
mInputSurface.makeCurrent();
|
||||
// Create a MediaCodec for the decoder, based on the extractor's format.
|
||||
mOutputSurface = new OutputSurface();
|
||||
|
||||
mOutputSurface.changeFragmentShader(createFragmentShader(
|
||||
inputVideoFormat.getInteger(MediaFormat.KEY_WIDTH), inputVideoFormat.getInteger(MediaFormat.KEY_HEIGHT),
|
||||
outputWidth, outputHeight));
|
||||
|
||||
mVideoDecoder = createVideoDecoder(inputVideoFormat, mOutputSurface.getSurface());
|
||||
|
||||
mVideoDecoderInputBuffers = mVideoDecoder.getInputBuffers();
|
||||
mVideoEncoderOutputBuffers = mVideoEncoder.getOutputBuffers();
|
||||
mVideoDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
|
||||
mVideoEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
|
||||
|
||||
if (mTimeFrom > 0) {
|
||||
mVideoExtractor.seekTo(mTimeFrom * 1000, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
|
||||
Log.i(TAG, "Seek video:" + mTimeFrom + " " + mVideoExtractor.getSampleTime());
|
||||
}
|
||||
}
|
||||
|
||||
void setMuxer(final @NonNull Muxer muxer) throws IOException {
|
||||
mMuxer = muxer;
|
||||
if (mEncoderOutputVideoFormat != null) {
|
||||
Log.d(TAG, "muxer: adding video track.");
|
||||
mOutputVideoTrack = muxer.addTrack(mEncoderOutputVideoFormat);
|
||||
}
|
||||
}
|
||||
|
||||
void step() throws IOException, TranscodingException {
|
||||
// Extract video from file and feed to decoder.
|
||||
// Do not extract video if we have determined the output format but we are not yet
|
||||
// ready to mux the frames.
|
||||
while (!mVideoExtractorDone
|
||||
&& (mEncoderOutputVideoFormat == null || mMuxer != null)) {
|
||||
int decoderInputBufferIndex = mVideoDecoder.dequeueInputBuffer(TIMEOUT_USEC);
|
||||
if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
if (VERBOSE) Log.d(TAG, "no video decoder input buffer");
|
||||
break;
|
||||
}
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "video decoder: returned input buffer: " + decoderInputBufferIndex);
|
||||
}
|
||||
final ByteBuffer decoderInputBuffer = mVideoDecoderInputBuffers[decoderInputBufferIndex];
|
||||
final int size = mVideoExtractor.readSampleData(decoderInputBuffer, 0);
|
||||
final long presentationTime = mVideoExtractor.getSampleTime();
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "video extractor: returned buffer of size " + size);
|
||||
Log.d(TAG, "video extractor: returned buffer for time " + presentationTime);
|
||||
}
|
||||
mVideoExtractorDone = size < 0 || (mTimeTo > 0 && presentationTime > mTimeTo * 1000);
|
||||
|
||||
if (mVideoExtractorDone) {
|
||||
if (VERBOSE) Log.d(TAG, "video extractor: EOS");
|
||||
mVideoDecoder.queueInputBuffer(
|
||||
decoderInputBufferIndex,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
} else {
|
||||
mVideoDecoder.queueInputBuffer(
|
||||
decoderInputBufferIndex,
|
||||
0,
|
||||
size,
|
||||
presentationTime,
|
||||
mVideoExtractor.getSampleFlags());
|
||||
}
|
||||
mVideoExtractor.advance();
|
||||
mVideoExtractedFrameCount++;
|
||||
// We extracted a frame, let's try something else next.
|
||||
break;
|
||||
}
|
||||
|
||||
// Poll output frames from the video decoder and feed the encoder.
|
||||
while (!mVideoDecoderDone && (mEncoderOutputVideoFormat == null || mMuxer != null)) {
|
||||
final int decoderOutputBufferIndex =
|
||||
mVideoDecoder.dequeueOutputBuffer(
|
||||
mVideoDecoderOutputBufferInfo, TIMEOUT_USEC);
|
||||
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
if (VERBOSE) Log.d(TAG, "no video decoder output buffer");
|
||||
break;
|
||||
}
|
||||
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
||||
if (VERBOSE) Log.d(TAG, "video decoder: output buffers changed");
|
||||
break;
|
||||
}
|
||||
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "video decoder: output format changed: " + mVideoDecoder.getOutputFormat());
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "video decoder: returned output buffer: "
|
||||
+ decoderOutputBufferIndex);
|
||||
Log.d(TAG, "video decoder: returned buffer of size "
|
||||
+ mVideoDecoderOutputBufferInfo.size);
|
||||
}
|
||||
if ((mVideoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
|
||||
if (VERBOSE) Log.d(TAG, "video decoder: codec config buffer");
|
||||
mVideoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
|
||||
break;
|
||||
}
|
||||
if (mVideoDecoderOutputBufferInfo.presentationTimeUs < mTimeFrom * 1000 &&
|
||||
(mVideoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
|
||||
if (VERBOSE) Log.d(TAG, "video decoder: frame prior to " + mVideoDecoderOutputBufferInfo.presentationTimeUs);
|
||||
mVideoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
|
||||
break;
|
||||
}
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "video decoder: returned buffer for time " + mVideoDecoderOutputBufferInfo.presentationTimeUs);
|
||||
}
|
||||
boolean render = mVideoDecoderOutputBufferInfo.size != 0;
|
||||
mVideoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, render);
|
||||
if (render) {
|
||||
if (VERBOSE) Log.d(TAG, "output surface: await new image");
|
||||
mOutputSurface.awaitNewImage();
|
||||
// Edit the frame and send it to the encoder.
|
||||
if (VERBOSE) Log.d(TAG, "output surface: draw image");
|
||||
mOutputSurface.drawImage();
|
||||
mInputSurface.setPresentationTime(mVideoDecoderOutputBufferInfo.presentationTimeUs * 1000);
|
||||
if (VERBOSE) Log.d(TAG, "input surface: swap buffers");
|
||||
mInputSurface.swapBuffers();
|
||||
if (VERBOSE) Log.d(TAG, "video encoder: notified of new frame");
|
||||
}
|
||||
if ((mVideoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
if (VERBOSE) Log.d(TAG, "video decoder: EOS");
|
||||
mVideoDecoderDone = true;
|
||||
mVideoEncoder.signalEndOfInputStream();
|
||||
}
|
||||
mVideoDecodedFrameCount++;
|
||||
// We extracted a pending frame, let's try something else next.
|
||||
break;
|
||||
}
|
||||
|
||||
// Poll frames from the video encoder and send them to the muxer.
|
||||
while (!mVideoEncoderDone && (mEncoderOutputVideoFormat == null || mMuxer != null)) {
|
||||
final int encoderOutputBufferIndex = mVideoEncoder.dequeueOutputBuffer(mVideoEncoderOutputBufferInfo, TIMEOUT_USEC);
|
||||
if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
if (VERBOSE) Log.d(TAG, "no video encoder output buffer");
|
||||
if (mVideoDecoderDone) {
|
||||
// on some devices and encoder stops after signalEndOfInputStream
|
||||
Log.w(TAG, "mVideoDecoderDone, but didn't get BUFFER_FLAG_END_OF_STREAM");
|
||||
mVideoEncodedFrameCount = mVideoDecodedFrameCount;
|
||||
mVideoEncoderDone = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
||||
if (VERBOSE) Log.d(TAG, "video encoder: output buffers changed");
|
||||
mVideoEncoderOutputBuffers = mVideoEncoder.getOutputBuffers();
|
||||
break;
|
||||
}
|
||||
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
||||
if (VERBOSE) Log.d(TAG, "video encoder: output format changed");
|
||||
Preconditions.checkState("video encoder changed its output format again?", mOutputVideoTrack < 0);
|
||||
mEncoderOutputVideoFormat = mVideoEncoder.getOutputFormat();
|
||||
break;
|
||||
}
|
||||
Preconditions.checkState("should have added track before processing output", mMuxer != null);
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "video encoder: returned output buffer: " + encoderOutputBufferIndex);
|
||||
Log.d(TAG, "video encoder: returned buffer of size " + mVideoEncoderOutputBufferInfo.size);
|
||||
}
|
||||
final ByteBuffer encoderOutputBuffer = mVideoEncoderOutputBuffers[encoderOutputBufferIndex];
|
||||
if ((mVideoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
|
||||
if (VERBOSE) Log.d(TAG, "video encoder: codec config buffer");
|
||||
// Simply ignore codec config buffers.
|
||||
mVideoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
|
||||
break;
|
||||
}
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "video encoder: returned buffer for time " + mVideoEncoderOutputBufferInfo.presentationTimeUs);
|
||||
}
|
||||
if (mVideoEncoderOutputBufferInfo.size != 0) {
|
||||
mMuxer.writeSampleData(mOutputVideoTrack, encoderOutputBuffer, mVideoEncoderOutputBufferInfo);
|
||||
mMuxingVideoPresentationTime = Math.max(mMuxingVideoPresentationTime, mVideoEncoderOutputBufferInfo.presentationTimeUs);
|
||||
}
|
||||
if ((mVideoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
if (VERBOSE) Log.d(TAG, "video encoder: EOS");
|
||||
mVideoEncoderDone = true;
|
||||
}
|
||||
mVideoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
|
||||
mVideoEncodedFrameCount++;
|
||||
// We enqueued an encoded frame, let's try something else next.
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void release() throws Exception {
|
||||
Exception exception = null;
|
||||
try {
|
||||
if (mVideoExtractor != null) {
|
||||
mVideoExtractor.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "error while releasing mVideoExtractor", e);
|
||||
exception = e;
|
||||
}
|
||||
try {
|
||||
if (mVideoDecoder != null) {
|
||||
mVideoDecoder.stop();
|
||||
mVideoDecoder.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "error while releasing mVideoDecoder", e);
|
||||
if (exception == null) {
|
||||
exception = e;
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (mOutputSurface != null) {
|
||||
mOutputSurface.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "error while releasing mOutputSurface", e);
|
||||
if (exception == null) {
|
||||
exception = e;
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (mInputSurface != null) {
|
||||
mInputSurface.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "error while releasing mInputSurface", e);
|
||||
if (exception == null) {
|
||||
exception = e;
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (mVideoEncoder != null) {
|
||||
mVideoEncoder.stop();
|
||||
mVideoEncoder.release();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "error while releasing mVideoEncoder", e);
|
||||
if (exception == null) {
|
||||
exception = e;
|
||||
}
|
||||
}
|
||||
if (exception != null) {
|
||||
throw exception;
|
||||
}
|
||||
}
|
||||
|
||||
String dumpState() {
|
||||
return String.format(Locale.US,
|
||||
"V{"
|
||||
+ "extracted:%d(done:%b) "
|
||||
+ "decoded:%d(done:%b) "
|
||||
+ "encoded:%d(done:%b) "
|
||||
+ "muxing:%b(track:%d)} ",
|
||||
mVideoExtractedFrameCount, mVideoExtractorDone,
|
||||
mVideoDecodedFrameCount, mVideoDecoderDone,
|
||||
mVideoEncodedFrameCount, mVideoEncoderDone,
|
||||
mMuxer != null, mOutputVideoTrack);
|
||||
}
|
||||
|
||||
void verifyEndState() {
|
||||
Preconditions.checkState("encoded (" + mVideoEncodedFrameCount + ") and decoded (" + mVideoDecodedFrameCount + ") video frame counts should match", mVideoDecodedFrameCount == mVideoEncodedFrameCount);
|
||||
Preconditions.checkState("decoded frame count should be less than extracted frame count", mVideoDecodedFrameCount <= mVideoExtractedFrameCount);
|
||||
}
|
||||
|
||||
private static String createFragmentShader(
|
||||
final int srcWidth,
|
||||
final int srcHeight,
|
||||
final int dstWidth,
|
||||
final int dstHeight) {
|
||||
final float kernelSizeX = (float) srcWidth / (float) dstWidth;
|
||||
final float kernelSizeY = (float) srcHeight / (float) dstHeight;
|
||||
Log.i(TAG, "kernel " + kernelSizeX + "x" + kernelSizeY);
|
||||
final String shader;
|
||||
if (kernelSizeX <= 2 && kernelSizeY <= 2) {
|
||||
shader =
|
||||
"#extension GL_OES_EGL_image_external : require\n" +
|
||||
"precision mediump float;\n" + // highp here doesn't seem to matter
|
||||
"varying vec2 vTextureCoord;\n" +
|
||||
"uniform samplerExternalOES sTexture;\n" +
|
||||
"void main() {\n" +
|
||||
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
"}\n";
|
||||
} else {
|
||||
final int kernelRadiusX = (int) Math.ceil(kernelSizeX - .1f) / 2;
|
||||
final int kernelRadiusY = (int) Math.ceil(kernelSizeY - .1f) / 2;
|
||||
final float stepX = kernelSizeX / (1 + 2 * kernelRadiusX) * (1f / srcWidth);
|
||||
final float stepY = kernelSizeY / (1 + 2 * kernelRadiusY) * (1f / srcHeight);
|
||||
final float sum = (1 + 2 * kernelRadiusX) * (1 + 2 * kernelRadiusY);
|
||||
final StringBuilder colorLoop = new StringBuilder();
|
||||
for (int i = -kernelRadiusX; i <=kernelRadiusX; i++) {
|
||||
for (int j = -kernelRadiusY; j <=kernelRadiusY; j++) {
|
||||
if (i != 0 || j != 0) {
|
||||
colorLoop.append(" + texture2D(sTexture, vTextureCoord.xy + vec2(")
|
||||
.append(i * stepX).append(", ").append(j * stepY).append("))\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
shader =
|
||||
"#extension GL_OES_EGL_image_external : require\n" +
|
||||
"precision mediump float;\n" + // highp here doesn't seem to matter
|
||||
"varying vec2 vTextureCoord;\n" +
|
||||
"uniform samplerExternalOES sTexture;\n" +
|
||||
"void main() {\n" +
|
||||
" gl_FragColor = (texture2D(sTexture, vTextureCoord)\n" +
|
||||
colorLoop.toString() +
|
||||
" ) / " + sum + ";\n" +
|
||||
"}\n";
|
||||
}
|
||||
Log.i(TAG, shader);
|
||||
return shader;
|
||||
}
|
||||
|
||||
private @NonNull
|
||||
MediaCodec createVideoDecoder(
|
||||
final @NonNull MediaFormat inputFormat,
|
||||
final @NonNull Surface surface) throws IOException {
|
||||
final MediaCodec decoder = MediaCodec.createDecoderByType(MediaConverter.getMimeTypeFor(inputFormat));
|
||||
decoder.configure(inputFormat, surface, null, 0);
|
||||
decoder.start();
|
||||
return decoder;
|
||||
}
|
||||
|
||||
private @NonNull
|
||||
MediaCodec createVideoEncoder(
|
||||
final @NonNull MediaCodecInfo codecInfo,
|
||||
final @NonNull MediaFormat format,
|
||||
final @NonNull AtomicReference<Surface> surfaceReference) throws IOException {
|
||||
final MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
|
||||
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
// Must be called before start()
|
||||
surfaceReference.set(encoder.createInputSurface());
|
||||
encoder.start();
|
||||
return encoder;
|
||||
}
|
||||
|
||||
private static int getAndSelectVideoTrackIndex(@NonNull MediaExtractor extractor) {
|
||||
for (int index = 0; index < extractor.getTrackCount(); ++index) {
|
||||
if (VERBOSE) {
|
||||
Log.d(TAG, "format for track " + index + " is " + MediaConverter.getMimeTypeFor(extractor.getTrackFormat(index)));
|
||||
}
|
||||
if (isVideoFormat(extractor.getTrackFormat(index))) {
|
||||
extractor.selectTrack(index);
|
||||
return index;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
private static boolean isVideoFormat(final @NonNull MediaFormat format) {
|
||||
return MediaConverter.getMimeTypeFor(format).startsWith("video/");
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user