I am trying to create Video frames camera frames using Camera1.Even when I take long Video from camera Video Plays it Too Fast.I tried changing bitrates and frame rates.
image byte[] from camera frames is being queued and fed to this class so that we could create video from bitmaps or byte[]
What we have done.
Opened up camera.Took camera frame from onNextFrame method using Camera1.We could have used MediaRecorder for Recording the video but simultaneous frame rendering to surfaceView and video recording using camera1 cannot be done.So we have used MediaCodec to create mp4 video from frames so that frame rendering and recording a video can be done.
package com.sukshi.smartid_demo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.AsyncTask;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import static com.sukshi.smartid_demo.Camera.CameraSource.angle;
public class BitmapToVideoEncoder {
private static final String TAG = BitmapToVideoEncoder.class.getSimpleName();
private IBitmapToVideoEncoderCallback mCallback;
private File mOutputFile;
private Queue<Bitmap> mEncodeQueue = new ConcurrentLinkedQueue();
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private Object mFrameSync = new Object();
private CountDownLatch mNewFrameLatch;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static int mWidth;
private static int mHeight;
private static final int BIT_RATE = 1000000;
private static final int FRAME_RATE = 30; // Frames per second
private static final int I_FRAME_INTERVAL = 1;
private int mGenerateIndex = 0;
private int mTrackIndex;
private boolean mNoMoreFrames = false;
private boolean mAbort = false;
public interface IBitmapToVideoEncoderCallback {
void onEncodingComplete(File outputFile);
}
public BitmapToVideoEncoder(IBitmapToVideoEncoderCallback callback) {
mCallback = callback;
}
public boolean isEncodingStarted() {
return (mediaCodec != null) && (mediaMuxer != null) && !mNoMoreFrames && !mAbort;
}
public int getActiveBitmaps() {
return mEncodeQueue.size();
}
public void startEncoding(int width, int height, File outputFile) {
mWidth = width;
mHeight = height;
mOutputFile = outputFile;
String outputFileString;
try {
outputFileString = outputFile.getCanonicalPath();
} catch (IOException e) {
Log.e(TAG, "Unable to get path for " + outputFile);
return;
}
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
Log.d(TAG, "found codec: " + codecInfo.getName());
int colorFormat;
try {
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
} catch (Exception e) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
Log.e(TAG, "Unable to create MediaCodec " + e.getMessage());
return;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mediaMuxer = new MediaMuxer(outputFileString, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
Log.e(TAG,"MediaMuxer creation failed. " + e.getMessage());
return;
}
Log.d(TAG, "Initialization complete. Starting encoder...");
new EncodingAsync().execute();
}
public void stopEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to stop encoding since it never started");
return;
}
Log.d(TAG, "Stopping encoding");
mNoMoreFrames = true;
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void abortEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to abort encoding since it never started");
return;
}
Log.d(TAG, "Aborting encoding");
mNoMoreFrames = true;
mAbort = true;
mEncodeQueue = new ConcurrentLinkedQueue(); // Drop all frames
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void queueFrame(byte[] nextData) {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to queue frame. Encoding not started");
return;
}
Log.d(TAG, "Queueing frame");
Bitmap OriginalBitmap = BitmapFactory.decodeByteArray(nextData, 0, nextData.length);
Matrix matrix = new Matrix();
matrix.postRotate(angle);
Bitmap rotatedbitmap = Bitmap.createBitmap(OriginalBitmap,0,0,OriginalBitmap.getWidth(),OriginalBitmap.getHeight(),matrix,true);
mEncodeQueue.add(rotatedbitmap);
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
class EncodingAsync extends AsyncTask<Void,Void,File>{
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
protected File doInBackground(Void... voids) {
Log.d(TAG, "Encoder started");
while(true) {
if (mNoMoreFrames && (mEncodeQueue.size() == 0)) break;
Bitmap bitmap = mEncodeQueue.poll();
if (bitmap == null) {
synchronized (mFrameSync) {
mNewFrameLatch = new CountDownLatch(1);
}
try {
mNewFrameLatch.await();
} catch (InterruptedException e) {}
bitmap = mEncodeQueue.poll();
}
if (bitmap == null) continue;
byte[] byteConvertFrame = getNV21(bitmap.getWidth(), bitmap.getHeight(), bitmap);
long TIMEOUT_USEC = 500000;
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
long ptsUsec = computePresentationTime(mGenerateIndex, FRAME_RATE);
if (inputBufIndex >= 0) {
final ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufIndex);
inputBuffer.clear();
inputBuffer.put(byteConvertFrame);
mediaCodec.queueInputBuffer(inputBufIndex, 0, byteConvertFrame.length, ptsUsec, 0);
mGenerateIndex++;
}
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Log.e(TAG, "No output from encoder available");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else if (mBufferInfo.size != 0) {
ByteBuffer encodedData = mediaCodec.getOutputBuffer(encoderStatus);
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
} else {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
}
}
}
release();
if (mAbort) {
mOutputFile.delete();
return null;
} else {
return mOutputFile;
}
}
#Override
protected void onPostExecute(File outputFile) {
super.onPostExecute(outputFile);
mCallback.onEncodingComplete(outputFile);
}
}
private void release() {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
Log.d(TAG,"RELEASE CODEC");
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
Log.d(TAG,"RELEASE MUXER");
}
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
private static int selectColorFormat(MediaCodecInfo codecInfo,
String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {
int[] argb = new int[inputWidth * inputHeight];
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
scaled.recycle();
return yuv;
}
private void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
}
index++;
}
}
}
private long computePresentationTime(long frameIndex, int framerate) {
return 132 + frameIndex * 1000000 / framerate;
}
}
You can see the main ways to use the MediaCodec
Generates a movie using OpenGL ES
https://bigflake.com/mediacodec/EncodeAndMuxTest.java.txt
Related
var des="/storage/emulated/0/Android/data/"
var newpath= SiliCompressor.with(this).compressVideo(filepath,"newmovies")
I'm compressing a video of 14 mb and it reduces to just 324kb. Video compression using sili compressor in Android kotlin reduces video quality too much.
If I do compression with custom resolution:
var newpath= SiliCompressor.with(this).compressVideo(filepath,"newmovies",1080,1920,30)
it decreases video quality too much.
How can I compress a video in android other than through FFMEG?
I have try media codec but it didn't work.
here is the image of code
public String compressVideo(String videoFilePath, String destinationDir, int outWidth, int outHeight, int bitrate) throws URISyntaxException {
boolean isconverted = MediaController.getInstance(mContext).convertVideo(videoFilePath, new File(destinationDir), outWidth, outHeight, bitrate);
if (isconverted) {
Log.v(LOG_TAG, "Video Conversion Complete");
} else {
Log.v(LOG_TAG, "Video conversion in progress");
}
return MediaController.cachedFile.getPath();
}
private boolean convertVideo(Context context, Uri videoContentUri, final String sourcePath, File destDir, int outWidth, int outHeight, int outBitrate) {
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
if (sourcePath != null) {
this.path = sourcePath;
retriever.setDataSource(path);
} else if (context != null && videoContentUri != null) {
retriever.setDataSource(context, videoContentUri);
} else {
return false;
}
String width = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
String height = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
String rotation = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
long startTime = -1;
long endTime = -1;
int resultWidth = outWidth > 0 ? outWidth : DEFAULT_VIDEO_WIDTH;
int resultHeight = outHeight > 0 ? outHeight : DEFAULT_VIDEO_HEIGHT;
int rotationValue = Integer.valueOf(rotation);
int originalWidth = Integer.valueOf(width);
int originalHeight = Integer.valueOf(height);
int bitrate = outBitrate > 0 ? outBitrate : DEFAULT_VIDEO_BITRATE;
int rotateRender = 0;
File cacheFile = null;
try {
cacheFile = new File(getCompressedVideoPath(destDir));
} catch (FileNotFoundException e) {
e.printStackTrace();
}
if (Build.VERSION.SDK_INT < 18 && resultHeight > resultWidth && resultWidth != originalWidth && resultHeight != originalHeight) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 90;
rotateRender = 270;
} else if (Build.VERSION.SDK_INT > 20) {
if (rotationValue == 90) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 0;
rotateRender = 270;
} else if (rotationValue == 180) {
rotateRender = 180;
rotationValue = 0;
} else if (rotationValue == 270) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 0;
rotateRender = 90;
}
}
//ParcelFileDescriptor descriptor = null; //"w" specify's write mode
FileDescriptor fileDescriptor = null;
try {
ParcelFileDescriptor descriptor = null;
if (sourcePath != null) {
descriptor = mContext.getContentResolver().openFileDescriptor(Uri.fromFile(new File(sourcePath)), "r");
} else {
descriptor = mContext.getContentResolver().openFileDescriptor(videoContentUri, "r");
}
fileDescriptor = descriptor.getFileDescriptor();
} catch (FileNotFoundException e) {
e.printStackTrace();
return false;
}
videoConvertFirstWrite = true;
boolean error = false;
long videoStartTime = startTime;
long time = System.currentTimeMillis();
if (resultWidth != 0 && resultHeight != 0) {
MP4Builder mediaMuxer = null;
MediaExtractor extractor = null;
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
Mp4Movie movie = new Mp4Movie();
movie.setCacheFile(cacheFile);
movie.setRotation(rotationValue);
movie.setSize(resultWidth, resultHeight);
mediaMuxer = new MP4Builder().createMovie(movie);
extractor = new MediaExtractor();
if (sourcePath != null && fileDescriptor != null) {
extractor.setDataSource(fileDescriptor);
} else if (context != null && videoContentUri != null) {
extractor.setDataSource(context, videoContentUri, null);
} else {
return false;
}
int videoIndex;
videoIndex = selectTrack(extractor, false);
if (videoIndex >= 0) {
MediaCodec decoder = null;
MediaCodec encoder = null;
InputSurface inputSurface = null;
OutputSurface outputSurface = null;
try {
long videoTime = -1;
boolean outputDone = false;
boolean inputDone = false;
boolean decoderDone = false;
int swapUV = 0;
int videoTrackIndex = -5;
int colorFormat;
int processorType = PROCESSOR_TYPE_OTHER;
String manufacturer = Build.MANUFACTURER.toLowerCase();
if (Build.VERSION.SDK_INT < 18) {
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
if (colorFormat == 0) {
throw new RuntimeException("no supported color format");
}
String codecName = codecInfo.getName();
if (codecName.contains("OMX.qcom.")) {
processorType = PROCESSOR_TYPE_QCOM;
if (Build.VERSION.SDK_INT == 16) {
if (manufacturer.equals("lge") || manufacturer.equals("nokia")) {
swapUV = 1;
}
}
} else if (codecName.contains("OMX.Intel.")) {
processorType = PROCESSOR_TYPE_INTEL;
} else if (codecName.equals("OMX.MTK.VIDEO.ENCODER.AVC")) {
processorType = PROCESSOR_TYPE_MTK;
} else if (codecName.equals("OMX.SEC.AVC.Encoder")) {
processorType = PROCESSOR_TYPE_SEC;
swapUV = 1;
} else if (codecName.equals("OMX.TI.DUCATI1.VIDEO.H264E")) {
processorType = PROCESSOR_TYPE_TI;
}
Log.e("tmessages", "codec = " + codecInfo.getName() + " manufacturer = " + manufacturer + "device = " + Build.MODEL);
} else {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
}
Log.e("tmessages", "colorFormat = " + colorFormat);
int resultHeightAligned = resultHeight;
int padding = 0;
int bufferSize = resultWidth * resultHeight * 3 / 2;
if (processorType == PROCESSOR_TYPE_OTHER) {
if (resultHeight % 16 != 0) {
resultHeightAligned += (16 - (resultHeight % 16));
padding = resultWidth * (resultHeightAligned - resultHeight);
bufferSize += padding * 5 / 4;
}
} else if (processorType == PROCESSOR_TYPE_QCOM) {
if (!manufacturer.toLowerCase().equals("lge")) {
int uvoffset = (resultWidth * resultHeight + 2047) & ~2047;
padding = uvoffset - (resultWidth * resultHeight);
bufferSize += padding;
}
} else if (processorType == PROCESSOR_TYPE_TI) {
//resultHeightAligned = 368;
//bufferSize = resultWidth * resultHeightAligned * 3 / 2;
//resultHeightAligned += (16 - (resultHeight % 16));
//padding = resultWidth * (resultHeightAligned - resultHeight);
//bufferSize += padding * 5 / 4;
} else if (processorType == PROCESSOR_TYPE_MTK) {
if (manufacturer.equals("baidu")) {
resultHeightAligned += (16 - (resultHeight % 16));
padding = resultWidth * (resultHeightAligned - resultHeight);
bufferSize += padding * 5 / 4;
}
}
extractor.selectTrack(videoIndex);
if (startTime > 0) {
extractor.seekTo(startTime, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
} else {
extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
}
MediaFormat inputFormat = extractor.getTrackFormat(videoIndex);
MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, resultWidth, resultHeight);
outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate != 0 ? bitrate : 921600);
outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
if (Build.VERSION.SDK_INT < 18) {
outputFormat.setInteger("stride", resultWidth + 32);
outputFormat.setInteger("slice-height", resultHeight);
}
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (Build.VERSION.SDK_INT >= 18) {
inputSurface = new InputSurface(encoder.createInputSurface());
inputSurface.makeCurrent();
}
encoder.start();
decoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
if (Build.VERSION.SDK_INT >= 18) {
outputSurface = new OutputSurface();
} else {
outputSurface = new OutputSurface(resultWidth, resultHeight, rotateRender);
}
decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
decoder.start();
final int TIMEOUT_USEC = 2500;
ByteBuffer[] decoderInputBuffers = null;
ByteBuffer[] encoderOutputBuffers = null;
ByteBuffer[] encoderInputBuffers = null;
if (Build.VERSION.SDK_INT < 21) {
decoderInputBuffers = decoder.getInputBuffers();
encoderOutputBuffers = encoder.getOutputBuffers();
if (Build.VERSION.SDK_INT < 18) {
encoderInputBuffers = encoder.getInputBuffers();
}
}
while (!outputDone) {
if (!inputDone) {
boolean eof = false;
int index = extractor.getSampleTrackIndex();
if (index == videoIndex) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf;
if (Build.VERSION.SDK_INT < 21) {
inputBuf = decoderInputBuffers[inputBufIndex];
} else {
inputBuf = decoder.getInputBuffer(inputBufIndex);
}
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
} else {
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
} else if (index == -1) {
eof = true;
}
if (eof) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
}
}
}
boolean decoderOutputAvailable = !decoderDone;
boolean encoderOutputAvailable = true;
while (decoderOutputAvailable || encoderOutputAvailable) {
int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
encoderOutputAvailable = false;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (Build.VERSION.SDK_INT < 21) {
encoderOutputBuffers = encoder.getOutputBuffers();
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
if (videoTrackIndex == -5) {
videoTrackIndex = mediaMuxer.addTrack(newFormat, false);
}
} else if (encoderStatus < 0) {
throw new RuntimeException("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else {
ByteBuffer encodedData;
if (Build.VERSION.SDK_INT < 21) {
encodedData = encoderOutputBuffers[encoderStatus];
} else {
encodedData = encoder.getOutputBuffer(encoderStatus);
}
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if (info.size > 1) {
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
if (mediaMuxer.writeSampleData(videoTrackIndex, encodedData, info, false)) {
didWriteData(false, false);
}
} else if (videoTrackIndex == -5) {
byte[] csd = new byte[info.size];
encodedData.limit(info.offset + info.size);
encodedData.position(info.offset);
encodedData.get(csd);
ByteBuffer sps = null;
ByteBuffer pps = null;
for (int a = info.size - 1; a >= 0; a--) {
if (a > 3) {
if (csd[a] == 1 && csd[a - 1] == 0 && csd[a - 2] == 0 && csd[a - 3] == 0) {
sps = ByteBuffer.allocate(a - 3);
pps = ByteBuffer.allocate(info.size - (a - 3));
sps.put(csd, 0, a - 3).position(0);
pps.put(csd, a - 3, info.size - (a - 3)).position(0);
break;
}
} else {
break;
}
}
MediaFormat newFormat = MediaFormat.createVideoFormat(MIME_TYPE, resultWidth, resultHeight);
if (sps != null && pps != null) {
newFormat.setByteBuffer("csd-0", sps);
newFormat.setByteBuffer("csd-1", pps);
}
videoTrackIndex = mediaMuxer.addTrack(newFormat, false);
}
}
outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
encoder.releaseOutputBuffer(encoderStatus, false);
}
if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) {
continue;
}
if (!decoderDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
decoderOutputAvailable = false;
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
Log.e("tmessages", "newFormat = " + newFormat);
} else if (decoderStatus < 0) {
throw new RuntimeException("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else {
boolean doRender;
if (Build.VERSION.SDK_INT >= 18) {
doRender = info.size != 0;
} else {
doRender = info.size != 0 || info.presentationTimeUs != 0;
}
if (endTime > 0 && info.presentationTimeUs >= endTime) {
inputDone = true;
decoderDone = true;
doRender = false;
info.flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
}
if (startTime > 0 && videoTime == -1) {
if (info.presentationTimeUs < startTime) {
doRender = false;
Log.e("tmessages", "drop frame startTime = " + startTime + " present time = " + info.presentationTimeUs);
} else {
videoTime = info.presentationTimeUs;
}
}
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
boolean errorWait = false;
try {
outputSurface.awaitNewImage();
} catch (Exception e) {
errorWait = true;
Log.e("tmessages", e.getMessage());
}
if (!errorWait) {
if (Build.VERSION.SDK_INT >= 18) {
outputSurface.drawImage(false);
inputSurface.setPresentationTime(info.presentationTimeUs * 1000);
inputSurface.swapBuffers();
} else {
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
outputSurface.drawImage(true);
ByteBuffer rgbBuf = outputSurface.getFrame();
ByteBuffer yuvBuf = encoderInputBuffers[inputBufIndex];
yuvBuf.clear();
convertVideoFrame(rgbBuf, yuvBuf, colorFormat, resultWidth, resultHeight, padding, swapUV);
encoder.queueInputBuffer(inputBufIndex, 0, bufferSize, info.presentationTimeUs, 0);
} else {
Log.e("tmessages", "input buffer not available");
}
}
}
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
decoderOutputAvailable = false;
Log.e("tmessages", "decoder stream end");
if (Build.VERSION.SDK_INT >= 18) {
encoder.signalEndOfInputStream();
} else {
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
encoder.queueInputBuffer(inputBufIndex, 0, 1, info.presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
}
}
}
}
}
if (videoTime != -1) {
videoStartTime = videoTime;
}
} catch (Exception e) {
Log.e("tmessages", e.getMessage());
error = true;
}
extractor.unselectTrack(videoIndex);
if (outputSurface != null) {
outputSurface.release();
}
if (inputSurface != null) {
inputSurface.release();
}
if (decoder != null) {
decoder.stop();
decoder.release();
}
if (encoder != null) {
encoder.stop();
encoder.release();
}
}
if (!error) {
readAndWriteTrack(extractor, mediaMuxer, info, videoStartTime, endTime, cacheFile, true);
}
} catch (Exception e) {
error = true;
Log.e("tmessages", e.getMessage());
} finally {
if (extractor != null) {
extractor.release();
}
if (mediaMuxer != null) {
try {
mediaMuxer.finishMovie(false);
} catch (Exception e) {
Log.e("tmessages", e.getMessage());
}
}
Log.e("tmessages", "time = " + (System.currentTimeMillis() - time));
}
} else {
didWriteData(true, true);
return false;
}
didWriteData(true, error);
cachedFile = cacheFile;
Log.e("ViratPath", path + "");
Log.e("ViratPath", cacheFile.getPath() + "");
if (sourcePath != null) {
Log.e("SourcePath", sourcePath);
}
if (videoContentUri != null) {
Log.e("VideoUri", videoContentUri.toString());
}
return true;
}
I'm decoding and encoding a video file using android MediaCodec. Both decoding and encoding are working fine with the following code except Pixel 3a device. For encoding, the encoder uses a set of bitmaps to create a video file. But only on the Pixel 3A device encoding bitmaps are failing and producing distorted video file.
Device details:
Name: Pixel 3A, Android version: 11
public class ImageProcessor implements Runnable {
private static final String VIDEO = "video/";
private static final String TAG = "VideoDecoder";
private static final long DEFAULT_TIMEOUT_US = 0;[enter image description here][1]
private final String inputFile;
private final String outputFile;
private MediaCodec mDecoder;
private MediaExtractor mExtractor;
private RenderScript rs;
private ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic;
private int width;
private int height;
private MediaCodec mEncoder;
private MediaMuxer mediaMuxer;
private int mTrackIndex;
private ScriptC_rotators rotateScript;
private int newWidth = 0, newHeight = 0;
private int preRotateHeight;
private int preRotateWidth;
private Allocation fromRotateAllocation;
private Allocation toRotateAllocation;
private int frameIndex;
private int deviceOrientation;
private int sensorOrientation;
private final Handler handler;
boolean sawOutputEOS = false;
boolean sawInputEOS = false;
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private FrameObject defaultObject;
private int faceBlurCount;
private long startTime;
private float frameRate;
private int generateIndex;
public ImageProcessor1(Handler handler, String inputFile, String outputFile) {
this.inputFile = inputFile;
this.outputFile = outputFile;
this.handler = handler;
}
public void setDeviceOrientation(int deviceOrientation) {
this.deviceOrientation = deviceOrientation;
}
public void setSensorOrientation(int sensorOrientation) {
this.sensorOrientation = sensorOrientation;
}
public void setDefaultObject(FrameObject frameObject) {
this.defaultObject = frameObject;
}
private void init() {
try {
mExtractor = new MediaExtractor();
mExtractor.setDataSource(inputFile);
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(inputFile);
FFmpegMediaMetadataRetriever metadataRetriever = new FFmpegMediaMetadataRetriever();
metadataRetriever.setDataSource(inputFile);
rs = RenderScript.create(Globals.getAppContext());
yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs));
rotateScript = new ScriptC_rotators(rs);
for (int i = 0; i < mExtractor.getTrackCount(); i++) {
MediaFormat format = mExtractor.getTrackFormat(i);
String mimeType = format.getString(MediaFormat.KEY_MIME);
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
frameRate = Float.parseFloat(metadataRetriever.extractMetadata(
FFmpegMediaMetadataRetriever.METADATA_KEY_FRAMERATE));
int bitRate = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE));
if (mimeType != null && mimeType.startsWith(VIDEO)) {
mExtractor.selectTrack(i);
mDecoder = MediaCodec.createDecoderByType(mimeType);
mDecoder.configure(format, null, null, 0 /* Decoder */);
mDecoder.start();
MediaCodecInfo mediaCodecInfo = selectCodec(mimeType);
if (mediaCodecInfo == null) {
throw new RuntimeException("Failed to initialise codec");
}
switch (deviceOrientation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
newWidth = height;
newHeight = width;
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
newWidth = width;
newHeight = height;
break;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(mimeType, newWidth, newHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
mediaFormat.setFloat(MediaFormat.KEY_FRAME_RATE, frameRate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEncoder.start();
mediaMuxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
break;
}
}
} catch (IOException e) {
throw new RuntimeException("Failed to initialise codec");
}
}
/**
* Returns the first codec capable of encoding the specified MIME type, or null if no
* match was found.
*/
private MediaCodecInfo selectCodec(String mimeType) throws IOException {
MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
MediaCodecInfo[] codecInfos = list.getCodecInfos();
for (MediaCodecInfo info : codecInfos) {
if (info.isEncoder()) {
mEncoder = MediaCodec.createByCodecName(info.getName());
String[] types = info.getSupportedTypes();
for (String type : types) {
if (type.equalsIgnoreCase(mimeType)) {
return info;
}
}
}
}
return null;
}
public void startProcessing() {
init();
MediaCodec.BufferInfo decoderBufferInfo = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo encoderBufferInfo = new MediaCodec.BufferInfo();
startTime = System.currentTimeMillis();
while (!sawOutputEOS) {
Log.d(TAG, "startProcessing: " + frameIndex);
if (!sawInputEOS && mDecoder != null) {
int inputBufferId = mDecoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputBufferId);
int sampleSize = mExtractor.readSampleData(inputBuffer, 0);
if (sampleSize < 0) {
mDecoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
sawInputEOS = true;
} else {
if (mExtractor != null) {
long presentationTimeUs = mExtractor.getSampleTime();
mDecoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
mExtractor.advance();
}
}
}
}
int outputBufferId = mDecoder.dequeueOutputBuffer(decoderBufferInfo, DEFAULT_TIMEOUT_US);
if (outputBufferId >= 0) {
if ((decoderBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
Log.d(TAG, "endProcessing: " + TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime));
}
boolean doRender = (decoderBufferInfo.size != 0);
if (doRender && mDecoder != null) {
Image image = mDecoder.getOutputImage(outputBufferId);
if (image != null) {
try {
frameIndex++;
byte[] frameData = quarterNV21(convertYUV420888ToNV21(image), image.getWidth(), image.getHeight());
byte[] data = getDataFromImage(image);
Type.Builder yuvType = new Type.Builder(rs, Element.U8(rs)).setX(data.length);
Allocation in = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT);
Type.Builder rgbaType = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(width).setY(height);
Allocation out = Allocation.createTyped(rs, rgbaType.create(), Allocation.USAGE_SCRIPT);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
in.copyFromUnchecked(data);
yuvToRgbIntrinsic.setInput(in);
yuvToRgbIntrinsic.forEach(out);
out.copyTo(bitmap);
image.close();
encodeBitmaps(bitmap, encoderBufferInfo);
} catch (Exception e) {
Log.d(TAG, "startProcessing: " + e.getMessage());
}
}
if (mDecoder != null) {
mDecoder.releaseOutputBuffer(outputBufferId, false);
}
}
}
}
}
private long computePresentationTime(int frameIndex) {
return 132 + frameIndex * 1000000 / (int)frameRate;
}
private byte[] convertYUV420888ToNV21(Image image) {
byte[] data;
ByteBuffer buffer0 = image.getPlanes()[0].getBuffer();
ByteBuffer buffer2 = image.getPlanes()[2].getBuffer();
int buffer0_size = buffer0.remaining();
int buffer2_size = buffer2.remaining();
data = new byte[buffer0_size + buffer2_size];
buffer0.get(data, 0, buffer0_size);
buffer2.get(data, buffer0_size, buffer2_size);
return data;
}
private byte[] quarterNV21(byte[] data, int iWidth, int iHeight) {
byte[] yuv = new byte[iWidth * iHeight * 3 / 2];
// halve yuma
int i = 0;
for (int y = 0; y < iHeight; y++) {
for (int x = 0; x < iWidth; x++) {
yuv[i] = data[y * iWidth + x];
i++;
}
}
return yuv;
}
private void release() {
try {
if (mExtractor != null) {
mExtractor.release();
mExtractor = null;
}
if (mDecoder != null) {
mDecoder.stop();
mDecoder.release();
mDecoder = null;
}
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
}
} catch (Exception e) {
Log.d(TAG, "imageprocessor release: " + e.fillInStackTrace());
}
Message message = handler.obtainMessage();
Bundle bundle = new Bundle();
bundle.putString(FrameUtil.COMPUTATION_SUCCESS_KEY, this.outputFile);
bundle.putInt(FrameUtil.FACE_BLUR_COUNT, faceBlurCount);
message.setData(bundle);
handler.sendMessage(message);
}
// encode the bitmap to a new video file
private void encodeBitmaps(Bitmap bitmap, MediaCodec.BufferInfo encoderBufferInfo) {
Bitmap rotatedBitmap = null;
switch (deviceOrientation) {
case Surface.ROTATION_0:
if (sensorOrientation == SENSOR_ORIENTATION_DEFAULT_DEGREES) {
rotatedBitmap = rotateBitmap(bitmap, 270);
} else {
rotatedBitmap = rotateBitmap(bitmap, 90);
}
break;
case Surface.ROTATION_90:
Bitmap newBitmap = rotateBitmap(bitmap, 90);
bitmap.recycle();
rotatedBitmap = rotateBitmap(newBitmap, 90);
break;
default:
rotatedBitmap = bitmap;
}
byte[] bytes = getNV21(rotatedBitmap.getWidth(), rotatedBitmap.getHeight(), rotatedBitmap);
int inputBufIndex = mEncoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
long ptsUsec = computePresentationTime(generateIndex);
if (inputBufIndex >= 0) {
ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufIndex);
if (inputBuffer != null) {
inputBuffer.clear();
inputBuffer.put(bytes);
mEncoder.queueInputBuffer(inputBufIndex, 0, bytes.length,
ptsUsec, 0);
generateIndex++;
}
}
int encoderStatus = mEncoder.dequeueOutputBuffer(encoderBufferInfo, DEFAULT_TIMEOUT_US);
if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = mEncoder.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderBufferInfo.size != 0) {
ByteBuffer outputBuffer = mEncoder.getOutputBuffer(encoderStatus);
if (outputBuffer != null) {
outputBuffer.position(encoderBufferInfo.offset);
outputBuffer.limit(encoderBufferInfo.offset + encoderBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, outputBuffer, encoderBufferInfo);
mEncoder.releaseOutputBuffer(encoderStatus, false);
}
if ((encoderBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mEncoder.signalEndOfInputStream();
}
}
}
private Allocation getFromRotateAllocation(Bitmap bitmap) {
int targetHeight = bitmap.getWidth();
int targetWidth = bitmap.getHeight();
if (targetHeight != preRotateHeight || targetWidth != preRotateWidth) {
preRotateHeight = targetHeight;
preRotateWidth = targetWidth;
fromRotateAllocation = Allocation.createFromBitmap(rs, bitmap,
Allocation.MipmapControl.MIPMAP_NONE,
Allocation.USAGE_SCRIPT);
}
return fromRotateAllocation;
}
private Allocation getToRotateAllocation(Bitmap bitmap) {
int targetHeight = bitmap.getWidth();
int targetWidth = bitmap.getHeight();
if (targetHeight != preRotateHeight || targetWidth != preRotateWidth) {
toRotateAllocation = Allocation.createFromBitmap(rs, bitmap,
Allocation.MipmapControl.MIPMAP_NONE,
Allocation.USAGE_SCRIPT);
}
return toRotateAllocation;
}
private Bitmap rotateBitmap(Bitmap bitmap, int angle) {
Bitmap.Config config = bitmap.getConfig();
int targetHeight = bitmap.getWidth();
int targetWidth = bitmap.getHeight();
rotateScript.set_inWidth(bitmap.getWidth());
rotateScript.set_inHeight(bitmap.getHeight());
Allocation sourceAllocation = getFromRotateAllocation(bitmap);
sourceAllocation.copyFrom(bitmap);
rotateScript.set_inImage(sourceAllocation);
Bitmap target = Bitmap.createBitmap(targetWidth, targetHeight, config);
final Allocation targetAllocation = getToRotateAllocation(target);
if (angle == 90) {
rotateScript.forEach_rotate_90_clockwise(targetAllocation, targetAllocation);
} else {
rotateScript.forEach_rotate_270_clockwise(targetAllocation, targetAllocation);
}
targetAllocation.copyTo(target);
return target;
}
private byte[] getNV21(int inputWidth, int inputHeight, Bitmap bitmap) {
int[] argb = new int[inputWidth * inputHeight];
bitmap.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
bitmap.recycle();
return yuv;
}
private void encodeYUV420SP(byte[] yuv420sp, int[] rgb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
//a = (aRGB[index] & 0xff000000) >> 24; //not using it right now
R = (rgb[index] & 0xff0000) >> 16;
G = (rgb[index] & 0xff00) >> 8;
B = (rgb[index] & 0xff);
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : (Math.min(Y, 255)));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : (Math.min(U, 255)));
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : (Math.min(V, 255)));
}
index++;
}
}
}
private static byte[] getDataFromImage(Image image) {
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
int channelOffset = 0;
int outputStride = 1;
for (int i = 0; i < planes.length; i++) {
switch (i) {
case 0:
channelOffset = 0;
outputStride = 1;
break;
case 1:
channelOffset = width * height + 1;
outputStride = 2;
break;
case 2:
channelOffset = width * height;
outputStride = 2;
break;
}
ByteBuffer buffer = planes[i].getBuffer();
int rowStride = planes[i].getRowStride();
int pixelStride = planes[i].getPixelStride();
int shift = (i == 0) ? 0 : 1;
int w = width >> shift;
int h = height >> shift;
buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
for (int row = 0; row < h; row++) {
int length;
if (pixelStride == 1 && outputStride == 1) {
length = w;
buffer.get(data, channelOffset, length);
channelOffset += length;
} else {
length = (w - 1) * pixelStride + 1;
buffer.get(rowData, 0, length);
for (int col = 0; col < w; col++) {
data[channelOffset] = rowData[col * pixelStride];
channelOffset += outputStride;
}
}
if (row < h - 1) {
buffer.position(buffer.position() + rowStride - length);
}
}
}
return data;
}
#Override
public void run() {
try {
startProcessing();
} catch (Exception ex) {
Log.d(TAG, "run: " + ex.getCause());
} finally {
release();
}
}
public void stopProcessing() {
sawOutputEOS = true;
}
}
Kindly have a look at the code and tell me what I am doing wrong.
Distorted video frame
I trying to compress video. it's working fine in all device but in Samsung S5 it gives below error.
Error Log : android.media.MediaCodec$CodecException: Error 0xffffec77
below have my compression function
public boolean convertVideo(final String path) {
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(path);
String width = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
String height = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
String rotation = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
long startTime = -1;
long endTime = -1;
int resultWidth = 960;
int resultHeight = 960;
int rotationValue = Integer.valueOf(rotation);
int originalWidth = Integer.valueOf(width);
int originalHeight = Integer.valueOf(height);
//int bitrate = 450000;
int bitrate = 1200000;
int rotateRender = 0;
File cacheFile= CrimeRegisterActivity.getOutputMediaFile(CrimeRegisterActivity.MEDIA_TYPE_VIDEO);
CrimeRegisterActivity.VIDEOFILEPATH=cacheFile.getPath();
if (Build.VERSION.SDK_INT < 18 && resultHeight > resultWidth && resultWidth != originalWidth && resultHeight != originalHeight) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 90;
rotateRender = 270;
} else if (Build.VERSION.SDK_INT > 20) {
if (rotationValue == 90) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 0;
rotateRender = 270;
} else if (rotationValue == 180) {
rotateRender = 180;
rotationValue = 0;
} else if (rotationValue == 270) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 0;
rotateRender = 90;
}
}
File inputFile = new File(path);
if (!inputFile.canRead()) {
didWriteData(true, true);
return false;
}
videoConvertFirstWrite = true;
boolean error = false;
long videoStartTime = startTime;
long time = System.currentTimeMillis();
if (resultWidth != 0 && resultHeight != 0) {
MP4Builder mediaMuxer = null;
MediaExtractor extractor = null;
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
Mp4Movie movie = new Mp4Movie();
movie.setCacheFile(cacheFile);
movie.setRotation(rotationValue);
movie.setSize(resultWidth, resultHeight);
mediaMuxer = new MP4Builder().createMovie(movie);
extractor = new MediaExtractor();
extractor.setDataSource(inputFile.toString());
if (resultWidth != originalWidth || resultHeight != originalHeight) {
int videoIndex;
videoIndex = selectTrack(extractor, false);
if (videoIndex >= 0) {
MediaCodec decoder = null;
MediaCodec encoder = null;
InputSurface inputSurface = null;
OutputSurface outputSurface = null;
try {
long videoTime = -1;
boolean outputDone = false;
boolean inputDone = false;
boolean decoderDone = false;
int swapUV = 0;
int videoTrackIndex = -5;
int colorFormat;
int processorType = PROCESSOR_TYPE_OTHER;
String manufacturer = Build.MANUFACTURER.toLowerCase();
if (Build.VERSION.SDK_INT < 18) {
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
if (colorFormat == 0) {
throw new RuntimeException("no supported color format");
}
String codecName = codecInfo.getName();
if (codecName.contains("OMX.qcom.")) {
processorType = PROCESSOR_TYPE_QCOM;
if (Build.VERSION.SDK_INT == 16) {
if (manufacturer.equals("lge") || manufacturer.equals("nokia")) {
swapUV = 1;
}
}
} else if (codecName.contains("OMX.Intel.")) {
processorType = PROCESSOR_TYPE_INTEL;
} else if (codecName.equals("OMX.MTK.VIDEO.ENCODER.AVC")) {
processorType = PROCESSOR_TYPE_MTK;
} else if (codecName.equals("OMX.SEC.AVC.Encoder")) {
processorType = PROCESSOR_TYPE_SEC;
swapUV = 1;
} else if (codecName.equals("OMX.TI.DUCATI1.VIDEO.H264E")) {
processorType = PROCESSOR_TYPE_TI;
}
Log.e("tmessages", "codec = " + codecInfo.getName() + " manufacturer = " + manufacturer + "device = " + Build.MODEL);
} else {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
}
Log.e("tmessages", "colorFormat = " + colorFormat);
int resultHeightAligned = resultHeight;
int padding = 0;
int bufferSize = resultWidth * resultHeight * 3 / 2;
if (processorType == PROCESSOR_TYPE_OTHER) {
if (resultHeight % 16 != 0) {
resultHeightAligned += (16 - (resultHeight % 16));
padding = resultWidth * (resultHeightAligned - resultHeight);
bufferSize += padding * 5 / 4;
}
} else if (processorType == PROCESSOR_TYPE_QCOM) {
if (!manufacturer.toLowerCase().equals("lge")) {
int uvoffset = (resultWidth * resultHeight + 2047) & ~2047;
padding = uvoffset - (resultWidth * resultHeight);
bufferSize += padding;
}
} else if (processorType == PROCESSOR_TYPE_TI) {
//resultHeightAligned = 368;
//bufferSize = resultWidth * resultHeightAligned * 3 / 2;
//resultHeightAligned += (16 - (resultHeight % 16));
//padding = resultWidth * (resultHeightAligned - resultHeight);
//bufferSize += padding * 5 / 4;
} else if (processorType == PROCESSOR_TYPE_MTK) {
if (manufacturer.equals("baidu")) {
resultHeightAligned += (16 - (resultHeight % 16));
padding = resultWidth * (resultHeightAligned - resultHeight);
bufferSize += padding * 5 / 4;
}
}
extractor.selectTrack(videoIndex);
if (startTime > 0) {
extractor.seekTo(startTime, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
} else {
extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
}
MediaFormat inputFormat = extractor.getTrackFormat(videoIndex);
MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, resultWidth, resultHeight);
outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate != 0 ? bitrate : 921600);
outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
if (Build.VERSION.SDK_INT < 18) {
outputFormat.setInteger("stride", resultWidth + 32);
outputFormat.setInteger("slice-height", resultHeight);
}
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (Build.VERSION.SDK_INT >= 18) {
inputSurface = new InputSurface(encoder.createInputSurface());
inputSurface.makeCurrent();
}
encoder.start();
decoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
if (Build.VERSION.SDK_INT >= 18) {
outputSurface = new OutputSurface();
} else {
outputSurface = new OutputSurface(resultWidth, resultHeight, rotateRender);
}
decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
decoder.start();
final int TIMEOUT_USEC = 2500;
ByteBuffer[] decoderInputBuffers = null;
ByteBuffer[] encoderOutputBuffers = null;
ByteBuffer[] encoderInputBuffers = null;
if (Build.VERSION.SDK_INT < 21) {
decoderInputBuffers = decoder.getInputBuffers();
encoderOutputBuffers = encoder.getOutputBuffers();
if (Build.VERSION.SDK_INT < 18) {
encoderInputBuffers = encoder.getInputBuffers();
}
}
while (!outputDone) {
if (!inputDone) {
boolean eof = false;
int index = extractor.getSampleTrackIndex();
if (index == videoIndex) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf;
if (Build.VERSION.SDK_INT < 21) {
inputBuf = decoderInputBuffers[inputBufIndex];
} else {
inputBuf = decoder.getInputBuffer(inputBufIndex);
}
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
} else {
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
} else if (index == -1) {
eof = true;
}
if (eof) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
}
}
}
boolean decoderOutputAvailable = !decoderDone;
boolean encoderOutputAvailable = true;
while (decoderOutputAvailable || encoderOutputAvailable) {
int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
encoderOutputAvailable = false;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (Build.VERSION.SDK_INT < 21) {
encoderOutputBuffers = encoder.getOutputBuffers();
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
if (videoTrackIndex == -5) {
videoTrackIndex = mediaMuxer.addTrack(newFormat, false);
}
} else if (encoderStatus < 0) {
throw new RuntimeException("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else {
ByteBuffer encodedData;
if (Build.VERSION.SDK_INT < 21) {
encodedData = encoderOutputBuffers[encoderStatus];
} else {
encodedData = encoder.getOutputBuffer(encoderStatus);
}
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if (info.size > 1) {
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
if (mediaMuxer.writeSampleData(videoTrackIndex, encodedData, info, false)) {
didWriteData(false, false);
}
} else if (videoTrackIndex == -5) {
byte[] csd = new byte[info.size];
encodedData.limit(info.offset + info.size);
encodedData.position(info.offset);
encodedData.get(csd);
ByteBuffer sps = null;
ByteBuffer pps = null;
for (int a = info.size - 1; a >= 0; a--) {
if (a > 3) {
if (csd[a] == 1 && csd[a - 1] == 0 && csd[a - 2] == 0 && csd[a - 3] == 0) {
sps = ByteBuffer.allocate(a - 3);
pps = ByteBuffer.allocate(info.size - (a - 3));
sps.put(csd, 0, a - 3).position(0);
pps.put(csd, a - 3, info.size - (a - 3)).position(0);
break;
}
} else {
break;
}
}
MediaFormat newFormat = MediaFormat.createVideoFormat(MIME_TYPE, resultWidth, resultHeight);
if (sps != null && pps != null) {
newFormat.setByteBuffer("csd-0", sps);
newFormat.setByteBuffer("csd-1", pps);
}
videoTrackIndex = mediaMuxer.addTrack(newFormat, false);
}
}
outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
encoder.releaseOutputBuffer(encoderStatus, false);
}
if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) {
continue;
}
if (!decoderDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
decoderOutputAvailable = false;
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
Log.e("tmessages", "newFormat = " + newFormat);
} else if (decoderStatus < 0) {
throw new RuntimeException("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else {
boolean doRender;
if (Build.VERSION.SDK_INT >= 18) {
doRender = info.size != 0;
} else {
doRender = info.size != 0 || info.presentationTimeUs != 0;
}
if (endTime > 0 && info.presentationTimeUs >= endTime) {
inputDone = true;
decoderDone = true;
doRender = false;
info.flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
}
if (startTime > 0 && videoTime == -1) {
if (info.presentationTimeUs < startTime) {
doRender = false;
Log.e("tmessages", "drop frame startTime = " + startTime + " present time = " + info.presentationTimeUs);
} else {
videoTime = info.presentationTimeUs;
}
}
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
boolean errorWait = false;
try {
outputSurface.awaitNewImage();
} catch (Exception e) {
errorWait = true;
Log.e("tmessages", e.getMessage());
}
if (!errorWait) {
if (Build.VERSION.SDK_INT >= 18) {
outputSurface.drawImage(false);
inputSurface.setPresentationTime(info.presentationTimeUs * 1000);
inputSurface.swapBuffers();
} else {
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
outputSurface.drawImage(true);
ByteBuffer rgbBuf = outputSurface.getFrame();
ByteBuffer yuvBuf = encoderInputBuffers[inputBufIndex];
yuvBuf.clear();
convertVideoFrame(rgbBuf, yuvBuf, colorFormat, resultWidth, resultHeight, padding, swapUV);
encoder.queueInputBuffer(inputBufIndex, 0, bufferSize, info.presentationTimeUs, 0);
} else {
Log.e("tmessages", "input buffer not available");
}
}
}
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
decoderOutputAvailable = false;
Log.e("tmessages", "decoder stream end");
if (Build.VERSION.SDK_INT >= 18) {
encoder.signalEndOfInputStream();
} else {
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
encoder.queueInputBuffer(inputBufIndex, 0, 1, info.presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
}
}
}
}
}
if (videoTime != -1) {
videoStartTime = videoTime;
}
} catch (Exception e) {
Log.e("tmessages", e.getMessage());
error = true;
}
extractor.unselectTrack(videoIndex);
if (outputSurface != null) {
outputSurface.release();
}
if (inputSurface != null) {
inputSurface.release();
}
if (decoder != null) {
decoder.stop();
decoder.release();
}
if (encoder != null) {
encoder.stop();
encoder.release();
}
}
} else {
long videoTime = readAndWriteTrack(extractor, mediaMuxer, info, startTime, endTime, cacheFile, false);
if (videoTime != -1) {
videoStartTime = videoTime;
}
}
if (!error) {
readAndWriteTrack(extractor, mediaMuxer, info, videoStartTime, endTime, cacheFile, true);
}
} catch (Exception e) {
error = true;
Log.e("tmessages", e.getMessage());
} finally {
if (extractor != null) {
extractor.release();
}
if (mediaMuxer != null) {
try {
mediaMuxer.finishMovie(false);
} catch (Exception e) {
Log.e("tmessages", e.getMessage());
}
}
Log.e("tmessages", "time = " + (System.currentTimeMillis() - time));
}
} else {
didWriteData(true, true);
return false;
}
didWriteData(true, error);
// inputFile.delete();
return true;
}
I have pre specify Height & width of video compression. above code have worked fine in another device.
I had this issue on Samsung Note 4 device and UHD video option (3840x 2160). After long trying it doesn't throw exception only with these width and height values:
resultWidth = 800;
resultHeight = 600;
or
resultWidth = 640;
resultHeight = 480;
You can set these values to the this code line:
MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, resultWidth, resultHeight);
I record a video from byte array using MediaCodec and MediaMuxer. But the result is that the video cannot be recorded. The system says: "File incorrect". You can see the code example below. I need to get a video file in mp4 format. Please tell me what the problem it is.
//init - this function is called once in the beginning.
private static final String MIME_TYPE = "video/avc";
private final static int MAX_WIDTH = 320;
private final static int MAX_HEIGHT = 240;
private final static int VIDEO_BITRATE = 2000000;
private static final int FRAME_RATE = 20;
private final static int VIDEO_IFRAME_INTERVAL = 10;
public boolean initCodec() {
bufferInfo = new MediaCodec.BufferInfo();
countFile++;
String savePath = "uonmap_video_" + countFile + ".mp4";
File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), savePath);
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
return false;
}
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VIDEO_IFRAME_INTERVAL);
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
e.printStackTrace();
}
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mMuxer = new MediaMuxer(file.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException ioe) {
throw new RuntimeException("MediaMuxer creation failed", ioe);
}
isStart = true;
mTrackIndex = -1;
return true;
}
//encode function - the function is called every time when a new byte array comes
public synchronized void encode(byte[] data, boolean endOfStream) {
final int TIMEOUT_USEC = 50;
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
try {
if (isPlanar) {
data = YV12toYUV420Planar(data);
} else {
data = YV12toYUV420PackedSemiPlanar(data);
}
} catch (IndexOutOfBoundsException ex) {
}
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(data);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, 50, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
} else {
return;
}
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
while (true) {
int encoderStatus = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
} else {
ByteBuffer encodedData = outputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
bufferInfo.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
bufferInfo.presentationTimeUs = 50;
mMuxer.writeSampleData(mTrackIndex, encodedData, bufferInfo);
}
mediaCodec.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
}
public byte[] YV12toYUV420PackedSemiPlanar(final byte[] input) {
/*
* COLOR_TI_FormatYUV420PackedSemiPlanar is NV12
* We convert by putting the corresponding U and V bytes together (interleaved).
*/
final int frameSize = width * height;
final int qFrameSize = frameSize/4;
byte[] output = new byte[input.length];
System.arraycopy(input, 0, output, 0, frameSize);
for (int i = 0; i < (qFrameSize); i++)
{
byte b = (input[frameSize + qFrameSize + i - 32 - 320]);
output[frameSize + i*2] = b;
output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);
}
System.arraycopy(input, 0, output, 0, frameSize); // Y
for (int i = 0; i < qFrameSize; i++) {
output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U)
output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V)
}
return output;
}
Log:
05-27 18:13:56.897 32196-32320/com.example.sasha.myrtc D/dalvikvm: GC_FOR_ALLOC freed 227K, 10% free 5333K/5876K, paused 24ms, total 24ms
05-27 18:13:56.907 32196-32434/com.example.sasha.myrtc V/MPEG4Writer: decoding time: 0 and ctts offset time: 0
05-27 18:13:56.907 32196-32434/com.example.sasha.myrtc V/MPEG4Writer: Video media time stamp: 0 and previous paused duration 50
how can I convert images to video without using FFmpeg or JCodec, only with android MediaCodec. The images for video is bitmap files that can be ARGB888 or YUV420 (my choice). The most important thing is that the video have to be playable in android devices and the maximum API is 16. I know all about API 18 MediaMuxer and I can not use it.
Please help me, I am stuck on this for many days.
(JCodec to slow, and FFmpeg very complicated to use).
There is no simple way to do this in API 16 that works across all devices.
You will encounter problems with buffer alignment, color spaces, and the need to use different YUV layouts on different devices.
Consider the buffer alignment issue. On pre-API 18 devices with Qualcomm SOCs, you had to align the CbCr planes at a 2K offset from the start of the buffer. On API 18, all devices use the same layout; this is enforced by CTS tests that were added in Android 4.3.
Even with API 18 you still have to runtime detect whether the encoder wants planar or semi-planar values. (It's probably not relevant for your situation, but none of the YUV formats output by the camera are accepted by MediaCodec.) Note there is no RGB input to MediaCodec.
If portability is not a concern, i.e. you're targeting a specific device, your code will be much simpler.
There are code snippets in the SO pages linked above. The closest "official" example is the buffer-to-buffer / buffer-to-surface tests in EncodeDecodeTest. These are API 18 tests, which means they don't do the "if QC and API16 then change buffer alignment" dance, but they do show how to do the planar vs. semi-planar layout detection. It doesn't include an RGB-to-YUV color converter, but there are examples of such around the web.
On the bright side, the encoder output seems to be just fine on any device and API version.
Converting the raw H.264 stream to a .mp4 file requires a 3rd-party library, since as you noted MediaMuxer is not available. I believe some people have installed ffmpeg as a command-line utility and executed it that way (maybe like this?).
import android.app.Activity;
import android.app.ProgressDialog;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Environment;
import android.os.Handler;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class MMediaMuxer {
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static int _width = 512;
private static int _height = 512;
private static final int BIT_RATE = 800000;
private static final int INFLAME_INTERVAL = 1;
private static final int FRAME_RATE = 10;
private static boolean DEBUG = false;
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private boolean mRunning;
private int generateIndex = 0;
private int mTrackIndex;
private int MAX_FRAME_VIDEO = 0;
private List<byte[]> bitList;
private List<byte[]> bitFirst;
private List<byte[]> bitLast;
private int current_index_frame = 0;
private static final String TAG = "CODEC";
private String outputPath;
private Activity _activity;
private ProgressDialog pd;
private String _title;
private String _mess;
public void Init(Activity activity, int width, int height, String title, String mess) {
_title = title;
_mess = mess;
_activity = activity;
_width = width;
_height = height;
Logd("MMediaMuxer Init");
ShowProgressBar();
}
private Handler aHandler = new Handler();
public void AddFrame(final byte[] byteFrame) {
CheckDataListState();
new Thread(new Runnable() {
#Override
public void run() {
Logd("Android get Frame");
Bitmap bit = BitmapFactory.decodeByteArray(byteFrame, 0, byteFrame.length);
Logd("Android convert Bitmap");
byte[] byteConvertFrame = getNV21(bit.getWidth(), bit.getHeight(), bit);
Logd("Android convert getNV21");
bitList.add(byteConvertFrame);
}
}).start();
}
public void AddFrame(byte[] byteFrame, int count, boolean isLast) {
CheckDataListState();
Logd("Android get Frames = " + count);
Bitmap bit = BitmapFactory.decodeByteArray(byteFrame, 0, byteFrame.length);
Logd("Android convert Bitmap");
byteFrame = getNV21(bit.getWidth(), bit.getHeight(), bit);
Logd("Android convert getNV21");
for (int i = 0; i < count; i++) {
if (isLast) {
bitLast.add(byteFrame);
} else {
bitFirst.add(byteFrame);
}
}
}
public void CreateVideo() {
current_index_frame = 0;
Logd("Prepare Frames Data");
bitFirst.addAll(bitList);
bitFirst.addAll(bitLast);
MAX_FRAME_VIDEO = bitFirst.size();
Logd("CreateVideo");
mRunning = true;
bufferEncoder();
}
public boolean GetStateEncoder() {
return mRunning;
}
public String GetPath() {
return outputPath;
}
public void onBackPressed() {
mRunning = false;
}
public void ShowProgressBar() {
_activity.runOnUiThread(new Runnable() {
public void run() {
pd = new ProgressDialog(_activity);
pd.setTitle(_title);
pd.setCancelable(false);
pd.setMessage(_mess);
pd.setCanceledOnTouchOutside(false);
pd.show();
}
});
}
public void HideProgressBar() {
new Thread(new Runnable() {
#Override
public void run() {
_activity.runOnUiThread(new Runnable() {
#Override
public void run() {
pd.dismiss();
}
});
}
}).start();
}
private void bufferEncoder() {
Runnable runnable = new Runnable() {
#Override
public void run() {
try {
Logd("PrepareEncoder start");
PrepareEncoder();
Logd("PrepareEncoder end");
} catch (IOException e) {
Loge(e.getMessage());
}
try {
while (mRunning) {
Encode();
}
} finally {
Logd("release");
Release();
HideProgressBar();
bitFirst = null;
bitLast = null;
}
}
};
Thread thread = new Thread(runnable);
thread.start();
}
public void ClearTask() {
bitList = null;
bitFirst = null;
bitLast = null;
}
private void PrepareEncoder() throws IOException {
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
Loge("Unable to find an appropriate codec for " + MIME_TYPE);
}
Logd("found codec: " + codecInfo.getName());
int colorFormat;
try {
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
} catch (Exception e) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, _width, _height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, INFLAME_INTERVAL);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
String currentDateTimeString = DateFormat.getDateTimeInstance().format(new Date());
outputPath = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES),
"pixel"+currentDateTimeString+".mp4").toString();
mediaMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException ioe) {
Loge("MediaMuxer creation failed");
}
}
private void Encode() {
while (true) {
if (!mRunning) {
break;
}
Logd("Encode start");
long TIMEOUT_USEC = 5000;
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
long ptsUsec = computePresentationTime(generateIndex, FRAME_RATE);
if (inputBufIndex >= 0) {
byte[] input = bitFirst.get(current_index_frame);
final ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufIndex);
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufIndex, 0, input.length, ptsUsec, 0);
generateIndex++;
}
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Loge("No output from encoder available");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderStatus < 0) {
Loge("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else if (mBufferInfo.size != 0) {
ByteBuffer encodedData = mediaCodec.getOutputBuffer(encoderStatus);
if (encodedData == null) {
Loge("encoderOutputBuffer " + encoderStatus + " was null");
} else {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
}
}
current_index_frame++;
if (current_index_frame > MAX_FRAME_VIDEO - 1) {
Log.d(TAG, "mRunning = false;");
mRunning = false;
}
Logd("Encode end");
}
}
private void Release() {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
Logd("RELEASE CODEC");
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
Logd("RELEASE MUXER");
}
}
/**
* Returns the first codec capable of encoding the specified MIME type, or
* null if no match was found.
*/
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
/**
* Returns a color format that is supported by the codec and by this test
* code. If no match is found, this throws a test failure -- the set of
* formats known to the test should be expanded for new platforms.
*/
private static int selectColorFormat(MediaCodecInfo codecInfo,
String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
/**
* Returns true if this is a color format that this test code understands
* (i.e. we know how to read and generate frames in this format).
*/
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {
int[] argb = new int[inputWidth * inputHeight];
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
scaled.recycle();
return yuv;
}
private void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
}
index++;
}
}
}
private void CheckDataListState() {
if (bitList == null) {
bitList = new ArrayList<>();
}
if (bitFirst == null) {
bitFirst = new ArrayList<>();
}
if (bitLast == null) {
bitLast = new ArrayList<>();
}
}
private long computePresentationTime(long frameIndex, int framerate) {
return 132 + frameIndex * 1000000 / framerate;
}
private static void Logd(String Mess) {
if (DEBUG) {
Log.d(TAG, Mess);
}
}
private static void Loge(String Mess) {
Log.e(TAG, Mess);
}
}
Works with 21 SDK. Do not pay attention to several lists. So it was done since the conversion of a piece of data goes in runtime.