I trying to compress video. it's working fine in all device but in Samsung S5 it gives below error.
Error Log : android.media.MediaCodec$CodecException: Error 0xffffec77
below have my compression function
public boolean convertVideo(final String path) {
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(path);
String width = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
String height = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
String rotation = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
long startTime = -1;
long endTime = -1;
int resultWidth = 960;
int resultHeight = 960;
int rotationValue = Integer.valueOf(rotation);
int originalWidth = Integer.valueOf(width);
int originalHeight = Integer.valueOf(height);
//int bitrate = 450000;
int bitrate = 1200000;
int rotateRender = 0;
File cacheFile= CrimeRegisterActivity.getOutputMediaFile(CrimeRegisterActivity.MEDIA_TYPE_VIDEO);
CrimeRegisterActivity.VIDEOFILEPATH=cacheFile.getPath();
if (Build.VERSION.SDK_INT < 18 && resultHeight > resultWidth && resultWidth != originalWidth && resultHeight != originalHeight) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 90;
rotateRender = 270;
} else if (Build.VERSION.SDK_INT > 20) {
if (rotationValue == 90) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 0;
rotateRender = 270;
} else if (rotationValue == 180) {
rotateRender = 180;
rotationValue = 0;
} else if (rotationValue == 270) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 0;
rotateRender = 90;
}
}
File inputFile = new File(path);
if (!inputFile.canRead()) {
didWriteData(true, true);
return false;
}
videoConvertFirstWrite = true;
boolean error = false;
long videoStartTime = startTime;
long time = System.currentTimeMillis();
if (resultWidth != 0 && resultHeight != 0) {
MP4Builder mediaMuxer = null;
MediaExtractor extractor = null;
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
Mp4Movie movie = new Mp4Movie();
movie.setCacheFile(cacheFile);
movie.setRotation(rotationValue);
movie.setSize(resultWidth, resultHeight);
mediaMuxer = new MP4Builder().createMovie(movie);
extractor = new MediaExtractor();
extractor.setDataSource(inputFile.toString());
if (resultWidth != originalWidth || resultHeight != originalHeight) {
int videoIndex;
videoIndex = selectTrack(extractor, false);
if (videoIndex >= 0) {
MediaCodec decoder = null;
MediaCodec encoder = null;
InputSurface inputSurface = null;
OutputSurface outputSurface = null;
try {
long videoTime = -1;
boolean outputDone = false;
boolean inputDone = false;
boolean decoderDone = false;
int swapUV = 0;
int videoTrackIndex = -5;
int colorFormat;
int processorType = PROCESSOR_TYPE_OTHER;
String manufacturer = Build.MANUFACTURER.toLowerCase();
if (Build.VERSION.SDK_INT < 18) {
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
if (colorFormat == 0) {
throw new RuntimeException("no supported color format");
}
String codecName = codecInfo.getName();
if (codecName.contains("OMX.qcom.")) {
processorType = PROCESSOR_TYPE_QCOM;
if (Build.VERSION.SDK_INT == 16) {
if (manufacturer.equals("lge") || manufacturer.equals("nokia")) {
swapUV = 1;
}
}
} else if (codecName.contains("OMX.Intel.")) {
processorType = PROCESSOR_TYPE_INTEL;
} else if (codecName.equals("OMX.MTK.VIDEO.ENCODER.AVC")) {
processorType = PROCESSOR_TYPE_MTK;
} else if (codecName.equals("OMX.SEC.AVC.Encoder")) {
processorType = PROCESSOR_TYPE_SEC;
swapUV = 1;
} else if (codecName.equals("OMX.TI.DUCATI1.VIDEO.H264E")) {
processorType = PROCESSOR_TYPE_TI;
}
Log.e("tmessages", "codec = " + codecInfo.getName() + " manufacturer = " + manufacturer + "device = " + Build.MODEL);
} else {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
}
Log.e("tmessages", "colorFormat = " + colorFormat);
int resultHeightAligned = resultHeight;
int padding = 0;
int bufferSize = resultWidth * resultHeight * 3 / 2;
if (processorType == PROCESSOR_TYPE_OTHER) {
if (resultHeight % 16 != 0) {
resultHeightAligned += (16 - (resultHeight % 16));
padding = resultWidth * (resultHeightAligned - resultHeight);
bufferSize += padding * 5 / 4;
}
} else if (processorType == PROCESSOR_TYPE_QCOM) {
if (!manufacturer.toLowerCase().equals("lge")) {
int uvoffset = (resultWidth * resultHeight + 2047) & ~2047;
padding = uvoffset - (resultWidth * resultHeight);
bufferSize += padding;
}
} else if (processorType == PROCESSOR_TYPE_TI) {
//resultHeightAligned = 368;
//bufferSize = resultWidth * resultHeightAligned * 3 / 2;
//resultHeightAligned += (16 - (resultHeight % 16));
//padding = resultWidth * (resultHeightAligned - resultHeight);
//bufferSize += padding * 5 / 4;
} else if (processorType == PROCESSOR_TYPE_MTK) {
if (manufacturer.equals("baidu")) {
resultHeightAligned += (16 - (resultHeight % 16));
padding = resultWidth * (resultHeightAligned - resultHeight);
bufferSize += padding * 5 / 4;
}
}
extractor.selectTrack(videoIndex);
if (startTime > 0) {
extractor.seekTo(startTime, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
} else {
extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
}
MediaFormat inputFormat = extractor.getTrackFormat(videoIndex);
MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, resultWidth, resultHeight);
outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate != 0 ? bitrate : 921600);
outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
if (Build.VERSION.SDK_INT < 18) {
outputFormat.setInteger("stride", resultWidth + 32);
outputFormat.setInteger("slice-height", resultHeight);
}
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (Build.VERSION.SDK_INT >= 18) {
inputSurface = new InputSurface(encoder.createInputSurface());
inputSurface.makeCurrent();
}
encoder.start();
decoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
if (Build.VERSION.SDK_INT >= 18) {
outputSurface = new OutputSurface();
} else {
outputSurface = new OutputSurface(resultWidth, resultHeight, rotateRender);
}
decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
decoder.start();
final int TIMEOUT_USEC = 2500;
ByteBuffer[] decoderInputBuffers = null;
ByteBuffer[] encoderOutputBuffers = null;
ByteBuffer[] encoderInputBuffers = null;
if (Build.VERSION.SDK_INT < 21) {
decoderInputBuffers = decoder.getInputBuffers();
encoderOutputBuffers = encoder.getOutputBuffers();
if (Build.VERSION.SDK_INT < 18) {
encoderInputBuffers = encoder.getInputBuffers();
}
}
while (!outputDone) {
if (!inputDone) {
boolean eof = false;
int index = extractor.getSampleTrackIndex();
if (index == videoIndex) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf;
if (Build.VERSION.SDK_INT < 21) {
inputBuf = decoderInputBuffers[inputBufIndex];
} else {
inputBuf = decoder.getInputBuffer(inputBufIndex);
}
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
} else {
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
} else if (index == -1) {
eof = true;
}
if (eof) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
}
}
}
boolean decoderOutputAvailable = !decoderDone;
boolean encoderOutputAvailable = true;
while (decoderOutputAvailable || encoderOutputAvailable) {
int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
encoderOutputAvailable = false;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (Build.VERSION.SDK_INT < 21) {
encoderOutputBuffers = encoder.getOutputBuffers();
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
if (videoTrackIndex == -5) {
videoTrackIndex = mediaMuxer.addTrack(newFormat, false);
}
} else if (encoderStatus < 0) {
throw new RuntimeException("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else {
ByteBuffer encodedData;
if (Build.VERSION.SDK_INT < 21) {
encodedData = encoderOutputBuffers[encoderStatus];
} else {
encodedData = encoder.getOutputBuffer(encoderStatus);
}
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if (info.size > 1) {
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
if (mediaMuxer.writeSampleData(videoTrackIndex, encodedData, info, false)) {
didWriteData(false, false);
}
} else if (videoTrackIndex == -5) {
byte[] csd = new byte[info.size];
encodedData.limit(info.offset + info.size);
encodedData.position(info.offset);
encodedData.get(csd);
ByteBuffer sps = null;
ByteBuffer pps = null;
for (int a = info.size - 1; a >= 0; a--) {
if (a > 3) {
if (csd[a] == 1 && csd[a - 1] == 0 && csd[a - 2] == 0 && csd[a - 3] == 0) {
sps = ByteBuffer.allocate(a - 3);
pps = ByteBuffer.allocate(info.size - (a - 3));
sps.put(csd, 0, a - 3).position(0);
pps.put(csd, a - 3, info.size - (a - 3)).position(0);
break;
}
} else {
break;
}
}
MediaFormat newFormat = MediaFormat.createVideoFormat(MIME_TYPE, resultWidth, resultHeight);
if (sps != null && pps != null) {
newFormat.setByteBuffer("csd-0", sps);
newFormat.setByteBuffer("csd-1", pps);
}
videoTrackIndex = mediaMuxer.addTrack(newFormat, false);
}
}
outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
encoder.releaseOutputBuffer(encoderStatus, false);
}
if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) {
continue;
}
if (!decoderDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
decoderOutputAvailable = false;
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
Log.e("tmessages", "newFormat = " + newFormat);
} else if (decoderStatus < 0) {
throw new RuntimeException("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else {
boolean doRender;
if (Build.VERSION.SDK_INT >= 18) {
doRender = info.size != 0;
} else {
doRender = info.size != 0 || info.presentationTimeUs != 0;
}
if (endTime > 0 && info.presentationTimeUs >= endTime) {
inputDone = true;
decoderDone = true;
doRender = false;
info.flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
}
if (startTime > 0 && videoTime == -1) {
if (info.presentationTimeUs < startTime) {
doRender = false;
Log.e("tmessages", "drop frame startTime = " + startTime + " present time = " + info.presentationTimeUs);
} else {
videoTime = info.presentationTimeUs;
}
}
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
boolean errorWait = false;
try {
outputSurface.awaitNewImage();
} catch (Exception e) {
errorWait = true;
Log.e("tmessages", e.getMessage());
}
if (!errorWait) {
if (Build.VERSION.SDK_INT >= 18) {
outputSurface.drawImage(false);
inputSurface.setPresentationTime(info.presentationTimeUs * 1000);
inputSurface.swapBuffers();
} else {
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
outputSurface.drawImage(true);
ByteBuffer rgbBuf = outputSurface.getFrame();
ByteBuffer yuvBuf = encoderInputBuffers[inputBufIndex];
yuvBuf.clear();
convertVideoFrame(rgbBuf, yuvBuf, colorFormat, resultWidth, resultHeight, padding, swapUV);
encoder.queueInputBuffer(inputBufIndex, 0, bufferSize, info.presentationTimeUs, 0);
} else {
Log.e("tmessages", "input buffer not available");
}
}
}
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
decoderOutputAvailable = false;
Log.e("tmessages", "decoder stream end");
if (Build.VERSION.SDK_INT >= 18) {
encoder.signalEndOfInputStream();
} else {
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
encoder.queueInputBuffer(inputBufIndex, 0, 1, info.presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
}
}
}
}
}
if (videoTime != -1) {
videoStartTime = videoTime;
}
} catch (Exception e) {
Log.e("tmessages", e.getMessage());
error = true;
}
extractor.unselectTrack(videoIndex);
if (outputSurface != null) {
outputSurface.release();
}
if (inputSurface != null) {
inputSurface.release();
}
if (decoder != null) {
decoder.stop();
decoder.release();
}
if (encoder != null) {
encoder.stop();
encoder.release();
}
}
} else {
long videoTime = readAndWriteTrack(extractor, mediaMuxer, info, startTime, endTime, cacheFile, false);
if (videoTime != -1) {
videoStartTime = videoTime;
}
}
if (!error) {
readAndWriteTrack(extractor, mediaMuxer, info, videoStartTime, endTime, cacheFile, true);
}
} catch (Exception e) {
error = true;
Log.e("tmessages", e.getMessage());
} finally {
if (extractor != null) {
extractor.release();
}
if (mediaMuxer != null) {
try {
mediaMuxer.finishMovie(false);
} catch (Exception e) {
Log.e("tmessages", e.getMessage());
}
}
Log.e("tmessages", "time = " + (System.currentTimeMillis() - time));
}
} else {
didWriteData(true, true);
return false;
}
didWriteData(true, error);
// inputFile.delete();
return true;
}
I have pre specify Height & width of video compression. above code have worked fine in another device.
I had this issue on Samsung Note 4 device and UHD video option (3840x 2160). After long trying it doesn't throw exception only with these width and height values:
resultWidth = 800;
resultHeight = 600;
or
resultWidth = 640;
resultHeight = 480;
You can set these values to the this code line:
MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, resultWidth, resultHeight);
Related
var des="/storage/emulated/0/Android/data/"
var newpath= SiliCompressor.with(this).compressVideo(filepath,"newmovies")
I'm compressing a video of 14 mb and it reduces to just 324kb. Video compression using sili compressor in Android kotlin reduces video quality too much.
If I do compression with custom resolution:
var newpath= SiliCompressor.with(this).compressVideo(filepath,"newmovies",1080,1920,30)
it decreases video quality too much.
How can I compress a video in android other than through FFMEG?
I have try media codec but it didn't work.
here is the image of code
public String compressVideo(String videoFilePath, String destinationDir, int outWidth, int outHeight, int bitrate) throws URISyntaxException {
boolean isconverted = MediaController.getInstance(mContext).convertVideo(videoFilePath, new File(destinationDir), outWidth, outHeight, bitrate);
if (isconverted) {
Log.v(LOG_TAG, "Video Conversion Complete");
} else {
Log.v(LOG_TAG, "Video conversion in progress");
}
return MediaController.cachedFile.getPath();
}
private boolean convertVideo(Context context, Uri videoContentUri, final String sourcePath, File destDir, int outWidth, int outHeight, int outBitrate) {
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
if (sourcePath != null) {
this.path = sourcePath;
retriever.setDataSource(path);
} else if (context != null && videoContentUri != null) {
retriever.setDataSource(context, videoContentUri);
} else {
return false;
}
String width = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
String height = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
String rotation = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
long startTime = -1;
long endTime = -1;
int resultWidth = outWidth > 0 ? outWidth : DEFAULT_VIDEO_WIDTH;
int resultHeight = outHeight > 0 ? outHeight : DEFAULT_VIDEO_HEIGHT;
int rotationValue = Integer.valueOf(rotation);
int originalWidth = Integer.valueOf(width);
int originalHeight = Integer.valueOf(height);
int bitrate = outBitrate > 0 ? outBitrate : DEFAULT_VIDEO_BITRATE;
int rotateRender = 0;
File cacheFile = null;
try {
cacheFile = new File(getCompressedVideoPath(destDir));
} catch (FileNotFoundException e) {
e.printStackTrace();
}
if (Build.VERSION.SDK_INT < 18 && resultHeight > resultWidth && resultWidth != originalWidth && resultHeight != originalHeight) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 90;
rotateRender = 270;
} else if (Build.VERSION.SDK_INT > 20) {
if (rotationValue == 90) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 0;
rotateRender = 270;
} else if (rotationValue == 180) {
rotateRender = 180;
rotationValue = 0;
} else if (rotationValue == 270) {
int temp = resultHeight;
resultHeight = resultWidth;
resultWidth = temp;
rotationValue = 0;
rotateRender = 90;
}
}
//ParcelFileDescriptor descriptor = null; //"w" specify's write mode
FileDescriptor fileDescriptor = null;
try {
ParcelFileDescriptor descriptor = null;
if (sourcePath != null) {
descriptor = mContext.getContentResolver().openFileDescriptor(Uri.fromFile(new File(sourcePath)), "r");
} else {
descriptor = mContext.getContentResolver().openFileDescriptor(videoContentUri, "r");
}
fileDescriptor = descriptor.getFileDescriptor();
} catch (FileNotFoundException e) {
e.printStackTrace();
return false;
}
videoConvertFirstWrite = true;
boolean error = false;
long videoStartTime = startTime;
long time = System.currentTimeMillis();
if (resultWidth != 0 && resultHeight != 0) {
MP4Builder mediaMuxer = null;
MediaExtractor extractor = null;
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
Mp4Movie movie = new Mp4Movie();
movie.setCacheFile(cacheFile);
movie.setRotation(rotationValue);
movie.setSize(resultWidth, resultHeight);
mediaMuxer = new MP4Builder().createMovie(movie);
extractor = new MediaExtractor();
if (sourcePath != null && fileDescriptor != null) {
extractor.setDataSource(fileDescriptor);
} else if (context != null && videoContentUri != null) {
extractor.setDataSource(context, videoContentUri, null);
} else {
return false;
}
int videoIndex;
videoIndex = selectTrack(extractor, false);
if (videoIndex >= 0) {
MediaCodec decoder = null;
MediaCodec encoder = null;
InputSurface inputSurface = null;
OutputSurface outputSurface = null;
try {
long videoTime = -1;
boolean outputDone = false;
boolean inputDone = false;
boolean decoderDone = false;
int swapUV = 0;
int videoTrackIndex = -5;
int colorFormat;
int processorType = PROCESSOR_TYPE_OTHER;
String manufacturer = Build.MANUFACTURER.toLowerCase();
if (Build.VERSION.SDK_INT < 18) {
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
if (colorFormat == 0) {
throw new RuntimeException("no supported color format");
}
String codecName = codecInfo.getName();
if (codecName.contains("OMX.qcom.")) {
processorType = PROCESSOR_TYPE_QCOM;
if (Build.VERSION.SDK_INT == 16) {
if (manufacturer.equals("lge") || manufacturer.equals("nokia")) {
swapUV = 1;
}
}
} else if (codecName.contains("OMX.Intel.")) {
processorType = PROCESSOR_TYPE_INTEL;
} else if (codecName.equals("OMX.MTK.VIDEO.ENCODER.AVC")) {
processorType = PROCESSOR_TYPE_MTK;
} else if (codecName.equals("OMX.SEC.AVC.Encoder")) {
processorType = PROCESSOR_TYPE_SEC;
swapUV = 1;
} else if (codecName.equals("OMX.TI.DUCATI1.VIDEO.H264E")) {
processorType = PROCESSOR_TYPE_TI;
}
Log.e("tmessages", "codec = " + codecInfo.getName() + " manufacturer = " + manufacturer + "device = " + Build.MODEL);
} else {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
}
Log.e("tmessages", "colorFormat = " + colorFormat);
int resultHeightAligned = resultHeight;
int padding = 0;
int bufferSize = resultWidth * resultHeight * 3 / 2;
if (processorType == PROCESSOR_TYPE_OTHER) {
if (resultHeight % 16 != 0) {
resultHeightAligned += (16 - (resultHeight % 16));
padding = resultWidth * (resultHeightAligned - resultHeight);
bufferSize += padding * 5 / 4;
}
} else if (processorType == PROCESSOR_TYPE_QCOM) {
if (!manufacturer.toLowerCase().equals("lge")) {
int uvoffset = (resultWidth * resultHeight + 2047) & ~2047;
padding = uvoffset - (resultWidth * resultHeight);
bufferSize += padding;
}
} else if (processorType == PROCESSOR_TYPE_TI) {
//resultHeightAligned = 368;
//bufferSize = resultWidth * resultHeightAligned * 3 / 2;
//resultHeightAligned += (16 - (resultHeight % 16));
//padding = resultWidth * (resultHeightAligned - resultHeight);
//bufferSize += padding * 5 / 4;
} else if (processorType == PROCESSOR_TYPE_MTK) {
if (manufacturer.equals("baidu")) {
resultHeightAligned += (16 - (resultHeight % 16));
padding = resultWidth * (resultHeightAligned - resultHeight);
bufferSize += padding * 5 / 4;
}
}
extractor.selectTrack(videoIndex);
if (startTime > 0) {
extractor.seekTo(startTime, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
} else {
extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
}
MediaFormat inputFormat = extractor.getTrackFormat(videoIndex);
MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, resultWidth, resultHeight);
outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate != 0 ? bitrate : 921600);
outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
if (Build.VERSION.SDK_INT < 18) {
outputFormat.setInteger("stride", resultWidth + 32);
outputFormat.setInteger("slice-height", resultHeight);
}
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (Build.VERSION.SDK_INT >= 18) {
inputSurface = new InputSurface(encoder.createInputSurface());
inputSurface.makeCurrent();
}
encoder.start();
decoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
if (Build.VERSION.SDK_INT >= 18) {
outputSurface = new OutputSurface();
} else {
outputSurface = new OutputSurface(resultWidth, resultHeight, rotateRender);
}
decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
decoder.start();
final int TIMEOUT_USEC = 2500;
ByteBuffer[] decoderInputBuffers = null;
ByteBuffer[] encoderOutputBuffers = null;
ByteBuffer[] encoderInputBuffers = null;
if (Build.VERSION.SDK_INT < 21) {
decoderInputBuffers = decoder.getInputBuffers();
encoderOutputBuffers = encoder.getOutputBuffers();
if (Build.VERSION.SDK_INT < 18) {
encoderInputBuffers = encoder.getInputBuffers();
}
}
while (!outputDone) {
if (!inputDone) {
boolean eof = false;
int index = extractor.getSampleTrackIndex();
if (index == videoIndex) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf;
if (Build.VERSION.SDK_INT < 21) {
inputBuf = decoderInputBuffers[inputBufIndex];
} else {
inputBuf = decoder.getInputBuffer(inputBufIndex);
}
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
} else {
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
} else if (index == -1) {
eof = true;
}
if (eof) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
}
}
}
boolean decoderOutputAvailable = !decoderDone;
boolean encoderOutputAvailable = true;
while (decoderOutputAvailable || encoderOutputAvailable) {
int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
encoderOutputAvailable = false;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (Build.VERSION.SDK_INT < 21) {
encoderOutputBuffers = encoder.getOutputBuffers();
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
if (videoTrackIndex == -5) {
videoTrackIndex = mediaMuxer.addTrack(newFormat, false);
}
} else if (encoderStatus < 0) {
throw new RuntimeException("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else {
ByteBuffer encodedData;
if (Build.VERSION.SDK_INT < 21) {
encodedData = encoderOutputBuffers[encoderStatus];
} else {
encodedData = encoder.getOutputBuffer(encoderStatus);
}
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if (info.size > 1) {
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
if (mediaMuxer.writeSampleData(videoTrackIndex, encodedData, info, false)) {
didWriteData(false, false);
}
} else if (videoTrackIndex == -5) {
byte[] csd = new byte[info.size];
encodedData.limit(info.offset + info.size);
encodedData.position(info.offset);
encodedData.get(csd);
ByteBuffer sps = null;
ByteBuffer pps = null;
for (int a = info.size - 1; a >= 0; a--) {
if (a > 3) {
if (csd[a] == 1 && csd[a - 1] == 0 && csd[a - 2] == 0 && csd[a - 3] == 0) {
sps = ByteBuffer.allocate(a - 3);
pps = ByteBuffer.allocate(info.size - (a - 3));
sps.put(csd, 0, a - 3).position(0);
pps.put(csd, a - 3, info.size - (a - 3)).position(0);
break;
}
} else {
break;
}
}
MediaFormat newFormat = MediaFormat.createVideoFormat(MIME_TYPE, resultWidth, resultHeight);
if (sps != null && pps != null) {
newFormat.setByteBuffer("csd-0", sps);
newFormat.setByteBuffer("csd-1", pps);
}
videoTrackIndex = mediaMuxer.addTrack(newFormat, false);
}
}
outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
encoder.releaseOutputBuffer(encoderStatus, false);
}
if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) {
continue;
}
if (!decoderDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
decoderOutputAvailable = false;
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
Log.e("tmessages", "newFormat = " + newFormat);
} else if (decoderStatus < 0) {
throw new RuntimeException("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else {
boolean doRender;
if (Build.VERSION.SDK_INT >= 18) {
doRender = info.size != 0;
} else {
doRender = info.size != 0 || info.presentationTimeUs != 0;
}
if (endTime > 0 && info.presentationTimeUs >= endTime) {
inputDone = true;
decoderDone = true;
doRender = false;
info.flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
}
if (startTime > 0 && videoTime == -1) {
if (info.presentationTimeUs < startTime) {
doRender = false;
Log.e("tmessages", "drop frame startTime = " + startTime + " present time = " + info.presentationTimeUs);
} else {
videoTime = info.presentationTimeUs;
}
}
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
boolean errorWait = false;
try {
outputSurface.awaitNewImage();
} catch (Exception e) {
errorWait = true;
Log.e("tmessages", e.getMessage());
}
if (!errorWait) {
if (Build.VERSION.SDK_INT >= 18) {
outputSurface.drawImage(false);
inputSurface.setPresentationTime(info.presentationTimeUs * 1000);
inputSurface.swapBuffers();
} else {
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
outputSurface.drawImage(true);
ByteBuffer rgbBuf = outputSurface.getFrame();
ByteBuffer yuvBuf = encoderInputBuffers[inputBufIndex];
yuvBuf.clear();
convertVideoFrame(rgbBuf, yuvBuf, colorFormat, resultWidth, resultHeight, padding, swapUV);
encoder.queueInputBuffer(inputBufIndex, 0, bufferSize, info.presentationTimeUs, 0);
} else {
Log.e("tmessages", "input buffer not available");
}
}
}
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
decoderOutputAvailable = false;
Log.e("tmessages", "decoder stream end");
if (Build.VERSION.SDK_INT >= 18) {
encoder.signalEndOfInputStream();
} else {
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
encoder.queueInputBuffer(inputBufIndex, 0, 1, info.presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
}
}
}
}
}
if (videoTime != -1) {
videoStartTime = videoTime;
}
} catch (Exception e) {
Log.e("tmessages", e.getMessage());
error = true;
}
extractor.unselectTrack(videoIndex);
if (outputSurface != null) {
outputSurface.release();
}
if (inputSurface != null) {
inputSurface.release();
}
if (decoder != null) {
decoder.stop();
decoder.release();
}
if (encoder != null) {
encoder.stop();
encoder.release();
}
}
if (!error) {
readAndWriteTrack(extractor, mediaMuxer, info, videoStartTime, endTime, cacheFile, true);
}
} catch (Exception e) {
error = true;
Log.e("tmessages", e.getMessage());
} finally {
if (extractor != null) {
extractor.release();
}
if (mediaMuxer != null) {
try {
mediaMuxer.finishMovie(false);
} catch (Exception e) {
Log.e("tmessages", e.getMessage());
}
}
Log.e("tmessages", "time = " + (System.currentTimeMillis() - time));
}
} else {
didWriteData(true, true);
return false;
}
didWriteData(true, error);
cachedFile = cacheFile;
Log.e("ViratPath", path + "");
Log.e("ViratPath", cacheFile.getPath() + "");
if (sourcePath != null) {
Log.e("SourcePath", sourcePath);
}
if (videoContentUri != null) {
Log.e("VideoUri", videoContentUri.toString());
}
return true;
}
I am trying to create Video frames camera frames using Camera1.Even when I take long Video from camera Video Plays it Too Fast.I tried changing bitrates and frame rates.
image byte[] from camera frames is being queued and fed to this class so that we could create video from bitmaps or byte[]
What we have done.
Opened up camera.Took camera frame from onNextFrame method using Camera1.We could have used MediaRecorder for Recording the video but simultaneous frame rendering to surfaceView and video recording using camera1 cannot be done.So we have used MediaCodec to create mp4 video from frames so that frame rendering and recording a video can be done.
package com.sukshi.smartid_demo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.AsyncTask;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import static com.sukshi.smartid_demo.Camera.CameraSource.angle;
public class BitmapToVideoEncoder {
private static final String TAG = BitmapToVideoEncoder.class.getSimpleName();
private IBitmapToVideoEncoderCallback mCallback;
private File mOutputFile;
private Queue<Bitmap> mEncodeQueue = new ConcurrentLinkedQueue();
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private Object mFrameSync = new Object();
private CountDownLatch mNewFrameLatch;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static int mWidth;
private static int mHeight;
private static final int BIT_RATE = 1000000;
private static final int FRAME_RATE = 30; // Frames per second
private static final int I_FRAME_INTERVAL = 1;
private int mGenerateIndex = 0;
private int mTrackIndex;
private boolean mNoMoreFrames = false;
private boolean mAbort = false;
public interface IBitmapToVideoEncoderCallback {
void onEncodingComplete(File outputFile);
}
public BitmapToVideoEncoder(IBitmapToVideoEncoderCallback callback) {
mCallback = callback;
}
public boolean isEncodingStarted() {
return (mediaCodec != null) && (mediaMuxer != null) && !mNoMoreFrames && !mAbort;
}
public int getActiveBitmaps() {
return mEncodeQueue.size();
}
public void startEncoding(int width, int height, File outputFile) {
mWidth = width;
mHeight = height;
mOutputFile = outputFile;
String outputFileString;
try {
outputFileString = outputFile.getCanonicalPath();
} catch (IOException e) {
Log.e(TAG, "Unable to get path for " + outputFile);
return;
}
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
Log.d(TAG, "found codec: " + codecInfo.getName());
int colorFormat;
try {
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
} catch (Exception e) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
Log.e(TAG, "Unable to create MediaCodec " + e.getMessage());
return;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mediaMuxer = new MediaMuxer(outputFileString, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
Log.e(TAG,"MediaMuxer creation failed. " + e.getMessage());
return;
}
Log.d(TAG, "Initialization complete. Starting encoder...");
new EncodingAsync().execute();
}
public void stopEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to stop encoding since it never started");
return;
}
Log.d(TAG, "Stopping encoding");
mNoMoreFrames = true;
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void abortEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to abort encoding since it never started");
return;
}
Log.d(TAG, "Aborting encoding");
mNoMoreFrames = true;
mAbort = true;
mEncodeQueue = new ConcurrentLinkedQueue(); // Drop all frames
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void queueFrame(byte[] nextData) {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to queue frame. Encoding not started");
return;
}
Log.d(TAG, "Queueing frame");
Bitmap OriginalBitmap = BitmapFactory.decodeByteArray(nextData, 0, nextData.length);
Matrix matrix = new Matrix();
matrix.postRotate(angle);
Bitmap rotatedbitmap = Bitmap.createBitmap(OriginalBitmap,0,0,OriginalBitmap.getWidth(),OriginalBitmap.getHeight(),matrix,true);
mEncodeQueue.add(rotatedbitmap);
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
class EncodingAsync extends AsyncTask<Void,Void,File>{
#RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
#Override
protected File doInBackground(Void... voids) {
Log.d(TAG, "Encoder started");
while(true) {
if (mNoMoreFrames && (mEncodeQueue.size() == 0)) break;
Bitmap bitmap = mEncodeQueue.poll();
if (bitmap == null) {
synchronized (mFrameSync) {
mNewFrameLatch = new CountDownLatch(1);
}
try {
mNewFrameLatch.await();
} catch (InterruptedException e) {}
bitmap = mEncodeQueue.poll();
}
if (bitmap == null) continue;
byte[] byteConvertFrame = getNV21(bitmap.getWidth(), bitmap.getHeight(), bitmap);
long TIMEOUT_USEC = 500000;
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
long ptsUsec = computePresentationTime(mGenerateIndex, FRAME_RATE);
if (inputBufIndex >= 0) {
final ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufIndex);
inputBuffer.clear();
inputBuffer.put(byteConvertFrame);
mediaCodec.queueInputBuffer(inputBufIndex, 0, byteConvertFrame.length, ptsUsec, 0);
mGenerateIndex++;
}
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Log.e(TAG, "No output from encoder available");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else if (mBufferInfo.size != 0) {
ByteBuffer encodedData = mediaCodec.getOutputBuffer(encoderStatus);
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
} else {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
}
}
}
release();
if (mAbort) {
mOutputFile.delete();
return null;
} else {
return mOutputFile;
}
}
#Override
protected void onPostExecute(File outputFile) {
super.onPostExecute(outputFile);
mCallback.onEncodingComplete(outputFile);
}
}
private void release() {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
Log.d(TAG,"RELEASE CODEC");
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
Log.d(TAG,"RELEASE MUXER");
}
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
private static int selectColorFormat(MediaCodecInfo codecInfo,
String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {
int[] argb = new int[inputWidth * inputHeight];
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
scaled.recycle();
return yuv;
}
private void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
}
index++;
}
}
}
private long computePresentationTime(long frameIndex, int framerate) {
return 132 + frameIndex * 1000000 / framerate;
}
}
You can see the main ways to use the MediaCodec
Generates a movie using OpenGL ES
https://bigflake.com/mediacodec/EncodeAndMuxTest.java.txt
I record a video from byte array using MediaCodec and MediaMuxer. But the result is that the video cannot be recorded. The system says: "File incorrect". You can see the code example below. I need to get a video file in mp4 format. Please tell me what the problem it is.
//init - this function is called once in the beginning.
private static final String MIME_TYPE = "video/avc";
private final static int MAX_WIDTH = 320;
private final static int MAX_HEIGHT = 240;
private final static int VIDEO_BITRATE = 2000000;
private static final int FRAME_RATE = 20;
private final static int VIDEO_IFRAME_INTERVAL = 10;
public boolean initCodec() {
bufferInfo = new MediaCodec.BufferInfo();
countFile++;
String savePath = "uonmap_video_" + countFile + ".mp4";
File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), savePath);
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
return false;
}
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VIDEO_IFRAME_INTERVAL);
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
e.printStackTrace();
}
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mMuxer = new MediaMuxer(file.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException ioe) {
throw new RuntimeException("MediaMuxer creation failed", ioe);
}
isStart = true;
mTrackIndex = -1;
return true;
}
//encode function - the function is called every time when a new byte array comes
public synchronized void encode(byte[] data, boolean endOfStream) {
final int TIMEOUT_USEC = 50;
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
try {
if (isPlanar) {
data = YV12toYUV420Planar(data);
} else {
data = YV12toYUV420PackedSemiPlanar(data);
}
} catch (IndexOutOfBoundsException ex) {
}
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(data);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, 50, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
} else {
return;
}
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
while (true) {
int encoderStatus = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
} else {
ByteBuffer encodedData = outputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
bufferInfo.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
bufferInfo.presentationTimeUs = 50;
mMuxer.writeSampleData(mTrackIndex, encodedData, bufferInfo);
}
mediaCodec.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
}
public byte[] YV12toYUV420PackedSemiPlanar(final byte[] input) {
/*
* COLOR_TI_FormatYUV420PackedSemiPlanar is NV12
* We convert by putting the corresponding U and V bytes together (interleaved).
*/
final int frameSize = width * height;
final int qFrameSize = frameSize/4;
byte[] output = new byte[input.length];
System.arraycopy(input, 0, output, 0, frameSize);
for (int i = 0; i < (qFrameSize); i++)
{
byte b = (input[frameSize + qFrameSize + i - 32 - 320]);
output[frameSize + i*2] = b;
output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);
}
System.arraycopy(input, 0, output, 0, frameSize); // Y
for (int i = 0; i < qFrameSize; i++) {
output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U)
output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V)
}
return output;
}
Log:
05-27 18:13:56.897 32196-32320/com.example.sasha.myrtc D/dalvikvm: GC_FOR_ALLOC freed 227K, 10% free 5333K/5876K, paused 24ms, total 24ms
05-27 18:13:56.907 32196-32434/com.example.sasha.myrtc V/MPEG4Writer: decoding time: 0 and ctts offset time: 0
05-27 18:13:56.907 32196-32434/com.example.sasha.myrtc V/MPEG4Writer: Video media time stamp: 0 and previous paused duration 50
I have working code to convert wav to m4a.is there anyway to convert mp3 to m4a by modifying my existing code.I don't want to use ffmpeg or native code to this.The below function is working for wav to m4a conversion but not working for mp3 to m4a
public void convertAudio(final String filename) {
final String AUDIO_RECORDING_FILE_NAME = Config.mp3Audio; // Input PCM file
final String COMPRESSED_AUDIO_FILE_NAME = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC).getPath()
+ "/convertedmp4.m4a"; // Output MP4/M4A file
final String COMPRESSED_AUDIO_FILE_MIME_TYPE = "audio/mp4a-latm";
final int COMPRESSED_AUDIO_FILE_BIT_RATE = 320000; // 64kbps
final int SAMPLING_RATE = 22050;
final int BUFFER_SIZE = 22050;
final int CODEC_TIMEOUT_IN_MS = 5000;
String LOGTAG = "CONVERT AUDIO";
try {
File inputFile = new File(filename);
FileInputStream fis = new FileInputStream(inputFile);
File outputFile = new File(COMPRESSED_AUDIO_FILE_NAME);
if (outputFile.exists()) outputFile.delete();
MediaMuxer mux = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
MediaFormat outputFormat = MediaFormat.createAudioFormat(COMPRESSED_AUDIO_FILE_MIME_TYPE, SAMPLING_RATE, 1);
outputFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, COMPRESSED_AUDIO_FILE_BIT_RATE);
outputFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 16384);
MediaCodec codec = MediaCodec.createEncoderByType(COMPRESSED_AUDIO_FILE_MIME_TYPE);
codec.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); // Note: Array of buffers
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
MediaCodec.BufferInfo outBuffInfo = new MediaCodec.BufferInfo();
byte[] tempBuffer = new byte[BUFFER_SIZE];
boolean hasMoreData = true;
double presentationTimeUs = 0;
int audioTrackIdx = 0;
int totalBytesRead = 0;
int percentComplete = 0;
do {
int inputBufIndex = 0;
while (inputBufIndex != -1 && hasMoreData) {
inputBufIndex = codec.dequeueInputBuffer(CODEC_TIMEOUT_IN_MS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
dstBuf.clear();
int bytesRead = fis.read(tempBuffer, 0, dstBuf.limit());
if (bytesRead == -1) { // -1 implies EOS
hasMoreData = false;
codec.queueInputBuffer(inputBufIndex, 0, 0, (long) presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
totalBytesRead += bytesRead;
dstBuf.put(tempBuffer, 0, bytesRead);
codec.queueInputBuffer(inputBufIndex, 0, bytesRead, (long) presentationTimeUs, 0);
presentationTimeUs = 1000000l * (totalBytesRead / 2) / SAMPLING_RATE;
}
}
}
int outputBufIndex = 0;
while (outputBufIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
outputBufIndex = codec.dequeueOutputBuffer(outBuffInfo, CODEC_TIMEOUT_IN_MS);
if (outputBufIndex >= 0) {
ByteBuffer encodedData = codecOutputBuffers[outputBufIndex];
encodedData.position(outBuffInfo.offset);
encodedData.limit(outBuffInfo.offset + outBuffInfo.size);
if ((outBuffInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 && outBuffInfo.size != 0) {
codec.releaseOutputBuffer(outputBufIndex, false);
} else {
mux.writeSampleData(audioTrackIdx, codecOutputBuffers[outputBufIndex], outBuffInfo);
codec.releaseOutputBuffer(outputBufIndex, false);
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
outputFormat = codec.getOutputFormat();
audioTrackIdx = mux.addTrack(outputFormat);
mux.start();
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
} else if (outputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// NO OP
} else {
}
}
percentComplete = (int) Math.round(((float) totalBytesRead / (float) inputFile.length()) * 100.0);
} while (outBuffInfo.flags != MediaCodec.BUFFER_FLAG_END_OF_STREAM);
fis.close();
mux.stop();
mux.release();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
I want to use Android MediaCodec and MediaMuxer APIS to encode a video with new format and save the result after encoding to a new .mp4 file.
This is my code:
final String MIME_TYPE = "video/avc";
final int BIT_RATE = 128000; // 128kbps
final int SAMPLING_RATE = 44100;
final int CODEC_TIMEOUT_IN_MS = 5000;
boolean suc=false;
try {
File inputFile = new File(filePath);
FileInputStream fis = new FileInputStream(inputFile);
File outputFile = new File(dstMediaPath);
if (outputFile.exists())
outputFile.delete();
MediaMuxer mux = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
MediaFormat outputFormat = MediaFormat.createAudioFormat(MIME_TYPE,SAMPLING_RATE, 1);
outputFormat = MediaFormat.createVideoFormat("video/avc",
320, 240);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000);
outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
MediaCodec codec = MediaCodec.createEncoderByType(MIME_TYPE);
codec.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); // Note: Array of buffers
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
MediaCodec.BufferInfo outBuffInfo = new MediaCodec.BufferInfo();
byte[] tempBuffer = new byte[4096];
boolean hasMoreData = true;
double presentationTimeUs = 0;
int audioTrackIdx = 0;
int totalBytesRead = 0;
int percentComplete;
do {
int inputBufIndex = 0;
while (inputBufIndex != -1 && hasMoreData) {
inputBufIndex = codec.dequeueInputBuffer(CODEC_TIMEOUT_IN_MS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
dstBuf.clear();
int bytesRead = fis.read(tempBuffer, 0, tempBuffer.length);
if (bytesRead == -1) { // -1 implies EOS
hasMoreData = false;
codec.queueInputBuffer(inputBufIndex, 0, 0, (long) presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
totalBytesRead += bytesRead;
dstBuf.put(tempBuffer, 0, bytesRead);
codec.queueInputBuffer(inputBufIndex, 0, bytesRead, (long) presentationTimeUs, 0);
presentationTimeUs = 1000000l * (totalBytesRead / 2) / SAMPLING_RATE;
}
}
}
int outputBufIndex = 0;
while (outputBufIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
outputBufIndex = codec.dequeueOutputBuffer(outBuffInfo, CODEC_TIMEOUT_IN_MS);
if (outputBufIndex >= 0) {
ByteBuffer encodedData = codecOutputBuffers[outputBufIndex];
encodedData.position(outBuffInfo.offset);
encodedData.limit(outBuffInfo.offset + outBuffInfo.size);
if ((outBuffInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 && outBuffInfo.size != 0) {
codec.releaseOutputBuffer(outputBufIndex, false);
outBuffInfo.size=0;
} else {
mux.writeSampleData(audioTrackIdx, codecOutputBuffers[outputBufIndex], outBuffInfo);
codec.releaseOutputBuffer(outputBufIndex, false);
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
outputFormat = codec.getOutputFormat();
Log.v(TAG, "Output format changed - " + outputFormat);
audioTrackIdx = mux.addTrack(outputFormat);
mux.start();
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
Log.e(TAG, "Output buffers changed during encode!");
} else if (outputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// NO OP
} else {
Log.e(TAG, "Unknown return code from dequeueOutputBuffer - " + outputBufIndex);
}
}
percentComplete = (int) Math.round(((float) totalBytesRead / (float) inputFile.length()) * 100.0);
} while (outBuffInfo.flags != MediaCodec.BUFFER_FLAG_END_OF_STREAM);
fis.close();
mux.stop();
mux.release();
Log.v(TAG, "Compression done ...");
suc=true;
} catch (FileNotFoundException e) {
Log.e(TAG, "File not found!", e);
suc=false;
} catch (IOException e) {
Log.e(TAG, "IO exception!", e);
suc=false;
}
return suc;
When open the file output, it can be played, but it shows the wrong results, an example of which follows:
Can anyone help me understand what I am doing wrong?