MediaCodec video encoding from byteBuffer fails - android

I'm trying to encode the OpenGL output of my program to an mp4 file (Android 4.4, Huawei P6, using the MediaCodec and MediaMuxer). Since my app's lifecycle is pretty closely tied to a GLSurfaceView, I don't want to risk braking this fragile part of my app by replacing that with a custom surface that's encodable directly, so I initially wanted to do something less intrusive, like just copying the pixels over to main memory and encoding from a byteBuffer. I could not find any complete examples of this, so I used the Grafika code (https://github.com/google/grafika) as a starting point.
It seems to work OK, and at one point it did produce a working MP4 file, but it's borked in its current state and I can't figure out what I'm doing wrong.
Encoding/muxing seems to work fine, until I actually close the Encoder, at which point I get the following error in the logcat:
07-06 20:17:24.890: A/ACodec(24650): frameworks/av/media/libstagefright/ACodec.cpp:3162 CHECK_EQ( (int)info->mStatus,(int)BufferInfo::OWNED_BY_COMPONENT) failed: 0 vs. 1
07-06 20:17:24.890: A/libc(24650): Fatal signal 6 (SIGABRT) at 0x0000604a (code=-6), thread 24795 (CodecLooper)
And my app closes
Can anyone tell me what this error actually means? It looks like I'm either holding on the a buffer (input or output?) that I should not hold on to, or have released a buffer prematurely that I should have held on to? And which buffer is it, in or out or the info buffer? I'm very confused...
For reference, here's the code I'm using to encode:
package bla;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.opengl.GLES20;
import android.os.Build;
import android.util.Log;
public class VideoEncoder
{
public static final boolean debugLog = true;
private static final String TAG = "bla";
private MediaCodec m_MediaEncoder = null;
private MediaMuxer mMuxer;
private MediaCodec.BufferInfo mBufferInfo;
private int mTrackIndex;
private boolean mMuxerStarted;
private int m_encodeWidth;
private int m_encodeHeight;
private int m_frameRate = 30;
private int m_frameInterval = 5;
private String m_fileName;
private int m_numRecordedFrames=0;
#TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public boolean Start( String filename, int width, int height, float fps )
{
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2 )
{
m_encodeWidth = width;
m_encodeHeight = height;
m_fileName = new String( filename ); // make a copy
if( debugLog ) Log.i(TAG, String.format("Bitrate for encoder set to %d", bitrate));
m_MediaEncoder = MediaCodec.createEncoderByType("video/avc");
MediaFormat mMediaFormat = MediaFormat.createVideoFormat("video/avc", m_encodeWidth, m_encodeHeight );
mMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 4000000);
// mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, m_frameRate);
mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
mMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_Format32bitBGRA8888);
// mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, m_frameInterval);
mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
m_MediaEncoder.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
m_MediaEncoder.start();
mBufferInfo = new MediaCodec.BufferInfo();
mTrackIndex = -1;
mMuxerStarted = false;
try
{ // delete the video file if it already exists
FileOutputStream videoOutputFileStream = new FileOutputStream( m_fileName );
videoOutputFileStream.close();
}
catch (FileNotFoundException e)
{
Log.e(TAG,"File not found when creating movie file");
e.printStackTrace();
return false;
}
catch (IOException e)
{
Log.e(TAG,"I/O exception creating movie file");
e.printStackTrace();
return false;
}
// create the muxer object
try
{ // delete the video file if it already exists
mMuxer = new MediaMuxer( m_fileName, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
catch (IOException e)
{
Log.e(TAG,"I/O exception creating muxer");
e.printStackTrace();
return false;
}
if( debugLog) Log.i(TAG,"Started Movie Encoder");
return true;
}
else
{
return false;
}
}
#TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public void Stop()
{
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2 )
{
if( debugLog) Log.i(TAG,"Stopping Movie Encoder");
if( m_MediaEncoder != null )
{
// if( debugLog) Log.i(TAG,"Flush()");
// m_MediaEncoder.flush();
if( debugLog) Log.i(TAG,"Stop()");
m_MediaEncoder.stop();
if( debugLog) Log.i(TAG,"Release()");
m_MediaEncoder.release();
if( debugLog) Log.i(TAG,"set null");
m_MediaEncoder = null;
Log.i(TAG,"Stopping Muxer");
if( m_numRecordedFrames > 0 )
{
if( debugLog) Log.i(TAG,"Stop()");
mMuxer.stop();
if( debugLog) Log.i(TAG,"Release()");
mMuxer.release();
}
if( debugLog) Log.i(TAG,"set null");
mMuxer = null;
}
}
}
#TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public void EncodeCurrentFrameBuffer()
{
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2 )
{
if( debugLog) Log.i(TAG,"Saving movie frame START");
if( debugLog) Log.i(TAG,"About to request input buffer");
ByteBuffer[] videoInputBuffers = m_MediaEncoder.getInputBuffers();
int inputBufferIndex = m_MediaEncoder.dequeueInputBuffer(10);
if( debugLog) Log.i(TAG,"Got input buffer: " + Integer.toString(inputBufferIndex) );
if(inputBufferIndex >= 0)
{
if( debugLog) Log.i(TAG,String.format("Encoding frame of %d x %d", m_encodeWidth, m_encodeHeight ));
ByteBuffer inputBuffer = videoInputBuffers[inputBufferIndex];
inputBuffer.clear();
if( debugLog) Log.i(TAG,"Reading pixels from OpenGL");
GLES20.glReadPixels( 0, 0, m_encodeWidth, m_encodeHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, inputBuffer );
long totaltime = (long)(m_numRecordedFrames+1) * 1000000 / (long)m_frameRate;
if( debugLog) Log.i(TAG,"Sending input buffer to encoder");
m_MediaEncoder.queueInputBuffer(inputBufferIndex, 0, m_encodeWidth * m_encodeHeight * 4, totaltime, 0);
drainEncoder();
}
else
{
videoInputBuffers = null;
}
if( debugLog) Log.i(TAG,"Saving movie frame END");
m_numRecordedFrames++;
}
}
#TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public void drainEncoder()
{
final int TIMEOUT_USEC = 10;
ByteBuffer[] encoderOutputBuffers = m_MediaEncoder.getOutputBuffers();
while (true)
{
int encoderStatus = m_MediaEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER)
{
// no output available yet
if (debugLog) Log.i(TAG, "no output available (yet), break out of while");
{
break; // out of while
}
}
else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{
// not expected for an encoder
encoderOutputBuffers = m_MediaEncoder.getOutputBuffers();
}
else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
{
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted)
{
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = m_MediaEncoder.getOutputFormat();
if( debugLog) Log.i(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
if( debugLog) Log.i(TAG, "muxer started");
}
else if (encoderStatus < 0)
{
if( debugLog) Log.i(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
// let's ignore it
}
else
{
if( debugLog) Log.i(TAG,"EncoderStatus = " + Integer.toString(encoderStatus));
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null)
{
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)
{
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (debugLog) Log.i(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0)
{
if (!mMuxerStarted)
{
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
if( debugLog) Log.i(TAG, "writing sample data to muxer");
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if( debugLog) Log.i(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" + mBufferInfo.presentationTimeUs);
}
if( debugLog) Log.i(TAG, "releasing output buffer");
m_MediaEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
{
if (debugLog) Log.i(TAG, "end of stream reached");
break; // out of while
}
}
}
}
}
}
Kind regards,
Nils Desle

Related

video steganography using mediacodec

I need to create videos with data hidden in them. i managed to extract video frames using mediacodec decoder as NV21 buffer and save them, then i create mp4 file from frames using mediacodec encoder.
the class below is responsible for saving frame files if we are in encode process or check the value if we want to extract data from stego-video.
public class ExtractMpegFramesBufferDecoder {
private static final String TAG = "ExtractMpegFramesDec";
private static final boolean VERBOSE = true; // lots of logging
// where to find files (note: requires WRITE_EXTERNAL_STORAGE permission)
private File STORE_FRAME_DIRECTORY;
private String INPUT_FILE;
private int frameRate; // stop extracting after this many
private int saveWidth;
private int saveHeight;
private int decodeCount;
private Handler _progressBarHandler;
private int duration;
//
private int MAX_FRAMES;
private boolean fromDecode;
//
public ExtractMpegFramesBufferDecoder(File storeFrameDirectory, String inputVideoPath, int frameRate
, int saveWidth, int saveHeight
, double duration, int rotation
, Handler _progressBarHandler) {
this.STORE_FRAME_DIRECTORY = storeFrameDirectory;
this.INPUT_FILE = inputVideoPath;
this.frameRate = frameRate;
this.saveWidth = saveWidth;
this.saveHeight = saveHeight;
this._progressBarHandler = _progressBarHandler;
this.duration = (int) duration;
}
/**
* Tests extraction from an MP4 to a series of PNG files.
* <p>
* We scale the video to 640x480 for the PNG just to demonstrate that we can scale the
* video with the GPU. If the input video has a different aspect ratio, we could preserve
* it by adjusting the GL viewport to get letterboxing or pillarboxing, but generally if
* you're extracting frames you don't want black bars.
*/
public void extractMpegFrames(int maxFrame, boolean fromDecode) throws IOException {
MediaCodec decoder = null;
MediaExtractor extractor = null;
MAX_FRAMES = maxFrame;
this.fromDecode = fromDecode;
try {
File inputFile = new File(INPUT_FILE); // must be an absolute path
// The MediaExtractor error messages aren't very useful. Check to see if the input
// file exists so we can throw a better one if it's not there.
if (!inputFile.canRead()) {
throw new FileNotFoundException("Unable to read " + inputFile);
}
extractor = new MediaExtractor();
extractor.setDataSource(inputFile.toString());
int trackIndex = selectTrack(extractor);
if (trackIndex < 0) {
throw new RuntimeException("No video track found in " + inputFile);
}
extractor.selectTrack(trackIndex);
MediaFormat format = extractor.getTrackFormat(trackIndex);
if (VERBOSE) {
Log.d(TAG, "Video size is " + format.getInteger(MediaFormat.KEY_WIDTH) + "x" +
format.getInteger(MediaFormat.KEY_HEIGHT));
}
// Create a MediaCodec decoder, and configure it with the MediaFormat from the
// extractor. It's very important to use the format from the extractor because
// it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
String mime = format.getString(MediaFormat.KEY_MIME);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, null, null, 0);
decoder.start();
doExtract(extractor, trackIndex, decoder);
} finally {
if (decoder != null) {
decoder.stop();
decoder.release();
decoder = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}
}
/**
* Selects the video track, if any.
*
* #return the track index, or -1 if no video track is found.
*/
private int selectTrack(MediaExtractor extractor) {
// Select the first video track we find, ignore the rest.
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
if (VERBOSE) {
Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
}
return i;
}
}
return -1;
}
/**
* Work loop.
*/
public void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder) throws IOException {
final int TIMEOUT_USEC = 10000;
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputChunk = 0;
decodeCount = 0;
long frameSaveTime = 0;
boolean outputDone = false;
boolean inputDone = false;
ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers();
MediaFormat decoderOutputFormat = null;
long rawSize = 0;
while (!outputDone) {
if (VERBOSE) Log.d(TAG, "loop");
// Feed more data to the decoder.
if (!inputDone) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
// Read the sample data into the ByteBuffer. This neither respects nor
// updates inputBuf's position, limit, etc.
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
if (VERBOSE) Log.d(TAG, "sent input EOS");
} else {
if (extractor.getSampleTrackIndex() != trackIndex) {
Log.w(TAG, "WEIRD: got sample from track " +
extractor.getSampleTrackIndex() + ", expected " + trackIndex);
}
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
presentationTimeUs, 0 /*flags*/);
if (VERBOSE) {
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
chunkSize);
}
inputChunk++;
extractor.advance();
}
} else {
if (VERBOSE) Log.d(TAG, "input buffer not available");
}
}
if (!outputDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (VERBOSE) Log.d(TAG, "no output from decoder available");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not important for us, since we're using Surface
if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
decoderOutputBuffers = decoder.getOutputBuffers();
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
decoderOutputFormat = newFormat;
if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
} else if (decoderStatus < 0) {
Log.e(TAG, "unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else { // decoderStatus >= 0
if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
" (size=" + info.size + ")");
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus];
outputFrame.position(info.offset);
outputFrame.limit(info.offset + info.size);
rawSize += info.size;
if (info.size == 0) {
if (VERBOSE) Log.d(TAG, "got empty frame");
} else {
// if it's decode then check the altered value
// else save the frames
if (fromDecode) {
outputFrame.rewind();
byte[] data = new byte[outputFrame.remaining()];
outputFrame.get(data);
int size = saveWidth * saveHeight;
int offset = size;
int[] pixels = new int[size];
int u, v, y1, y2, y3, y4;
int uvIndex = 0;
if (decodeCount == 1) {
// i percorre os Y and the final pixels
// k percorre os pixles U e V
for (int i = 0, k = 0; i < size; i += 2, k += 2) {
y1 = data[i] & 0xff;
y2 = data[i + 1] & 0xff;
y3 = data[saveWidth + i] & 0xff;
y4 = data[saveWidth + i + 1] & 0xff;
u = data[offset + k] & 0xff;
v = data[offset + k + 1] & 0xff;
// getting size
if (uvIndex == 0) {
int specialByte1P1 = u & 15;
int specialByte1P2 = v & 15;
int specialCharacter1 = (specialByte1P1 << 4) | specialByte1P2;
if (specialCharacter1 != 17) {
throw new IllegalArgumentException("value has changed");
}
}
uvIndex++;
if (i != 0 && (i + 2) % saveWidth == 0)
i += saveWidth;
}
}
} else {
outputFrame.rewind();
byte[] data = new byte[outputFrame.remaining()];
outputFrame.get(data);
try {
File outputFile = new File(STORE_FRAME_DIRECTORY,
String.format(Locale.US, "frame_%d.frame", decodeCount));
FileOutputStream stream = new FileOutputStream(outputFile.getAbsoluteFile());
stream.write(data);
} catch (FileNotFoundException e1) {
e1.printStackTrace();
}
}
decodeCount++;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
decoder.releaseOutputBuffer(decoderStatus, false);
}
}
}
int numSaved = (frameRate < decodeCount) ? frameRate : decodeCount;
Log.d(TAG, "Saving " + numSaved + " frames took " +
(frameSaveTime / numSaved / 1000) + " us per frame");
}
public int getDecodeCount() {
return decodeCount;
}
}
in class below i encode frames, alter one u v value of (frame 1), store digit number 17 in lsb of first u and v and build mp4 using mediacodec encoder.
public class YUVFrameBufferToVideoEncoder {
private static final String TAG = BitmapToVideoEncoder.class.getSimpleName();
private static final int ERROR_IN_PROCESS = 0;
private IBitmapToVideoEncoderCallback mCallback;
private File mOutputFile;
private Queue<File> mEncodeQueue = new ConcurrentLinkedQueue();
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private Object mFrameSync = new Object();
private CountDownLatch mNewFrameLatch;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static int mWidth;
private static int mHeight;
private static int BIT_RATE;
private static int FRAME_RATE; // Frames per second
private int frameCount;
private Handler _progressBarHandler;
private Handler _processHandler;
private static final int I_FRAME_INTERVAL = 1;
private int mGenerateIndex = 0;
private int mTrackIndex;
private boolean mNoMoreFrames = false;
private boolean mAbort = false;
//
private byte[] dataToHide;
public interface IBitmapToVideoEncoderCallback {
void onEncodingComplete(File outputFile);
}
public YUVFrameBufferToVideoEncoder(IBitmapToVideoEncoderCallback callback) {
mCallback = callback;
}
public boolean isEncodingStarted() {
return (mediaCodec != null) && (mediaMuxer != null) && !mNoMoreFrames && !mAbort;
}
public int getActiveBitmaps() {
return mEncodeQueue.size();
}
public boolean startEncoding(int width, int height, int fps, int bitrate, int frameCount
, byte[] dataToHide, Handler _progressBarHandler, Handler _processHandler
, File outputFile) {
mWidth = width;
mHeight = height;
FRAME_RATE = fps;
BIT_RATE = bitrate;
this.frameCount = frameCount;
this._progressBarHandler = _progressBarHandler;
this._processHandler = _processHandler;
mOutputFile = outputFile;
this.dataToHide = dataToHide;
String outputFileString;
try {
outputFileString = outputFile.getCanonicalPath();
} catch (IOException e) {
Log.e(TAG, "Unable to get path for " + outputFile);
ErrorManager.getInstance().addErrorMessage("Unable to get path for " + outputFile);
return false;
}
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
ErrorManager.getInstance().addErrorMessage("Unable to find an appropriate codec for " + MIME_TYPE);
return false;
}
Log.d(TAG, "found codec: " + codecInfo.getName());
int colorFormat;
try {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
} catch (Exception e) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
Log.e(TAG, "Unable to create MediaCodec " + e.getMessage());
ErrorManager.getInstance().addErrorMessage("Unable to create MediaCodec " + e.getMessage());
return false;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mediaMuxer = new MediaMuxer(outputFileString, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
Log.e(TAG, "MediaMuxer creation failed. " + e.getMessage());
ErrorManager.getInstance().addErrorMessage("MediaMuxer creation failed. " + e.getMessage());
return false;
}
Log.d(TAG, "Initialization complete. Starting encoder...");
Completable.fromAction(this::encode)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe();
return true;
}
public void stopEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to stop encoding since it never started");
return;
}
Log.d(TAG, "Stopping encoding");
mNoMoreFrames = true;
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void abortEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to abort encoding since it never started");
return;
}
Log.d(TAG, "Aborting encoding");
mNoMoreFrames = true;
mAbort = true;
mEncodeQueue = new ConcurrentLinkedQueue(); // Drop all frames
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void queueFrame(File frame) {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to queue frame. Encoding not started");
return;
}
Log.d(TAG, "Queueing frame");
mEncodeQueue.add(frame);
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
private void encode() {
Log.d(TAG, "Encoder started");
while (true) {
if (mNoMoreFrames && (mEncodeQueue.size() == 0)) break;
File frame = mEncodeQueue.poll();
if (frame == null) {
synchronized (mFrameSync) {
mNewFrameLatch = new CountDownLatch(1);
}
try {
mNewFrameLatch.await();
} catch (InterruptedException e) {
}
frame = mEncodeQueue.poll();
}
if (frame == null) continue;
int size = (int) frame.length();
byte[] bytesNV21 = new byte[size];
try {
BufferedInputStream buf = new BufferedInputStream(new FileInputStream(frame));
buf.read(bytesNV21, 0, bytesNV21.length);
buf.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
int offsetSize = mWidth * mHeight;
int byteNV21Offset = offsetSize;
int u, v, y1, y2, y3, y4;
//
int dataToHideLength = 0;
if (dataToHide != null)
dataToHideLength = dataToHide.length;
boolean isLastIndexInserted1 = false;
boolean isLastIndexInserted2 = false;
boolean isLastIndexInserted3 = false;
int uvIndex = 0;
int frameByteCapacity = ((mWidth * mHeight) / 4) / 20;
Log.e(TAG, "encode: dataToHideLength: " + dataToHideLength);
Log.e(TAG, "encode: frameByteCapacity: " + dataToHideLength);
//
// i percorre os Y and the final pixels
// k percorre os pixles U e V
for (int i = 0, k = 0; i < offsetSize; i += 2, k += 2) {
y1 = bytesNV21[i] & 0xff;
y2 = bytesNV21[i + 1] & 0xff;
y3 = bytesNV21[mWidth + i] & 0xff;
y4 = bytesNV21[mWidth + i + 1] & 0xff;
u = bytesNV21[byteNV21Offset + k] & 0xff;
v = bytesNV21[byteNV21Offset + k + 1] & 0xff;
// frame 1
// altering u and v for test
if (mGenerateIndex == 1) {
int Unew = u & 240;
int Vnew = v & 240;
if (uvIndex == 0) {
// used in start and end of stego bytes
int specialByte1Integer = 17;
int specialByte1P1 = specialByte1Integer & 240;
int specialByte1P2 = specialByte1Integer & 15;
// shift p1 right 4 position
specialByte1P1 = specialByte1P1 >> 4;
u = Unew | specialByte1P1;
v = Vnew | specialByte1P2;
}
bytesNV21[byteNV21Offset + k] = (byte) u;
bytesNV21[byteNV21Offset + k + 1] = (byte) v;
}
uvIndex++;
if (i != 0 && (i + 2) % mWidth == 0)
i += mWidth;
}
long TIMEOUT_USEC = 500000;
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
long ptsUsec = computePresentationTime(mGenerateIndex, FRAME_RATE);
if (inputBufIndex >= 0) {
final ByteBuffer inputBuffer = mediaCodec.getInputBuffers()[inputBufIndex];
inputBuffer.clear();
inputBuffer.put(bytesNV21);
mediaCodec.queueInputBuffer(inputBufIndex, 0, bytesNV21.length, ptsUsec, 0);
mGenerateIndex++;
int percentComplete = 70 + (int) ((((double) mGenerateIndex) / (frameCount)) * 30);
if (_progressBarHandler != null) {
_progressBarHandler.sendMessage(_progressBarHandler.obtainMessage(percentComplete));
}
Log.w("creatingVideo: ", "is:" + percentComplete);
}
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Log.e(TAG, "No output from encoder available");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else if (mBufferInfo.size != 0) {
ByteBuffer encodedData = mediaCodec.getOutputBuffers()[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
} else {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
}
}
}
release();
if (mAbort) {
mOutputFile.delete();
} else {
mCallback.onEncodingComplete(mOutputFile);
}
}
private void release() {
try {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
Log.d(TAG, "RELEASE CODEC");
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
Log.d(TAG, "RELEASE MUXER");
}
} catch (Exception ignored) {
ErrorManager.getInstance().addErrorMessage("unsupported video file");
Message res = _processHandler.obtainMessage(ERROR_IN_PROCESS);
_processHandler.sendMessage(res);
}
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
private static int selectColorFormat(MediaCodecInfo codecInfo,
String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private long computePresentationTime(long frameIndex, int framerate) {
return 132 + frameIndex * 1000000 / framerate;
}}
output video is created successfully without any problem, but mediacodec has changed the altered test value and i cannot retrieve it.
here is my question, is this a right approach for doing video steganography in android? if it is not the right way can you please make a suggestion?
Steganography comes with a prerequisite - lossless encoding.
None of the codecs available on Android support lossless video encoding, as of now.
So I'm afraid your LSBs would never remain the same post encoding/decoding.
Suggestion: If you don't have a lot many frames, I would suggest you use a lossless format. You may encode your frames into a sequence of PNG images.

How do record video stream data as mp4 in webRTC android?

Please help me! I used this example in https://github.com/pchab/AndroidRTC to streaming video and audio from a android device to an other android device.In this example, they used 2 librarys is : libjingle_peerConnection and SocketIo client but i don't know how to save streaming data as h.264 format?
After a lot of tries and hard work about this project, I found the solution for saving video as mp4 without any problem.
add this VideoFileRenderer.java to your project
package org.webrtc;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
import java.io.IOException;
import java.nio.ByteBuffer;
public class VideoFileRenderer implements VideoSink, SamplesReadyCallback {
private static final String TAG = "VideoFileRenderer";
private final HandlerThread renderThread;
private final Handler renderThreadHandler;
private final HandlerThread audioThread;
private final Handler audioThreadHandler;
private int outputFileWidth = -1;
private int outputFileHeight = -1;
private ByteBuffer[] encoderOutputBuffers;
private ByteBuffer[] audioInputBuffers;
private ByteBuffer[] audioOutputBuffers;
private EglBase eglBase;
private EglBase.Context sharedContext;
private VideoFrameDrawer frameDrawer;
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private MediaMuxer mediaMuxer;
private MediaCodec encoder;
private MediaCodec.BufferInfo bufferInfo, audioBufferInfo;
private int trackIndex = -1;
private int audioTrackIndex;
private boolean isRunning = true;
private GlRectDrawer drawer;
private Surface surface;
private MediaCodec audioEncoder;
private AudioDeviceModule audioDeviceModule;
public VideoFileRenderer(String outputFile, final EglBase.Context sharedContext, boolean withAudio) throws IOException {
renderThread = new HandlerThread(TAG + "RenderThread");
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
if (withAudio) {
audioThread = new HandlerThread(TAG + "AudioThread");
audioThread.start();
audioThreadHandler = new Handler(audioThread.getLooper());
} else {
audioThread = null;
audioThreadHandler = null;
}
bufferInfo = new MediaCodec.BufferInfo();
this.sharedContext = sharedContext;
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
mediaMuxer = new MediaMuxer(outputFile,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
audioTrackIndex = withAudio ? -1 : 0;
}
private void initVideoEncoder() {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, outputFileWidth, outputFileHeight);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
// Create a MediaCodec encoder and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
renderThreadHandler.post(() -> {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE);
surface = encoder.createInputSurface();
eglBase.createSurface(surface);
eglBase.makeCurrent();
drawer = new GlRectDrawer();
});
} catch (Exception e) {
Log.wtf(TAG, e);
}
}
#Override
public void onFrame(VideoFrame frame) {
frame.retain();
if (outputFileWidth == -1) {
outputFileWidth = frame.getRotatedWidth();
outputFileHeight = frame.getRotatedHeight();
initVideoEncoder();
}
renderThreadHandler.post(() -> renderFrameOnRenderThread(frame));
}
private void renderFrameOnRenderThread(VideoFrame frame) {
if (frameDrawer == null) {
frameDrawer = new VideoFrameDrawer();
}
frameDrawer.drawFrame(frame, drawer, null, 0, 0, outputFileWidth, outputFileHeight);
frame.release();
drainEncoder();
eglBase.swapBuffers();
}
/**
* Release all resources. All already posted frames will be rendered first.
*/
public void release() {
isRunning = false;
if (audioThreadHandler != null)
audioThreadHandler.post(() -> {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
audioThread.quit();
});
renderThreadHandler.post(() -> {
if (encoder != null) {
encoder.stop();
encoder.release();
}
eglBase.release();
mediaMuxer.stop();
mediaMuxer.release();
renderThread.quit();
});
}
private boolean encoderStarted = false;
private volatile boolean muxerStarted = false;
private long videoFrameStart = 0;
private void drainEncoder() {
if (!encoderStarted) {
encoder.start();
encoderOutputBuffers = encoder.getOutputBuffers();
encoderStarted = true;
return;
}
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = encoder.getOutputBuffers();
Log.e(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = encoder.getOutputFormat();
Log.e(TAG, "encoder output format changed: " + newFormat);
trackIndex = mediaMuxer.addTrack(newFormat);
if (audioTrackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
if (videoFrameStart == 0 && bufferInfo.presentationTimeUs != 0) {
videoFrameStart = bufferInfo.presentationTimeUs;
}
bufferInfo.presentationTimeUs -= videoFrameStart;
if (muxerStarted)
mediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
isRunning = isRunning && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
private long presTime = 0L;
private void drainAudio() {
if (audioBufferInfo == null)
audioBufferInfo = new MediaCodec.BufferInfo();
while (true) {
int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
audioOutputBuffers = audioEncoder.getOutputBuffers();
Log.w(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = audioEncoder.getOutputFormat();
Log.w(TAG, "encoder output format changed: " + newFormat);
audioTrackIndex = mediaMuxer.addTrack(newFormat);
if (trackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = audioOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(audioBufferInfo.offset);
encodedData.limit(audioBufferInfo.offset + audioBufferInfo.size);
if (muxerStarted)
mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo);
isRunning = isRunning && (audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
audioEncoder.releaseOutputBuffer(encoderStatus, false);
if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
#Override
public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) {
if (!isRunning)
return;
audioThreadHandler.post(() -> {
if (audioEncoder == null) try {
audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, audioSamples.getChannelCount());
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, audioSamples.getSampleRate());
format.setInteger(MediaFormat.KEY_BIT_RATE, 64 * 1024);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
audioEncoder.start();
audioInputBuffers = audioEncoder.getInputBuffers();
audioOutputBuffers = audioEncoder.getOutputBuffers();
} catch (IOException exception) {
Log.wtf(TAG, exception);
}
int bufferIndex = audioEncoder.dequeueInputBuffer(0);
if (bufferIndex >= 0) {
ByteBuffer buffer = audioInputBuffers[bufferIndex];
buffer.clear();
byte[] data = audioSamples.getData();
buffer.put(data);
audioEncoder.queueInputBuffer(bufferIndex, 0, data.length, presTime, 0);
presTime += data.length * 125 / 12; // 1000000 microseconds / 48000hz / 2 bytes
}
drainAudio();
});
}
}
then add this implementation for this recording MediaRecorderImpl.java
package com.vedja.hassan.kavandeh_master.utils;
import android.support.annotation.Nullable;
import android.util.Log;
import com.vedja.hassan.kavandeh_master.utils.utils.EglUtils;
import org.webrtc.VideoFileRenderer;
import org.webrtc.VideoTrack;
import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule;
import java.io.File;
public class MediaRecorderImpl {
private final Integer id;
private final VideoTrack videoTrack;
private final AudioSamplesInterceptor audioInterceptor;
private VideoFileRenderer videoFileRenderer;
private boolean isRunning = false;
private File recordFile;
public MediaRecorderImpl(Integer id, #Nullable VideoTrack videoTrack, #Nullable AudioSamplesInterceptor audioInterceptor) {
this.id = id;
this.videoTrack = videoTrack;
this.audioInterceptor = audioInterceptor;
}
public void startRecording(File file) throws Exception {
recordFile = file;
if (isRunning)
return;
isRunning = true;
//noinspection ResultOfMethodCallIgnored
file.getParentFile().mkdirs();
if (videoTrack != null) {
videoFileRenderer = new VideoFileRenderer(
file.getAbsolutePath(),
EglUtils.getRootEglBaseContext(),
audioInterceptor != null
);
videoTrack.addSink(videoFileRenderer);
if (audioInterceptor != null)
audioInterceptor.attachCallback(id, videoFileRenderer);
} else {
Log.e(TAG, "Video track is null");
if (audioInterceptor != null) {
//TODO(rostopira): audio only recording
throw new Exception("Audio-only recording not implemented yet");
}
}
}
public File getRecordFile() { return recordFile; }
public void stopRecording() {
isRunning = false;
if (audioInterceptor != null)
audioInterceptor.detachCallback(id);
if (videoTrack != null && videoFileRenderer != null) {
videoTrack.removeSink(videoFileRenderer);
videoFileRenderer.release();
videoFileRenderer = null;
}
}
private static final String TAG = "MediaRecorderImpl";
}
and use above code with this code
final AudioSamplesInterceptor inputSamplesInterceptor = new AudioSamplesInterceptor();
private OutputAudioSamplesInterceptor outputSamplesInterceptor = null;
private final SparseArray<MediaRecorderImpl> mediaRecorders = new SparseArray<>();
void startRecordingToFile(String path, Integer id, #Nullable VideoTrack videoTrack, #Nullable AudioChannel audioChannel) throws Exception {
AudioSamplesInterceptor interceptor = null;
if (audioChannel == AudioChannel.INPUT)
interceptor = inputSamplesInterceptor;
else if (audioChannel == AudioChannel.OUTPUT) {
if (outputSamplesInterceptor == null)
outputSamplesInterceptor = new OutputAudioSamplesInterceptor(audioDeviceModule);
interceptor = outputSamplesInterceptor;
}
mediaRecorder = new MediaRecorderImpl(id, videoTrack, interceptor);
mediaRecorder.startRecording(new File(path));
mediaRecorders.append(id, mediaRecorder);
}
void stopRecording(Integer id) {
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
if (mediaRecorder != null) {
mediaRecorder.stopRecording();
mediaRecorders.remove(id);
File file = mediaRecorder.getRecordFile();
if (file != null) {
ContentValues values = new ContentValues(3);
values.put(MediaStore.Video.Media.TITLE, file.getName());
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath());
getContentResolver().insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values);
}
}
}
finaly use this
try {
VedjaSharedPreference sharedPreference = new VedjaSharedPreference(getContext());
final File dir = new File(sharedPreference.getStringParam(StaticParameter.SAVING_URL) + "/audio/");
dir.mkdirs(); //create folders where write files
final File file = new File(dir, "Vedja-".concat(String.valueOf(System.currentTimeMillis())).concat(".mp3"));
VideoTrack videoTrack = null;
MediaStreamTrack track = slaveManagerActivity.remoteStream.videoTracks.get(0);
if (track instanceof VideoTrack)
videoTrack = (VideoTrack) track;
AudioChannel audioChannel = AudioChannel.OUTPUT;
slaveManagerActivity.startRecordingToFile(file.getPath(), 1, videoTrack, audioChannel);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(
"Failed to open video file for output: ", e);
}
Maybe after copying this code, some class is not found in your project. you can search this class on the internet.
in this project have a class VideoFileRendere you can use this Rendere for save video in file
https://github.com/Piasy/AppRTC-Android

Android MediaCodec : ExtractDecodeEditEncodeMuxTest exit after line :output surface: await new image

I wanted to run ExtractDecodeEditEncodeMuxTest from https://android.googlesource.com/platform/cts/+/jb-mr2-release/tests/tests/media/src/android/media/cts/ExtractDecodeEditEncodeMuxTest.java.
I have edited the code so that it can take input from sdcard and output to sdcard to simplify the code. But main while loop break after the line outputSurface.awaitNewImage();. And decoding encoding stopped.
private String mInputFile = Environment.getExternalStorageDirectory().getAbsolutePath()+"/dingdong.mp4";
private String mOutputFile = Environment.getExternalStorageDirectory().getAbsolutePath()+"/compressed_output.mp4";
private String mOutputVideoMimeType;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
try {
extractDecodeEditEncodeMux();
} catch (Exception e) {
Log.e(TAG,e.getMessage(),e);
}
}
private void extractDecodeEditEncodeMux() throws Exception {
// Exception that may be thrown during release.
Exception exception = null;
MediaCodecInfo videoCodecInfo = selectCodec(OUTPUT_VIDEO_MIME_TYPE);
if (videoCodecInfo == null) {
// Don't fail CTS if they don't have an AVC codec (not here, anyway).
Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_VIDEO_MIME_TYPE);
return;
}
if (VERBOSE) Log.d(TAG, "video found codec: " + videoCodecInfo.getName());
MediaCodecInfo audioCodecInfo = selectCodec(OUTPUT_AUDIO_MIME_TYPE);
if (audioCodecInfo == null) {
// Don't fail CTS if they don't have an AAC codec (not here, anyway).
Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_AUDIO_MIME_TYPE);
return;
}
if (VERBOSE) Log.d(TAG, "audio found codec: " + audioCodecInfo.getName());
MediaExtractor videoExtractor = null;
MediaExtractor audioExtractor = null;
OutputSurface outputSurface = null;
MediaCodec videoDecoder = null;
MediaCodec audioDecoder = null;
MediaCodec videoEncoder = null;
MediaCodec audioEncoder = null;
MediaMuxer muxer = null;
InputSurface inputSurface = null;
try {
if (mCopyVideo) {
videoExtractor = createExtractor();
int videoInputTrack = getAndSelectVideoTrackIndex(videoExtractor);
assertTrue("missing video track in test video", videoInputTrack != -1);
MediaFormat inputFormat = videoExtractor.getTrackFormat(videoInputTrack);
// We avoid the device-specific limitations on width and height by using values
// that are multiples of 16, which all tested devices seem to be able to handle.
MediaFormat outputVideoFormat =
MediaFormat.createVideoFormat(OUTPUT_VIDEO_MIME_TYPE, mOutputWidth, mOutputHeight);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
outputVideoFormat.setInteger(
MediaFormat.KEY_COLOR_FORMAT, OUTPUT_VIDEO_COLOR_FORMAT);
outputVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_VIDEO_BIT_RATE);
outputVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE);
outputVideoFormat.setInteger(
MediaFormat.KEY_I_FRAME_INTERVAL, OUTPUT_VIDEO_IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "video format: " + outputVideoFormat);
// Create a MediaCodec for the desired codec, then configure it as an encoder with
// our desired properties. Request a Surface to use for input.
AtomicReference<Surface> inputSurfaceReference = new AtomicReference<Surface>();
videoEncoder = createVideoEncoder(
videoCodecInfo, outputVideoFormat, inputSurfaceReference);
inputSurface = new InputSurface(inputSurfaceReference.get());
inputSurface.makeCurrent();
// Create a MediaCodec for the decoder, based on the extractor's format.
outputSurface = new OutputSurface();
outputSurface.changeFragmentShader(FRAGMENT_SHADER);
videoDecoder = createVideoDecoder(inputFormat, outputSurface.getSurface());
}
if (mCopyAudio) {
audioExtractor = createExtractor();
int audioInputTrack = getAndSelectAudioTrackIndex(audioExtractor);
assertTrue("missing audio track in test video", audioInputTrack != -1);
MediaFormat inputFormat = audioExtractor.getTrackFormat(audioInputTrack);
MediaFormat outputAudioFormat = MediaFormat.createAudioFormat(OUTPUT_AUDIO_MIME_TYPE, OUTPUT_AUDIO_SAMPLE_RATE_HZ, OUTPUT_AUDIO_CHANNEL_COUNT);
outputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_AUDIO_BIT_RATE);
outputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE);
// Create a MediaCodec for the desired codec, then configure it as an encoder with
// our desired properties. Request a Surface to use for input.
audioEncoder = createAudioEncoder(audioCodecInfo, outputAudioFormat);
// Create a MediaCodec for the decoder, based on the extractor's format.
audioDecoder = createAudioDecoder(inputFormat);
}
// Creates a muxer but do not start or add tracks just yet.
muxer = createMuxer();
doExtractDecodeEditEncodeMux(videoExtractor, audioExtractor, videoDecoder, videoEncoder, audioDecoder, audioEncoder, muxer, inputSurface, outputSurface);
} finally {
if (VERBOSE) Log.d(TAG, "releasing extractor, decoder, encoder, and muxer");
// Try to release everything we acquired, even if one of the releases fails, in which
// case we save the first exception we got and re-throw at the end (unless something
// other exception has already been thrown). This guarantees the first exception thrown
// is reported as the cause of the error, everything is (attempted) to be released, and
// all other exceptions appear in the logs.
try {
if (videoExtractor != null) {
videoExtractor.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing videoExtractor", e);
if (exception == null) {
exception = e;
}
}
try {
if (audioExtractor != null) {
audioExtractor.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing audioExtractor", e);
if (exception == null) {
exception = e;
}
}
try {
if (videoDecoder != null) {
videoDecoder.stop();
videoDecoder.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing videoDecoder", e);
if (exception == null) {
exception = e;
}
}
try {
if (outputSurface != null) {
outputSurface.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing outputSurface", e);
if (exception == null) {
exception = e;
}
}
try {
if (videoEncoder != null) {
videoEncoder.stop();
videoEncoder.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing videoEncoder", e);
if (exception == null) {
exception = e;
}
}
try {
if (audioDecoder != null) {
audioDecoder.stop();
audioDecoder.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing audioDecoder", e);
if (exception == null) {
exception = e;
}
}
try {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing audioEncoder", e);
if (exception == null) {
exception = e;
}
}
try {
if (muxer != null) {
muxer.stop();
muxer.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing muxer", e);
if (exception == null) {
exception = e;
}
}
try {
if (inputSurface != null) {
inputSurface.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing inputSurface", e);
if (exception == null) {
exception = e;
}
}
}
if (exception != null) {
throw exception;
}
}
/**
* Creates an extractor that reads its frames from {#link #mSourceResId}.
*/
private MediaExtractor createExtractor() throws IOException {
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(mInputFile);
return extractor;
}
/**
* Creates a decoder for the given format, which outputs to the given surface.
*
* #param inputFormat the format of the stream to decode
* #param surface into which to decode the frames
*/
private MediaCodec createVideoDecoder(MediaFormat inputFormat, Surface surface) throws IOException {
MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, surface, null, 0);
decoder.start();
return decoder;
}
private MediaCodec createVideoEncoder(
MediaCodecInfo codecInfo,
MediaFormat format,
AtomicReference<Surface> surfaceReference) throws IOException {
MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// Must be called before start() is.
surfaceReference.set(encoder.createInputSurface());
encoder.start();
return encoder;
}
private MediaCodec createAudioDecoder(MediaFormat inputFormat) throws IOException {
MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, null, null, 0);
decoder.start();
return decoder;
}
private MediaCodec createAudioEncoder(MediaCodecInfo codecInfo, MediaFormat format) throws IOException {
MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
encoder.start();
return encoder;
}
private MediaMuxer createMuxer() throws IOException {
return new MediaMuxer(mOutputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
private int getAndSelectVideoTrackIndex(MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
if (VERBOSE) {
Log.d(TAG, "format for track " + index + " is "
+ getMimeTypeFor(extractor.getTrackFormat(index)));
}
if (isVideoFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
MediaFormat inputFormat = extractor.getTrackFormat(index);
mOutputWidth = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
mOutputHeight = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
return index;
}
}
return -1;
}
private int getAndSelectAudioTrackIndex(MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
if (VERBOSE) {
Log.d(TAG, "format for track " + index + " is "
+ getMimeTypeFor(extractor.getTrackFormat(index)));
}
if (isAudioFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
return index;
}
}
return -1;
}
private void doExtractDecodeEditEncodeMux(MediaExtractor videoExtractor, MediaExtractor audioExtractor, MediaCodec videoDecoder, MediaCodec videoEncoder, MediaCodec audioDecoder, MediaCodec audioEncoder, MediaMuxer muxer, InputSurface inputSurface, OutputSurface outputSurface) {
ByteBuffer[] videoDecoderInputBuffers = null;
ByteBuffer[] videoDecoderOutputBuffers = null;
ByteBuffer[] videoEncoderOutputBuffers = null;
MediaCodec.BufferInfo videoDecoderOutputBufferInfo = null;
MediaCodec.BufferInfo videoEncoderOutputBufferInfo = null;
if (mCopyVideo) {
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
videoDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
videoEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
}
ByteBuffer[] audioDecoderInputBuffers = null;
ByteBuffer[] audioDecoderOutputBuffers = null;
ByteBuffer[] audioEncoderInputBuffers = null;
ByteBuffer[] audioEncoderOutputBuffers = null;
MediaCodec.BufferInfo audioDecoderOutputBufferInfo = null;
MediaCodec.BufferInfo audioEncoderOutputBufferInfo = null;
if (mCopyAudio) {
audioDecoderInputBuffers = audioDecoder.getInputBuffers();
audioDecoderOutputBuffers = audioDecoder.getOutputBuffers();
audioEncoderInputBuffers = audioEncoder.getInputBuffers();
audioEncoderOutputBuffers = audioEncoder.getOutputBuffers();
audioDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
audioEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
}
// We will get these from the decoders when notified of a format change.
MediaFormat decoderOutputVideoFormat = null;
MediaFormat decoderOutputAudioFormat = null;
// We will get these from the encoders when notified of a format change.
MediaFormat encoderOutputVideoFormat = null;
MediaFormat encoderOutputAudioFormat = null;
// We will determine these once we have the output format.
int outputVideoTrack = -1;
int outputAudioTrack = -1;
// Whether things are done on the video side.
boolean videoExtractorDone = false;
boolean videoDecoderDone = false;
boolean videoEncoderDone = false;
// Whether things are done on the audio side.
boolean audioExtractorDone = false;
boolean audioDecoderDone = false;
boolean audioEncoderDone = false;
// The audio decoder output buffer to process, -1 if none.
int pendingAudioDecoderOutputBufferIndex = -1;
boolean muxing = false;
int videoExtractedFrameCount = 0;
int videoDecodedFrameCount = 0;
int videoEncodedFrameCount = 0;
int audioExtractedFrameCount = 0;
int audioDecodedFrameCount = 0;
int audioEncodedFrameCount = 0;
while ((mCopyVideo && !videoEncoderDone) || (mCopyAudio && !audioEncoderDone)) {
//1: Extract video from file and feed to decoder.
// Do not extract video if we have determined the output format but we are not yet
// ready to mux the frames.
while (mCopyVideo && !videoExtractorDone && (encoderOutputVideoFormat == null || muxing)) {
int decoderInputBufferIndex = videoDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no video decoder input buffer");
break;
}
if (VERBOSE)Log.d(TAG, "video decoder: returned input buffer: " + decoderInputBufferIndex);
ByteBuffer decoderInputBuffer = videoDecoderInputBuffers[decoderInputBufferIndex];
int size = videoExtractor.readSampleData(decoderInputBuffer, 0);
long presentationTime = videoExtractor.getSampleTime();
if (VERBOSE)Log.d(TAG, "video extractor: returned buffer of size " + size +" for time "+presentationTime);
if (size >= 0) videoDecoder.queueInputBuffer(decoderInputBufferIndex, 0, size, presentationTime,videoExtractor.getSampleFlags());
videoExtractorDone = !videoExtractor.advance();
if (videoExtractorDone) {
if (VERBOSE) Log.d(TAG, "video extractor: EOS");
videoDecoder.queueInputBuffer(decoderInputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
videoExtractedFrameCount++;
// We extracted a frame, let's try something else next.
break;
}
//3: Poll output frames from the video decoder and feed the encoder.
while (mCopyVideo && !videoDecoderDone && (encoderOutputVideoFormat == null || muxing)) {
int decoderOutputBufferIndex = videoDecoder.dequeueOutputBuffer(videoDecoderOutputBufferInfo, TIMEOUT_USEC);
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no video decoder output buffer");
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (VERBOSE) Log.d(TAG, "video decoder: output buffers changed");
videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
decoderOutputVideoFormat = videoDecoder.getOutputFormat();
if (VERBOSE) Log.d(TAG, "video decoder: output format changed: " + decoderOutputVideoFormat);
break;
}
if (VERBOSE) {
Log.d(TAG, "video decoder: returned output buffer: " + decoderOutputBufferIndex);
Log.d(TAG, "video decoder: returned buffer of size " + videoDecoderOutputBufferInfo.size);
}
ByteBuffer decoderOutputBuffer = videoDecoderOutputBuffers[decoderOutputBufferIndex];
if ((videoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)!= 0) {
if (VERBOSE) Log.d(TAG, "video decoder: codec config buffer");
videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
break;
}
if (VERBOSE)Log.d(TAG, "video decoder: returned buffer for time " + videoDecoderOutputBufferInfo.presentationTimeUs);
boolean render = videoDecoderOutputBufferInfo.size != 0;
videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, render);
if (render) {
if (VERBOSE) Log.d(TAG, "output surface: await new image");
outputSurface.awaitNewImage();
// Edit the frame and send it to the encoder.
if (VERBOSE) Log.d(TAG, "output surface: draw image");
outputSurface.drawImage();
inputSurface.setPresentationTime(videoDecoderOutputBufferInfo.presentationTimeUs * 1000);
if (VERBOSE) Log.d(TAG, "input surface: swap buffers");
inputSurface.swapBuffers();
if (VERBOSE) Log.d(TAG, "video encoder: notified of new frame");
}
if ((videoDecoderOutputBufferInfo.flags
& MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "video decoder: EOS");
videoDecoderDone = true;
videoEncoder.signalEndOfInputStream();
}
videoDecodedFrameCount++;
// We extracted a pending frame, let's try something else next.
break;
}
//6: Poll frames from the video encoder and send them to the muxer.
while (mCopyVideo && !videoEncoderDone && (encoderOutputVideoFormat == null || muxing)) {
int encoderOutputBufferIndex = videoEncoder.dequeueOutputBuffer(videoEncoderOutputBufferInfo, TIMEOUT_USEC);
if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no video encoder output buffer");
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (VERBOSE) Log.d(TAG, "video encoder: output buffers changed");
videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (VERBOSE) Log.d(TAG, "video encoder: output format changed");
if (outputVideoTrack >= 0) {
fail("video encoder changed its output format again?");
}
encoderOutputVideoFormat = videoEncoder.getOutputFormat();
break;
}
assertTrue("should have added track before processing output", muxing);
if (VERBOSE) {
Log.d(TAG, "video encoder: returned output buffer: " + encoderOutputBufferIndex);
Log.d(TAG, "video encoder: returned buffer of size " + videoEncoderOutputBufferInfo.size);
}
ByteBuffer encoderOutputBuffer = videoEncoderOutputBuffers[encoderOutputBufferIndex];
if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
if (VERBOSE) Log.d(TAG, "video encoder: codec config buffer");
// Simply ignore codec config buffers.
videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
if (VERBOSE) Log.d(TAG, "video encoder: returned buffer for time " + videoEncoderOutputBufferInfo.presentationTimeUs);
if (videoEncoderOutputBufferInfo.size != 0) {muxer.writeSampleData(outputVideoTrack, encoderOutputBuffer, videoEncoderOutputBufferInfo);
}
if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
!= 0) {
if (VERBOSE) Log.d(TAG, "video encoder: EOS");
videoEncoderDone = true;
}
videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
videoEncodedFrameCount++;
// We enqueued an encoded frame, let's try something else next.
break;
}
if (!muxing
&& (!mCopyAudio || encoderOutputAudioFormat != null)
&& (!mCopyVideo || encoderOutputVideoFormat != null)) {
if (mCopyVideo) {
Log.d(TAG, "muxer: adding video track.");
outputVideoTrack = muxer.addTrack(encoderOutputVideoFormat);
}
if (mCopyAudio) {
Log.d(TAG, "muxer: adding audio track.");
outputAudioTrack = muxer.addTrack(encoderOutputAudioFormat);
}
Log.d(TAG, "muxer: starting");
muxer.start();
muxing = true;
}
}
// Basic sanity checks.
if (mCopyVideo) {
assertEquals("encoded and decoded video frame counts should match",
videoDecodedFrameCount, videoEncodedFrameCount);
assertTrue("decoded frame count should be less than extracted frame count",
videoDecodedFrameCount <= videoExtractedFrameCount);
}
if (mCopyAudio) {
assertEquals("no frame should be pending", -1, pendingAudioDecoderOutputBufferIndex);
}
// TODO: Check the generated output file.
}
private static boolean isVideoFormat(MediaFormat format) {
return getMimeTypeFor(format).startsWith("video/");
}
private static boolean isAudioFormat(MediaFormat format) {
return getMimeTypeFor(format).startsWith("audio/");
}
private static String getMimeTypeFor(MediaFormat format) {
return format.getString(MediaFormat.KEY_MIME);
}
/**
* Returns the first codec capable of encoding the specified MIME type, or null if no match was
* found.
*/
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
I have found solution. I should not call extractDecodeEditEncodeMux() directly inside onCreate() method. I need to create separate thread and call extractDecodeEditEncodeMux() from that thread.
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Thread myThread = new Thread(new Runnable() {
#Override
public void run() {
try {
extractDecodeEditEncodeMux();
} catch (Exception e) {
e.printStackTrace();
}
}
});
myThread.start();
}

MediaCodec hasn't any available input buffer

I am using MediaCodec API for encoding video and audio into mp4 file. Data encoded in separate threads. Sometimes on some devices audio encoder stops to return any available input buffer and as result MediaMuxer crashes when trying to stop it. Here is my code:
configuring media codec:
public static final String MIME_TYPE_AUDIO = "audio/mp4a-latm";
public static final int SAMPLE_RATE = 44100;
public static final int CHANNEL_COUNT = 1;
public static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
public static final int BIT_RATE_AUDIO = 128000;
public static final int SAMPLES_PER_FRAME = 1024 * 2;
public static final int FRAMES_PER_BUFFER = 24;
public static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
public static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;
public static final int MAX_INPUT_SIZE = 16384 * 4;
public static final int MAX_SAMPLE_SIZE = 256 * 1024;
private AudioRecord audioRecord;
private ByteBuffer[] inputBuffers;
private ByteBuffer inputBuffer;
private MediaExtractor mediaExtractor;
private boolean audioSended = false;
private boolean completed = false;
private int sampleCount;
private int iBufferSize;
public AudioEncoderCore(MovieMuxer muxer) throws IOException {
this.muxer = muxer;
bufferInfo = new MediaCodec.BufferInfo();
MediaFormat mediaFormat = null;
mediaFormat = MediaFormat.createAudioFormat(MIME_TYPE_AUDIO, SAMPLE_RATE, CHANNEL_COUNT);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, MAX_INPUT_SIZE);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE_AUDIO);
encoder = MediaCodec.createEncoderByType(MIME_TYPE_AUDIO);
encoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
encoder.start();
iBufferSize = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
// Ensure buffer is adequately sized for the AudioRecord
// object to initialize
int iMinBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT);
if (iBufferSize < iMinBufferSize)
iBufferSize = ((iMinBufferSize / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
audioRecord = new AudioRecord(AUDIO_SOURCE, SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT, iBufferSize);
audioRecord.startRecording();
}
sending data to encoder:
public void sendDataFromMic(boolean endOfStream) {
if (endOfStream)
Log.d(TAG, "sendDataFromMic end of stream");
long audioPresentationTimeNs;
byte[] mTempBuffer = new byte[SAMPLES_PER_FRAME];
audioPresentationTimeNs = System.nanoTime();
int iReadResult = audioRecord.read(mTempBuffer, 0, mTempBuffer.length);
if (iReadResult == AudioRecord.ERROR_BAD_VALUE || iReadResult == AudioRecord.ERROR_INVALID_OPERATION || iReadResult == 0) {
Log.e(TAG, "audio buffer read error");
} else {
// send current frame data to encoder
try {
if (inputBuffers == null)
inputBuffers = encoder.getInputBuffers();
//Sometimes can't get any available input buffer
int inputBufferIndex = encoder.dequeueInputBuffer(100000);
Log.d(TAG, "inputBufferIndex = " + inputBufferIndex);
if (inputBufferIndex >= 0) {
inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(mTempBuffer, 0, iReadResult);
Log.d(TAG, "sending frame to audio encoder " + iReadResult + " bytes");
encoder.queueInputBuffer(inputBufferIndex, 0, iReadResult, audioPresentationTimeNs / 1000,
endOfStream ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
}
} catch (Throwable t) {
Log.e(TAG, "sendFrameToAudioEncoder exception");
t.printStackTrace();
}
}
}
drain encoder:
public void drainEncoder() {
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_NSECS);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "no output available, spinning to await EOS");
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
encoderOutputBuffers = encoder.getOutputBuffers();
else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
Log.d(TAG, "encoder format changed: " + newFormat);
trackIndex = muxer.addTrack(newFormat);
} else if (muxer.isMuxerStarted()) {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null)
throw new RuntimeException("encoded buffer " + encoderStatus + " was null");
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
muxer.writeSampleData(trackIndex, encodedData, bufferInfo);
Log.d(TAG, "sent " + bufferInfo.size + " bytes to muxer, ts=" +
bufferInfo.presentationTimeUs + " track index=" + trackIndex);
}
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "end of stream reached");
completed = true;
break; // out of while
}
}
}
}
Bug is stable reproduced on HTC One, Galaxy S3, but all works fine on Huawei Honor 3C.
After source code investigations I found solution. When I dequeue output buffer, the muxer may be not started yet, in that case buffer not released. So here's working code for drain encoder:
public void drainEncoder() {
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_NSECS);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "no output available, spinning to await EOS");
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
encoderOutputBuffers = encoder.getOutputBuffers();
else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
Log.d(TAG, "encoder format changed: " + newFormat);
trackIndex = muxer.addTrack(newFormat);
} else if (muxer.isMuxerStarted()) {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null)
throw new RuntimeException("encoded buffer " + encoderStatus + " was null");
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
muxer.writeSampleData(trackIndex, encodedData, bufferInfo);
Log.d(TAG, "sent " + bufferInfo.size + " bytes to muxer, ts=" +
bufferInfo.presentationTimeUs + " track index=" + trackIndex);
}
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "end of stream reached");
completed = true;
break; // out of while
}
}
else{
//Muxer not ready, release buffer
encoder.releaseOutputBuffer(encoderStatus, false);
Log.d(TAG, "muxer not ready, skip data");
}
}
}

MediaCodec converted file from WAV to AMR does not play

I am using MediaCodec to convert a .wav file to .amr. I have used following code for input and getting encoded buffer. I get the encoded file but it does not play, my input is proper as i am able to play that file in Audacity. I am using EncodeDecodeTest.java from android API test. Any pointers to potential problem are appreciated.
MediaCodec codec = MediaCodec.createByCodecName(componentName);
try {
codec.configure(
format,
null /* surface */,
null /* crypto */,
MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (IllegalStateException e) {
AppLog.logString("codec '" + componentName + "' failed configuration.");
// assertTrue("codec '" + componentName + "' failed configuration.", false);
}
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
int numBytesSubmitted = 0;
boolean doneSubmittingInput = false;
int numBytesDequeued = 0;
while (true) {
int index =0 ;
if (!doneSubmittingInput) {
index = codec.dequeueInputBuffer(kTimeoutUs /* timeoutUs */);
if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (numBytesSubmitted >= filesize ){
codec.queueInputBuffer(
index,
0 /* offset */,
0 /* size */,
0 /* timeUs */,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
if (VERBOSE) {
AppLog.logString("queued input EOS.");
}
doneSubmittingInput = true;
} else {
int inputsize = 0;
int size = 0;
if ((filesize - numBytesSubmitted) > codecInputBuffers[index]
.limit()) {
inputsize = codecInputBuffers[index].limit();
} else
inputsize = (int) (filesize - numBytesSubmitted);
size = queueInputBuffer(codec,
codecInputBuffers, index, in, inputsize);
numBytesSubmitted += size;
/*if (VERBOSE) {
AppLog.logString("queued " + size + " bytes of input data.");
}*/
}
}else
AppLog.logString("MediaCodec.INFO_TRY_AGAIN_LATER.");
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
index = codec.dequeueOutputBuffer(info, kTimeoutUs /* timeoutUs */);
if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
AppLog.logString("INFO_OUTPUT_FORMAT_CHANGED");
MediaFormat fmt = codec.getOutputFormat();
AppLog.logString(fmt.getString(MediaFormat.KEY_MIME));
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
} else {
dequeueOutputBuffer(codec, codecOutputBuffers, index, info, fos);
numBytesDequeued += info.size;
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) {
AppLog.logString("dequeued output EOS.");
}
break;
}
if (VERBOSE) {
AppLog.logString("dequeued " + info.size + " bytes of output data.");
}
}
}
if (VERBOSE) {
AppLog.logString("queued a total of " + numBytesSubmitted + "bytes, "
+ "dequeued " + numBytesDequeued + " bytes.");
}
codec.release();
codec = null;
queueInputBuffer and dequeueOutputBuffer are implemented as follows:
private int queueInputBuffer(
MediaCodec codec, ByteBuffer[] inputBuffers, int index, FileInputStream in, int size) {
ByteBuffer buffer = inputBuffers[index];
buffer.clear();
// int size = buffer.limit();
AppLog.logString("Size is :" + size);
byte[] data = new byte[size];
try {
in.read(data,0,size);
} catch (IOException e) {
e.printStackTrace();
}
buffer.put(data);
buffer.flip();
codec.queueInputBuffer(index, 0 /* offset */, size, 0 /* timeUs */, MediaCodec.BUFFER_FLAG_SYNC_FRAME);
return size;
}
private void dequeueOutputBuffer(
MediaCodec codec, ByteBuffer[] outputBuffers,
int index, MediaCodec.BufferInfo info, FileOutputStream fos) {
byte[] data= new byte[info.size];
ByteBuffer out = outputBuffers[index];
out.get(data);
try {
fos.write(data);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
codec.releaseOutputBuffer(index, false /* render */);
}

Categories

Resources