I wanted to run ExtractDecodeEditEncodeMuxTest from https://android.googlesource.com/platform/cts/+/jb-mr2-release/tests/tests/media/src/android/media/cts/ExtractDecodeEditEncodeMuxTest.java.
I have edited the code so that it can take input from sdcard and output to sdcard to simplify the code. But main while loop break after the line outputSurface.awaitNewImage();. And decoding encoding stopped.
private String mInputFile = Environment.getExternalStorageDirectory().getAbsolutePath()+"/dingdong.mp4";
private String mOutputFile = Environment.getExternalStorageDirectory().getAbsolutePath()+"/compressed_output.mp4";
private String mOutputVideoMimeType;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
try {
extractDecodeEditEncodeMux();
} catch (Exception e) {
Log.e(TAG,e.getMessage(),e);
}
}
private void extractDecodeEditEncodeMux() throws Exception {
// Exception that may be thrown during release.
Exception exception = null;
MediaCodecInfo videoCodecInfo = selectCodec(OUTPUT_VIDEO_MIME_TYPE);
if (videoCodecInfo == null) {
// Don't fail CTS if they don't have an AVC codec (not here, anyway).
Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_VIDEO_MIME_TYPE);
return;
}
if (VERBOSE) Log.d(TAG, "video found codec: " + videoCodecInfo.getName());
MediaCodecInfo audioCodecInfo = selectCodec(OUTPUT_AUDIO_MIME_TYPE);
if (audioCodecInfo == null) {
// Don't fail CTS if they don't have an AAC codec (not here, anyway).
Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_AUDIO_MIME_TYPE);
return;
}
if (VERBOSE) Log.d(TAG, "audio found codec: " + audioCodecInfo.getName());
MediaExtractor videoExtractor = null;
MediaExtractor audioExtractor = null;
OutputSurface outputSurface = null;
MediaCodec videoDecoder = null;
MediaCodec audioDecoder = null;
MediaCodec videoEncoder = null;
MediaCodec audioEncoder = null;
MediaMuxer muxer = null;
InputSurface inputSurface = null;
try {
if (mCopyVideo) {
videoExtractor = createExtractor();
int videoInputTrack = getAndSelectVideoTrackIndex(videoExtractor);
assertTrue("missing video track in test video", videoInputTrack != -1);
MediaFormat inputFormat = videoExtractor.getTrackFormat(videoInputTrack);
// We avoid the device-specific limitations on width and height by using values
// that are multiples of 16, which all tested devices seem to be able to handle.
MediaFormat outputVideoFormat =
MediaFormat.createVideoFormat(OUTPUT_VIDEO_MIME_TYPE, mOutputWidth, mOutputHeight);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
outputVideoFormat.setInteger(
MediaFormat.KEY_COLOR_FORMAT, OUTPUT_VIDEO_COLOR_FORMAT);
outputVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_VIDEO_BIT_RATE);
outputVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE);
outputVideoFormat.setInteger(
MediaFormat.KEY_I_FRAME_INTERVAL, OUTPUT_VIDEO_IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "video format: " + outputVideoFormat);
// Create a MediaCodec for the desired codec, then configure it as an encoder with
// our desired properties. Request a Surface to use for input.
AtomicReference<Surface> inputSurfaceReference = new AtomicReference<Surface>();
videoEncoder = createVideoEncoder(
videoCodecInfo, outputVideoFormat, inputSurfaceReference);
inputSurface = new InputSurface(inputSurfaceReference.get());
inputSurface.makeCurrent();
// Create a MediaCodec for the decoder, based on the extractor's format.
outputSurface = new OutputSurface();
outputSurface.changeFragmentShader(FRAGMENT_SHADER);
videoDecoder = createVideoDecoder(inputFormat, outputSurface.getSurface());
}
if (mCopyAudio) {
audioExtractor = createExtractor();
int audioInputTrack = getAndSelectAudioTrackIndex(audioExtractor);
assertTrue("missing audio track in test video", audioInputTrack != -1);
MediaFormat inputFormat = audioExtractor.getTrackFormat(audioInputTrack);
MediaFormat outputAudioFormat = MediaFormat.createAudioFormat(OUTPUT_AUDIO_MIME_TYPE, OUTPUT_AUDIO_SAMPLE_RATE_HZ, OUTPUT_AUDIO_CHANNEL_COUNT);
outputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_AUDIO_BIT_RATE);
outputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE);
// Create a MediaCodec for the desired codec, then configure it as an encoder with
// our desired properties. Request a Surface to use for input.
audioEncoder = createAudioEncoder(audioCodecInfo, outputAudioFormat);
// Create a MediaCodec for the decoder, based on the extractor's format.
audioDecoder = createAudioDecoder(inputFormat);
}
// Creates a muxer but do not start or add tracks just yet.
muxer = createMuxer();
doExtractDecodeEditEncodeMux(videoExtractor, audioExtractor, videoDecoder, videoEncoder, audioDecoder, audioEncoder, muxer, inputSurface, outputSurface);
} finally {
if (VERBOSE) Log.d(TAG, "releasing extractor, decoder, encoder, and muxer");
// Try to release everything we acquired, even if one of the releases fails, in which
// case we save the first exception we got and re-throw at the end (unless something
// other exception has already been thrown). This guarantees the first exception thrown
// is reported as the cause of the error, everything is (attempted) to be released, and
// all other exceptions appear in the logs.
try {
if (videoExtractor != null) {
videoExtractor.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing videoExtractor", e);
if (exception == null) {
exception = e;
}
}
try {
if (audioExtractor != null) {
audioExtractor.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing audioExtractor", e);
if (exception == null) {
exception = e;
}
}
try {
if (videoDecoder != null) {
videoDecoder.stop();
videoDecoder.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing videoDecoder", e);
if (exception == null) {
exception = e;
}
}
try {
if (outputSurface != null) {
outputSurface.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing outputSurface", e);
if (exception == null) {
exception = e;
}
}
try {
if (videoEncoder != null) {
videoEncoder.stop();
videoEncoder.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing videoEncoder", e);
if (exception == null) {
exception = e;
}
}
try {
if (audioDecoder != null) {
audioDecoder.stop();
audioDecoder.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing audioDecoder", e);
if (exception == null) {
exception = e;
}
}
try {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing audioEncoder", e);
if (exception == null) {
exception = e;
}
}
try {
if (muxer != null) {
muxer.stop();
muxer.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing muxer", e);
if (exception == null) {
exception = e;
}
}
try {
if (inputSurface != null) {
inputSurface.release();
}
} catch(Exception e) {
Log.e(TAG, "error while releasing inputSurface", e);
if (exception == null) {
exception = e;
}
}
}
if (exception != null) {
throw exception;
}
}
/**
* Creates an extractor that reads its frames from {#link #mSourceResId}.
*/
private MediaExtractor createExtractor() throws IOException {
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(mInputFile);
return extractor;
}
/**
* Creates a decoder for the given format, which outputs to the given surface.
*
* #param inputFormat the format of the stream to decode
* #param surface into which to decode the frames
*/
private MediaCodec createVideoDecoder(MediaFormat inputFormat, Surface surface) throws IOException {
MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, surface, null, 0);
decoder.start();
return decoder;
}
private MediaCodec createVideoEncoder(
MediaCodecInfo codecInfo,
MediaFormat format,
AtomicReference<Surface> surfaceReference) throws IOException {
MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// Must be called before start() is.
surfaceReference.set(encoder.createInputSurface());
encoder.start();
return encoder;
}
private MediaCodec createAudioDecoder(MediaFormat inputFormat) throws IOException {
MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, null, null, 0);
decoder.start();
return decoder;
}
private MediaCodec createAudioEncoder(MediaCodecInfo codecInfo, MediaFormat format) throws IOException {
MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
encoder.start();
return encoder;
}
private MediaMuxer createMuxer() throws IOException {
return new MediaMuxer(mOutputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
private int getAndSelectVideoTrackIndex(MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
if (VERBOSE) {
Log.d(TAG, "format for track " + index + " is "
+ getMimeTypeFor(extractor.getTrackFormat(index)));
}
if (isVideoFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
MediaFormat inputFormat = extractor.getTrackFormat(index);
mOutputWidth = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
mOutputHeight = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
return index;
}
}
return -1;
}
private int getAndSelectAudioTrackIndex(MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
if (VERBOSE) {
Log.d(TAG, "format for track " + index + " is "
+ getMimeTypeFor(extractor.getTrackFormat(index)));
}
if (isAudioFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
return index;
}
}
return -1;
}
private void doExtractDecodeEditEncodeMux(MediaExtractor videoExtractor, MediaExtractor audioExtractor, MediaCodec videoDecoder, MediaCodec videoEncoder, MediaCodec audioDecoder, MediaCodec audioEncoder, MediaMuxer muxer, InputSurface inputSurface, OutputSurface outputSurface) {
ByteBuffer[] videoDecoderInputBuffers = null;
ByteBuffer[] videoDecoderOutputBuffers = null;
ByteBuffer[] videoEncoderOutputBuffers = null;
MediaCodec.BufferInfo videoDecoderOutputBufferInfo = null;
MediaCodec.BufferInfo videoEncoderOutputBufferInfo = null;
if (mCopyVideo) {
videoDecoderInputBuffers = videoDecoder.getInputBuffers();
videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
videoDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
videoEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
}
ByteBuffer[] audioDecoderInputBuffers = null;
ByteBuffer[] audioDecoderOutputBuffers = null;
ByteBuffer[] audioEncoderInputBuffers = null;
ByteBuffer[] audioEncoderOutputBuffers = null;
MediaCodec.BufferInfo audioDecoderOutputBufferInfo = null;
MediaCodec.BufferInfo audioEncoderOutputBufferInfo = null;
if (mCopyAudio) {
audioDecoderInputBuffers = audioDecoder.getInputBuffers();
audioDecoderOutputBuffers = audioDecoder.getOutputBuffers();
audioEncoderInputBuffers = audioEncoder.getInputBuffers();
audioEncoderOutputBuffers = audioEncoder.getOutputBuffers();
audioDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
audioEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
}
// We will get these from the decoders when notified of a format change.
MediaFormat decoderOutputVideoFormat = null;
MediaFormat decoderOutputAudioFormat = null;
// We will get these from the encoders when notified of a format change.
MediaFormat encoderOutputVideoFormat = null;
MediaFormat encoderOutputAudioFormat = null;
// We will determine these once we have the output format.
int outputVideoTrack = -1;
int outputAudioTrack = -1;
// Whether things are done on the video side.
boolean videoExtractorDone = false;
boolean videoDecoderDone = false;
boolean videoEncoderDone = false;
// Whether things are done on the audio side.
boolean audioExtractorDone = false;
boolean audioDecoderDone = false;
boolean audioEncoderDone = false;
// The audio decoder output buffer to process, -1 if none.
int pendingAudioDecoderOutputBufferIndex = -1;
boolean muxing = false;
int videoExtractedFrameCount = 0;
int videoDecodedFrameCount = 0;
int videoEncodedFrameCount = 0;
int audioExtractedFrameCount = 0;
int audioDecodedFrameCount = 0;
int audioEncodedFrameCount = 0;
while ((mCopyVideo && !videoEncoderDone) || (mCopyAudio && !audioEncoderDone)) {
//1: Extract video from file and feed to decoder.
// Do not extract video if we have determined the output format but we are not yet
// ready to mux the frames.
while (mCopyVideo && !videoExtractorDone && (encoderOutputVideoFormat == null || muxing)) {
int decoderInputBufferIndex = videoDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no video decoder input buffer");
break;
}
if (VERBOSE)Log.d(TAG, "video decoder: returned input buffer: " + decoderInputBufferIndex);
ByteBuffer decoderInputBuffer = videoDecoderInputBuffers[decoderInputBufferIndex];
int size = videoExtractor.readSampleData(decoderInputBuffer, 0);
long presentationTime = videoExtractor.getSampleTime();
if (VERBOSE)Log.d(TAG, "video extractor: returned buffer of size " + size +" for time "+presentationTime);
if (size >= 0) videoDecoder.queueInputBuffer(decoderInputBufferIndex, 0, size, presentationTime,videoExtractor.getSampleFlags());
videoExtractorDone = !videoExtractor.advance();
if (videoExtractorDone) {
if (VERBOSE) Log.d(TAG, "video extractor: EOS");
videoDecoder.queueInputBuffer(decoderInputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
videoExtractedFrameCount++;
// We extracted a frame, let's try something else next.
break;
}
//3: Poll output frames from the video decoder and feed the encoder.
while (mCopyVideo && !videoDecoderDone && (encoderOutputVideoFormat == null || muxing)) {
int decoderOutputBufferIndex = videoDecoder.dequeueOutputBuffer(videoDecoderOutputBufferInfo, TIMEOUT_USEC);
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no video decoder output buffer");
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (VERBOSE) Log.d(TAG, "video decoder: output buffers changed");
videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
decoderOutputVideoFormat = videoDecoder.getOutputFormat();
if (VERBOSE) Log.d(TAG, "video decoder: output format changed: " + decoderOutputVideoFormat);
break;
}
if (VERBOSE) {
Log.d(TAG, "video decoder: returned output buffer: " + decoderOutputBufferIndex);
Log.d(TAG, "video decoder: returned buffer of size " + videoDecoderOutputBufferInfo.size);
}
ByteBuffer decoderOutputBuffer = videoDecoderOutputBuffers[decoderOutputBufferIndex];
if ((videoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)!= 0) {
if (VERBOSE) Log.d(TAG, "video decoder: codec config buffer");
videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
break;
}
if (VERBOSE)Log.d(TAG, "video decoder: returned buffer for time " + videoDecoderOutputBufferInfo.presentationTimeUs);
boolean render = videoDecoderOutputBufferInfo.size != 0;
videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, render);
if (render) {
if (VERBOSE) Log.d(TAG, "output surface: await new image");
outputSurface.awaitNewImage();
// Edit the frame and send it to the encoder.
if (VERBOSE) Log.d(TAG, "output surface: draw image");
outputSurface.drawImage();
inputSurface.setPresentationTime(videoDecoderOutputBufferInfo.presentationTimeUs * 1000);
if (VERBOSE) Log.d(TAG, "input surface: swap buffers");
inputSurface.swapBuffers();
if (VERBOSE) Log.d(TAG, "video encoder: notified of new frame");
}
if ((videoDecoderOutputBufferInfo.flags
& MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "video decoder: EOS");
videoDecoderDone = true;
videoEncoder.signalEndOfInputStream();
}
videoDecodedFrameCount++;
// We extracted a pending frame, let's try something else next.
break;
}
//6: Poll frames from the video encoder and send them to the muxer.
while (mCopyVideo && !videoEncoderDone && (encoderOutputVideoFormat == null || muxing)) {
int encoderOutputBufferIndex = videoEncoder.dequeueOutputBuffer(videoEncoderOutputBufferInfo, TIMEOUT_USEC);
if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no video encoder output buffer");
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (VERBOSE) Log.d(TAG, "video encoder: output buffers changed");
videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (VERBOSE) Log.d(TAG, "video encoder: output format changed");
if (outputVideoTrack >= 0) {
fail("video encoder changed its output format again?");
}
encoderOutputVideoFormat = videoEncoder.getOutputFormat();
break;
}
assertTrue("should have added track before processing output", muxing);
if (VERBOSE) {
Log.d(TAG, "video encoder: returned output buffer: " + encoderOutputBufferIndex);
Log.d(TAG, "video encoder: returned buffer of size " + videoEncoderOutputBufferInfo.size);
}
ByteBuffer encoderOutputBuffer = videoEncoderOutputBuffers[encoderOutputBufferIndex];
if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
if (VERBOSE) Log.d(TAG, "video encoder: codec config buffer");
// Simply ignore codec config buffers.
videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
if (VERBOSE) Log.d(TAG, "video encoder: returned buffer for time " + videoEncoderOutputBufferInfo.presentationTimeUs);
if (videoEncoderOutputBufferInfo.size != 0) {muxer.writeSampleData(outputVideoTrack, encoderOutputBuffer, videoEncoderOutputBufferInfo);
}
if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
!= 0) {
if (VERBOSE) Log.d(TAG, "video encoder: EOS");
videoEncoderDone = true;
}
videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
videoEncodedFrameCount++;
// We enqueued an encoded frame, let's try something else next.
break;
}
if (!muxing
&& (!mCopyAudio || encoderOutputAudioFormat != null)
&& (!mCopyVideo || encoderOutputVideoFormat != null)) {
if (mCopyVideo) {
Log.d(TAG, "muxer: adding video track.");
outputVideoTrack = muxer.addTrack(encoderOutputVideoFormat);
}
if (mCopyAudio) {
Log.d(TAG, "muxer: adding audio track.");
outputAudioTrack = muxer.addTrack(encoderOutputAudioFormat);
}
Log.d(TAG, "muxer: starting");
muxer.start();
muxing = true;
}
}
// Basic sanity checks.
if (mCopyVideo) {
assertEquals("encoded and decoded video frame counts should match",
videoDecodedFrameCount, videoEncodedFrameCount);
assertTrue("decoded frame count should be less than extracted frame count",
videoDecodedFrameCount <= videoExtractedFrameCount);
}
if (mCopyAudio) {
assertEquals("no frame should be pending", -1, pendingAudioDecoderOutputBufferIndex);
}
// TODO: Check the generated output file.
}
private static boolean isVideoFormat(MediaFormat format) {
return getMimeTypeFor(format).startsWith("video/");
}
private static boolean isAudioFormat(MediaFormat format) {
return getMimeTypeFor(format).startsWith("audio/");
}
private static String getMimeTypeFor(MediaFormat format) {
return format.getString(MediaFormat.KEY_MIME);
}
/**
* Returns the first codec capable of encoding the specified MIME type, or null if no match was
* found.
*/
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
I have found solution. I should not call extractDecodeEditEncodeMux() directly inside onCreate() method. I need to create separate thread and call extractDecodeEditEncodeMux() from that thread.
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Thread myThread = new Thread(new Runnable() {
#Override
public void run() {
try {
extractDecodeEditEncodeMux();
} catch (Exception e) {
e.printStackTrace();
}
}
});
myThread.start();
}
Related
I'm working on a screen recorder app where if app is in backstack on android 11 and on starting of screen recording only screen casting option is visible but "app is using microphone" message is not displayed and on stopping recording only shows thumbnail and video is not played as video size is too small this is my recording code and this error is only occuring in android 11 OS other are working fine
public void startRecording() {
synchronized (sSync) {
Log.i(TAG, "startRecording: inside synchronized");
if (mMuxer == null) {
Log.i(TAG, "startRecording: muxer is null");
getScreenSize();
mMediaProjection = mMediaProjectionManager.getMediaProjection(mScreenCaptureResultCode, mScreenCaptureIntent);
DisplayManager dm = (DisplayManager) Objects.requireNonNull(MyApplication.Companion.getApplication()).getSystemService(Context.DISPLAY_SERVICE);
Display defaultDisplay;
if (dm != null) {
Log.i(TAG, "startRecording: dim is not null");
defaultDisplay = dm.getDisplay(Display.DEFAULT_DISPLAY);
} else {
throw new IllegalStateException("Cannot display manager?!?");
}
if (defaultDisplay == null) {
throw new RuntimeException("No display found.");
}
try {
mMuxer = new MediaMuxerWrapper(context, ".mp4"); // if you record audio only, ".m4a" is also OK.
if (true) {
VideoSetting videoSetting = SettingManager.getVideoProfile(context);
Log.i(TAG, "Video config: " + videoSetting);
mCurrentVideoSetting = videoSetting;
List<RenderUtil.CustomDecorator> decors = createDecorators();
// new TestMediaScreenEncoderHard(mMuxer, mMediaEncoderListener, mMediaProjection, mCurrentVideoSetting, mScreenDensity, decors);
if (MyUtils.isRunningOnEmulator()) {
new MediaScreenEncoder(mMuxer, mMediaEncoderListener, mMediaProjection, mCurrentVideoSetting, mScreenDensity, decors);
} else {
new MediaScreenEncoderHard(mMuxer, mMediaEncoderListener, mMediaProjection, mCurrentVideoSetting, mScreenDensity, decors);
}
}
if (true) {
// for audio capturing
new MediaAudioEncoder(mMuxer, mMediaEncoderListener);
Log.i(TAG, "startRecording: audio capture");
}
mMuxer.prepare();
mMuxer.startRecording();
} catch (final IOException e) {
Log.e(TAG, "startScreenRecord:", e);
}
}
}
This is my media encoder class
public class MediaAudioEncoder extends MediaEncoder {
public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
public static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
private static final String TAG = MediaAudioEncoder.class.getSimpleName();
private static final String MIME_TYPE = "audio/mp4a-latm";
private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
private static final int BIT_RATE = 64000;
private static final int[] AUDIO_SOURCES = new int[]{
MediaRecorder.AudioSource.CAMCORDER,
MediaRecorder.AudioSource.MIC,
MediaRecorder.AudioSource.DEFAULT,
MediaRecorder.AudioSource.VOICE_COMMUNICATION,
MediaRecorder.AudioSource.VOICE_RECOGNITION,
};
private AudioThread mAudioThread = null;
public MediaAudioEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
super(muxer, listener);
}
/**
* select the first codec that match a specific MIME type
*
* #param mimeType
* #return
*/
private static final MediaCodecInfo selectAudioCodec(final String mimeType) {
// if (DEBUG) Log.i(TAG, "selectAudioCodec:");
MediaCodecInfo result = null;
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
LOOP:
for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
// if (DEBUG) Log.i(TAG, "supportedType:" + codecInfo.getName() + ",MIME=" + types[j]);
if (types[j].equalsIgnoreCase(mimeType)) {
if (result == null) {
result = codecInfo;
break LOOP;
}
}
}
}
return result;
}
#Override
public void prepare() throws IOException {
// if (DEBUG) Log.i(TAG, "prepare:");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
// prepare MediaCodec for AAC encoding of audio data from inernal mic.
final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
if (audioCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
// if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());
final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
// if (DEBUG) Log.i(TAG, "format: " + audioFormat);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
// if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
#Override
public void startRecording() {
super.startRecording();
// create and execute audio capturing thread using internal mic
if (mAudioThread == null) {
mAudioThread = new AudioThread();
mAudioThread.start();
}
}
#Override
protected void release() {
mAudioThread = null;
super.release();
}
/**
* Thread to capture audio data from internal mic as uncompressed 16bit PCM data
* and write them to the MediaCodec encoder
*/
private class AudioThread extends Thread {
#SuppressLint("MissingPermission")
#Override
public void run() {
try {
final int min_buffer_size = AudioRecord.getMinBufferSize(
SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
if (buffer_size < min_buffer_size)
buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
AudioRecord audioRecord = null;
for (final int source : AUDIO_SOURCES) {
try {
audioRecord = new AudioRecord(
source, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
if (audioRecord != null) {
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
audioRecord.release();
audioRecord = null;
}
}
} catch (final Exception e) {
audioRecord = null;
}
if (audioRecord != null) break;
}
if (audioRecord != null) {
try {
if (mIsEncoding) {
// if (DEBUG) Log.i(TAG, "AudioThread:start audio recording");
final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
int readBytes;
audioRecord.startRecording();
try {
for (; mIsEncoding && !mRequestStop && !mIsEOS; ) {
// read audio data from internal mic
buf.clear();
readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
if (readBytes > 0) {
// set audio data to encoder
buf.position(readBytes);
buf.flip();
encode(buf, readBytes, getPTSUs());
frameAvailableSoon();
}
}
frameAvailableSoon();
} finally {
audioRecord.stop();
}
}
} finally {
audioRecord.release();
}
} else {
Log.e(TAG, "failed to initialize AudioRecord");
}
} catch (final Exception e) {
Log.e(TAG, "AudioThread#run", e);
}
// if (DEBUG) Log.i(TAG, "AudioThread:finished");
}
}
}
These are my error logs
2022-06-09 09:40:56.785 21415-21616/com.example.recorderscreen
E/IAudioFlinger: createRecord returned error -1 2022-06-09
09:40:56.785 21415-21616/com.example.recorderscreen E/AudioRecord:
createRecord_l(436738312): AudioFlinger could not create record track,
status: -1 2022-06-09 09:40:56.785
21415-21616/com.example.recorderscreen E/AudioRecord-JNI: Error
creating AudioRecord instance: initialization check failed with status
-1. 2022-06-09 09:40:56.785 21415-21616/com.example.recorderscreen E/android.media.AudioRecord: Error code -20 when initializing native
AudioRecord object. 2022-06-09 09:40:56.785
21415-21616/com.example.recorderscreen E/MediaAudioEncoder: failed to
initialize AudioRecord
Firstly, thanks for taking time to read this. I need some help or insights as I'm facing decoding issue with h264 frames. I'm posting this question as most of other related posts to this do not provide clear steps. I've shared the code for decoder thread which has got the main logic. I get the sps, pps and data from network and thus I make this mConfigured flag as true when sps and pps are received and respective arrays are fed. I can provide more info if needed.
PS - Please excuse for the coding standards as it is still a POC in development.
decoder.dequeueOutputBuffer >> always return -1
//below function called from network when new packet received.
public void decodeAndPlayVideoFrame(byte[] encodedData, int frameType) {
if (frameType == 1) {
SPS = encodedData;
} else if (frameType == 2) {
PPS = encodedData;
} else if (frameType == 0) {
Log.d("EncodeDecode", "enqueueing frame no: " + (frameID));
try {
Frame frame = new Frame(frameID);
int totalDataLength = encodedData.length + SPS.length + PPS.length;
frame.frameData = new byte[totalDataLength];
System.arraycopy(SPS, 0, frame.frameData, 0, SPS.length);
System.arraycopy(PPS, 0, frame.frameData, SPS.length, PPS.length);
System.arraycopy(encodedData, 0, frame.frameData, SPS.length + PPS.length, encodedData.length);
queue.add(frame);
frameID++;
} catch (NullPointerException e) {
Log.e("EncodeDecode", "frame is null");
e.printStackTrace();
} catch (IllegalArgumentException e) {
Log.e("EncodeDecode", "problem inserting in the queue");
e.printStackTrace();
} catch (IllegalStateException e) {
Log.e("EncodeDecode", "problem inserting in the queue");
e.printStackTrace();
}
Log.d("EncodeDecode", "frame enqueued. queue size now: " + queue.size());
}
}
// Player thread starts as screen launches and process packets fed into queue.
private class PlayerThread extends Thread {
//private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
private boolean mConfigured;
public PlayerThread(Surface surface) {
this.surface = surface;
}
private void initCodec() throws IOException {
MediaFormat mediaFormat = null;
decoder = MediaCodec.createDecoderByType("video/avc");
mediaFormat = MediaFormat.createVideoFormat("video/avc",
320,
240);
try {
decoder.configure(mediaFormat,
surface,
null,
0);
frameID = 0;
mConfigured = true;
decoder.start();
Log.d("EncodeDecode", "DECODER_THREAD:: decoder.start() called");
} catch (Exception e) {
e.printStackTrace();
}
}
#Override
public void run() {
while (SPS == null || PPS == null || SPS.length == 0 || PPS.length == 0) {
try {
Log.d("EncodeDecode", "DECODER_THREAD:: sps,pps not ready yet");
sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (!mConfigured) {
try {
initCodec();
} catch (IOException e) {
e.printStackTrace();
}
}
int i = 0;
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
while (!Thread.interrupted()) {
Frame currentFrame = null;
try {
Log.d("EncodeDecode", "DECODER_THREAD:: calling queue.take(), if there is no frame in the queue it will wait");
currentFrame = queue.take();
} catch (InterruptedException e) {
Log.e("EncodeDecode", "DECODER_THREAD:: interrupted while PlayerThread was waiting for the next frame");
e.printStackTrace();
}
if (currentFrame == null)
Log.e("EncodeDecode", "DECODER_THREAD:: null frame dequeued");
else
Log.d("EncodeDecode", "DECODER_THREAD:: " + currentFrame.id + " no frame dequeued");
if (currentFrame != null && currentFrame.frameData != null && currentFrame.frameData.length != 0) {
Log.d("EncodeDecode", "DECODER_THREAD:: decoding frame no: " + i + " , dataLength = " + currentFrame.frameData.length);
int inIndex = decoder.dequeueInputBuffer(-1);
if (inIndex >= 0) {
Log.d("EncodeDecode", "DECODER_THREAD:: sample size: " + currentFrame.frameData.length + "->" + currentFrame.frameData[0] + "to" + currentFrame.frameData[currentFrame.frameData.length-1]);
ByteBuffer buffer = inputBuffers[inIndex];
buffer.clear();
buffer.put(currentFrame.frameData);
decoder.queueInputBuffer(inIndex, 0, currentFrame.frameData.length, 0, 0);
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outIndex = decoder.dequeueOutputBuffer(info, 0);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.e("EncodeDecode", "DECODER_THREAD:: INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.e("EncodeDecode", "DECODER_THREAD:: New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.e("EncodeDecode", "DECODER_THREAD:: dequeueOutputBuffer timed out!");
break;
default:
Log.d("EncodeDecode", "DECODER_THREAD:: decoded SUCCESSFULLY!!!");
ByteBuffer outbuffer = outputBuffers[outIndex];
decoder.releaseOutputBuffer(outIndex, true);
break;
}
try {
Thread.sleep(250);
} catch (InterruptedException e) {
e.printStackTrace();
}
i++;
}
}
}
decoder.stop();
decoder.release();
mConfigured = false;
}
}
I do not know how do you get SPS and PPS and how do you check them...anyway you have to set SPS and PPS to mediaformat before starting the decodec, in your code, you can do something like :
....
while (SPS == null || PPS == null || SPS.length == 0 || PPS.length == 0) {
try {
Log.d("EncodeDecode", "DECODER_THREAD:: sps,pps not ready yet");
sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (!mConfigured) {
try {
// init with SPS and PPS
initCodec(sps,pps);
} catch (IOException e) {
e.printStackTrace();
}
}
.....
And then set codec specific datas in initCodec before starting the decodec :
private void initCodec(byte[] sps, byte[] pps) throws IOException {
MediaFormat mediaFormat = null;
decoder = MediaCodec.createDecoderByType("video/avc");
mediaFormat = MediaFormat.createVideoFormat("video/avc",
320,
240);
// set the codec specific datas cds-0 = SPS, cds-1 = PPS
mediaFormat.setByteBuffer("cds-0", ByteBuffer.wrap(sps));
mediaFormat.setByteBuffer("cds-1", ByteBuffer.wrap(pps));
try {
decoder.configure(mediaFormat,
surface,
null,
0);
frameID = 0;
mConfigured = true;
decoder.start();
Log.d("EncodeDecode", "DECODER_THREAD:: decoder.start() called");
} catch (Exception e) {
e.printStackTrace();
}
}
...........
Update: From the doc
...Many decoders require the actual compressed data stream to be preceded
by "codec specific data", i.e. setup data used to initialize the codec
such as PPS/SPS in the case of AVC video or code tables in the case of
vorbis audio. The class MediaExtractor provides codec specific data as
part of the returned track format in entries named "csd-0", "csd-1"
...
These buffers can be submitted directly after start() or flush() by
specifying the flag BUFFER_FLAG_CODEC_CONFIG. However, if you
configure the codec with a MediaFormat containing these keys, they
will be automatically submitted by MediaCodec directly after start.
> Therefore, the use of BUFFER_FLAG_CODEC_CONFIG flag is discouraged and
is recommended only for advanced users.
I am working on a Android app which render data form the Drone , I am able to render the raw frames on the SurfaceView after decoding it with the DjiVideoSteamDecoder class. I want the Yuv frames form the decoder class i.e DjiVideoSteamDecoder. The problem is I am not getting the continious yuv frames from the Yuv listner. As per the Docs we need to set
DJIVideoStreamDecoder.getInstance().changeSurface(null) it works for we frames and then it stops producing the YUv data .Let me past the decoding class which gives me the YUV frames.
private void initCodec() {
if (width == 0 || height == 0) {
return;
}
Log.e("OBJ","codec inside initcodec"+codec);
if (codec != null) {
releaseCodec();
}
loge("initVideoDecoder----------------------------------------------------------");
loge("initVideoDecoder video width = " + width + " height = " + height);
// create the media format
MediaFormat format = MediaFormat.createVideoFormat(VIDEO_ENCODING_FORMAT, width, height);
if (surface == null) {
Log.i(TAG,"initVideoDecoder: yuv output");
// The surface is null, which means that the yuv data is needed, so the color format should
// be set to YUV420.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
} else {
Log.i(TAG,"initVideoDecoder: display");
// The surface is set, so the color format should be set to format surface.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
}
try {
// Create the codec instance.
codec = MediaCodec.createDecoderByType(VIDEO_ENCODING_FORMAT);
Log.i(TAG, "initVideoDecoder create: " + (codec == null));
// Configure the codec. What should be noted here is that the hardware decoder would not output
// any yuv data if OnReceive surface is configured into, which mean that if you want the yuv frames, you
// should set "null" surface when calling the "configure" method of MediaCodec.
codec.configure(format, surface, null, 0);
Log.i(TAG, "initVideoDecoder configure");
// codec.configure(format, null, null, 0);
if (codec == null) {
Log.e(TAG, "Can't find video info!");
return;
}
// Start the codec
codec.start();
Log.i(TAG, "initVideoDecoder start");
// Get the input and output buffers of hardware decoder
inputBuffers = codec.getInputBuffers();
outputBuffers = codec.getOutputBuffers();
Log.i(TAG, "initVideoDecoder get buffers");
} catch (Exception e) {
Log.i(TAG, "init codec failed, do it again: "+ e.getMessage());
if (e instanceof MediaCodec.CodecException) {
MediaCodec.CodecException ce = (MediaCodec.CodecException) e;
ce.printStackTrace();
}
e.printStackTrace();
}
}
And decoder class :
private void decodeFrame() throws Exception {
DJIFrame inputFrame = frameQueue.poll();
if (inputFrame == null) {
return;
}
if (codec == null) {
if (dataHandler != null && !dataHandler.hasMessages(MSG_INIT_CODEC)) {
dataHandler.sendEmptyMessage(MSG_INIT_CODEC);
}
}
int inIndex = -1;
// Get input buffer index of the MediaCodec.
for (int i = 0; i < CODEC_DEQUEUE_INPUT_QUEUE_RETRY && inIndex < 0; i ++) {
try {
inIndex = codec.dequeueInputBuffer(0);
} catch (IllegalStateException e) {
logd(TAG, "decodeFrame: dequeue input: " + e);
codec.stop();
codec.reset();
initCodec();
e.printStackTrace();
}
}
logd(TAG, "decodeFrame: index=" + inIndex);
Log.e("OBJ","index "+inIndex);
// Decode the frame using MediaCodec
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
buffer.clear();
buffer.rewind();
buffer.put(inputFrame.videoBuffer);
inputFrame.fedIntoCodecTime = System.currentTimeMillis();
long queueingDelay = inputFrame.getQueueDelay();
logd("input frame delay: " + queueingDelay);
// Feed the frame data to the decoder.
codec.queueInputBuffer(inIndex, 0, inputFrame.size, inputFrame.pts, 0);
hasIFrameInCodec = true;
// Get the output data from the decoder.
int outIndex = -1;
outIndex = codec.dequeueOutputBuffer(bufferInfo, 0);
Log.e("OBJ","Outputindex"+outIndex);
logd(TAG, "decodeFrame: outIndex: " + outIndex);
if (outIndex >= 0) {
if ( surface == null && yuvDataListener != null) {
//if (yuvDataListener != null) {
// If the surface is null, the yuv data should be get from the buffer and invoke the callback.
logd("decodeFrame: need callback");
ByteBuffer yuvDataBuf = outputBuffers[outIndex];
yuvDataBuf.position(bufferInfo.offset);
yuvDataBuf.limit(bufferInfo.size - bufferInfo.offset);
final byte[] bytes = new byte[bufferInfo.size - bufferInfo.offset];
yuvDataBuf.get(bytes);
callbackHandler.post(new Runnable() {
#Override
public void run() {
yuvDataListener.onYuvDataReceived(bytes, width, height);
}
});
}
// All the output buffer must be release no matter whether the yuv data is output or
// not, so that the codec can reuse the buffer.
codec.releaseOutputBuffer(outIndex, true);
} else if (outIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// The output buffer set is changed. So the decoder should be reinitialized and the
// output buffers should be retrieved.
long curTime = System.currentTimeMillis();
bufferChangedQueue.addLast(curTime);
if (bufferChangedQueue.size() >= 10) {
long headTime = bufferChangedQueue.pollFirst();
if (curTime - headTime < 1000) {
// reset decoder
loge("Reset decoder. Get INFO_OUTPUT_BUFFERS_CHANGED more than 10 times within OnReceive second.");
bufferChangedQueue.clear();
dataHandler.removeCallbacksAndMessages(null);
dataHandler.sendEmptyMessage(MSG_INIT_CODEC);
return;
}
}
if (outputBuffers == null) {
return;
}
outputBuffers = codec.getOutputBuffers();
} else if (outIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
loge("format changed, color: " + codec.getOutputFormat().getInteger(MediaFormat.KEY_COLOR_FORMAT));
}
}
}
I'm developing a video editing app, but I encounter some problems.I want to rearrange the video frames. And I have got an idea.First video decoding,
and the frame decoded data stored in a list, rearrange and generate new video.
I used the MediaCodec class in Android Framework to complete codec work.But I'm not familiar with it.According to my ideas, I wrote the following code:
public class VideoProcesser {
private static final String TAG = "VideoProcesser";
private static final File OUTPUT_FILENAME_DIR = Environment.getExternalStorageDirectory();
private static final String OUTPUT_VIDEO_MIME_TYPE = "video/avc";
private static final int OUTPUT_VIDEO_BIT_RATE = 2000000; // 2Mbps
private static final int OUTPUT_VIDEO_FRAME_RATE = 15; // 15fps
private static final int OUTPUT_VIDEO_IFRAME_INTERVAL = 10; // 10 seconds between I-frames
private Context mContext;
public VideoProcesser(Context mContext) {
this.mContext = mContext;
}
public void startProcessing() {
VideoChunks longChunks = generateVideoChunks(new IntegerPair(0, 180));
VideoChunks shortChunks = generateVideoChunks(new IntegerPair(30, 60));
VideoChunks dstChunks = new VideoChunks();
dstChunks.addChunks(longChunks);
dstChunks.addChunks(shortChunks);
saveChunksToFile(dstChunks, new File(OUTPUT_FILENAME_DIR, "sample_processed.mp4"));
}
private void saveChunksToFile(VideoChunks inputData, File file) {
MediaMuxer muxer = null;
MediaCodec videoEncoder = null;
try {
MediaFormat outputFormat =
MediaFormat.createVideoFormat(OUTPUT_VIDEO_MIME_TYPE, 1280, 720);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_VIDEO_BIT_RATE);
outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE);
outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, OUTPUT_VIDEO_IFRAME_INTERVAL);
MediaCodecInfo videoEncoderInfo = selectCodec(getMimeTypeFor(outputFormat));
int colorFormat = selectColorFormat(videoEncoderInfo, OUTPUT_VIDEO_MIME_TYPE);
outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
videoEncoder = MediaCodec.createByCodecName(videoEncoderInfo.getName());
videoEncoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
videoEncoder.start();
muxer = new MediaMuxer(file.getAbsolutePath(),
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
doSaveVideoChunks(inputData, videoEncoder, muxer);
} catch (IOException e) {
e.printStackTrace();
} finally {
Log.d(TAG, "shutting down encoder, muxer");
if (videoEncoder != null) {
videoEncoder.stop();
videoEncoder.release();
}
if (muxer != null) {
muxer.stop();
muxer.release();
}
}
}
private void doSaveVideoChunks(VideoChunks inputData, MediaCodec videoEncoder,
MediaMuxer muxer) {
final int TIMEOUT_USEC = 10000;
ByteBuffer[] videoEncoderInputBuffers = videoEncoder.getInputBuffers();
ByteBuffer[] videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
MediaCodec.BufferInfo videoEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
int inputChunk = 0;
int videoEncodedFrameCount = 0;
boolean inputDone = false;
boolean videoEncodeDone = false;
boolean muxing = false;
MediaFormat encoderOutputVideoFormat = null;
int outputVideoTrack = -1;
while (!videoEncodeDone) {
Log.d(TAG, String.format("save loop: inputChunk:%s encoded:%s", inputChunk,
videoEncodedFrameCount));
if (!inputDone && (encoderOutputVideoFormat == null || muxing)) {
int encoderInputStatus = videoEncoder.dequeueInputBuffer(TIMEOUT_USEC);
if (encoderInputStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "no video encoder input buffer");
continue;
}
long time = computePresentationTime(inputChunk);
if (inputChunk == inputData.getNumChunks()) {
// End of stream -- send empty frame with EOS flag set.
videoEncoder.queueInputBuffer(encoderInputStatus, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
Log.d(TAG, "input data EOS");
} else {
ByteBuffer encoderInputBuffer = videoEncoderInputBuffers[encoderInputStatus];
encoderInputBuffer.clear();
inputData.getChunkData(inputChunk, encoderInputBuffer);
videoEncoder.queueInputBuffer(encoderInputStatus, 0,
encoderInputBuffer.position(), time,
inputData.getChunkFlags(inputChunk));
inputChunk++;
}
}
if (!videoEncodeDone && (encoderOutputVideoFormat == null || muxing)) {
int encoderOutputStatus =
videoEncoder.dequeueOutputBuffer(videoEncoderOutputBufferInfo,
TIMEOUT_USEC);
if (encoderOutputStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "no video encoder output buffer");
} else if (encoderOutputStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
Log.d(TAG, "video encoder: output buffers changed");
videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
} else if (encoderOutputStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
encoderOutputVideoFormat = videoEncoder.getOutputFormat();
Log.d(TAG, "video encoder: output media format changed"
+ encoderOutputVideoFormat);
outputVideoTrack = muxer.addTrack(encoderOutputVideoFormat);
muxing = true;
muxer.start();
} else if (encoderOutputStatus < 0) {
//ignore it
} else {
ByteBuffer encoderOutputBuffer = videoEncoderOutputBuffers[encoderOutputStatus];
if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
Log.d(TAG, "video encoder: codec config buffer");
videoEncoder.releaseOutputBuffer(encoderOutputStatus, false);
continue;
// Simply ignore codec config buffers.
}
if (videoEncoderOutputBufferInfo.size != 0) {
muxer.writeSampleData(outputVideoTrack, encoderOutputBuffer,
videoEncoderOutputBufferInfo);
}
if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
!= 0) {
Log.d(TAG, "video encoder: EOS");
videoEncodeDone = true;
}
videoEncoder.releaseOutputBuffer(encoderOutputStatus, false);
videoEncodedFrameCount++;
}
}
}
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
Log.e(TAG,
"couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
return 0; // not reached
}
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for this test
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private static long computePresentationTime(int frameIndex) {
return 132 + frameIndex * 1000000 / OUTPUT_VIDEO_FRAME_RATE;
}
private VideoChunks generateVideoChunks(IntegerPair segment) {
VideoChunks outputData = new VideoChunks();
MediaExtractor videoExtractor = null;
MediaCodec videoDecoder = null;
try {
videoExtractor = createVideoExtractor(R.raw.sample);
int videoInputTrack = getAndSelectVideoTrackIndex(videoExtractor);
MediaFormat videoInputFormat = videoExtractor.getTrackFormat(videoInputTrack);
videoDecoder = createVideoDecoder(videoInputFormat);
doGenerateVideoChunks(segment, videoExtractor, videoDecoder, outputData);
} catch (IOException e) {
e.printStackTrace();
} finally {
Log.d(TAG, "shutting down extractor, decoder");
if (videoExtractor != null) {
videoExtractor.release();
}
if (videoDecoder != null) {
videoDecoder.stop();
videoDecoder.release();
}
}
return outputData;
}
private void doGenerateVideoChunks(IntegerPair segment, MediaExtractor videoExtractor,
MediaCodec videoDecoder, VideoChunks outputData) {
final int TIMEOUT_USEC = 10000;
ByteBuffer[] videoDecoderInputBuffers = videoDecoder.getInputBuffers();
ByteBuffer[] videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
MediaCodec.BufferInfo videoDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
int videoExtractedFrameCount = 0;
int videoDecodedFrameCount = 0;
boolean videoExtracted = false;
boolean videoDecoded = false;
while (true) {
Log.d(TAG, String.format("loop: extracted:%s decoded:%s", videoExtractedFrameCount,
videoDecodedFrameCount));
while (!videoExtracted) {
int decoderInputStatus = videoDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (decoderInputStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "no video decoder input buffer");
break;
}
ByteBuffer decoderInputBuffer = videoDecoderInputBuffers[decoderInputStatus];
int size = videoExtractor.readSampleData(decoderInputBuffer, 0);
long presentationTime = videoExtractor.getSampleTime();
int flags = videoExtractor.getSampleFlags();
if (size >= 0) {
videoDecoder.queueInputBuffer(decoderInputStatus, 0, size, presentationTime,
flags);
}
videoExtracted = !videoExtractor.advance();
if (videoExtracted) {
Log.d(TAG, "video extractor: EOS");
videoDecoder.queueInputBuffer(decoderInputStatus, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
videoExtractedFrameCount++;
break;
}
while (!videoDecoded) {
int decoderOutputStatus =
videoDecoder.dequeueOutputBuffer(videoDecoderOutputBufferInfo,
TIMEOUT_USEC);
if (decoderOutputStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "no video decoder output buffer");
break;
}
if (decoderOutputStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
Log.d(TAG, "video decoder: output buffers changed");
videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
break;
}
if (decoderOutputStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = videoDecoder.getOutputFormat();
Log.d(TAG, "video decoder: output format changed: " + newFormat);
break;
}
ByteBuffer decoderOutputBuffer = videoDecoderOutputBuffers[decoderOutputStatus];
if ((videoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
Log.d(TAG, "video decoder: codec config buffer");
videoDecoder.releaseOutputBuffer(decoderOutputStatus, false);
break;
}
boolean render = videoDecoderOutputBufferInfo.size != 0;
if (render) {
if (segment.contains(videoDecodedFrameCount)) {
outputData.addChunk(decoderOutputBuffer, videoDecoderOutputBufferInfo.flags,
videoDecoderOutputBufferInfo.presentationTimeUs);
}
}
videoDecoder.releaseOutputBuffer(decoderOutputStatus, render);
if ((videoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
!= 0) {
Log.d(TAG, "video decoder: EOS");
videoDecoded = true;
}
videoDecodedFrameCount++;
break;
}
if (videoDecoded) {
break;
}
}
}
private MediaCodec createVideoDecoder(MediaFormat inputFormat) throws IOException {
MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, null, null, 0);
decoder.start();
return decoder;
}
private MediaExtractor createVideoExtractor(#RawRes int resId) throws IOException {
MediaExtractor extractor = new MediaExtractor();
AssetFileDescriptor srcFd = mContext.getResources().openRawResourceFd(resId);
extractor.setDataSource(srcFd.getFileDescriptor(), srcFd.getStartOffset(),
srcFd.getLength());
return extractor;
}
private int getAndSelectVideoTrackIndex(MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
Log.d(TAG, "format for track " + index + " is " + getMimeTypeFor(
extractor.getTrackFormat(index)));
if (isVideoFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
return index;
}
}
return -1;
}
private static String getMimeTypeFor(MediaFormat format) {
return format.getString(MediaFormat.KEY_MIME);
}
private static boolean isVideoFormat(MediaFormat format) {
return getMimeTypeFor(format).startsWith("video/");
}
}
However, But I got a video which is loss the original video data.Can you help me find what's wrong with the above code. Or have any good ideas.
This is my full code.
edit1: I found the problem.The size of the output video not correctly is the key to the problem.So I keep the same size with the original video, everything is ok.At Last, Don't forget the orientation of the video, otherwise your output video may be a bit strange.
I'm developing function of translating one video into another with additional effects for each frame. I decided to use opengl-es for applying effects on each frame. My input and output videos are in MP4 using H.264 codec.
I use MediaCodec API (android api 18+) for decoding H.264 into the opengl texture, then draw on the surface using this texture with my shader.
I thought that using MediaCodec with H.264 will do hardware decoding on android and it will be fast. But appeared that it is not.
Recoding small 432x240 15 seconds video consumed 28 seconds of total time!
Please, take a look at my code + profile information and share some advice, critics if I'm doing something wrong.
My code:
private void editVideoFile()
{
if (VERBOSE)
{
Log.d(TAG, "editVideoFile " + mWidth + "x" + mHeight);
}
MediaCodec decoder = null;
MediaCodec encoder = null;
InputSurface inputSurface = null;
OutputSurface outputSurface = null;
try
{
File inputFile = new File(FILES_DIR, INPUT_FILE); // must be an absolute path
// The MediaExtractor error messages aren't very useful. Check to see if the input
// file exists so we can throw a better one if it's not there.
if (!inputFile.canRead())
{
throw new FileNotFoundException("Unable to read " + inputFile);
}
extractor = new MediaExtractor();
extractor.setDataSource(inputFile.toString());
int trackIndex = inVideoTrackIndex = selectTrack(extractor);
if (trackIndex < 0)
{
throw new RuntimeException("No video track found in " + inputFile);
}
extractor.selectTrack(trackIndex);
MediaFormat inputFormat = extractor.getTrackFormat(trackIndex);
mWidth = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
mHeight = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
if (VERBOSE)
{
Log.d(TAG, "Video size is " + mWidth + "x" + mHeight);
}
// Create an encoder format that matches the input format. (Might be able to just
// re-use the format used to generate the video, since we want it to be the same.)
MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE,
getFormatValue(inputFormat, MediaFormat.KEY_BIT_RATE, BIT_RATE));
outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE,
getFormatValue(inputFormat, MediaFormat.KEY_FRAME_RATE, FRAME_RATE));
outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
getFormatValue(inputFormat,MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL));
try
{
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
}
catch (IOException iex)
{
throw new RuntimeException(iex);
}
encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
inputSurface = new InputSurface(encoder.createInputSurface());
inputSurface.makeCurrent();
encoder.start();
// Output filename. Ideally this would use Context.getFilesDir() rather than a
// hard-coded output directory.
String outputPath = new File(OUTPUT_DIR,
"transformed-" + mWidth + "x" + mHeight + ".mp4").toString();
Log.d(TAG, "output file is " + outputPath);
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
try
{
mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
catch (IOException ioe)
{
throw new RuntimeException("MediaMuxer creation failed", ioe);
}
mTrackIndex = -1;
mMuxerStarted = false;
// OutputSurface uses the EGL context created by InputSurface.
try
{
decoder = MediaCodec.createDecoderByType(MIME_TYPE);
}
catch (IOException iex)
{
throw new RuntimeException(iex);
}
outputSurface = new OutputSurface();
outputSurface.changeFragmentShader(FRAGMENT_SHADER);
decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
decoder.start();
editVideoData(decoder, outputSurface, inputSurface, encoder);
}
catch (Exception ex)
{
Log.e(TAG, "Error processing", ex);
throw new RuntimeException(ex);
}
finally
{
if (VERBOSE)
{
Log.d(TAG, "shutting down encoder, decoder");
}
if (outputSurface != null)
{
outputSurface.release();
}
if (inputSurface != null)
{
inputSurface.release();
}
if (encoder != null)
{
encoder.stop();
encoder.release();
}
if (decoder != null)
{
decoder.stop();
decoder.release();
}
if (mMuxer != null)
{
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
}
/**
* Selects the video track, if any.
*
* #return the track index, or -1 if no video track is found.
*/
private int selectTrack(MediaExtractor extractor)
{
// Select the first video track we find, ignore the rest.
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; i++)
{
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/"))
{
if (VERBOSE)
{
Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
}
return i;
}
}
return -1;
}
/**
* Edits a stream of video data.
*/
private void editVideoData(MediaCodec decoder,
OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder)
{
final int TIMEOUT_USEC = 10000;
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputChunk = 0;
boolean outputDone = false;
boolean inputDone = false;
boolean decoderDone = false;
while (!outputDone)
{
if (VERBOSE)
{
Log.d(TAG, "edit loop");
}
// Feed more data to the decoder.
if (!inputDone)
{
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0)
{
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
// Read the sample data into the ByteBuffer. This neither respects nor
// updates inputBuf's position, limit, etc.
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0)
{
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
if (VERBOSE)
{
Log.d(TAG, "sent input EOS");
}
}
else
{
if (extractor.getSampleTrackIndex() != inVideoTrackIndex)
{
Log.w(TAG, "WEIRD: got sample from track " +
extractor.getSampleTrackIndex() + ", expected " + inVideoTrackIndex);
}
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
presentationTimeUs, 0 /*flags*/);
if (VERBOSE)
{
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
chunkSize);
}
inputChunk++;
extractor.advance();
}
}
else
{
if (VERBOSE)
{
Log.d(TAG, "input buffer not available");
}
}
}
// Assume output is available. Loop until both assumptions are false.
boolean decoderOutputAvailable = !decoderDone;
boolean encoderOutputAvailable = true;
while (decoderOutputAvailable || encoderOutputAvailable)
{
// Start by draining any pending output from the encoder. It's important to
// do this before we try to stuff any more data in.
int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER)
{
// no output available yet
if (VERBOSE)
{
Log.d(TAG, "no output from encoder available");
}
encoderOutputAvailable = false;
}
else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{
encoderOutputBuffers = encoder.getOutputBuffers();
if (VERBOSE)
{
Log.d(TAG, "encoder output buffers changed");
}
}
else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
{
if (mMuxerStarted)
{
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = encoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
}
else if (encoderStatus < 0)
{
throw new RuntimeException("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
}
else
{ // encoderStatus >= 0
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null)
{
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)
{
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE)
{
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
}
info.size = 0;
}
// Write the data to the output "file".
if (info.size != 0)
{
if (!mMuxerStarted)
{
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(info.offset);
encodedData.limit(info.offset + info.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, info);
if (VERBOSE)
{
Log.d(TAG, "sent " + info.size + " bytes to muxer");
}
}
outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
encoder.releaseOutputBuffer(encoderStatus, false);
}
if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER)
{
// Continue attempts to drain output.
continue;
}
// Encoder is drained, check to see if we've got a new frame of output from
// the decoder. (The output is going to a Surface, rather than a ByteBuffer,
// but we still get information through BufferInfo.)
if (!decoderDone)
{
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER)
{
// no output available yet
if (VERBOSE)
{
Log.d(TAG, "no output from decoder available");
}
decoderOutputAvailable = false;
}
else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{
//decoderOutputBuffers = decoder.getOutputBuffers();
if (VERBOSE)
{
Log.d(TAG, "decoder output buffers changed (we don't care)");
}
}
else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
{
// expected before first buffer of data
MediaFormat newFormat = decoder.getOutputFormat();
if (VERBOSE)
{
Log.d(TAG, "decoder output format changed: " + newFormat);
}
}
else if (decoderStatus < 0)
{
throw new RuntimeException("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
}
else
{ // decoderStatus >= 0
if (VERBOSE)
{
Log.d(TAG, "surface decoder given buffer "
+ decoderStatus + " (size=" + info.size + ")");
}
// The ByteBuffers are null references, but we still get a nonzero
// size for the decoded data.
boolean doRender = (info.size != 0);
// As soon as we call releaseOutputBuffer, the buffer will be forwarded
// to SurfaceTexture to convert to a texture. The API doesn't
// guarantee that the texture will be available before the call
// returns, so we need to wait for the onFrameAvailable callback to
// fire. If we don't wait, we risk rendering from the previous frame.
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender)
{
// This waits for the image and renders it after it arrives.
if (VERBOSE)
{
Log.d(TAG, "awaiting frame");
}
outputSurface.awaitNewImage();
outputSurface.drawImage();
// Send it to the encoder.
inputSurface.setPresentationTime(info.presentationTimeUs * 1000);
if (VERBOSE)
{
Log.d(TAG, "swapBuffers");
}
inputSurface.swapBuffers();
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
{
// forward decoder EOS to encoder
if (VERBOSE)
{
Log.d(TAG, "signaling input EOS");
}
if (WORK_AROUND_BUGS)
{
// Bail early, possibly dropping a frame.
return;
}
else
{
encoder.signalEndOfInputStream();
}
}
}
}
}
}
}
And profile information:
Tested on Samsung Galaxy Note3 Intl (Qualcom)
Your issue probably is in how you synchronously wait for events on one single thread, with a nonzero timeout.
You could probably get better throuhput if you lower the timeout. Most of the hardware codecs work with a bit of latency; you can have a good total throughput, but don't expect to have a result (a frame encoded or decoded) immediately.
Ideally, you would use a zero timeout to check all inputs/outputs of both encoder and decoder, and in case there's no free buffers on either points, wait with a nonzero timeout on e.g. encoder output or decoder output.
If you can target Android 5.0, with asynchronous mode in MediaCodec, it's much easier to get this done right. See e.g. https://github.com/mstorsjo/android-decodeencodetest for an example on how to do this. See also https://stackoverflow.com/a/35885471/3115956 for a longer discussion on this issue.
You can also have a look at some similar questions.