Android, use Mediacodec with libstreaming - android

I've a problem with this library
https://github.com/fyhertz/libstreaming
it allows to send via wireless the streaming of photocamera, it use 3 methods: two with mediacodec and one with mediarecorder.
I would like to modify it, and I have to use only the mediacodec;however first of all I tried the code of the example 2 of the library, but I've always found the same error:
the log tell me that the device can use the mediacodec, it set the encoder and when it test the decoder it fall and the buffer is filled with -1.
This is the method in the EncoderDebugger class where the exception occurs, some kind soul can help me please?
private long decode(boolean withPrefix) {
int n =3, i = 0, j = 0;
long elapsed = 0, now = timestamp();
int decInputIndex = 0, decOutputIndex = 0;
ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers();
ByteBuffer[] decOutputBuffers = mDecoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
while (elapsed<3000000) {
// Feeds the decoder with a NAL unit
if (i<NB_ENCODED) {
decInputIndex = mDecoder.dequeueInputBuffer(1000000/FRAMERATE);
if (decInputIndex>=0) {
int l1 = decInputBuffers[decInputIndex].capacity();
int l2 = mVideo[i].length;
decInputBuffers[decInputIndex].clear();
if ((withPrefix && hasPrefix(mVideo[i])) || (!withPrefix && !hasPrefix(mVideo[i]))) {
check(l1>=l2, "The decoder input buffer is not big enough (nal="+l2+", capacity="+l1+").");
decInputBuffers[decInputIndex].put(mVideo[i],0,mVideo[i].length);
} else if (withPrefix && !hasPrefix(mVideo[i])) {
check(l1>=l2+4, "The decoder input buffer is not big enough (nal="+(l2+4)+", capacity="+l1+").");
decInputBuffers[decInputIndex].put(new byte[] {0,0,0,1});
decInputBuffers[decInputIndex].put(mVideo[i],0,mVideo[i].length);
} else if (!withPrefix && hasPrefix(mVideo[i])) {
check(l1>=l2-4, "The decoder input buffer is not big enough (nal="+(l2-4)+", capacity="+l1+").");
decInputBuffers[decInputIndex].put(mVideo[i],4,mVideo[i].length-4);
}
mDecoder.queueInputBuffer(decInputIndex, 0, l2, timestamp(), 0);
i++;
} else {
if (VERBOSE) Log.d(TAG,"No buffer available !7");
}
}
// Tries to get a decoded image
decOutputIndex = mDecoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
if (decOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
decOutputBuffers = mDecoder.getOutputBuffers();
} else if (decOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
mDecOutputFormat = mDecoder.getOutputFormat();
} else if (decOutputIndex>=0) {
if (n>2) {
// We have successfully encoded and decoded an image !
int length = info.size;
mDecodedVideo[j] = new byte[length];
decOutputBuffers[decOutputIndex].clear();
decOutputBuffers[decOutputIndex].get(mDecodedVideo[j], 0, length);
// Converts the decoded frame to NV21
convertToNV21(j);
if (j>=NB_DECODED-1) {
flushMediaCodec(mDecoder);
if (VERBOSE) Log.v(TAG, "Decoding "+n+" frames took "+elapsed/1000+" ms");
return elapsed;
}
j++;
}
mDecoder.releaseOutputBuffer(decOutputIndex, false);
n++;
}
elapsed = timestamp() - now;
}
throw new RuntimeException("The decoder did not decode anything.");
}

Here's my suggestions:
(1) check the settings of encoder and decoder, and make sure that they match. For example, revolution and color format are the same.
(2) make sure the very first packet generated by the encoder has been sent and pushed into the decoder. This packet defines the basic settings of the video stream.
(3) the decoder usually buffers 5-10 frames. So data in the buffer is invalid for a few hundred ms.
(4) while initiating the decoder, set the surface as null. Otherwise the output buffer will be read by the surface and probably released automatically.

Related

Whats wrong with my h264 MediaCodec implementation?

I'm pulling video from a drone that encodes the video stream in h264. It sends each NAL unit via 1-5 UDP packets. I have code that creates a NAL unit out of those packets and removes the header. It then passes it to MediaCodec which then passes it to a Surface object
The output should be a video feed but for some reason, all I get is a black screen. I know that the surface is working as intended because if I futz with the NAL unit I get this green garbage that I think happens when MediaCodec doesn't know what it's looking at.
Anyways here's the section of code that handles the actual decoding. Is there anything actually wrong with it or am I looking for the issue in the wrong place?
//This part initializes the decoder and generally sets up everything needed for the while loop down below
encodedVideo = new ServerSocket(11111, 1460, false, 0);
MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 100000);
try {
m_codec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
m_codec.configure(format, new Surface(droneSight.getSurfaceTexture()), null, 0);
} catch (Exception e) {
e.printStackTrace();
}
m_codec.start();
running = true;
initialFrame = Arrays.copyOf(encodedVideo.getPacketData(true,true),encodedVideo.getPacketData(true,false).length);
//This section handles the actual grabbing and decoding of each NAL unit. Or it would, if it worked.
while (running) {
byte[] frame = this.getNALUnit();
int inputIndex = m_codec.dequeueInputBuffer(-1);
if (inputIndex >= 0) {
ByteBuffer buf = m_codec.getInputBuffer(inputIndex);
buf.put(frame);
m_codec.queueInputBuffer(inputIndex, 0, frame.length, 0, 0);
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outputIndex = m_codec.dequeueOutputBuffer(info, 0);
if (outputIndex >= 0) {
m_codec.releaseOutputBuffer(outputIndex, true);
}
}

How to play raw NAL units in Android MediaCodec

I initially tried How to play raw NAL units in Andoid exoplayer? but I noticed I'm gonna have to do things in low level.
I've found this simple MediaCodec example. As you can see, it's a thread that plays a file on a surface passed to it.
Notice the lines
mExtractor = new MediaExtractor();
mExtractor.setDataSource(filePath);
It looks like that I should create my own MediaExtractor which, instead of extracting the video units from a file, it'll use the h264 NAL units from a buffer I'll provide.
I can then call mExtractor.setDataSource(MediaDataSource dataSource), see MediaDataSource
It has readAt(long position, byte[] buffer, int offset, int size)
This is where it reads the NAL units. However, how should I pass them? I have no information on the structure of the buffer that needs to be read.
Should I pass a byte[] buffer with the NAL units in it, and if so, in which format? What is the offset for? If it's a buffer, shouldn't I just erase the lines that were read and thus have no offset or size?
By the way, the h264 NAL units are streaming ones, they come from RTP packets, not files. I'm gonna get them through C++ and store them on a buffer an try to pass to the mediaExtractor.
UPDATE:
I've been reading a lot about MediaCodec and I think I understand it better. According to https://developer.android.com/reference/android/media/MediaCodec, everything relies on something of this type:
MediaCodec codec = MediaCodec.createByCodecName(name);
MediaFormat mOutputFormat; // member variable
codec.setCallback(new MediaCodec.Callback() {
#Override
void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
// fill inputBuffer with valid data
…
codec.queueInputBuffer(inputBufferId, …);
}
#Override
void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, …) {
ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
// bufferFormat is equivalent to mOutputFormat
// outputBuffer is ready to be processed or rendered.
…
codec.releaseOutputBuffer(outputBufferId, …);
}
#Override
void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
// Subsequent data will conform to new format.
// Can ignore if using getOutputFormat(outputBufferId)
mOutputFormat = format; // option B
}
#Override
void onError(…) {
…
}
});
codec.configure(format, …);
mOutputFormat = codec.getOutputFormat(); // option B
codec.start();
// wait for processing to complete
codec.stop();
codec.release();
As you can see, I can pass input buffers and get decoded output buffers. The exact byte formats are still a mystery, but I think that's how it works. Also according to the same article, the usage of ByteBuffers is slow, and Surfaces are preferred. They consume the output buffers automatically. Although there's no tutorial on how to do it, there's a section in the article that says it's almost identical, so I guess I just need to add the additional lines
codec.setInputSurface(Surface inputSurface)
codec.setOutputSurface(Surface outputSurface)
Where inputSurface and outputSurface are Surfaces which I pass to a MediaPlayer which I use (how) to display the video in an activity. And the output buffers will simply not come on onOutputBufferAvailable (because the surface consumes them first), neither onInputBufferAvailable.
So the questions now are: how exactly do I construct a Surface which contains the video buffer, and how do I display a MediaPlayer into an activity
For output I can simply create a Surface and pass to a MediaPlayer and MediaCodec, but what about input? Do I need ByteBuffer for the input anyways, and Surface would just be for using other outputs as inputs?
you first need to remove the NAL units , and feed the raw H264 bytes into this method, how ever in your case your reading from the file , so no need to remove any thing since your not using packets , just feed the data bytes to this method:
rivate void initDecoder(){
try {
writeHeader = true;
if(mDecodeMediaCodec != null){
try{
mDecodeMediaCodec.stop();
}catch (Exception e){}
try{
mDecodeMediaCodec.release();
}catch (Exception e){}
}
mDecodeMediaCodec = MediaCodec.createDecoderByType(MIME_TYPE);
//MIME_TYPE = video/avc in your case
mDecodeMediaCodec.configure(format,mSurfaceView.getHolder().getSurface(),
null,
0);
mDecodeMediaCodec.start();
mDecodeInputBuffers = mDecodeMediaCodec.getInputBuffers();
} catch (IOException e) {
e.printStackTrace();
mLatch.trigger();
}
}
private void decode(byte[] data){
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputBufferIndex = mDecodeMediaCodec.dequeueInputBuffer(1000);//
if (inputBufferIndex >= 0) {
ByteBuffer buffer = mDecodeInputBuffers[inputBufferIndex];
buffer.clear();
buffer.put(data);
mDecodeMediaCodec.queueInputBuffer(inputBufferIndex,
0,
data.length,
packet.sequence / 1000,
0);
data = null;
//decodeDataBuffer.clear();
//decodeDataBuffer = null;
}
int outputBufferIndex = mDecodeMediaCodec.dequeueOutputBuffer(info,
1000);
do {
if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
//no output available yet
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
//encodeOutputBuffers = mDecodeMediaCodec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
format = mDecodeMediaCodec.getOutputFormat();
//mediaformat changed
} else if (outputBufferIndex < 0) {
//unexpected result from encoder.dequeueOutputBuffer
} else {
mDecodeMediaCodec.releaseOutputBuffer(outputBufferIndex,
true);
outputBufferIndex = mDecodeMediaCodec.dequeueOutputBuffer(info,
0);
}
} while (outputBufferIndex > 0);
}
please dont forget that iFrame (the first frame bytes) contains sensitive data and MUST be fed to the decoder first

Any way to render a raw video on a ANativeWindow and at the same time write it to a file?

I try to render a raw h264 video to a surface (after decoding) and write it at the same time to a file.
The rendering is working fine but when I want to get the current output buffer, it always have a size of 8 and the output file have a size of 3,87 Ko.
It seems like the output buffer is locked by the surface (ANativeWindow)?
Anyone can give me an advice to do it without creating another codec?
The codec is configured with an output surface :
if (AMEDIA_OK == AMediaCodec_configure(d->codec, d->format, d->window /*the native window */, NULL, 0)
Here is the code snipet when I try to get the output buffer :
if (!d->sawOutputEOS) {
AMediaCodecBufferInfo info;
auto status = AMediaCodec_dequeueOutputBuffer(d->codec, &info, -1);
if (status >= 0) {
if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
LOGV("output EOS");
d->sawOutputEOS = true;
d->isPlaying = false;
}
int64_t delay = 333000;
usleep((useconds_t )delay / 15);
size_t size;
// here i get the output buffer
uint8_t *outputbuffer = AMediaCodec_getOutputBuffer(d->codec,status,&size);
write(d->fd1,outputbuffer,size); // the output is always 0
LOGV("%d",size); // the size is always 8
LOGV("FRAME num : %d", counter[d->nb]++);
AMediaCodec_releaseOutputBuffer(d->codec, status, info.size != 0);
if (d->renderonce) {
d->renderonce = false;
return;
}
} else if (status == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
LOGV("output buffers changed");
} else if (status == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
auto format = AMediaCodec_getOutputFormat(d->codec);
LOGV("format changed to: %s", AMediaFormat_toString(format));
AMediaFormat_delete(format);
d->formatChanged = true;
} else if (status == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
LOGV("no output buffer right now");
} else {
LOGV("unexpected info code: %zd", status);
}
}
Thanks in advance
It's not locked; you asked the decoder to work for display, so it used the fastest route to display, without exposing the pixels to readable memory. You may find that the format is the opaque COLOR_FormatSurface, as explained here.

Transcode h264 video by MediaCodec directly withou texture render

I'm doing a transcoder using MediaCodec.
I created two mediacodec instance, one is for decoding and another is for encoding. I'm trying to send decoders outputBuffer directly into encoders inputBuffer.
It seems has no problem while compiling and executing.And it runs quickly.
But the output video file has something wrong.I checked the metadata of the output video and they are all right : bitrate, framerate, resolution ...Only the images in the video is wrong like this:screen shot
I thought it has somethings wrong,but I cannot figure it out...
I searched libraries and documents, and I found some sample codes using Texture surface to render the decoder output data and tranfer the data into the encoder. But I thought it should not be neccessary for me. Because I dont need to edit images of the video.What I only need to do is changing the bitrate and resolution to make the file's size smaller.
here is the core code in my project:
private void decodeCore() {
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int frameCount = 0;
while (mDecodeRunning) {
int inputBufferId = mDecoder.dequeueInputBuffer(50);
if (inputBufferId >= 0) {
// fill inputBuffers[inputBufferId] with valid data
int sampleSize = mExtractor.readSampleData(mDecodeInputBuffers[inputBufferId], 0);
if (sampleSize >= 0) {
long time = mExtractor.getSampleTime();
mDecoder.queueInputBuffer(inputBufferId, 0, sampleSize, time, 0);
} else {
mDecoder.queueInputBuffer(inputBufferId, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
mExtractor.advance();
}
int outputBufferId = mDecoder.dequeueOutputBuffer(bufferInfo, 50);
if (outputBufferId >= 0) {
FrameData data = mFrameDataQueue.obtain();
//wait until queue has space to push data
while (data == null) {
try {
Thread.sleep(20);
} catch (InterruptedException e) {
e.printStackTrace();
}
data = mFrameDataQueue.obtain();
}
data.data.clear();
data.size = 0;
data.offset = 0;
data.flag = 0;
data.frameTimeInUs = bufferInfo.presentationTimeUs;
// outputBuffers[outputBufferId] is ready to be processed or rendered.
if (bufferInfo.size > 0) {
ByteBuffer buffer = mDecodeOutputBuffers[outputBufferId];
buffer.position(bufferInfo.offset);
buffer.limit(bufferInfo.offset + bufferInfo.size);
data.data.put(buffer);
data.data.flip();
data.size = bufferInfo.size;
data.frameIndex = frameCount++;
}
data.flag = bufferInfo.flags;
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
Log.d("bingbing_transcode", "decode over! frames:" + (frameCount - 1));
mDecodeRunning = false;
}
mFrameDataQueue.pushToQueue(data);
mDecoder.releaseOutputBuffer(outputBufferId, false);
Log.d("bingbing_transcode", "decode output:\n frame:" + (frameCount - 1) + "\n" + "size:" + bufferInfo.size);
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
mDecodeOutputBuffers = mDecoder.getOutputBuffers();
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
mDecodeOutputVideoFormat = mDecoder.getOutputFormat();
configureAndStartEncoder();
}
}
mDecoder.stop();
mDecoder.release();
}
private void encodeCore() {
int trackIndex = 0;
boolean muxerStarted = false;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int frameCount = 0;
while (mEncodeRunning) {
int inputBufferId = mEncoder.dequeueInputBuffer(50);
if (inputBufferId >= 0) {
FrameData data = mFrameDataQueue.pollFromQueue();
//wait until queue has space to push data
while (data == null) {
try {
Thread.sleep(20);
} catch (InterruptedException e) {
e.printStackTrace();
}
data = mFrameDataQueue.obtain();
}
if (data.size > 0) {
ByteBuffer inputBuffer = mEncodeInputBuffers[inputBufferId];
inputBuffer.clear();
inputBuffer.put(data.data);
inputBuffer.flip();
}
mEncoder.queueInputBuffer(inputBufferId, 0, data.size, data.frameTimeInUs, data.flag);
mFrameDataQueue.recycle(data);
}
int outputBufferId = mEncoder.dequeueOutputBuffer(bufferInfo, 50);
if (outputBufferId >= 0) {
// outputBuffers[outputBufferId] is ready to be processed or rendered.
ByteBuffer encodedData = mEncodeOutputBuffers[outputBufferId];
if (bufferInfo.size > 0) {
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + outputBufferId + " was null");
}
if (!muxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
frameCount++;
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
mMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
Log.d("bingbing_transcode", "encode over! frames:" + (frameCount - 1));
mEncodeRunning = false;
}
mEncoder.releaseOutputBuffer(outputBufferId, false);
Log.d("bingbing_transcode", "encode output:\n frame:" + (frameCount - 1) + "\n" + "size:" + bufferInfo.size);
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
mEncodeOutputBuffers = mEncoder.getOutputBuffers();
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (muxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d("bingbing_transcode", "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
trackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
muxerStarted = true;
mEncodeOutputVideoFormat = newFormat;
}
}
mEncoder.stop();
mEncoder.release();
if (muxerStarted) {
mMuxer.stop();
mMuxer.release();
}
}
these two functions run in two different threads.
FrameData is a simple storage of frame bytebuffer and frame present time and something needed
When using bytebuffer input, there are a few details that are undefined about the input data layout. When the width isn't a multiple of 16, some encoders want to have the input data row length padded to a multiple of 16, while others will assume a line length equal to the width, with no extra padding.
The Android CTS tests (which define what behaviour one can expect across all devices) for encoding from bytebuffer inputs intentionally only test resolutions that are a multiple of 16, since they know different hardware vendors do this differently, and they didn't want to enforce any particular handling.
You can't generally assume that the decoder output would use a similar row size as what the encoder consumes either. The decoder is free to (and some actually do) return a significantly larger width than the actual content size, and use the crop_left/crop_right fields for indicating what parts of it actually are intended to be visible. So in case the decoder did that, you can't pass the data straight from the decoder to the encoder unless you copy it line by line, taking into account the actual line sizes used by the decoder and encoder.
Additionally, you can't even assume that the decoder uses a similar pixel format as the encoder. Many qualcomm devices use a special tiled pixel format for the decoder output, while the encoder input is normal planar data. In these cases, you'd have to implement a pretty complex logic for unshuffling the data before you can feed it into the encoder.
Using a texture surface as intermediate hides all of these details. It might not sound completely necessary for your use case, but it does hide all the variation in buffer formats between decoder and encoder.

android mediacodec plays I-Frame only after receiving the next one (the time difference between I-Frames is 2-3 seconds)

I'm trying to use the Android MediaCodec for decoding NAL units. The problem is that the video is played with a delay of 2-3 seconds, as the first I-Frame is played only after receiving the next one (the time difference between I-Frames is 2-3 seconds). So if I put the same I-frame 2 times in a row then it will play without any delay. But in this case I can't play the rest frames.
I do not understand what the problem could be. I have tried to fix it on my own, but I can't.
public void play(MediaCodec decoder, PESPacket currentPES){
byte[] data = currentPES.data.toByteArray();
pts = currentPES.getPts();
try {
if (data[4] == 0x67) {
Log.d(TAG, "found sps/pps!");
int ibs = decoder.dequeueInputBuffer(0);
if (ibs >= 0) {
ByteBuffer buffer = decoder.getInputBuffers()[ibs];
buffer.clear();
buffer.put(data);
decoder.queueInputBuffer(ibs, 0, data.length, pts, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
}
} else {
if (data[4] == 0x41) { // not I-frame
int ibs = decoder.dequeueInputBuffer(0);
if (ibs >= 0) {
System.out.println("queueInputBuffer little");
ByteBuffer buffer = decoder.getInputBuffers()[ibs];
buffer.clear();
buffer.put(data);
decoder.queueInputBuffer(ibs, 0, data.length, pts, 0);
}
}
if (data[4] == 0x65) { // I-frame
int ibs = decoder.dequeueInputBuffer(0);
if (ibs >= 0) {
System.out.println("queueInputBuffer");
ByteBuffer buffer = decoder.getInputBuffers()[ibs];
buffer.clear();
buffer.put(data);
decoder.queueInputBuffer(ibs, 0, data.length, pts, 0);
}
}
}
int outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10000);
if (outputBufferIndex >= 0) {
System.out.println("releaseOutputBuffer " + outputBufferIndex);
decoder.releaseOutputBuffer(outputBufferIndex, true);
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
System.out.println("INFO_OUTPUT_BUFFERS_CHANGED ");
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = decoder.getOutputFormat();
System.out.println("INFO_OUTPUT_FORMAT_CHANGED " + format);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
It is maybe too late, but wanted to share some info.
If only send IDR frames to video decoder, you can try set "android._num-input-buffers" and "android._num-output-buffers" to 1 in the MediaFormat when configure the decoder. This will try to allocate single buffer in video decoder, thus output buffer immediately. However, the behaviour is not guaranteed, will depends on decoder's implementation.
I'm guessing, Google's software H264 decoder, "omx.google.h264.decoder" should support this behaviour, but I have not tested.
Refer to Android source code FrameDecoder.cpp as following for how to set the parameters. The FrameDecoder.cpp is the framework layer implementation of Android API MetaDataExtractor.getFrameAt().
http://androidxref.com/9.0.0_r3/xref/frameworks/av/media/libstagefright/FrameDecoder.cpp#411
451 // For the thumbnail extraction case, try to allocate single buffer in both
452 // input and output ports, if seeking to a sync frame. NOTE: This request may
453 // fail if component requires more than that for decoding.
454 bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
455 || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
456 if (!isSeekingClosest) {
457 videoFormat->setInt32("android._num-input-buffers", 1);
458 videoFormat->setInt32("android._num-output-buffers", 1);
459 }

Categories

Resources