MediaCodec decode AAC audio chunks from RTSP and play - android

I'm receiving rtp packets containing aac audio chunks encoded by libvo_aacenc (44100hz 128kbps 2ch) from a FFServer instance. I'm trying to decode them with MediaCodec one by one in Android and playback as soon as the chunk is decoded.
Client.java
Player player = new Player();
//RTSP listener
#Override
public void onRTSPPacketReceived(RTPpacket packet) {
byte [] aac_chunk = packet.getpayload();
player.playAAC(aac_chunk);
}
Player.java
private MediaCodec decoder;
private AudioTrack audioTrack;
private MediaExtractor extractor;
public Player(){
extractor = new MediaExtractor();
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
44100, AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
44100,
AudioTrack.MODE_STREAM);
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, 128 * 1024);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectHE);
try{
decoder = MediaCodec.createDecoderByType("audio/mp4a-latm");
decoder.configure(format, null, null, 0);
} catch (IOException e) {
e.printStackTrace();
}
decoder.start();
audioTrack.play();
}
//Decode and play one aac_chunk
public void playAAC(byte [] data){
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
int inIndex = decoder.dequeueInputBuffer(-1);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
buffer.put(data, 0, data.length);
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inIndex, 0, sampleSize, presentationTimeUs, 0);
}
}
int outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT);
while(outIndex >= 0){
ByteBuffer outBuffer = outputBuffers[outIndex];
byte[] decoded_chunk = new byte[info.size];
outBuffer.get(decoded_chunk); // Read the buffer all at once
outBuffer.clear();
//!! Decoded decoded_chunk.length = 0 !!
System.out.println("DECODED CHUNK SIZE: "+decoded_chunk.length);
//Instant play of the decoded chunk
audioTrack.write(decoded_chunk, info.offset, info.offset + info.size);
decoder.releaseOutputBuffer(outIndex, false);
break;
}
decoder.flush();
}
On start, MediaCodec is correctly initiated.
MediaCodec: (0xa5040280) start
MediaCodec: (0xa5040280) input buffers allocated
MediaCodec: (0xa5040280) numBuffers (4)
MediaCodec: (0xa5040280) output buffers allocated
MediaCodec: (0xa5040280) numBuffers (4)
The problem
I'm actually hearing no sound. MediaCodec is working but looks like It's not decoding anything into his Output buffers, since decoded_chunk.length = 0 and outBuffer.limit() = 0 .
Questions
Should I async fill MediaCodec input buffers? Unfortunately I didn't find anything in the examples I found about this problem: instant decode and playback.
I've follow these examples:
Decode and playback AAC file extracting media information. (link)
Same but different way to implement MediaCodec, steps defined (link)

I've solved this using MediaCodec in async mode and MediaCodec.Callback as described in the official docs here which is available only for Android minSdkVersion 21.
Basically I've used a queue for every RTP audio chunk I receive and then I'm notified every time MediaCodec buffers state change. It's actually easier to handle the decoder flow.
decoder.setCallback(new MediaCodec.Callback() {
#Override
public void onInputBufferAvailable(#NonNull MediaCodec mediaCodec, int i) {
//One InputBuffer is available to decode
while (true) {
if(queue.size() > 0) {
byte[] data = queue.removeFirst();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
ByteBuffer buffer = mediaCodec.getInputBuffer(i);
buffer.put(data, 0, data.length);
mediaCodec.queueInputBuffer(i, 0, data.length, 0, 0);
break;
}
}
}
#Override
public void onOutputBufferAvailable(#NonNull MediaCodec mediaCodec, int i, #NonNull MediaCodec.BufferInfo info) {
//DECODING PACKET ENDED
ByteBuffer outBuffer = mediaCodec.getOutputBuffer(i);
byte[] chunk = new byte[info.size];
outBuffer.get(chunk); // Read the buffer all at once
outBuffer.clear();
audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
mediaCodec.releaseOutputBuffer(i, false);
}
#Override
public void onError(#NonNull MediaCodec mediaCodec, #NonNull MediaCodec.CodecException e) {}
#Override
public void onOutputFormatChanged(#NonNull MediaCodec mediaCodec, #NonNull MediaFormat mediaFormat) {}
});

Related

Andrdoid MediaCodec OutputBuffer approach method

I am using Mediacodec on Android for H264 stream Decoding.
The raw data stream consists of a series of NAL Units.
Every frame(640*480) in the video is divided into four parts in the stream.
Each time I send a buffer(One NAL Unit) into Mediacodec Decoder and wait for the outputbuffer.
It turns out that the outputbuffer capacity is as big as a YUV Frame,but the number of outputbuffers that come up is nearly four times the Frame number.I
guess the outputbuffer only returns 1/4 of the decode result.If it is so ,how can I get continuous YUV Frame Data ?
public void setDataToDecoder(byte[] videoBuffer,int size,int id){
Log.e("Decoder","-----------"+id);
inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
//Log.e("Decoder","InputIndex "+inputBufferIndex);
if(inputBufferIndex>=0){
ByteBuffer inputBuffer=inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(videoBuffer,0,size);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timeStamp, 0);
timeStamp++;
}
BufferInfo bufferInfo = new BufferInfo();
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while(outputBufferIndex>=0){
ByteBuffer outputData=outputBuffers[outputBufferIndex];
if(bufferInfo.size!=0){
outputData.position(bufferInfo.offset);
outputData.limit(bufferInfo.offset+bufferInfo.size);
Log.e("P","oooo"+outputData.position());
Log.e("P","oooo"+outputData.limit());
}
int tmp;
if(FrameCt<=20){
try {
tmp = fc.write(outputData);
Log.e("P",""+tmp);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
FrameCt++;
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
Log.e("Decoder","-----------"+id);
}

How to configure the encoder input buffer size created by MediaCodec?

I'm working to decode a video file and then encode to a smaller size/bit rate video file. I have finished the process of decoding and getting the raw video output buffer,but when I queue the raw output buffer to the input buffer of the encoder, it throws an overflow exception. As the capacity of the input buffer is too small to hold the raw output buffer.
I find that if I configure the width and height of the output format of the encoder bigger, the capacity of both the input and output buffer of the encoder will be bigger too.And they are very near in values.And when I configure the width and height as the orignal video size,the input buffer is big enough to hold the raw output buffer of the decoder, and I get the output video file.But I want to get a smaller size and smaller bit size video.
The key code is as belows.
MediaCodecInfo codecInfo = selectCodec("video/avc");
MediaFormat outformat = MediaFormat.createVideoFormat("video/avc", 1280, 720);
int colorfmt = selectColorFormat(codecInfo, "video/avc");
outformat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorfmt);//2141391876);
outformat.setInteger(MediaFormat.KEY_BIT_RATE, 178*1024*8);
outformat.setInteger(MediaFormat.KEY_FRAME_RATE, 24);
outformat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
//outformat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 3400000);;
MediaCodec encoder = MediaCodec.createEncoderByType("video/avc");
encoder.configure(outformat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
encoder.start();
Extract video and decode
int inIndex = decoder.dequeueInputBuffer(10000);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
Encode and mux to mp4 file
int outIndex = decoder.dequeueOutputBuffer(info, 10000);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
MediaFormat infmt = decoder.getOutputFormat();
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer buffer = outputBuffers[outIndex];
buffer.position(info.offset);
buffer.limit(info.offset + info.size);
int encInputIndex = encoder.dequeueInputBuffer(10000);
if (encInputIndex >= 0) {
ByteBuffer encBuffer = encInputBuf[encInputIndex];
encBuffer.clear();
encBuffer.put(buffer);
encoder.queueInputBuffer(encInputIndex, 0, info.size, info.presentationTimeUs,0);
}
ByteBuffer[] encOutputBuf = encoder.getOutputBuffers();
int trackindex = 0;
while(true) {
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encOutputBuf = encoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
// now that we have the Magic Goodies, start the muxer
trackindex = muxer.addTrack(newFormat);
muxer.start();
} else if (encoderStatus < 0) {
// let's ignore it
} else {
ByteBuffer buf = encOutputBuf[encoderStatus];
muxer.writeSampleData(trackindex, buf, bufferInfo);
encoder.releaseOutputBuffer(encoderStatus, false);
}
}
#fadden Wish to discuss with you and get your help.Thanks!
Decoder in MediaCodec is not able to make resizing automatically, it always output frames in real sizes.
Encoder also encodes frames and uses real sizes as input and is not able to make resizing internally. So if you want to put frame to encoder that has sizes not equal to decoded frame you need to make resizing.
Also you can check INDE Media Pack: https://software.intel.com/en-us/intel-inde/media-pack, it has MediaComposer class, that lets you easily make video resizing

How to get bitmap (frames) from video using MediaCodec

I'm trying to get all frames from video file using MediaCodec. If I try display video on SurfaceView, everything is ok. But if surface is null, and when I try get Bitmap from byte array, alwaus get null or runtime exception.
This is my code:
private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
public PlayerThread(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
extractor = new MediaExtractor();
extractor.setDataSource(videoPath);
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, /*surface*/ null, null, 0);
break;
}
}
if (decoder == null) {
Log.e("DecodeActivity", "Can't find video info!");
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
boolean isEOS = false;
while (!Thread.interrupted()) {
++numFrames;
if (!isEOS) {
int inIndex = decoder.dequeueInputBuffer(10000);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
// We shouldn't stop the playback at this point,
// just pass the EOS
// flag to decoder, we will get it again from the
// dequeueOutputBuffer
Log.d("DecodeActivity",
"InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize,
extractor.getSampleTime(), 0);
extractor.advance();
}
}
}
int outIndex = decoder.dequeueOutputBuffer(info, 10000);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity",
"New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
// I tried get Bitmap on few ways
//1.
//ByteBuffer buffer = outputBuffers[outIndex];
//byte[] ba = new byte[buffer.remaining()];
//buffer.get(ba);
//final Bitmap bmp = BitmapFactory.decodeByteArray(ba, 0, ba.length);// this return null
//2.
//ByteBuffer buffer = outputBuffers[outIndex];
//final Bitmap bmp = Bitmap.createBitmap(1920, 1080, Config.ARGB_8888);//using MediaFormat object I know width and height
//int a = bmp.getByteCount(); //8294400
//buffer.rewind();
//int b = buffer.capacity();//3137536
//int c = buffer.remaining();//3137536
//bmp.copyPixelsFromBuffer(buffer); // java.lang.RuntimeException: Buffer not large enough for pixels
//I know what exception mean, but i don't know why xception occurs
//In this place I need bitmap
// We use a very simple clock to keep the video FPS, or the
// video
// playback will be too fast
while (info.presentationTimeUs / 1000 > System
.currentTimeMillis() - startMs) {
try {
sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, true);
break;
}
// All decoded frames have been rendered, we can stop playing
// now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity",
"OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}
I have no idea how to solve it.
Samos
There are a couple of problems with your code (or, arguably, with MediaCodec).
First, the ByteBuffer handling in MediaCodec is poor, so you have to manually set the buffer parameters from the values in the BufferInfo object that is filled out by dequeueOutputBuffer().
Second, the values that come out of the MediaCodec are in YUV format, not RGB. As of Android 4.4, the Android framework does not provide a function that will convert the output to Bitmap. You will need to provide your own YUV-to-RGB converters (plural -- there are multiple formats). Some devices use proprietary, undocumented color formats.
You can see an example of extracting and examining MediaCodec decoder buffer contents in the CTS EncodeDecodeTest buffer-to-buffer methods (e.g. checkFrame()).
A more reliable way to go about this is to go back to decoding to a Surface, but extract the pixels with OpenGL ES. The bigflake ExtractMpegFramesTest shows how to do this.
The short answer is:
In the default section of your switch statement, you need to reset the ByteBuffer position, so instead of:
ByteBuffer buffer = outputBuffers[outIndex];
byte[] ba = new byte[buffer.remaining()];
buffer.get(ba);
you should have something like
ByteBuffer buffer = outputBuffers[outIndex];
buffer.position(info.offset);
buffer.limit(info.offset + info.size);
byte[] ba = new byte[buffer.remaining()];
buffer.get(ba);
In your original code, you will find that your ByteBuffer has a remaining() of 0.

Connect MediaCodec with Camera (setPreviewCallbackWithBuffers)

As #Aegonis asked in this post
I'm trying to connect the Camera with Android to encode the video frames.
The problem is, trying to do the same than him, in this part, as another 2 users that posted in his topic, it crash:
// called from Camera.setPreviewCallbackWithBuffer(...) in other class
public void encoder(byte[] input) {
try {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
}
...
The call i do from setPreviewCallbackWithBuffer is like this:
int bufSize= 460800;
camera.addCallbackBuffer(new byte[bufSize]);
camera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera arg1) {
codecs.encoder(data);
}
});
...
Having the following error messages
03-04 20:30:46.762: E/AndroidRuntime(26196): java.nio.BufferOverflowException
03-04 20:30:46.762: E/AndroidRuntime(26196): at java.nio.Buffer.checkPutBounds(Buffer.java:189)
03-04 20:30:46.762: E/AndroidRuntime(26196): at java.nio.ReadWriteDirectByteBuffer.put(ReadWriteDirectByteBuffer.java:100)
03-04 20:30:46.762: E/AndroidRuntime(26196): at java.nio.ByteBuffer.put(ByteBuffer.java:712)
So well, the byte buffer is so big, but setPreviewCallbackWithBuffer says you to use at least one buffer with the size: 460800. So... how would you solve this problem?
Try this
Parameters camParams = camera.getParameters();
int size = imageWidth * imageHeight;
size = size * ImageFormat.getBitsPerPixel(camParams.getPreviewFormat()) / 8;
mBuffer = new byte[size]; // class variable
camera.addCallbackBuffer(mBuffer);

Audio playing back at the wrong speed using FFmpeg on Android

General problem in the following:
I decode the audio as follows:
ReSampleContext* rsc = av_audio_resample_init(
1, aCodecCtx->channels,
aCodecCtx->sample_rate, aCodecCtx->sample_rate,
av_get_sample_fmt("u8"), aCodecCtx->sample_fmt,
16, 10, 0, 1);
while (av_read_frame(pFormatCtx, &packet)>= 0) {
if (aCodecCtx->codec_type == AVMEDIA_TYPE_AUDIO) {
int data_size = AVCODEC_MAX_AUDIO_FRAME_SIZE * 2;
int size=packet.size;
int decoded = 0;
while(size > 0) {
int len = avcodec_decode_audio3(aCodecCtx, pAudioBuffer,
&data_size, &packet);
//Сonvert audio to sample 8bit
out_size = audio_resample(rsc, outBuffer, pAudioBuffer, len);
jbyte *bytes = (*env)->GetByteArrayElements(env, array, NULL);
memcpy(bytes, outBuffer, out_size);
(*env)->ReleaseByteArrayElements(env, array, bytes, 0);
(*env)->CallStaticVoidMethod(env, cls, mid, array, out_size, number);
size -= len;
number++;
}
}
}
Next release it AudioTrack. After that, I hear that song that was necessary, but with noise and speed of 2 times larger. In what may be the problem?
UPDATE:
This is Java code:
public static AudioTrack track;
public static byte[] bytes;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
int bufSize = 2048;
track = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_8BIT, bufSize, AudioTrack.MODE_STREAM);
bytes = new byte[bufSize];
Thread mAudioThread = new Thread(new Runnable() {
public void run() {
int res = main(2, "/sdcard/muzika_iz_reklami_bmw_5_series_-_bmw_5_series.mp3", bytes);
System.out.println(res);
}
});
mAudioThread.setPriority(Thread.MAX_PRIORITY);
mAudioThread.start();
}
private static void play(byte[] play, int length, int p) {
if (p==0){
track.play();
}
track.write(play, 0, length);
}
Perhaps your AudioTrack is expecting stereo data but you are sending it mono. You could try setting your AudioTrack channel configuration to CHANNEL_OUT_MONO.
This could be a problem of sampling rate mismatch. May be you are creating audioTrack with a lesser sampler rate compared to actual rate.
It could be also a problem with the AudioTrack channel configuration, as Matthew mentioned.
May be you should try something like I have answered in this question.
Because the way you are interacting with JNI from Java, I am not sure, whether that will work. I have little knowledge about JNI. But my code works with me and which is what I am currently using with my app.

Categories

Resources