Improper decoding of raw H264 video stream using android MediaCodec - android

I am building a screen sharing app and trying to receive raw H264 video stream from a socket and displaying it using surfaceview by decoding using MediaCodec class. However I am successfully able to receive data and display on surface. But the problem where I am stuck from two days is the video is very laggy,jittery and having green bands and patches. You can see the issue in the youtube video link. If I save this video to sdcard and play it using MxPlayer then it is playing fine.Also I tried with Gstreamer and everything is fine.
This is my class calling from Activity when surfaceview is created.
//New Edited Code
public class Server {
static final int socketServerPORT = 53515;
MainActivity activity;
ServerSocket serverSocket;
public Server(MainActivity activity, Surface surface) {
Log.e("constructor()", "called");
this.activity = activity;
Thread socketServerThread = new Thread(new SocketServerThread(surface));
socketServerThread.start();
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
Log.e("codecinfo", codecInfo.getName());
return codecInfo;
}
}
}
return null;
}
private class SocketServerThread extends Thread {
InputStream is;
Socket socket;
private MediaCodec codec;
private Surface surface;
public SocketServerThread(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
Log.e("socketthread", "called");
try {
selectCodec("video/avc");
codec = MediaCodec.createByCodecName(selectCodec("video/avc").getName());
MediaFormat format = MediaFormat.createVideoFormat("video/avc", 800, 480);
// format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, 1024000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
codec.configure(format, surface, null, 0);
codec.start();
serverSocket = new ServerSocket(socketServerPORT);
while (true) {
socket = serverSocket.accept();
Log.e("connection", "accepted");
is = socket.getInputStream();
if (is != null) {
// File file = new File(Environment.getExternalStorageDirectory() + "/stream.mp4");
// OutputStream output = new FileOutputStream(file);
byte[] buff = new byte[4 * 1024]; // or other buffer size
int read;
while ((read = is.read(buff)) != -1) {
// output.write(buff, 0, read);
if (buff.length == 1)
continue;
int inIndex = codec.dequeueInputBuffer(10000);
if (inIndex >= 0) {
ByteBuffer inputBuffer = codec.getInputBuffer(inIndex);
inputBuffer.clear();
inputBuffer.put(buff);
codec.queueInputBuffer(inIndex, 0, buff.length, 16, 0);
}
MediaCodec.BufferInfo buffInfo = new MediaCodec.BufferInfo();
int outIndex = codec.dequeueOutputBuffer(buffInfo, 10000);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
case -3:
break;
default:
codec.releaseOutputBuffer(outIndex, true);
}
}
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (codec != null) {
codec.release();
}
if (socket != null) {
try {
socket.close();
is.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
}
I tried the way you mentioned to create MediaCodec as
MediaCodec.createByCodecName(selectCodec("video/avc").getName());
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
Log.e("codecinfo",codecInfo.getName());
return codecInfo;
}
}
}
return null;
}
but it gives me error
I/OMXClient: MuxOMX ctor
I/MediaCodec: [OMX.qcom.video.encoder.avc] setting surface generation to 28345345
W/ACodec: [OMX.qcom.video.encoder.avc] Failed to set standard component role 'video_decoder.avc'.
E/ACodec: [OMX.qcom.video.encoder.avc] configureCodec returning error -1010
E/ACodec: signalError(omxError 0x80001001, internalError -1010)
E/MediaCodec: Codec reported err 0xfffffc0e, actionCode 0, while in state 3
E/MediaCodec: configure failed with err 0xfffffc0e, resetting...
I/OMXClient: MuxOMX ctor
E/AndroidRuntime: FATAL EXCEPTION: Thread-5
Process: com.androidsrc.server, PID: 27681
android.media.MediaCodec$CodecException: Error 0xfffffc0e
at android.media.MediaCodec.configure(MediaCodec.java:1884)
at com.androidsrc.server.Server$SocketServerThread.run(Server.java:70)
at java.lang.Thread.run(Thread.java:761)
Also I tried to create Mediacodec earlier by this method
codec = MediaCodec.createDecoderByType("video/avc");
but the video quality remains same as in the video shared on youtube.

I'd rather comment this but still missing the required points :D
I think you (or i'm not aware that it' works otherwise) are missing some important bits. The video you provided shows signs of missing information on how to interpret things.
Are you sending and using SPS and PPS? I'm not sure if the MediaFormat you create contains all the needed information.
MediaCodec requires entire frames to be fed to it. Are you sure that you receive a full frame using this sort of logic?
Some Decoders expect the frames without a NAL header. Are you sending frames without NAL headers?
I don't see you transfer any presentation time
I suggest you Look at RTP/RTSP protocol on how to transfer media.
To further help you i suggest you provide the producer logic as well

OMX.google.h264.decoder is a software codec with a limited functionality (at least on some devices and api levels).
Try to use createDecoderByType instead of createDecoderByName or to choose another codec as in the example here:
https://developer.android.com/reference/android/media/MediaCodecInfo.html
(modify it to choose decoder other than OMX.google.h264.decoder)

Try to reduce timeout on dequeue (codec.dequeueInputBuffer(10000);), unless you think it does take so much time to decode.
Also setup following flags:
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, CodecCapabilities.COLOR_FormatSurface);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
You mentioned that you changed code to select by type but original post had errors when decoder was created. Are you able to fix those errors?
You do need to use hardware decoder to ensure that you can play without significant performance cost. New video definitely is worst than first attempt, which means you had decoder selection issue. You can use ffplay with -f h264 option and confirm that format is correct.
Lastly, look into video dimensions and you can read back buffer from decoder (by passing false in releaseOutputBuffer) and write it to disk to confirm your dimensions are correct. Raw video to final video will have different dimension. Read width, height from codecFormat to confirm it is able to decode correctly.

Related

How to play raw NAL units in Android MediaCodec

I initially tried How to play raw NAL units in Andoid exoplayer? but I noticed I'm gonna have to do things in low level.
I've found this simple MediaCodec example. As you can see, it's a thread that plays a file on a surface passed to it.
Notice the lines
mExtractor = new MediaExtractor();
mExtractor.setDataSource(filePath);
It looks like that I should create my own MediaExtractor which, instead of extracting the video units from a file, it'll use the h264 NAL units from a buffer I'll provide.
I can then call mExtractor.setDataSource(MediaDataSource dataSource), see MediaDataSource
It has readAt(long position, byte[] buffer, int offset, int size)
This is where it reads the NAL units. However, how should I pass them? I have no information on the structure of the buffer that needs to be read.
Should I pass a byte[] buffer with the NAL units in it, and if so, in which format? What is the offset for? If it's a buffer, shouldn't I just erase the lines that were read and thus have no offset or size?
By the way, the h264 NAL units are streaming ones, they come from RTP packets, not files. I'm gonna get them through C++ and store them on a buffer an try to pass to the mediaExtractor.
UPDATE:
I've been reading a lot about MediaCodec and I think I understand it better. According to https://developer.android.com/reference/android/media/MediaCodec, everything relies on something of this type:
MediaCodec codec = MediaCodec.createByCodecName(name);
MediaFormat mOutputFormat; // member variable
codec.setCallback(new MediaCodec.Callback() {
#Override
void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
// fill inputBuffer with valid data
…
codec.queueInputBuffer(inputBufferId, …);
}
#Override
void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, …) {
ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
// bufferFormat is equivalent to mOutputFormat
// outputBuffer is ready to be processed or rendered.
…
codec.releaseOutputBuffer(outputBufferId, …);
}
#Override
void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
// Subsequent data will conform to new format.
// Can ignore if using getOutputFormat(outputBufferId)
mOutputFormat = format; // option B
}
#Override
void onError(…) {
…
}
});
codec.configure(format, …);
mOutputFormat = codec.getOutputFormat(); // option B
codec.start();
// wait for processing to complete
codec.stop();
codec.release();
As you can see, I can pass input buffers and get decoded output buffers. The exact byte formats are still a mystery, but I think that's how it works. Also according to the same article, the usage of ByteBuffers is slow, and Surfaces are preferred. They consume the output buffers automatically. Although there's no tutorial on how to do it, there's a section in the article that says it's almost identical, so I guess I just need to add the additional lines
codec.setInputSurface(Surface inputSurface)
codec.setOutputSurface(Surface outputSurface)
Where inputSurface and outputSurface are Surfaces which I pass to a MediaPlayer which I use (how) to display the video in an activity. And the output buffers will simply not come on onOutputBufferAvailable (because the surface consumes them first), neither onInputBufferAvailable.
So the questions now are: how exactly do I construct a Surface which contains the video buffer, and how do I display a MediaPlayer into an activity
For output I can simply create a Surface and pass to a MediaPlayer and MediaCodec, but what about input? Do I need ByteBuffer for the input anyways, and Surface would just be for using other outputs as inputs?
you first need to remove the NAL units , and feed the raw H264 bytes into this method, how ever in your case your reading from the file , so no need to remove any thing since your not using packets , just feed the data bytes to this method:
rivate void initDecoder(){
try {
writeHeader = true;
if(mDecodeMediaCodec != null){
try{
mDecodeMediaCodec.stop();
}catch (Exception e){}
try{
mDecodeMediaCodec.release();
}catch (Exception e){}
}
mDecodeMediaCodec = MediaCodec.createDecoderByType(MIME_TYPE);
//MIME_TYPE = video/avc in your case
mDecodeMediaCodec.configure(format,mSurfaceView.getHolder().getSurface(),
null,
0);
mDecodeMediaCodec.start();
mDecodeInputBuffers = mDecodeMediaCodec.getInputBuffers();
} catch (IOException e) {
e.printStackTrace();
mLatch.trigger();
}
}
private void decode(byte[] data){
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputBufferIndex = mDecodeMediaCodec.dequeueInputBuffer(1000);//
if (inputBufferIndex >= 0) {
ByteBuffer buffer = mDecodeInputBuffers[inputBufferIndex];
buffer.clear();
buffer.put(data);
mDecodeMediaCodec.queueInputBuffer(inputBufferIndex,
0,
data.length,
packet.sequence / 1000,
0);
data = null;
//decodeDataBuffer.clear();
//decodeDataBuffer = null;
}
int outputBufferIndex = mDecodeMediaCodec.dequeueOutputBuffer(info,
1000);
do {
if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
//no output available yet
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
//encodeOutputBuffers = mDecodeMediaCodec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
format = mDecodeMediaCodec.getOutputFormat();
//mediaformat changed
} else if (outputBufferIndex < 0) {
//unexpected result from encoder.dequeueOutputBuffer
} else {
mDecodeMediaCodec.releaseOutputBuffer(outputBufferIndex,
true);
outputBufferIndex = mDecodeMediaCodec.dequeueOutputBuffer(info,
0);
}
} while (outputBufferIndex > 0);
}
please dont forget that iFrame (the first frame bytes) contains sensitive data and MUST be fed to the decoder first

MediaExtractor for audio, getting unexpected audio

Using the MediaExtractor class, I am able to get encoded audio sample data from an saved mp4 video with the below:
ByteBuffer byteBuffer = ByteBuffer.allocate(1024 * 256);
MediaExtractor audioExtractor = new MediaExtractor();
try {
int trackIndex = -1;
audioExtractor.setDataSource(originalMediaItem.getFilePath());
for (int i = 0; i < audioExtractor.getTrackCount(); i++) {
MediaFormat format = audioExtractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
trackIndex = i;
break;
}
}
audioExtractor.selectTrack(trackIndex);
mAudioFormatMedia = audioExtractor.getTrackFormat(trackIndex);
mAudioTrackIndex = mMediaMuxer.addTrack(mAudioFormatMedia);
int size = audioExtractor.readSampleData(byteBuffer, 0);
do {
if (audioExtractor.getSampleTrackIndex() == 1) {
long presentationTime = audioExtractor.getSampleTime();
mInputBufferHashMap.put(presentationTime, byteBuffer);
audioExtractor.advance();
size = audioExtractor.readSampleData(byteBuffer, 0);
}
} while (size >= 0);
audioExtractor.release();
audioExtractor = null;
} catch (IOException e) {
e.printStackTrace();
}
I have a video source coming from a GlSurface and then want to use a MediaMuxer to mux this video with the audio extraction mentioned previously. Audio is interleaved into the muxer using the hashmap as video is being processed. I am successful in muxing both the Video and Audio and creating a playable mp4 video, however the audio does not sound anything like the original audio of the original mp4.
I do see the expected bufferinfo.size and bufferInfo.presentationTimeUs when I write to the muxer:
mMediaMuxer.writeSampleData(mAudioTrackIndex, buffer, mAudioBufferInfo);
Log.d(TAG, String.format("Wrote %d audio bytes at %d", mAudioBufferInfo.size, mAudioBufferInfo.presentationTimeUs));
I've tried to use the standard inputBuffer, outputBuffer with MediaCodec, like this https://gist.github.com/a-m-s/1991ab18fbcb0fcc2cf9, but this produces the same audio, and from my understanding, MediaExtractor should already be encoded audio data, so data should be able to be piped directly.
What is also interesting is that when i check for the flags when initially extracting:
if( (audioExtractor.getSampleFlags() & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
Log.d(TAG, "BUFFER_FLAG_END_OF_STREAM")
Neither of the above get printed for the original mp4 video. I am now questioning the original mp4 video and whether if it is possible to have a non-extractable audiotrack for an mp4 and how I can possibly confirm this.
I believe I've looked at most if not all the MediaExtractor questions on stackoverflow and a lot of the singleton solutions for MediaExtractor on github. Does anyone know of a way to extract audio another way, i.e. using ExoPlayer (preferrably not ffmpeg because it adds a ton of overhead on the android project). Any insights would help if there are any errors in my current implementation!
EDIT 1: This is what the format is audioExtractor.getTrackFormat(trackIndex):
{max-bitrate=512000, sample-rate=48000, track-id=2, durationUs=22373187, mime=audio/mp4a-latm, profile=2, channel-count=4, language=```, aac-profile=2, bitrate=512000, max-input-size=1764, csd-0=java.nio.HeapByteBuffer[pos=0 lim=2 cap=2]}
Problem was attempting to create a Map for the audio data. The AudioData was not correct. I was able to solve this by batching audio sample data while writing videoData using a method like the below:
private void writeAudioSampleData(
MediaExtractor audioExtractor, MediaMuxer muxer, int filterStart, int filterEnd) {
mFilterStart = filterEnd;
MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();
boolean audioExtractorDone = false;
audioExtractor.seekTo(filterStart, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
synchronized (mAudioLockObject) {
while (!audioExtractorDone) {
try {
audioBufferInfo.size =
audioExtractor.readSampleData(audioInputBuffer, 0);
} catch (Exception e) {
e.printStackTrace();
}
if (DEBUG) {
Log.d(TAG, "audioBufferInfo.size: " + audioBufferInfo.size);
}
if (audioBufferInfo.size < 0) {
audioBufferInfo.size = 0;
audioExtractorDone = true;
} else {
audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
if (audioBufferInfo.presentationTimeUs > filterEnd) {
break; //out of while
}
if (audioBufferInfo.presentationTimeUs >= filterStart &&
audioBufferInfo.presentationTimeUs <= filterEnd) {
audioBufferInfo.presentationTimeUs -= mOriginalMediaItem.mRecordingStartTs;
audioBufferInfo.flags = audioExtractor.getSampleFlags();
try {
muxer.writeSampleData(mAudioTrackIndex, audioInputBuffer,
audioBufferInfo);
if (DEBUG)Log.d(TAG, String.format("Wrote %d audio bytes at %d",
audioBufferInfo.size, audioBufferInfo.presentationTimeUs));
} catch(IllegalArgumentException | IllegalStateException |
NullPointerException ignore) {}
}
audioExtractor.advance();
}
}
}

MediaCodec failing on S7

I have set a series of classes to decode H264 streaming video from a server and render it on a SurfaceView. Now this code is working perfectly on every device I've tried including the emulator, but suddenly I bought myself a S7 and in this device it does not work properly anymore.
The weird thing is that some times it will work perfectly, then some other times it will throw this error:
06-15 16:41:40.249 13300-24605/cm.myapp E/ACodec: [OMX.Exynos.avc.dec] ERROR(0x90000012)
06-15 16:41:40.249 13300-24605/cm.myapp E/ACodec: signalError(omxError 0x90000012, internalError -2147483648)
06-15 16:41:40.249 13300-24604/cm.myapp E/MediaCodec: Codec reported err 0x90000012, actionCode 0, while in state 6
06-15 16:41:40.249 13300-24578/cm.myapp W/MediaStreamerThread: Failed to draw media.
Sometimes it will crash on the dequeueInputBuffers call:
java.lang.IllegalStateException
at android.media.MediaCodec.native_dequeueOutputBuffer(Native Method)
at android.media.MediaCodec.dequeueOutputBuffer(MediaCodec.java:2379)
And then again, some other times it will throw this very different error:
06-15 16:34:57.239 13300-16625/cm.myapp W/System.err: java.lang.IllegalArgumentException: The surface has been released
06-15 16:34:57.239 13300-16625/cm.myapp W/System.err: at android.media.MediaCodec.native_configure(Native Method)
06-15 16:34:57.239 13300-16625/cm.myapp W/System.err: at android.media.MediaCodec.configure(MediaCodec.java:1778)
java.lang.RuntimeException: Could not create h264 decoder
These errors by themselves are not very verbose and thus I cannot figure out where the problem might be.
Again my code works perfectly on most devices but it's failing on this one. How is this possible? Any ideas?
This is my decoding code:
public class H264Decoder
{
static private final long TIMEOUT_US = 10000L;
private MediaCodec mDecoder;
private Surface mSurface;
private static final List<byte[]> EMPTY_ENCODE_RESULT = new ArrayList<>();
public void init()
{
try
{
mDecoder = MediaCodec.createDecoderByType( "video/avc" );
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 640, 480);
mDecoder.configure(mediaFormat, mSurface, null, 0);
mDecoder.start();
}
catch(NoClassDefFoundError ex) {
ex.printStackTrace();
throw new RuntimeException("Could not create h264 decoder", ex);
}
}
public List<byte[]> offer(byte[] data)
{
List<byte[]> returnValue = new ArrayList<>();
returnValue.add(decode(data, true));
return returnValue;
}
public void release()
{
assert mSurface != null;
assert mDecoder != null;
mDecoder.stop();
mDecoder.release();
mDecoder = null;
}
public H264Decoder(Surface surface)
{
mSurface = surface;
}
public byte[] decode(byte[] data, boolean updateRender)
{
if (mSurface == null)
{
return null;
}
// INPUT -----------------------------------------------------------------------------------
int inputBufferIndex = mDecoder.dequeueInputBuffer(TIMEOUT_US);
if (inputBufferIndex >= 0)
{
// Get an input buffer from the codec, fill it with data, give it back
ByteBuffer inputBuffer = mDecoder.getInputBuffers()[inputBufferIndex];
inputBuffer.put(data);
mDecoder.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0 );
}
// OUTPUT ----------------------------------------------------------------------------------
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outputBufferIndex = mDecoder.dequeueOutputBuffer(info, TIMEOUT_US);
if ( outputBufferIndex >= 0 )
{
final ByteBuffer[] outputBuffer = mDecoder.getOutputBuffers();
mDecoder.releaseOutputBuffer(outputBufferIndex, updateRender);
}
else if ( outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED )
{
//outputBuffers = codec.getOutputBuffers();
}
else if ( outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED )
{
MediaFormat format = mDecoder.getOutputFormat();
}
return null;
}
}
It is hard to say what can go wrong. One thing is though
mDecoder.releaseOutputBuffer(outputBufferIndex, updateRender);
if(!updateRender) {
return outputBuffer[outputBufferIndex].array();
}
I would not recommend returning array from output buffer. According to documentation it states
Once an output buffer is released to the codec, it MUST NOT be used
If you truly need to have encoded sample, I would better create a copy of output buffer, call release, and then return the copy.
If byte array that's returned used to be processed somehow else, I would recommend extracting this method of decoder into the loop and process the byte buffer (output buffer), and then call release output buffer. For more reference I would look into MediaCodec Synchronous Processing using Buffers
For Samsung S7, you have to make sure that the first H264 frame inserted in decoder is an I frame.
On Samsung Galaxy S8, I would get ERROR(0x90000012) asynchronously after calling MediaCodec.configure(), before feeding any stream input.
One sure way of getting ERROR(0x90000012) is to use streams in AVCC format (like Apple Quicktime movies).
In fact, it seems across many Android devices, we have more success using H264 in Annex-B format.
Annex-b also means not needing to pass out of band extra data using MediaCodec.BUFFER_FLAG_CODEC_CONFIG or sps/pps configured via MediaFormat (csd-0, csd-1 keys).

How to encode to the AAC by MediaCodec in Android?

I want to convert the file from .mp3 to AAC and I reference the source code of the Spydroid APP.
It convert the audio data from Microphone audio source to AAC.
I try to modify the code and change the audio source from Microphone audio source to local mp3 file.
But I don't know how to pushing raw audio to the decoder...
Does somebody can help me and give me some suggestion ?
The code of encode is like the following:
protected void encodeWithMediaCodec() throws IOException {
final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)*2;
((AACLATMPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate);
//The music file in my phone , and I want to push it to the
long file_size = 0;
string path = "/storage/sdcard1/music/test.mp3"
File audio = new File(path );
mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, mBuffer_Size);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
mThread = new Thread(new Runnable() {
#Override
public void run() {
int len = 0, bufferIndex = 0;
try {
while (!Thread.interrupted()) {
bufferIndex = mMediaCodec.dequeueInputBuffer(10000);
if (bufferIndex>=0) {
inputBuffers[bufferIndex].clear();
len = audio.length();
if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) {
Log.e(TAG,"An error occured with the AudioRecord API !");
} else {
Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity());
mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime()/1000, 0);
}
}
}
} catch (RuntimeException e) {
e.printStackTrace();
}
}
});
mThread.start();
// The packetizer encapsulates this stream in an RTP stream and send it over the network
mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort);
mPacketizer.setInputStream(inputStream);
mPacketizer.start();
mStreaming = true;
}
It has error log like the following:
V/AACStream(25015): Pushing raw audio to the decoder: len=11952883 bs: 4096
W/System.err(25015): java.lang.IllegalStateException
W/System.err(25015): at android.media.MediaCodec.queueInputBuffer(Native Method)
W/System.err(25015): at net.majorkernelpanic.streaming.audio.AACStream$1.run(AACStream.java:258)
W/System.err(25015): at java.lang.Thread.run(Thread.java:856)
W/InputMethodManagerService( 574): Window already focused, ignoring focus gain of: com.android.internal.view.IInputMethodClient$Stub$Proxy#42afeb98 attribute=null
And the error of the code is at the
mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime()/1000, 0);
I think the error is at the len , but I don't know how to fix it...
And it seems does not put the music file to the encoder...And I have two question
Question 1:
How to pushing the audio file to the MediaCodec?
Did I missing something at the code ?
Question 2:
How to make sure the data has been encode to the AAC ??

Android, use Mediacodec with libstreaming

I've a problem with this library
https://github.com/fyhertz/libstreaming
it allows to send via wireless the streaming of photocamera, it use 3 methods: two with mediacodec and one with mediarecorder.
I would like to modify it, and I have to use only the mediacodec;however first of all I tried the code of the example 2 of the library, but I've always found the same error:
the log tell me that the device can use the mediacodec, it set the encoder and when it test the decoder it fall and the buffer is filled with -1.
This is the method in the EncoderDebugger class where the exception occurs, some kind soul can help me please?
private long decode(boolean withPrefix) {
int n =3, i = 0, j = 0;
long elapsed = 0, now = timestamp();
int decInputIndex = 0, decOutputIndex = 0;
ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers();
ByteBuffer[] decOutputBuffers = mDecoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
while (elapsed<3000000) {
// Feeds the decoder with a NAL unit
if (i<NB_ENCODED) {
decInputIndex = mDecoder.dequeueInputBuffer(1000000/FRAMERATE);
if (decInputIndex>=0) {
int l1 = decInputBuffers[decInputIndex].capacity();
int l2 = mVideo[i].length;
decInputBuffers[decInputIndex].clear();
if ((withPrefix && hasPrefix(mVideo[i])) || (!withPrefix && !hasPrefix(mVideo[i]))) {
check(l1>=l2, "The decoder input buffer is not big enough (nal="+l2+", capacity="+l1+").");
decInputBuffers[decInputIndex].put(mVideo[i],0,mVideo[i].length);
} else if (withPrefix && !hasPrefix(mVideo[i])) {
check(l1>=l2+4, "The decoder input buffer is not big enough (nal="+(l2+4)+", capacity="+l1+").");
decInputBuffers[decInputIndex].put(new byte[] {0,0,0,1});
decInputBuffers[decInputIndex].put(mVideo[i],0,mVideo[i].length);
} else if (!withPrefix && hasPrefix(mVideo[i])) {
check(l1>=l2-4, "The decoder input buffer is not big enough (nal="+(l2-4)+", capacity="+l1+").");
decInputBuffers[decInputIndex].put(mVideo[i],4,mVideo[i].length-4);
}
mDecoder.queueInputBuffer(decInputIndex, 0, l2, timestamp(), 0);
i++;
} else {
if (VERBOSE) Log.d(TAG,"No buffer available !7");
}
}
// Tries to get a decoded image
decOutputIndex = mDecoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
if (decOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
decOutputBuffers = mDecoder.getOutputBuffers();
} else if (decOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
mDecOutputFormat = mDecoder.getOutputFormat();
} else if (decOutputIndex>=0) {
if (n>2) {
// We have successfully encoded and decoded an image !
int length = info.size;
mDecodedVideo[j] = new byte[length];
decOutputBuffers[decOutputIndex].clear();
decOutputBuffers[decOutputIndex].get(mDecodedVideo[j], 0, length);
// Converts the decoded frame to NV21
convertToNV21(j);
if (j>=NB_DECODED-1) {
flushMediaCodec(mDecoder);
if (VERBOSE) Log.v(TAG, "Decoding "+n+" frames took "+elapsed/1000+" ms");
return elapsed;
}
j++;
}
mDecoder.releaseOutputBuffer(decOutputIndex, false);
n++;
}
elapsed = timestamp() - now;
}
throw new RuntimeException("The decoder did not decode anything.");
}
Here's my suggestions:
(1) check the settings of encoder and decoder, and make sure that they match. For example, revolution and color format are the same.
(2) make sure the very first packet generated by the encoder has been sent and pushed into the decoder. This packet defines the basic settings of the video stream.
(3) the decoder usually buffers 5-10 frames. So data in the buffer is invalid for a few hundred ms.
(4) while initiating the decoder, set the surface as null. Otherwise the output buffer will be read by the surface and probably released automatically.

Categories

Resources