How to encode to the AAC by MediaCodec in Android? - android

I want to convert the file from .mp3 to AAC and I reference the source code of the Spydroid APP.
It convert the audio data from Microphone audio source to AAC.
I try to modify the code and change the audio source from Microphone audio source to local mp3 file.
But I don't know how to pushing raw audio to the decoder...
Does somebody can help me and give me some suggestion ?
The code of encode is like the following:
protected void encodeWithMediaCodec() throws IOException {
final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)*2;
((AACLATMPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate);
//The music file in my phone , and I want to push it to the
long file_size = 0;
string path = "/storage/sdcard1/music/test.mp3"
File audio = new File(path );
mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, mBuffer_Size);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
mThread = new Thread(new Runnable() {
#Override
public void run() {
int len = 0, bufferIndex = 0;
try {
while (!Thread.interrupted()) {
bufferIndex = mMediaCodec.dequeueInputBuffer(10000);
if (bufferIndex>=0) {
inputBuffers[bufferIndex].clear();
len = audio.length();
if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) {
Log.e(TAG,"An error occured with the AudioRecord API !");
} else {
Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity());
mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime()/1000, 0);
}
}
}
} catch (RuntimeException e) {
e.printStackTrace();
}
}
});
mThread.start();
// The packetizer encapsulates this stream in an RTP stream and send it over the network
mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort);
mPacketizer.setInputStream(inputStream);
mPacketizer.start();
mStreaming = true;
}
It has error log like the following:
V/AACStream(25015): Pushing raw audio to the decoder: len=11952883 bs: 4096
W/System.err(25015): java.lang.IllegalStateException
W/System.err(25015): at android.media.MediaCodec.queueInputBuffer(Native Method)
W/System.err(25015): at net.majorkernelpanic.streaming.audio.AACStream$1.run(AACStream.java:258)
W/System.err(25015): at java.lang.Thread.run(Thread.java:856)
W/InputMethodManagerService( 574): Window already focused, ignoring focus gain of: com.android.internal.view.IInputMethodClient$Stub$Proxy#42afeb98 attribute=null
And the error of the code is at the
mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime()/1000, 0);
I think the error is at the len , but I don't know how to fix it...
And it seems does not put the music file to the encoder...And I have two question
Question 1:
How to pushing the audio file to the MediaCodec?
Did I missing something at the code ?
Question 2:
How to make sure the data has been encode to the AAC ??

Related

Audio getting from ByteArray in android

I am trying getting audio from NV21 byte array,When I run below code I am getting error this line inputBuffer.put(input); name of error**"java.nio.BufferOverflowException"** how can I get audio from byte array?
I guess Ia ma getiing error from ByteBuffer but I cannot solve it, I should increase inputBuffer but how I can't find. Please Help me.
public void init(){
//initialize Audio Encoder
File audio_file = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/", "audio_encoded.aac");
try {
outputStream = new BufferedOutputStream(new FileOutputStream(audio_file));
Log.e("AudioEncoder", "outputStream initialized");
} catch (Exception e){
e.printStackTrace();
}
try {
audioCodec = MediaCodec.createEncoderByType(audioType);
} catch (IOException e) {
e.printStackTrace();
}
final int kSampleRates[] = { 8000, 11025, 22050, 44100, 48000 };
final int kBitRates[] = { 64000, 128000 };
MediaFormat audioFormat = MediaFormat.createAudioFormat(audioType,kSampleRates[3],2);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, kBitRates[1]);
audioCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
audioCodec.start();
}
}
// called AudioRecord's read
public synchronized void audioEncoder(byte[] input) {
Log.e("AudioEncoder", input.length + " is coming");
try {
ByteBuffer[] inputBuffers = audioCodec.getInputBuffers();
ByteBuffer[] outputBuffers = audioCodec.getOutputBuffers();
int inputBufferIndex = audioCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
audioCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = audioCodec.dequeueOutputBuffer(bufferInfo,0);
//Without ADTS header
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
outputStream.write(outData, 0, outData.length);
Log.e("AudioEncoder", outData.length + " bytes written");
audioCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = audioCodec.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
private CameraProxy.CameraDataCallBack callBack = new CameraProxy.CameraDataCallBack() {
#Override
public void onDataBack(byte[] data, long length) {
// TODO Auto-generated method stub
Log.i(TAG, "length . " + length);
//audio play
int min_buffer_size = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormats);
audioRecord = new AudioRecord(audioSource,sampleRateInHz,channelConfig,audioFormats,min_buffer_size);
audioRecord.read(data,0,data.length);
audioEncoder(data);
}
}
You can try;
audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, inputSize);
inputSize should set according to input size. Then inputBuffers capacity will be enough.
Audio encoding offers a little more flexibility than video, in that it's easier to change the size of the input data.
In this case, I recommend checking the size of inputBuffer (inputBuffer.remaining()) and supplying exactly that amount of audio data. That means if data is too big, only put into the inputBuffer what will fit, and save the rest for the next input buffer. And if data is too small, buffer it someplace temporarily, until you get more audio data (enough to fill the entire inputBuffer). That's the way this codec is intended to be used.
As an aside: Your code appears to have some problems with confusing video data and audio data, the lifecycle of the AudioRecord object, and the proper threading arrangement for capturing video and audio simultaneously. Here are some hints:
NV21 is a picture format, not audio.
onDataBack() is giving you a picture -- then you're overwriting it with a bit of audio
In onDataBack(), data is going to be HUGE -- b/c it contains a picture. And you're trying to read in the whole thing with audio data. Depending on how the AudioRecord is configured, it may only read a few bytes. You should check the return value. From the docs:
Data should be read from the audio hardware in chunks of sizes inferior to the total recording buffer size.
If you are in need of a better piece of sample code, this project looks pretty decent.

MediaExtractor for audio, getting unexpected audio

Using the MediaExtractor class, I am able to get encoded audio sample data from an saved mp4 video with the below:
ByteBuffer byteBuffer = ByteBuffer.allocate(1024 * 256);
MediaExtractor audioExtractor = new MediaExtractor();
try {
int trackIndex = -1;
audioExtractor.setDataSource(originalMediaItem.getFilePath());
for (int i = 0; i < audioExtractor.getTrackCount(); i++) {
MediaFormat format = audioExtractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
trackIndex = i;
break;
}
}
audioExtractor.selectTrack(trackIndex);
mAudioFormatMedia = audioExtractor.getTrackFormat(trackIndex);
mAudioTrackIndex = mMediaMuxer.addTrack(mAudioFormatMedia);
int size = audioExtractor.readSampleData(byteBuffer, 0);
do {
if (audioExtractor.getSampleTrackIndex() == 1) {
long presentationTime = audioExtractor.getSampleTime();
mInputBufferHashMap.put(presentationTime, byteBuffer);
audioExtractor.advance();
size = audioExtractor.readSampleData(byteBuffer, 0);
}
} while (size >= 0);
audioExtractor.release();
audioExtractor = null;
} catch (IOException e) {
e.printStackTrace();
}
I have a video source coming from a GlSurface and then want to use a MediaMuxer to mux this video with the audio extraction mentioned previously. Audio is interleaved into the muxer using the hashmap as video is being processed. I am successful in muxing both the Video and Audio and creating a playable mp4 video, however the audio does not sound anything like the original audio of the original mp4.
I do see the expected bufferinfo.size and bufferInfo.presentationTimeUs when I write to the muxer:
mMediaMuxer.writeSampleData(mAudioTrackIndex, buffer, mAudioBufferInfo);
Log.d(TAG, String.format("Wrote %d audio bytes at %d", mAudioBufferInfo.size, mAudioBufferInfo.presentationTimeUs));
I've tried to use the standard inputBuffer, outputBuffer with MediaCodec, like this https://gist.github.com/a-m-s/1991ab18fbcb0fcc2cf9, but this produces the same audio, and from my understanding, MediaExtractor should already be encoded audio data, so data should be able to be piped directly.
What is also interesting is that when i check for the flags when initially extracting:
if( (audioExtractor.getSampleFlags() & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
Log.d(TAG, "BUFFER_FLAG_END_OF_STREAM")
Neither of the above get printed for the original mp4 video. I am now questioning the original mp4 video and whether if it is possible to have a non-extractable audiotrack for an mp4 and how I can possibly confirm this.
I believe I've looked at most if not all the MediaExtractor questions on stackoverflow and a lot of the singleton solutions for MediaExtractor on github. Does anyone know of a way to extract audio another way, i.e. using ExoPlayer (preferrably not ffmpeg because it adds a ton of overhead on the android project). Any insights would help if there are any errors in my current implementation!
EDIT 1: This is what the format is audioExtractor.getTrackFormat(trackIndex):
{max-bitrate=512000, sample-rate=48000, track-id=2, durationUs=22373187, mime=audio/mp4a-latm, profile=2, channel-count=4, language=```, aac-profile=2, bitrate=512000, max-input-size=1764, csd-0=java.nio.HeapByteBuffer[pos=0 lim=2 cap=2]}
Problem was attempting to create a Map for the audio data. The AudioData was not correct. I was able to solve this by batching audio sample data while writing videoData using a method like the below:
private void writeAudioSampleData(
MediaExtractor audioExtractor, MediaMuxer muxer, int filterStart, int filterEnd) {
mFilterStart = filterEnd;
MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();
boolean audioExtractorDone = false;
audioExtractor.seekTo(filterStart, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
synchronized (mAudioLockObject) {
while (!audioExtractorDone) {
try {
audioBufferInfo.size =
audioExtractor.readSampleData(audioInputBuffer, 0);
} catch (Exception e) {
e.printStackTrace();
}
if (DEBUG) {
Log.d(TAG, "audioBufferInfo.size: " + audioBufferInfo.size);
}
if (audioBufferInfo.size < 0) {
audioBufferInfo.size = 0;
audioExtractorDone = true;
} else {
audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
if (audioBufferInfo.presentationTimeUs > filterEnd) {
break; //out of while
}
if (audioBufferInfo.presentationTimeUs >= filterStart &&
audioBufferInfo.presentationTimeUs <= filterEnd) {
audioBufferInfo.presentationTimeUs -= mOriginalMediaItem.mRecordingStartTs;
audioBufferInfo.flags = audioExtractor.getSampleFlags();
try {
muxer.writeSampleData(mAudioTrackIndex, audioInputBuffer,
audioBufferInfo);
if (DEBUG)Log.d(TAG, String.format("Wrote %d audio bytes at %d",
audioBufferInfo.size, audioBufferInfo.presentationTimeUs));
} catch(IllegalArgumentException | IllegalStateException |
NullPointerException ignore) {}
}
audioExtractor.advance();
}
}
}

Improper decoding of raw H264 video stream using android MediaCodec

I am building a screen sharing app and trying to receive raw H264 video stream from a socket and displaying it using surfaceview by decoding using MediaCodec class. However I am successfully able to receive data and display on surface. But the problem where I am stuck from two days is the video is very laggy,jittery and having green bands and patches. You can see the issue in the youtube video link. If I save this video to sdcard and play it using MxPlayer then it is playing fine.Also I tried with Gstreamer and everything is fine.
This is my class calling from Activity when surfaceview is created.
//New Edited Code
public class Server {
static final int socketServerPORT = 53515;
MainActivity activity;
ServerSocket serverSocket;
public Server(MainActivity activity, Surface surface) {
Log.e("constructor()", "called");
this.activity = activity;
Thread socketServerThread = new Thread(new SocketServerThread(surface));
socketServerThread.start();
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
Log.e("codecinfo", codecInfo.getName());
return codecInfo;
}
}
}
return null;
}
private class SocketServerThread extends Thread {
InputStream is;
Socket socket;
private MediaCodec codec;
private Surface surface;
public SocketServerThread(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
Log.e("socketthread", "called");
try {
selectCodec("video/avc");
codec = MediaCodec.createByCodecName(selectCodec("video/avc").getName());
MediaFormat format = MediaFormat.createVideoFormat("video/avc", 800, 480);
// format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, 1024000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
codec.configure(format, surface, null, 0);
codec.start();
serverSocket = new ServerSocket(socketServerPORT);
while (true) {
socket = serverSocket.accept();
Log.e("connection", "accepted");
is = socket.getInputStream();
if (is != null) {
// File file = new File(Environment.getExternalStorageDirectory() + "/stream.mp4");
// OutputStream output = new FileOutputStream(file);
byte[] buff = new byte[4 * 1024]; // or other buffer size
int read;
while ((read = is.read(buff)) != -1) {
// output.write(buff, 0, read);
if (buff.length == 1)
continue;
int inIndex = codec.dequeueInputBuffer(10000);
if (inIndex >= 0) {
ByteBuffer inputBuffer = codec.getInputBuffer(inIndex);
inputBuffer.clear();
inputBuffer.put(buff);
codec.queueInputBuffer(inIndex, 0, buff.length, 16, 0);
}
MediaCodec.BufferInfo buffInfo = new MediaCodec.BufferInfo();
int outIndex = codec.dequeueOutputBuffer(buffInfo, 10000);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
case -3:
break;
default:
codec.releaseOutputBuffer(outIndex, true);
}
}
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (codec != null) {
codec.release();
}
if (socket != null) {
try {
socket.close();
is.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
}
I tried the way you mentioned to create MediaCodec as
MediaCodec.createByCodecName(selectCodec("video/avc").getName());
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
Log.e("codecinfo",codecInfo.getName());
return codecInfo;
}
}
}
return null;
}
but it gives me error
I/OMXClient: MuxOMX ctor
I/MediaCodec: [OMX.qcom.video.encoder.avc] setting surface generation to 28345345
W/ACodec: [OMX.qcom.video.encoder.avc] Failed to set standard component role 'video_decoder.avc'.
E/ACodec: [OMX.qcom.video.encoder.avc] configureCodec returning error -1010
E/ACodec: signalError(omxError 0x80001001, internalError -1010)
E/MediaCodec: Codec reported err 0xfffffc0e, actionCode 0, while in state 3
E/MediaCodec: configure failed with err 0xfffffc0e, resetting...
I/OMXClient: MuxOMX ctor
E/AndroidRuntime: FATAL EXCEPTION: Thread-5
Process: com.androidsrc.server, PID: 27681
android.media.MediaCodec$CodecException: Error 0xfffffc0e
at android.media.MediaCodec.configure(MediaCodec.java:1884)
at com.androidsrc.server.Server$SocketServerThread.run(Server.java:70)
at java.lang.Thread.run(Thread.java:761)
Also I tried to create Mediacodec earlier by this method
codec = MediaCodec.createDecoderByType("video/avc");
but the video quality remains same as in the video shared on youtube.
I'd rather comment this but still missing the required points :D
I think you (or i'm not aware that it' works otherwise) are missing some important bits. The video you provided shows signs of missing information on how to interpret things.
Are you sending and using SPS and PPS? I'm not sure if the MediaFormat you create contains all the needed information.
MediaCodec requires entire frames to be fed to it. Are you sure that you receive a full frame using this sort of logic?
Some Decoders expect the frames without a NAL header. Are you sending frames without NAL headers?
I don't see you transfer any presentation time
I suggest you Look at RTP/RTSP protocol on how to transfer media.
To further help you i suggest you provide the producer logic as well
OMX.google.h264.decoder is a software codec with a limited functionality (at least on some devices and api levels).
Try to use createDecoderByType instead of createDecoderByName or to choose another codec as in the example here:
https://developer.android.com/reference/android/media/MediaCodecInfo.html
(modify it to choose decoder other than OMX.google.h264.decoder)
Try to reduce timeout on dequeue (codec.dequeueInputBuffer(10000);), unless you think it does take so much time to decode.
Also setup following flags:
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, CodecCapabilities.COLOR_FormatSurface);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
You mentioned that you changed code to select by type but original post had errors when decoder was created. Are you able to fix those errors?
You do need to use hardware decoder to ensure that you can play without significant performance cost. New video definitely is worst than first attempt, which means you had decoder selection issue. You can use ffplay with -f h264 option and confirm that format is correct.
Lastly, look into video dimensions and you can read back buffer from decoder (by passing false in releaseOutputBuffer) and write it to disk to confirm your dimensions are correct. Raw video to final video will have different dimension. Read width, height from codecFormat to confirm it is able to decode correctly.

Unable to mux both audio and video

I'm writing an app that records screen capture and audio using MediaCodec. I use MediaMuxer to mux video and audio to create mp4 file. I successfuly managed to write video and audio separately, however when I try muxing them together live, the result is unexpected. Either audio is played without video, or video is played right after audio. My guess is that I'm doing something wrong with timestamps, but I can't figure out what exactly. I already looked at those examples: https://github.com/OnlyInAmerica/HWEncoderExperiments/tree/audiotest/HWEncoderExperiments/src/main/java/net/openwatch/hwencoderexperiments and the ones on bigflake.com and was not able to find the answer.
Here's my media formats configurations:
mVideoFormat = createMediaFormat();
private static MediaFormat createVideoFormat() {
MediaFormat format = MediaFormat.createVideoFormat(
Preferences.MIME_TYPE, mScreenWidth, mScreenHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, Preferences.BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, Preferences.FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
Preferences.IFRAME_INTERVAL);
return format;
}
mAudioFormat = createAudioFormat();
private static MediaFormat createAudioFormat() {
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_BIT_RATE, 64000);
return format;
}
Audio and video encoders, muxer:
mVideoEncoder = MediaCodec.createEncoderByType(Preferences.MIME_TYPE);
mVideoEncoder.configure(mVideoFormat, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = new InputSurface(mVideoEncoder.createInputSurface(),
mSavedEglContext);
mVideoEncoder.start();
if (recordAudio){
audioBufferSize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
mAudioRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, audioBufferSize);
mAudioRecorder.startRecording();
mAudioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm");
mAudioEncoder.configure(mAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mAudioEncoder.start();
}
try {
String fileId = String.valueOf(System.currentTimeMillis());
mMuxer = new MediaMuxer(dir.getPath() + "/Video"
+ fileId + ".mp4",
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException ioe) {
throw new RuntimeException("MediaMuxer creation failed", ioe);
}
mVideoTrackIndex = -1;
mAudioTrackIndex = -1;
mMuxerStarted = false;
I use this to set up video timestamps:
mInputSurface.setPresentationTime(mSurfaceTexture.getTimestamp());
drainVideoEncoder(false);
And this to set up audio time stamps:
lastQueuedPresentationTimeStampUs = getNextQueuedPresentationTimeStampUs();
if(endOfStream)
mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, audioBuffer.length, lastQueuedPresentationTimeStampUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
else
mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, audioBuffer.length, lastQueuedPresentationTimeStampUs, 0);
mAudioBufferInfo.presentationTimeUs = getNextDeQueuedPresentationTimeStampUs();
mMuxer.writeSampleData(mAudioTrackIndex, encodedData,
mAudioBufferInfo);
lastDequeuedPresentationTimeStampUs = mAudioBufferInfo.presentationTimeUs;
private static long getNextQueuedPresentationTimeStampUs(){
long nextQueuedPresentationTimeStampUs = (lastQueuedPresentationTimeStampUs > lastDequeuedPresentationTimeStampUs)
? (lastQueuedPresentationTimeStampUs + 1) : (lastDequeuedPresentationTimeStampUs + 1);
Log.i(TAG, "nextQueuedPresentationTimeStampUs: " + nextQueuedPresentationTimeStampUs);
return nextQueuedPresentationTimeStampUs;
}
private static long getNextDeQueuedPresentationTimeStampUs(){
Log.i(TAG, "nextDequeuedPresentationTimeStampUs: " + (lastDequeuedPresentationTimeStampUs + 1));
lastDequeuedPresentationTimeStampUs ++;
return lastDequeuedPresentationTimeStampUs;
}
I took it from this example https://github.com/OnlyInAmerica/HWEncoderExperiments/blob/audiotest/HWEncoderExperiments/src/main/java/net/openwatch/hwencoderexperiments/AudioEncodingTest.java in order to avoid "timestampUs XXX < lastTimestampUs XXX" error
Can someone help me figure out the problem, please?
It looks like you're using system-provided time stamps for video, but a simple counter for audio. Unless somehow the video timestamp is being used to seed the audio every frame and it's just not shown above.
For audio and video to play in sync, you need to have the same presentation time stamp on audio and video frames that are expected to be presented at the same time.
See also this related question.
I think the solution might be to just repeatedly read audio samples. You could check if a new video frame is available every N audio samples, and pass it to the muxer with the same timestamp as soon as a new video frame arrives.
int __buffer_offset = 0;
final int CHUNK_SIZE = 100; /* record 100 samples each iteration */
while (!__new_video_frame_available) {
this._audio_recorder.read(__recorded_data, __buffer_offset, CHUNK_SIZE);
__buffer_offset += CHUNK_SIZE;
}
I think that should work.
Kindest regards,
Wolfram

What does the error "OMX_GetExtensionIndex failed" mean in the context of a MediaCodec and how can I resolve it?

I am trying to use the low-level media APIs to decode files in mp3 and other formats so I can process them. I am following the tutorial here to take an encoded file and play it back with an AudioTrack and my code is largely the same but I am getting an error.
Here is my code:
public void playSongMono(View view)
{
//create extractor and point it to file
AssetFileDescriptor fd = getResources().openRawResourceFd(R.raw.song);
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(fd.getFileDescriptor(),fd.getStartOffset(),fd.getLength());
//get MIME type of file
MediaFormat inputFormat = extractor.getTrackFormat(0);
Log.d("MonoPlayer","TRACKS #: "+extractor.getTrackCount());
String mime = inputFormat.getString(MediaFormat.KEY_MIME);
Log.d("MonoPlayer","FORMAT: "+mime);
extractor.selectTrack(0);
//create codec
MediaCodec codec = MediaCodec.createDecoderByType(mime);
Log.d("MonoPlayer","1");
codec.configure(inputFormat, null, null, 0);
Log.d("MonoPlayer","2");
codec.start();
Log.d("MonoPlayer","3");
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
//get a buffer and fill it
int inputBufferIndex = codec.dequeueInputBuffer(1000000);
if(inputBufferIndex >= 0)
{
Log.d("MonoPlayer","4");
//fill the buffer
int sampleSize = extractor.readSampleData(inputBuffers[inputBufferIndex], 0);
long presentationTimeUs = 0;
boolean sawInputEOS= false;
if (sampleSize < 0) {
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufferIndex,
0,
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
extractor.advance();
}
}
BufferInfo info = new BufferInfo(); //this will be populated with data by dequeueOutputBuffer
//get the index of a finished buffer. since we only enqueued one we should only wait for one
int resultBufferIndex = codec.dequeueOutputBuffer(info, 1000000);
if (resultBufferIndex >= 0 )
{
Log.d("MonoPlayer","5");
//we now have a buffer of pcm data
byte[] chunk = new byte[info.size];
outputBuffers[resultBufferIndex].get(chunk);
outputBuffers[resultBufferIndex].clear();
codec.releaseOutputBuffer(resultBufferIndex, false);
//create audiotrack to play sound
audiotrack = new AudioTrack(AudioManager.STREAM_MUSIC,
44100,
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
chunk.length,
AudioTrack.MODE_STATIC);
audiotrack.play();
audiotrack.write(chunk, 0, chunk.length);
}
extractor.release();
extractor = null;
codec.stop();
codec.release();
codec = null;
}
And upon execution of this code I get the following logcat output
D MonoPlayer TRACKS #: 1
D MonoPlayer FORMAT: audio/mpeg
I OMXClient Using client-side OMX mux.
D MonoPlayer 1
E OMXNodeInstance OMX_GetExtensionIndex failed
D MonoPlayer 2
D MonoPlayer 3
D MonoPlayer 4
Above is the error I mentioned. I'm not exactly sure why this error is occurring or what it means. However, I have tried to gather some information. The use of log output suggests that the error happens with the line codec.configure(inputFormat, null, null, 0);. I have tried removing the line, which predictably results in an illegal state error but removes the error in question. Additionally, in the code I posted, the Log.d("MonoPlayer","5"); is never reached, even if the time out on the dequeue call is set to indefinite, so I assume the decoder is not properly configured.
If anyone has any information on why I might be getting this error and what I could do to fix it, that would be great. Thanks in advance.
That message is probably harmless. I see it in tests that succeed.
It appears to be coming from line 288 of OMXNodeInstance.cpp. The OMX.google.android.index.enableAndroidNativeBuffers extension lookup fails, which just means the extension wasn't defined on that device.

Categories

Resources