Android MediaMuxer readSampleData IllegalStateException - android

I am using the following code to generate a video. However, one in every 5 attempts the readSampleData raises either a IllegalArgumentException or a IllegalStateException. What can I do to stop the exceptions. Can I check the state of the extractor before calling readSampleData. If so, how?
MediaExtractor videoExtractor = new MediaExtractor();
videoExtractor.setDataSource(SOURCE);
Log.d(TAG, "Video Extractor Track Count " + videoExtractor.getTrackCount() );
MediaMuxer muxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
videoExtractor.selectTrack(0);
MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
int videoTrack = muxer.addTrack(videoFormat);
Log.d(TAG, "Video Format " + videoFormat.toString() );
boolean videoEnded = false;
long videoTimeStamp = 0;
int frameCount = 0;
int offset = 100;
ByteBuffer videoBuf = ByteBuffer.allocate(MAX_SAMPLE_SIZE);
BufferInfo videoBufferInfo = new BufferInfo();
muxer.start();
while (!videoEnded)
{
videoBufferInfo.offset = offset;
videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);
if (videoBufferInfo.size < 0)
{
Log.d(TAG, "video ended " + videoBufferInfo.size);
videoEnded = true;
videoBufferInfo.size = 0;
}
else
{
videoTimeStamp = videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
videoBufferInfo.flags = videoExtractor.getSampleFlags();
muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
videoExtractor.advance();
frameCount++;
Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + (videoBufferInfo.presentationTimeUs/1000000.0f) +" Flags:" + videoBufferInfo.flags +" Size " + videoBufferInfo.size);
}
}
muxer.stop();
muxer.release();
The actual code adds audio to the video after the video extractor loop.

Related

How to merge audio and video using MediaMuxer?

I'm developing an Android App with mp3 audio file and mp4 video file with(no sound).
Now I want to mix them and create a new mp4 video file(with sound). From Android 4.3, Google suggests using the MediaMuxer class to mix stream audio and video. I have tried many times without success.
Here is the code:
private void muxing() {
String outputFile = "";
try {
File file = new File(Environment.getExternalStorageDirectory() + "/final2.mp4");
file.createNewFile();
outputFile = file.getAbsolutePath();
MediaExtractor videoExtractor = new MediaExtractor();
videoExtractor.setDataSource(Environment.getExternalStorageDirectory().toString()+"/vd1.h264");
MediaExtractor audioExtractor = new MediaExtractor();
audioExtractor.setDataSource(Environment.getExternalStorageDirectory()+"/audioVideoDir/audio.m4a");
MediaMuxer muxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
videoExtractor.selectTrack(0);
MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
int videoTrack = muxer.addTrack(videoFormat);
audioExtractor.selectTrack(0);
MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
int audioTrack = muxer.addTrack(audioFormat);
boolean sawEOS = false;
int frameCount = 0;
int offset = 100;
int sampleSize = 256 * 1024;
ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();
videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
muxer.start();
while (!sawEOS)
{
videoBufferInfo.offset = offset;
videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);
if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0)
{
// Log.d(TAG, "saw input EOS.");
sawEOS = true;
videoBufferInfo.size = 0;
}
else
{
videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
videoBufferInfo.flags = videoExtractor.getSampleFlags();
muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
videoExtractor.advance();
frameCount++;
}
}
Toast.makeText(getApplicationContext() , "frame:" + frameCount , Toast.LENGTH_SHORT).show();
boolean sawEOS2 = false;
int frameCount2 =0;
while (!sawEOS2)
{
frameCount2++;
audioBufferInfo.offset = offset;
audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);
if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0)
{
// Log.d(TAG, "saw input EOS.");
sawEOS2 = true;
audioBufferInfo.size = 0;
}
else
{
audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
audioBufferInfo.flags = audioExtractor.getSampleFlags();
muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
audioExtractor.advance();
}
}
Toast.makeText(getApplicationContext() , "frame:" + frameCount2 , Toast.LENGTH_SHORT).show();
muxer.stop();
muxer.release();
} catch (IOException e) {
} catch (Exception e) {
}
}
I get the below error:
Unknown mime type 'audio/mpeg'. 12-09 11:58:33.569: A/MPEG4Writer(332): frameworks/av/media/libstagefright/MPEG4Writer.cpp:2699 CHECK(!"should not be here, unknown mime type.")
Any solution to resolve my issue with MediaMuxer API from Google? Even i tried with ffmpeg after merging of audio and video together video doesn't plays in default video player of android device but it loads in Vlc player.
FFMPEG followed from here:https://github.com/WritingMinds/ffmpeg-android-java
try to add mp4 video file.
Its working for me. but output file is with audio duration.

Android MediaExtractor readSampleData IllegalArgumentException

I try to follow this question Concatenate multiple mp4 audio files using android´s MediaMuxer and concatenate video files.
But for some reason, the MediaExtractor throws an IllegalArgumentException when I call the readSampleData
In the official API, this function shouldn't throw any Exceptions at all ! What's going on ?
I have found an old question that suggest that the size of the ByteBuffer might be responsible : Android MediaMuxer readSampleData IllegalStateException
I have tried tons of values for the size and none of them made the issue go away. Is there a standard size I should know ?
Any hints could help !
boolean VERBOSE = true;
private boolean concatenateFiles(File dst, List<File> sources) {
if ((sources == null) || (sources.size() == 0)) {
return false;
}
boolean result;
MediaExtractor extractor = null;
MediaMuxer muxer = null;
try {
// Set up MediaMuxer for the destination.
muxer = new MediaMuxer(dst.getPath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
// Copy the samples from MediaExtractor to MediaMuxer.
boolean sawEOS = false;
int frameCount = 0;
int offset = 100;
ByteBuffer dstBuf = ByteBuffer.allocate(256 * 1024);
BufferInfo bufferInfo = new BufferInfo();
long timeOffsetUs = 0;
int dstTrackIndex = -1;
for (int fileIndex = 0; fileIndex < sources.size(); fileIndex++) {
int numberOfSamplesInSource = getNumberOfSamples(sources.get(fileIndex));
if (VERBOSE) {
Log.d(TAG, String.format("Source file: %s", sources.get(fileIndex).getPath()));
}
if (!sources.get(fileIndex).canRead()) {
throw new FileNotFoundException("Unable to read " + sources.get(fileIndex));
}
// Set up MediaExtractor to read from the source.
extractor = new MediaExtractor();
extractor.setDataSource(sources.get(fileIndex).getPath());
// Set up the tracks.
SparseIntArray indexMap = new SparseIntArray(extractor.getTrackCount());
for (int i = 0; i < extractor.getTrackCount(); i++) {
extractor.selectTrack(i);
MediaFormat format = extractor.getTrackFormat(i);
if (dstTrackIndex < 0) {
dstTrackIndex = muxer.addTrack(format);
muxer.start();
}
indexMap.put(i, dstTrackIndex);
}
long lastPresentationTimeUs = 0;
int currentSample = 0;
while (!sawEOS) {
bufferInfo.offset = offset;
bufferInfo.size = extractor.readSampleData(dstBuf, offset);
if (bufferInfo.size < 0) {
sawEOS = true;
bufferInfo.size = 0;
timeOffsetUs += (lastPresentationTimeUs);
}
else {
lastPresentationTimeUs = extractor.getSampleTime();
bufferInfo.presentationTimeUs = extractor.getSampleTime() + timeOffsetUs;
bufferInfo.flags = extractor.getSampleFlags();
int trackIndex = extractor.getSampleTrackIndex();
if ((currentSample < numberOfSamplesInSource) || (fileIndex == sources.size() - 1)) {
muxer.writeSampleData(indexMap.get(trackIndex), dstBuf, bufferInfo);
}
extractor.advance();
frameCount++;
currentSample++;
if (VERBOSE) {
Log.d(TAG, "Frame (" + frameCount + ") " +
"PresentationTimeUs:" + bufferInfo.presentationTimeUs +
" Flags:" + bufferInfo.flags +
" TrackIndex:" + trackIndex +
" Size(KB) " + bufferInfo.size / 1024);
}
}
}
extractor.release();
extractor = null;
}
result = true;
}
catch (Exception e) {
e.printStackTrace();
result = false;
}
finally {
if (extractor != null) {
extractor.release();
}
if (muxer != null) {
muxer.stop();
muxer.release();
}
}
return result;
}
Just had this error when decoding a 4k movie. Others (including other very high resolution movies) had been working fine, but about halfway through -co-incedentilly near a more complex scene...- I get this error.
My buffer size was 1mb (1024*1024), increased to 2mb and the problem is gone.
So the END_OF_STREAM you find in the source is presumably "end of the buffer"

MediaCodec hasn't any available input buffer

I am using MediaCodec API for encoding video and audio into mp4 file. Data encoded in separate threads. Sometimes on some devices audio encoder stops to return any available input buffer and as result MediaMuxer crashes when trying to stop it. Here is my code:
configuring media codec:
public static final String MIME_TYPE_AUDIO = "audio/mp4a-latm";
public static final int SAMPLE_RATE = 44100;
public static final int CHANNEL_COUNT = 1;
public static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
public static final int BIT_RATE_AUDIO = 128000;
public static final int SAMPLES_PER_FRAME = 1024 * 2;
public static final int FRAMES_PER_BUFFER = 24;
public static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
public static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;
public static final int MAX_INPUT_SIZE = 16384 * 4;
public static final int MAX_SAMPLE_SIZE = 256 * 1024;
private AudioRecord audioRecord;
private ByteBuffer[] inputBuffers;
private ByteBuffer inputBuffer;
private MediaExtractor mediaExtractor;
private boolean audioSended = false;
private boolean completed = false;
private int sampleCount;
private int iBufferSize;
public AudioEncoderCore(MovieMuxer muxer) throws IOException {
this.muxer = muxer;
bufferInfo = new MediaCodec.BufferInfo();
MediaFormat mediaFormat = null;
mediaFormat = MediaFormat.createAudioFormat(MIME_TYPE_AUDIO, SAMPLE_RATE, CHANNEL_COUNT);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, MAX_INPUT_SIZE);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE_AUDIO);
encoder = MediaCodec.createEncoderByType(MIME_TYPE_AUDIO);
encoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
encoder.start();
iBufferSize = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
// Ensure buffer is adequately sized for the AudioRecord
// object to initialize
int iMinBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT);
if (iBufferSize < iMinBufferSize)
iBufferSize = ((iMinBufferSize / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
audioRecord = new AudioRecord(AUDIO_SOURCE, SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT, iBufferSize);
audioRecord.startRecording();
}
sending data to encoder:
public void sendDataFromMic(boolean endOfStream) {
if (endOfStream)
Log.d(TAG, "sendDataFromMic end of stream");
long audioPresentationTimeNs;
byte[] mTempBuffer = new byte[SAMPLES_PER_FRAME];
audioPresentationTimeNs = System.nanoTime();
int iReadResult = audioRecord.read(mTempBuffer, 0, mTempBuffer.length);
if (iReadResult == AudioRecord.ERROR_BAD_VALUE || iReadResult == AudioRecord.ERROR_INVALID_OPERATION || iReadResult == 0) {
Log.e(TAG, "audio buffer read error");
} else {
// send current frame data to encoder
try {
if (inputBuffers == null)
inputBuffers = encoder.getInputBuffers();
//Sometimes can't get any available input buffer
int inputBufferIndex = encoder.dequeueInputBuffer(100000);
Log.d(TAG, "inputBufferIndex = " + inputBufferIndex);
if (inputBufferIndex >= 0) {
inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(mTempBuffer, 0, iReadResult);
Log.d(TAG, "sending frame to audio encoder " + iReadResult + " bytes");
encoder.queueInputBuffer(inputBufferIndex, 0, iReadResult, audioPresentationTimeNs / 1000,
endOfStream ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
}
} catch (Throwable t) {
Log.e(TAG, "sendFrameToAudioEncoder exception");
t.printStackTrace();
}
}
}
drain encoder:
public void drainEncoder() {
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_NSECS);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "no output available, spinning to await EOS");
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
encoderOutputBuffers = encoder.getOutputBuffers();
else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
Log.d(TAG, "encoder format changed: " + newFormat);
trackIndex = muxer.addTrack(newFormat);
} else if (muxer.isMuxerStarted()) {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null)
throw new RuntimeException("encoded buffer " + encoderStatus + " was null");
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
muxer.writeSampleData(trackIndex, encodedData, bufferInfo);
Log.d(TAG, "sent " + bufferInfo.size + " bytes to muxer, ts=" +
bufferInfo.presentationTimeUs + " track index=" + trackIndex);
}
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "end of stream reached");
completed = true;
break; // out of while
}
}
}
}
Bug is stable reproduced on HTC One, Galaxy S3, but all works fine on Huawei Honor 3C.
After source code investigations I found solution. When I dequeue output buffer, the muxer may be not started yet, in that case buffer not released. So here's working code for drain encoder:
public void drainEncoder() {
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_NSECS);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "no output available, spinning to await EOS");
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
encoderOutputBuffers = encoder.getOutputBuffers();
else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
Log.d(TAG, "encoder format changed: " + newFormat);
trackIndex = muxer.addTrack(newFormat);
} else if (muxer.isMuxerStarted()) {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null)
throw new RuntimeException("encoded buffer " + encoderStatus + " was null");
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
muxer.writeSampleData(trackIndex, encodedData, bufferInfo);
Log.d(TAG, "sent " + bufferInfo.size + " bytes to muxer, ts=" +
bufferInfo.presentationTimeUs + " track index=" + trackIndex);
}
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "end of stream reached");
completed = true;
break; // out of while
}
}
else{
//Muxer not ready, release buffer
encoder.releaseOutputBuffer(encoderStatus, false);
Log.d(TAG, "muxer not ready, skip data");
}
}
}

Android MediaMuxer Audio Issue

I am trying to use MediaMuxer to add a audio track to a video. The following code works but the audio stops halfway through the video. Both the video and audio files only have one track. The playback speed of the audio and video seem to be fine. The audio file is longer than the video so I dont think that is the issue. I have been at this for a while now can cant figure it out.
private void createFinalVideo(){
String outputFile = "";
try {
File file = new File(Environment.getExternalStorageDirectory() + File.separator + "final.mp4");
file.createNewFile();
outputFile = file.getAbsolutePath();
MediaExtractor videoExtractor = new MediaExtractor();
videoExtractor.setDataSource(OUTPUT);
MediaExtractor audioExtractor = new MediaExtractor();
final AssetFileDescriptor afd = context.getAssets().openFd("audio.m4a");
audioExtractor.setDataSource(afd.getFileDescriptor(),afd.getStartOffset(),afd.getLength());
Log.d(TAG, "Video Extractor Track Count " + videoExtractor.getTrackCount() );
Log.d(TAG, "Audio Extractor Track Count " + audioExtractor.getTrackCount() );
MediaMuxer muxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
videoExtractor.selectTrack(0);
MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
int videoTrack = muxer.addTrack(videoFormat);
audioExtractor.selectTrack(0);
MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
int audioTrack = muxer.addTrack(audioFormat);
Log.d(TAG, "Video Format " + videoFormat.toString() );
Log.d(TAG, "Audio Format " + audioFormat.toString() );
boolean sawEOS = false;
int frameCount = 0;
int offset = 100;
int sampleSize = 256 * 1024;
ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
BufferInfo videoBufferInfo = new BufferInfo();
BufferInfo audioBufferInfo = new BufferInfo();
videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
muxer.start();
while (!sawEOS)
{
videoBufferInfo.offset = offset;
audioBufferInfo.offset = offset;
videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);
audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);
if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0)
{
Log.d(TAG, "saw input EOS.");
sawEOS = true;
videoBufferInfo.size = 0;
audioBufferInfo.size = 0;
}
else
{
videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
videoBufferInfo.flags = videoExtractor.getSampleFlags();
muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
videoExtractor.advance();
audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
audioBufferInfo.flags = audioExtractor.getSampleFlags();
muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
audioExtractor.advance();
frameCount++;
Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs +" Flags:" + videoBufferInfo.flags +" Size(KB) " + videoBufferInfo.size / 1024);
Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs +" Flags:" + audioBufferInfo.flags +" Size(KB) " + audioBufferInfo.size / 1024);
}
}
muxer.stop();
muxer.release();
} catch (IOException e) {
Log.d(TAG, "Mixer Error 1 " + e.getMessage());
} catch (Exception e) {
Log.d(TAG, "Mixer Error 2 " + e.getMessage());
}
return;
}
The media format print out is the following
Video Format {max-input-size=1572864, frame-rate=28, height=1920, csd-0=java.nio.ByteArrayBuffer[position=0,limit=18,capacity=18], width=1072, durationUs=2968688, csd-1=java.nio.ByteArrayBuffer[position=0,limit=8,capacity=8], mime=video/avc, isDMCMMExtractor=1}
Audio Format {max-input-size=1572864, encoder-padding=708, aac-profile=2, csd-0=java.nio.ByteArrayBuffer[position=0,limit=2,capacity=2], sample-rate=44100, durationUs=19783401, channel-count=2, encoder-delay=2112, mime=audio/mp4a-latm, isDMCMMExtractor=1}
Any guidance would be highly appreciated.
Thanks
user346443: The issue was the video extractor was pulling data out quicker than the audio extractor. Splitting the audio and video into separate loops fixed the issue.

What are lib_k3_generic_audio_extractor.so, lib_k3_mov_extractor.so, and so on?

I'm experimenting with Android MediaCodec, with the code I got from here: https://code.google.com/p/android-source-browsing.platform--cts
When I tried to run the code "android-source-browsing.platform--cts/tests/tests/media/src/android/media/cts/DecoderTest.java", I got these errors:
D/android.media.cts.DecoderTest(6615): decode(): master = 2131034134
D/android.media.cts.DecoderTest(6615): decode(): masterFd = {AssetFileDescriptor: {ParcelFileDescriptor: FileDescriptor[50]} start=1781867 len=529200}
D/android.media.cts.DecoderTest(6615): decode(): masterLength = 529200
D/android.media.cts.DecoderTest(6615): decode(): is = android.content.res.AssetFileDescriptor$AutoCloseInputStream#417592a0
D/android.media.cts.DecoderTest(6615): decode(): bis = java.io.BufferedInputStream#41759368
I/ExtractorLoader(6615): Register extractor from [lib_k3_avi_extractor.so]
E/ExtractorLoader(6615): Open [lib_k3_mov_extractor.so] failed!
E/ExtractorLoader(6615): Open [lib_k3_flv_extractor.so] failed!
I/ExtractorLoader(6615): Register extractor from [lib_k3_m2ts_extractor.so]
E/ExtractorLoader(6615): Open [lib_k3_asf_extractor.so] failed!
E/ExtractorLoader(6615): Open [lib_k3_generic_audio_extractor.so] failed!
D/android.media.cts.DecoderTest(6615): decode(): testAssetFd = {AssetFileDescriptor: {ParcelFileDescriptor: FileDescriptor[50]} start=1591735 len=49318}
D/android.media.cts.DecoderTest(6615): decode(): testFd = FileDescriptor[50]
D/android.media.cts.DecoderTest(6615): decode(): trackCount = 1
D/android.media.cts.DecoderTest(6615): decode(): fileLength = 49318
D/android.media.cts.DecoderTest(6615): decode(): format = {durationUs=3056326, encoder-padding=1908, channel-count=2, encoder-delay=576, mime=audio/mpeg, sample-rate=44100}
D/android.media.cts.DecoderTest(6615): decode(): mime = audio/mpeg
Apparently the libraries lib_k3_avi_extractor.so, lib_k3_mov_extractor.so, etc, are not in my device. Here are my questions: What are these libraries? What are they for? Why are they not in the device? I googled for these libraries, but strangely the search came up with absolutely nothing.
The Java code that generated the errors is listed below, with my minor changes to add the log information:
private void decode(int testinput, int master, float maxerror) throws IOException {
// read master file into memory
AssetFileDescriptor masterFd = mResources.openRawResourceFd(master);
long masterLength = masterFd.getLength(); //in bytes?
short[] masterBuffer = new short[(int)(masterLength/2)];
InputStream is = masterFd.createInputStream();
BufferedInputStream bis = new BufferedInputStream(is);
Log.d(TAG, "decode(): master = " + master);
Log.d(TAG, "decode(): masterFd = " + masterFd);
Log.d(TAG, "decode(): masterLength = " + masterLength);
Log.d(TAG, "decode(): is = " + is);
Log.d(TAG, "decode(): bis = " + bis);
for (int i = 0; i < masterBuffer.length; i++) {
int lo = bis.read();
int hi = bis.read();
if (hi >= 128) {
hi -= 256;
}
int sample = hi * 256 + lo;
masterBuffer[i] = (short) sample;
}
bis.close();
MediaExtractor extractor;
MediaCodec codec;
ByteBuffer[] codecInputBuffers;
ByteBuffer[] codecOutputBuffers;
AssetFileDescriptor testAssetFd = mResources.openRawResourceFd(testinput);
FileDescriptor testFd = testAssetFd.getFileDescriptor();
extractor = new MediaExtractor();
extractor.setDataSource(testFd, testAssetFd.getStartOffset(), testAssetFd.getLength());
//assertEquals("wrong number of tracks", 1, extractor.getTrackCount());
MediaFormat format = extractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
//assertTrue("not an audio file", mime.startsWith("audio/"));
Log.d(TAG, "decode(): testAssetFd = " + testAssetFd);
Log.d(TAG, "decode(): testFd = " + testFd);
Log.d(TAG, "decode(): trackCount = " + extractor.getTrackCount());
Log.d(TAG, "decode(): fileLength = " + testAssetFd.getLength());
Log.d(TAG, "decode(): format = " + format);
Log.d(TAG, "decode(): mime = " + mime);
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
codec.start();
codecInputBuffers = codec.getInputBuffers();
codecOutputBuffers = codec.getOutputBuffers();
extractor.selectTrack(0);
// start decoding
int numBytesDecoded = 0;
int maxdelta = 0;
long totalErrorSquared = 0;
final long kTimeOutUs = 5000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
while (!sawOutputEOS) {
if (!sawInputEOS) {
// transfer control of input buffer from codec to Java code
int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
// fill input buffer with data
int sampleSize = extractor.readSampleData(dstBuf, 0 /* offset */);
long presentationTimeUs = 0;
if (sampleSize < 0) {
Log.d(TAG, "saw input EOS.");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
}
// let the codec control input buffer again
codec.queueInputBuffer(
inputBufIndex,
0 /* offset */,
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
extractor.advance();
}
}
}
int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
if (res >= 0) {
int outputBufIndex = res;
ByteBuffer buf = codecOutputBuffers[outputBufIndex];
// check the waveform matches within the specified max error
for (int i = 0; i < info.size; i += 2) {
short sample = buf.getShort(i);
int idx = (numBytesDecoded + i) / 2;
//assertTrue("decoder returned too much data", idx < masterBuffer.length);
short mastersample = masterBuffer[idx];
int d = sample - mastersample;
totalErrorSquared += d * d;
}
numBytesDecoded += info.size;
codec.releaseOutputBuffer(outputBufIndex, false /* render */);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "saw output EOS.");
sawOutputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
Log.d(TAG, "output buffers have changed.");
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat oformat = codec.getOutputFormat();
Log.d(TAG, "output format has changed to " + oformat);
}
}
codec.stop();
codec.release();
testAssetFd.close();
masterFd.close();
//assertEquals("wrong data size", masterLength, numBytesDecoded);
long avgErrorSquared = (totalErrorSquared / (numBytesDecoded / 2));
double rmse = Math.sqrt(avgErrorSquared);
//assertTrue("decoding error too big: " + rmse, rmse <= maxerror);
}

Categories

Resources