Video trimming on Android using MediaCodec, Media Extractor and Media Muxer - android

In my Android App I need a solution when the user can trim a video from internal storage. I am trying to achieve this without using any third-party library. I was referencing this Google's Gallery App source code here. But I am getting the following error :
timestampUs 66733 < lastTimestampUs 133467 for Video track
There are few SO questions which talk about this timestamp issue, but I am really not being to figure out as I am new to using MediaCodecs, MediaMuxers etc. Would be great if any can hep
Here's my code :
// Set up MediaExtractor to read from the source.
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(srcPath);
int trackCount = extractor.getTrackCount();
System.out.println("tracl" + trackCount);
// Set up MediaMuxer for the destination.
MediaMuxer muxer;
muxer = new MediaMuxer(dstPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
// Set up the tracks and retrieve the max buffer size for selected
// tracks.
HashMap<Integer, Integer> indexMap = new HashMap<>(trackCount);
int bufferSize = -1;
for (int i = 0; i < trackCount; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
boolean selectCurrentTrack = false;
if (mime.startsWith("audio/") && useAudio) {
selectCurrentTrack = true;
} else if (mime.startsWith("video/") && useVideo) {
selectCurrentTrack = true;
}
if (selectCurrentTrack) {
extractor.selectTrack(i);
int dstIndex = muxer.addTrack(format);
System.out.println(format);
System.out.println("dstIndex" + dstIndex);
indexMap.put(i, dstIndex);
if (format.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
int newSize = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
bufferSize = newSize > bufferSize ? newSize : bufferSize;
}
}
}
System.out.println(indexMap);
if (bufferSize < 0) {
bufferSize = DEFAULT_BUFFER_SIZE;
}
// Set up the orientation and starting time for extractor.
MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever();
retrieverSrc.setDataSource(srcPath);
String degreesString = retrieverSrc.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
if (degreesString != null) {
int degrees = Integer.parseInt(degreesString);
if (degrees >= 0) {
muxer.setOrientationHint(degrees);
}
}
if (startMs > 0) {
extractor.seekTo(startMs * 1000, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
}
// System.out.println(extractor.);
// Copy the samples from MediaExtractor to MediaMuxer. We will loop
// for copying each sample and stop when we get to the end of the source
// file or exceed the end time of the trimming.
int offset = 0;
int trackIndex = -1;
ByteBuffer dstBuf = ByteBuffer.allocate(bufferSize);
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
try {
muxer.start();
while (true) {
System.out.println("copying");
bufferInfo.offset = offset;
bufferInfo.size = extractor.readSampleData(dstBuf, offset);
if (bufferInfo.size < 0 ) {
// InstabugSDKLogger.d(TAG, "Saw input EOS.");
System.out.println("Saw input EOS.");
bufferInfo.size = 0;
break;
} else {
/**
* The presentation timestamp in microseconds for the buffer.
* This is derived from the presentation timestamp passed in
* with the corresponding input buffer. This should be ignored for
* a 0-sized buffer.
*/
/**
* Returns the current sample's presentation time in microseconds.
* or -1 if no more samples are available.
*/
bufferInfo.presentationTimeUs = extractor.getSampleTime();
//1 sec = 1000000 micco sec
if (endMs > 0 && bufferInfo.presentationTimeUs > (endMs * 1000)) {
break;
} else {
bufferInfo.flags = extractor.getSampleFlags();
trackIndex = extractor.getSampleTrackIndex();
muxer.writeSampleData(indexMap.get(trackIndex), dstBuf, bufferInfo);
//System.out.println(muxer);
extractor.advance();
}
}
}
muxer.stop();
File file = new File(srcPath);
file.delete();
} catch (IllegalStateException e) {
System.out.println("The source video file is malformed");
} finally {
muxer.release();
}

Related

PCM to MP3 using MediaCodec API Android

I am gettting PCM data from Microphone using AudioRecord class, how to encode it to MP3 using MediaCodec in Android?
Yes it is Possible MediaCodec Reference
Below code will convect pcm to m4a you have to modify muxer output format for mp3
private void convertAudio(String filename) throws IOException {
String outputpath =Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC).getPath()+"/converted.m4a";
// Set up MediaExtractor to read from the source.
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(filename);
int trackCount = extractor.getTrackCount();
// Set up MediaMuxer for the destination.
MediaMuxer muxer;
muxer = new MediaMuxer(outputpath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
// Set up the tracks.
HashMap<Integer, Integer> indexMap = new HashMap<Integer, Integer>(trackCount);
for (int i = 0; i < trackCount; i++) {
extractor.selectTrack(i);
MediaFormat format = extractor.getTrackFormat(i);
format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AMR_NB);
int dstIndex = muxer.addTrack(format);
indexMap.put(i, dstIndex);
}
// Copy the samples from MediaExtractor to MediaMuxer.
boolean sawEOS = false;
int bufferSize = 32000;
int frameCount = 0;
int offset = 100;
ByteBuffer dstBuf = ByteBuffer.allocate(bufferSize);
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
/* if (degrees >= 0) {
muxer.setOrientationHint(degrees);
}*/
// Test setLocation out of bound cases
muxer.start();
while (!sawEOS) {
bufferInfo.offset = offset;
bufferInfo.size = extractor.readSampleData(dstBuf, offset);
if (bufferInfo.size < 0) {
sawEOS = true;
bufferInfo.size = 0;
} else {
bufferInfo.presentationTimeUs = extractor.getSampleTime();
bufferInfo.flags = extractor.getSampleFlags();
int trackIndex = extractor.getSampleTrackIndex();
muxer.writeSampleData(indexMap.get(trackIndex), dstBuf,
bufferInfo);
extractor.advance();
frameCount++;
}
}
muxer.stop();
muxer.release();
return;
}

Android MediaExtractor readSampleData IllegalArgumentException

I try to follow this question Concatenate multiple mp4 audio files using android´s MediaMuxer and concatenate video files.
But for some reason, the MediaExtractor throws an IllegalArgumentException when I call the readSampleData
In the official API, this function shouldn't throw any Exceptions at all ! What's going on ?
I have found an old question that suggest that the size of the ByteBuffer might be responsible : Android MediaMuxer readSampleData IllegalStateException
I have tried tons of values for the size and none of them made the issue go away. Is there a standard size I should know ?
Any hints could help !
boolean VERBOSE = true;
private boolean concatenateFiles(File dst, List<File> sources) {
if ((sources == null) || (sources.size() == 0)) {
return false;
}
boolean result;
MediaExtractor extractor = null;
MediaMuxer muxer = null;
try {
// Set up MediaMuxer for the destination.
muxer = new MediaMuxer(dst.getPath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
// Copy the samples from MediaExtractor to MediaMuxer.
boolean sawEOS = false;
int frameCount = 0;
int offset = 100;
ByteBuffer dstBuf = ByteBuffer.allocate(256 * 1024);
BufferInfo bufferInfo = new BufferInfo();
long timeOffsetUs = 0;
int dstTrackIndex = -1;
for (int fileIndex = 0; fileIndex < sources.size(); fileIndex++) {
int numberOfSamplesInSource = getNumberOfSamples(sources.get(fileIndex));
if (VERBOSE) {
Log.d(TAG, String.format("Source file: %s", sources.get(fileIndex).getPath()));
}
if (!sources.get(fileIndex).canRead()) {
throw new FileNotFoundException("Unable to read " + sources.get(fileIndex));
}
// Set up MediaExtractor to read from the source.
extractor = new MediaExtractor();
extractor.setDataSource(sources.get(fileIndex).getPath());
// Set up the tracks.
SparseIntArray indexMap = new SparseIntArray(extractor.getTrackCount());
for (int i = 0; i < extractor.getTrackCount(); i++) {
extractor.selectTrack(i);
MediaFormat format = extractor.getTrackFormat(i);
if (dstTrackIndex < 0) {
dstTrackIndex = muxer.addTrack(format);
muxer.start();
}
indexMap.put(i, dstTrackIndex);
}
long lastPresentationTimeUs = 0;
int currentSample = 0;
while (!sawEOS) {
bufferInfo.offset = offset;
bufferInfo.size = extractor.readSampleData(dstBuf, offset);
if (bufferInfo.size < 0) {
sawEOS = true;
bufferInfo.size = 0;
timeOffsetUs += (lastPresentationTimeUs);
}
else {
lastPresentationTimeUs = extractor.getSampleTime();
bufferInfo.presentationTimeUs = extractor.getSampleTime() + timeOffsetUs;
bufferInfo.flags = extractor.getSampleFlags();
int trackIndex = extractor.getSampleTrackIndex();
if ((currentSample < numberOfSamplesInSource) || (fileIndex == sources.size() - 1)) {
muxer.writeSampleData(indexMap.get(trackIndex), dstBuf, bufferInfo);
}
extractor.advance();
frameCount++;
currentSample++;
if (VERBOSE) {
Log.d(TAG, "Frame (" + frameCount + ") " +
"PresentationTimeUs:" + bufferInfo.presentationTimeUs +
" Flags:" + bufferInfo.flags +
" TrackIndex:" + trackIndex +
" Size(KB) " + bufferInfo.size / 1024);
}
}
}
extractor.release();
extractor = null;
}
result = true;
}
catch (Exception e) {
e.printStackTrace();
result = false;
}
finally {
if (extractor != null) {
extractor.release();
}
if (muxer != null) {
muxer.stop();
muxer.release();
}
}
return result;
}
Just had this error when decoding a 4k movie. Others (including other very high resolution movies) had been working fine, but about halfway through -co-incedentilly near a more complex scene...- I get this error.
My buffer size was 1mb (1024*1024), increased to 2mb and the problem is gone.
So the END_OF_STREAM you find in the source is presumably "end of the buffer"

Create MP4 container for .ts segment

I want to play static HLS content (not live video content) on my app in Android. What I currently do is download all the segments from the .m3u8 file and merge it into one file. When I play this file, I can see this the video being played, but it is not seekable. As per this link, .ts files are not seekable on Android.
I cannot risk running ffmpeg on phone for converting the file to MP4 format. I have studied MP4 format and its atom structure. What I want to know is, if there is an easy way to create MP4 container (atoms hierarchy) which would simply refer to the .ts segment (the merged segment that was created from sub-segments) in its data atom (mdat)
I would really appreciate any help/suggestions.
Not possible without a copy. TS is uses 188 byte packets with headers. These headers create breaks in the middle of frames. In mp4, frames must be contiguous.
Android provides support libraries such as MediaCodec and MediaExtractor that provides access to low level media encoding/decoding. It is fast and efficient as it uses hardware acceleration.
Here's how I believe one is suppose to do it on Android unless you are ok with using ffmpeg which of course if resource intensive operation.
1) Use MediaExtractor to extract data from the file.
2) Pass the extracted data to MediaCodec.
3) Use MediaCodec to render output to a surface (in case of video) and AudioTrack (in case of audio).
4) This is the most difficult step: Synchronize audio/video. I haven't implemented this yet. But this would require keeping track of time sync between audio and video. Audio would be played normally and you might have to drop some frames in case of video to keep it in sync with audio playback.
Here's code for decoding audio/video and playing them respectively using AudioTrack and Surface.
In case of video decoding, there's a sleep to slowdown the frame rendering.
public void decodeVideo(Surface surface) throws IOException {
MediaExtractor extractor = new MediaExtractor();
MediaCodec codec;
ByteBuffer[] codecInputBuffers;
ByteBuffer[] codecOutputBuffers;
extractor.setDataSource(file);
Log.d(TAG, "No of tracks = " + extractor.getTrackCount());
MediaFormat format = extractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
Log.d(TAG, "mime = " + mime);
Log.d(TAG, "format = " + format);
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, surface, null, 0);
codec.start();
codecInputBuffers = codec.getInputBuffers();
codecOutputBuffers = codec.getOutputBuffers();
extractor.selectTrack(0);
final long timeout_in_Us = 5000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
int noOutputCounter = 0;
long startMs = System.currentTimeMillis();
while(!sawOutputEOS && noOutputCounter < 50) {
noOutputCounter++;
if(!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(timeout_in_Us);
if(inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize = extractor.readSampleData(dstBuf, 0);
long presentationTimeUs = 0;
if(sampleSize < 0) {
Log.d(TAG, "saw input EOS.");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufIndex, 0, sampleSize, presentationTimeUs, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if(!sawInputEOS) {
extractor.advance();
}
}
}
int res = codec.dequeueOutputBuffer(info, timeout_in_Us);
if(res >= 0) {
if(info.size > 0) {
noOutputCounter = 0;
}
int outputBufIndex = res;
while(info.presentationTimeUs/1000 > System.currentTimeMillis() - startMs) {
try {
Thread.sleep(5);
} catch (Exception e) {
break;
}
}
codec.releaseOutputBuffer(outputBufIndex, true);
if((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
Log.d(TAG, "saw output EOS.");
sawOutputEOS = true;
}
} else if(res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
Log.d(TAG, "output buffers have changed.");
} else if(res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format1 = codec.getOutputFormat();
Log.d(TAG, "output format has changed to " + format1);
} else if(res == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "Codec try again returned" + res);
}
}
codec.stop();
codec.release();
}
private int audioSessionId = -1;
private AudioTrack createAudioTrack(MediaFormat format) {
int channelConfiguration = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT) == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO;
int bufferSize = AudioTrack.getMinBufferSize(format.getInteger(MediaFormat.KEY_SAMPLE_RATE), channelConfiguration, AudioFormat.ENCODING_PCM_16BIT) * 8;
AudioTrack audioTrack;
if(audioSessionId == -1) {
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, format.getInteger(MediaFormat.KEY_SAMPLE_RATE), channelConfiguration,
AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM);
} else {
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, format.getInteger(MediaFormat.KEY_SAMPLE_RATE), channelConfiguration,
AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM, audioSessionId);
}
audioTrack.play();
audioSessionId = audioTrack.getAudioSessionId();
return audioTrack;
}
public void decodeAudio() throws IOException {
MediaExtractor extractor = new MediaExtractor();
MediaCodec codec;
ByteBuffer[] codecInputBuffers;
ByteBuffer[] codecOutputBuffers;
extractor.setDataSource(file);
Log.d(TAG, "No of tracks = " + extractor.getTrackCount());
MediaFormat format = extractor.getTrackFormat(1);
String mime = format.getString(MediaFormat.KEY_MIME);
Log.d(TAG, "mime = " + mime);
Log.d(TAG, "format = " + format);
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null, null, 0);
codec.start();
codecInputBuffers = codec.getInputBuffers();
codecOutputBuffers = codec.getOutputBuffers();
extractor.selectTrack(1);
AudioTrack audioTrack = createAudioTrack(format);
final long timeout_in_Us = 5000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
int noOutputCounter = 0;
while(!sawOutputEOS && noOutputCounter < 50) {
noOutputCounter++;
if(!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(timeout_in_Us);
if(inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize = extractor.readSampleData(dstBuf, 0);
long presentationTimeUs = 0;
if(sampleSize < 0) {
Log.d(TAG, "saw input EOS.");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufIndex, 0, sampleSize, presentationTimeUs, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if(!sawInputEOS) {
extractor.advance();
}
}
}
int res = codec.dequeueOutputBuffer(info, timeout_in_Us);
if(res >= 0) {
if(info.size > 0) {
noOutputCounter = 0;
}
int outputBufIndex = res;
//Possibly store the decoded buffer
ByteBuffer buf = codecOutputBuffers[outputBufIndex];
final byte[] chunk = new byte[info.size];
buf.get(chunk);
buf.clear();
if(chunk.length > 0) {
audioTrack.write(chunk, 0 ,chunk.length);
}
codec.releaseOutputBuffer(outputBufIndex, false);
if((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
Log.d(TAG, "saw output EOS.");
sawOutputEOS = true;
}
} else if(res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
Log.d(TAG, "output buffers have changed.");
} else if(res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format1 = codec.getOutputFormat();
Log.d(TAG, "output format has changed to " + format1);
audioTrack.stop();
audioTrack = createAudioTrack(codec.getOutputFormat());
} else if(res == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "Codec try again returned" + res);
}
}
codec.stop();
codec.release();
}

What are lib_k3_generic_audio_extractor.so, lib_k3_mov_extractor.so, and so on?

I'm experimenting with Android MediaCodec, with the code I got from here: https://code.google.com/p/android-source-browsing.platform--cts
When I tried to run the code "android-source-browsing.platform--cts/tests/tests/media/src/android/media/cts/DecoderTest.java", I got these errors:
D/android.media.cts.DecoderTest(6615): decode(): master = 2131034134
D/android.media.cts.DecoderTest(6615): decode(): masterFd = {AssetFileDescriptor: {ParcelFileDescriptor: FileDescriptor[50]} start=1781867 len=529200}
D/android.media.cts.DecoderTest(6615): decode(): masterLength = 529200
D/android.media.cts.DecoderTest(6615): decode(): is = android.content.res.AssetFileDescriptor$AutoCloseInputStream#417592a0
D/android.media.cts.DecoderTest(6615): decode(): bis = java.io.BufferedInputStream#41759368
I/ExtractorLoader(6615): Register extractor from [lib_k3_avi_extractor.so]
E/ExtractorLoader(6615): Open [lib_k3_mov_extractor.so] failed!
E/ExtractorLoader(6615): Open [lib_k3_flv_extractor.so] failed!
I/ExtractorLoader(6615): Register extractor from [lib_k3_m2ts_extractor.so]
E/ExtractorLoader(6615): Open [lib_k3_asf_extractor.so] failed!
E/ExtractorLoader(6615): Open [lib_k3_generic_audio_extractor.so] failed!
D/android.media.cts.DecoderTest(6615): decode(): testAssetFd = {AssetFileDescriptor: {ParcelFileDescriptor: FileDescriptor[50]} start=1591735 len=49318}
D/android.media.cts.DecoderTest(6615): decode(): testFd = FileDescriptor[50]
D/android.media.cts.DecoderTest(6615): decode(): trackCount = 1
D/android.media.cts.DecoderTest(6615): decode(): fileLength = 49318
D/android.media.cts.DecoderTest(6615): decode(): format = {durationUs=3056326, encoder-padding=1908, channel-count=2, encoder-delay=576, mime=audio/mpeg, sample-rate=44100}
D/android.media.cts.DecoderTest(6615): decode(): mime = audio/mpeg
Apparently the libraries lib_k3_avi_extractor.so, lib_k3_mov_extractor.so, etc, are not in my device. Here are my questions: What are these libraries? What are they for? Why are they not in the device? I googled for these libraries, but strangely the search came up with absolutely nothing.
The Java code that generated the errors is listed below, with my minor changes to add the log information:
private void decode(int testinput, int master, float maxerror) throws IOException {
// read master file into memory
AssetFileDescriptor masterFd = mResources.openRawResourceFd(master);
long masterLength = masterFd.getLength(); //in bytes?
short[] masterBuffer = new short[(int)(masterLength/2)];
InputStream is = masterFd.createInputStream();
BufferedInputStream bis = new BufferedInputStream(is);
Log.d(TAG, "decode(): master = " + master);
Log.d(TAG, "decode(): masterFd = " + masterFd);
Log.d(TAG, "decode(): masterLength = " + masterLength);
Log.d(TAG, "decode(): is = " + is);
Log.d(TAG, "decode(): bis = " + bis);
for (int i = 0; i < masterBuffer.length; i++) {
int lo = bis.read();
int hi = bis.read();
if (hi >= 128) {
hi -= 256;
}
int sample = hi * 256 + lo;
masterBuffer[i] = (short) sample;
}
bis.close();
MediaExtractor extractor;
MediaCodec codec;
ByteBuffer[] codecInputBuffers;
ByteBuffer[] codecOutputBuffers;
AssetFileDescriptor testAssetFd = mResources.openRawResourceFd(testinput);
FileDescriptor testFd = testAssetFd.getFileDescriptor();
extractor = new MediaExtractor();
extractor.setDataSource(testFd, testAssetFd.getStartOffset(), testAssetFd.getLength());
//assertEquals("wrong number of tracks", 1, extractor.getTrackCount());
MediaFormat format = extractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
//assertTrue("not an audio file", mime.startsWith("audio/"));
Log.d(TAG, "decode(): testAssetFd = " + testAssetFd);
Log.d(TAG, "decode(): testFd = " + testFd);
Log.d(TAG, "decode(): trackCount = " + extractor.getTrackCount());
Log.d(TAG, "decode(): fileLength = " + testAssetFd.getLength());
Log.d(TAG, "decode(): format = " + format);
Log.d(TAG, "decode(): mime = " + mime);
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
codec.start();
codecInputBuffers = codec.getInputBuffers();
codecOutputBuffers = codec.getOutputBuffers();
extractor.selectTrack(0);
// start decoding
int numBytesDecoded = 0;
int maxdelta = 0;
long totalErrorSquared = 0;
final long kTimeOutUs = 5000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
while (!sawOutputEOS) {
if (!sawInputEOS) {
// transfer control of input buffer from codec to Java code
int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
// fill input buffer with data
int sampleSize = extractor.readSampleData(dstBuf, 0 /* offset */);
long presentationTimeUs = 0;
if (sampleSize < 0) {
Log.d(TAG, "saw input EOS.");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
}
// let the codec control input buffer again
codec.queueInputBuffer(
inputBufIndex,
0 /* offset */,
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
extractor.advance();
}
}
}
int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
if (res >= 0) {
int outputBufIndex = res;
ByteBuffer buf = codecOutputBuffers[outputBufIndex];
// check the waveform matches within the specified max error
for (int i = 0; i < info.size; i += 2) {
short sample = buf.getShort(i);
int idx = (numBytesDecoded + i) / 2;
//assertTrue("decoder returned too much data", idx < masterBuffer.length);
short mastersample = masterBuffer[idx];
int d = sample - mastersample;
totalErrorSquared += d * d;
}
numBytesDecoded += info.size;
codec.releaseOutputBuffer(outputBufIndex, false /* render */);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "saw output EOS.");
sawOutputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
Log.d(TAG, "output buffers have changed.");
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat oformat = codec.getOutputFormat();
Log.d(TAG, "output format has changed to " + oformat);
}
}
codec.stop();
codec.release();
testAssetFd.close();
masterFd.close();
//assertEquals("wrong data size", masterLength, numBytesDecoded);
long avgErrorSquared = (totalErrorSquared / (numBytesDecoded / 2));
double rmse = Math.sqrt(avgErrorSquared);
//assertTrue("decoding error too big: " + rmse, rmse <= maxerror);
}

Encode wav to AAC on Android

You can use MediaRecorder to record a stream directly to AAC but there doesn't seem to be a way to encode an existing PCM/WAV file to AAC. The ability to encode to AAC exists natively in Android and I'd like to use that. Is there no way to do it with a pre-existing audio file?
Look at this beautiful (and perfectly working) example:
Mp4ParserSample
Look at the final part of the class (rows 335-442), the convert Runnable object just does the job! You have to shape that code to your needs, adjust the input and the output file paths and the conversion parameters (sampling rate, bit rate, etc).
public static final String AUDIO_RECORDING_FILE_NAME = "audio_Capturing-190814-034638.422.wav"; // Input PCM file
public static final String COMPRESSED_AUDIO_FILE_NAME = "convertedmp4.m4a"; // Output MP4/M4A file
public static final String COMPRESSED_AUDIO_FILE_MIME_TYPE = "audio/mp4a-latm";
public static final int COMPRESSED_AUDIO_FILE_BIT_RATE = 64000; // 64kbps
public static final int SAMPLING_RATE = 48000;
public static final int BUFFER_SIZE = 48000;
public static final int CODEC_TIMEOUT_IN_MS = 5000;
String LOGTAG = "CONVERT AUDIO";
Runnable convert = new Runnable() {
#TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_BACKGROUND);
try {
String filePath = Environment.getExternalStorageDirectory().getPath() + "/" + AUDIO_RECORDING_FILE_NAME;
File inputFile = new File(filePath);
FileInputStream fis = new FileInputStream(inputFile);
File outputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + COMPRESSED_AUDIO_FILE_NAME);
if (outputFile.exists()) outputFile.delete();
MediaMuxer mux = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
MediaFormat outputFormat = MediaFormat.createAudioFormat(COMPRESSED_AUDIO_FILE_MIME_TYPE,SAMPLING_RATE, 1);
outputFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, COMPRESSED_AUDIO_FILE_BIT_RATE);
outputFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 16384);
MediaCodec codec = MediaCodec.createEncoderByType(COMPRESSED_AUDIO_FILE_MIME_TYPE);
codec.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); // Note: Array of buffers
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
MediaCodec.BufferInfo outBuffInfo = new MediaCodec.BufferInfo();
byte[] tempBuffer = new byte[BUFFER_SIZE];
boolean hasMoreData = true;
double presentationTimeUs = 0;
int audioTrackIdx = 0;
int totalBytesRead = 0;
int percentComplete = 0;
do {
int inputBufIndex = 0;
while (inputBufIndex != -1 && hasMoreData) {
inputBufIndex = codec.dequeueInputBuffer(CODEC_TIMEOUT_IN_MS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
dstBuf.clear();
int bytesRead = fis.read(tempBuffer, 0, dstBuf.limit());
Log.e("bytesRead","Readed "+bytesRead);
if (bytesRead == -1) { // -1 implies EOS
hasMoreData = false;
codec.queueInputBuffer(inputBufIndex, 0, 0, (long) presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
totalBytesRead += bytesRead;
dstBuf.put(tempBuffer, 0, bytesRead);
codec.queueInputBuffer(inputBufIndex, 0, bytesRead, (long) presentationTimeUs, 0);
presentationTimeUs = 1000000l * (totalBytesRead / 2) / SAMPLING_RATE;
}
}
}
// Drain audio
int outputBufIndex = 0;
while (outputBufIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
outputBufIndex = codec.dequeueOutputBuffer(outBuffInfo, CODEC_TIMEOUT_IN_MS);
if (outputBufIndex >= 0) {
ByteBuffer encodedData = codecOutputBuffers[outputBufIndex];
encodedData.position(outBuffInfo.offset);
encodedData.limit(outBuffInfo.offset + outBuffInfo.size);
if ((outBuffInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 && outBuffInfo.size != 0) {
codec.releaseOutputBuffer(outputBufIndex, false);
}else{
mux.writeSampleData(audioTrackIdx, codecOutputBuffers[outputBufIndex], outBuffInfo);
codec.releaseOutputBuffer(outputBufIndex, false);
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
outputFormat = codec.getOutputFormat();
Log.v(LOGTAG, "Output format changed - " + outputFormat);
audioTrackIdx = mux.addTrack(outputFormat);
mux.start();
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
Log.e(LOGTAG, "Output buffers changed during encode!");
} else if (outputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// NO OP
} else {
Log.e(LOGTAG, "Unknown return code from dequeueOutputBuffer - " + outputBufIndex);
}
}
percentComplete = (int) Math.round(((float) totalBytesRead / (float) inputFile.length()) * 100.0);
Log.v(LOGTAG, "Conversion % - " + percentComplete);
} while (outBuffInfo.flags != MediaCodec.BUFFER_FLAG_END_OF_STREAM);
fis.close();
mux.stop();
mux.release();
Log.v(LOGTAG, "Compression done ...");
} catch (FileNotFoundException e) {
Log.e(LOGTAG, "File not found!", e);
} catch (IOException e) {
Log.e(LOGTAG, "IO exception!", e);
}
//mStop = false;
// Notify UI thread...
}
};
you can get your hands dirty with the native code and use the IOMX C++ interface to decoders in the framework. But this is build sensitive and will not work on other phones and android flavours.
Another option is port an opensource aac encoder like ffmpeg and write an app over it over jni. Will atleast work with phones with same architecture (arm-9, cortex a8..).
JB has a MediaCodec just to fulfill your wishes. But the problem would be the install base for devices with JB will be lean for some more time.
http://developer.android.com/about/versions/android-4.1.html#Multimedia
I think you can use this library.
https://github.com/timsu/android-aac-enc
http://betaful.com/post/82668810035/encoding-aac-audio-in-android

Categories

Resources