I want to get pcm samples from audio file using MediaCodec. I successfully got them, but there are NaN values in decoded data.
What do those mean? How to eliminate them?
Here is my code:
public float[] getPCMSamples(String audioPath) throws IOException {
MediaExtractor extractor = new MediaExtractor();
MediaCodec decoder = null;
int byte_num = 0;
extractor.setDataSource(audioPath);
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; ++i) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, null, null, 0);
int rate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int channels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
long duration = format.getLong(MediaFormat.KEY_DURATION);
duration = (duration / 1000000) + 1;
int bitspersample = 16;
//compute how much byte required for whole song
byte_num = (rate * channels * (int) duration * bitspersample) / 8;
}
}
//Decode
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
//info for passing to the dequeueOutputBuffer
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
//flag for the end of the stream
boolean isEOS = false;
//holding bytes obtained from each outputBuffer
byte[] decodedBytes = new byte[byte_num];
//index of decoded bytes so far
int decodedIdx = 0;
//Start decoding
while (true) {
//fill inputBuffer with audio encoded data
if (!isEOS) {
int inputBufferIndex = decoder.dequeueInputBuffer(10000);
if (inputBufferIndex >= 0) {
int sampleSize = extractor.readSampleData(inputBuffers[inputBufferIndex], 0);
if (sampleSize < 0) {
decoder.queueInputBuffer(inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
} else {
decoder.queueInputBuffer(inputBufferIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
}
int outputBufIndex = decoder.dequeueOutputBuffer(info, 10000);
if (outputBufIndex >= 0) {
Log.d(TAG, "got frame, size " + info.size + "/" + info.presentationTimeUs);
ByteBuffer buffer = outputBuffers[outputBufIndex];
byte[] temp = new byte[buffer.remaining()];
buffer.get(temp);
System.arraycopy(temp, 0, decodedBytes, decodedIdx, temp.length);
decodedIdx += temp.length;
decoder.releaseOutputBuffer(outputBufIndex, false /* render */);
if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
Log.d(TAG, "saw output EOS.");
break;
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = decoder.getOutputBuffers();
Log.d(TAG, "output buffers have changed.");
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat oformat = decoder.getOutputFormat();
Log.d(TAG, "output format has changed to " + oformat);
} else {
Log.d(TAG, "dequeueOutputBuffer returned " + outputBufIndex);
}
}
decoder.stop();
decoder.release();
FloatBuffer floatBuffer = ByteBuffer.wrap(decodedBytes).asFloatBuffer();
float[] decoded = new float[floatBuffer.remaining()];
floatBuffer.get(decoded);
return decoded;
}
Raw, decoded audio on android is normally 16 bit integers, not floats. So instead of FloatBuffer, use ShortBuffer.
Related
I am facing an issue while encoding M4A stereo file from PCM. My code works perfectly for MONO encoding (means Channel count =1) but for stereo(channel count =2) it's not working.
The problem is that the encoder doubles the duration of audio.
I have logged the sample rate and channel, both are perfect i.e., 48.100kHz sample rate and 2 channel count.
Below is my encoding method code:
public boolean mediaMux(){
try {
File outputDirectory = new File(outFilePath);
if (!outputDirectory.exists()){
outputDirectory.mkdir();
}
File outputFile = new File(outputDirectory.getPath() , outFileName + ".m4a");
if (outputFile.exists()) outputFile.delete();
MediaMuxer mux = null;
mux = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
MediaFormat outputFormat = MediaFormat.createAudioFormat(COMPRESSED_AUDIO_FILE_MIME_TYPE,
sampleRate, channel);
outputFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
MediaCodec codec = MediaCodec.createEncoderByType(COMPRESSED_AUDIO_FILE_MIME_TYPE);
codec.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); // Note: Array of buffers
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
MediaCodec.BufferInfo outBuffInfo = new MediaCodec.BufferInfo();
byte[] tempBuffer = new byte[bufferSize];
double presentationTimeUs = 0;
int audioTrackIdx = 0;
int totalBytesRead = 0;
int percentComplete;
do {
int inputBufIndex = 0;
while (inputBufIndex != -1 && data.size() > 0) {
try {
Log.w("Read Log","Reading Data");
inputBufIndex = codec.dequeueInputBuffer(CODEC_TIMEOUT_IN_MS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
dstBuf.clear();
byte[] a = data.remove();
int bytesRead = a.length;
//Log.w("DestBuffer Limit",dstBuf.limit() +"");
if (!hasMoreData) { // -1 implies EOS
codec.queueInputBuffer(inputBufIndex, 0, 0, (long) presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
totalBytesRead += bytesRead;
dstBuf.put(a, 0, bytesRead);
codec.queueInputBuffer(inputBufIndex, 0, bytesRead, (long) presentationTimeUs, 0);
presentationTimeUs = 1000000l * (totalBytesRead / 2) / sampleRate;
}
}
}catch (NoSuchElementException ex){
ex.printStackTrace();
}
}
// Drain audio
int outputBufIndex = 0;
while (outputBufIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.w("Write Log","Writing Data");
outputBufIndex = codec.dequeueOutputBuffer(outBuffInfo, CODEC_TIMEOUT_IN_MS);
if (outputBufIndex >= 0) {
ByteBuffer encodedData = codecOutputBuffers[outputBufIndex];
encodedData.position(outBuffInfo.offset);
encodedData.limit(outBuffInfo.offset + outBuffInfo.size);
if ((outBuffInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 && outBuffInfo.size != 0) {
codec.releaseOutputBuffer(outputBufIndex, false);
} else {
mux.writeSampleData(audioTrackIdx, codecOutputBuffers[outputBufIndex], outBuffInfo);
codec.releaseOutputBuffer(outputBufIndex, false);
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
outputFormat = codec.getOutputFormat();
Log.v(LOGTAG, "Output format changed - " + outputFormat);
audioTrackIdx = mux.addTrack(outputFormat);
mux.start();
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
Log.e(LOGTAG, "Output buffers changed during encode!");
} else if (outputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// NO OP
} else {
Log.e(LOGTAG, "Unknown return code from dequeueOutputBuffer - " + outputBufIndex);
}
}
//percentComplete = (int) Math.round(((float) totalBytesRead / (float) inputFile.length()) * 100.0);
Log.v(LOGTAG, "Conversion % - " );
} while (outBuffInfo.flags != MediaCodec.BUFFER_FLAG_END_OF_STREAM && !mStop);
mux.stop();
mux.release();
Log.v(LOGTAG, "Compression done ...");
} catch (FileNotFoundException e) {
Log.e(LOGTAG, "File not found!", e);
return false;
} catch (IOException e) {
Log.e(LOGTAG, "IO exception!", e);
return false;
} catch (Exception e){
e.printStackTrace();
}
return true;
}
Update
Recording audio code chuck
recorder = new AudioRecord(
MediaRecorder.AudioSource.MIC, recordingSampleRate,
recordingChannels,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize * 2);
if User selects mono, recordingChannels = AudioFormat.CHANNEL_IN_MONO and for stereo recording channels = AudioFormat.CHANNEL_IN_STEREO
Thanks in advance
I think
presentationTimeUs = 1000000l * (totalBytesRead / 2) / sampleRate;
should actually be
presentationTimeUs = 1000000l * (totalBytesRead / (2 * channel)) / sampleRate;
as you have 2 bytes per sample per channel, i.e. 4 bytes per sample for stereo.
I want to import the PCM data of an mp3 file. I tried the mediaCodec but it failed. Is there a way to do this? Use mediaFormat to change to a raw file, then decode it with mediaCodec to create a raw file of the same size as the mp3 file. However, it is not known whether this file was created correctly.
File aac = new File(Environment.getExternalStorageDirectory()+"/audio.raw");
if(!aac.exists())
aac.createNewFile();
FileInputStream fis = new FileInputStream(audioPath);
BufferedInputStream bis = new BufferedInputStream(fis);
extractor.setDataSource(audioPath);
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; ++i) {
format = extractor.getTrackFormat(i);
mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
break;
}
}
MediaMuxer muxerd = new MediaMuxer(aac.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
format = MediaFormat.createAudioFormat("audio/raw", sample_rate, 1);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_BIT_RATE, COMPRESSED_AUDIO_FILE_BIT_RATE);
codec = MediaCodec.createDecoderByType("audio/raw");
codec.configure(format, null, null, 0);
codec.start();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int audioTrackIdx = 0;
int totalBytesRead = 0;
boolean inputEos = false;
boolean outputEos = false;
int inputBufIndex, outputBufIndex;
int sampleSize;
ByteBuffer readBuffer, writeBuffer;
double presentationTimeUs = 0;
while (!outputEos) {
if (!inputEos) {
inputBufIndex = codec.dequeueInputBuffer(timeoutUs);
if (inputBufIndex >= 0)
{
readBuffer = codec.getInputBuffer(inputBufIndex);
readBuffer.clear();
int i = readBuffer.remaining();
byte[] tempBuffer = new byte[i];
sampleSize = bis.read(tempBuffer);
if (sampleSize < 0) {
inputEos = true;
codec.queueInputBuffer(inputBufIndex, 0, 0, (long) presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
totalBytesRead += sampleSize;
if (little_endian)
readBuffer.put(tempBuffer, 0, sampleSize);
else {
ByteBuffer bb = ByteBuffer.wrap(tempBuffer);
ShortBuffer sb = bb.order(ByteOrder.BIG_ENDIAN).asShortBuffer();
short[] shorts = new short[i / 2];
sb.get(shorts);
for (short s : shorts) {
readBuffer.putShort(s);
}
}
codec.queueInputBuffer(inputBufIndex, 0, sampleSize, (long) presentationTimeUs, 0);
presentationTimeUs = 1000000L * (totalBytesRead / 2) / sample_rate;
}
}
}
Log.e("line 3", "pass");
outputBufIndex = codec.dequeueOutputBuffer(info, timeoutUs);
if (outputBufIndex >= 0) {
writeBuffer = codec.getOutputBuffer(outputBufIndex);
writeBuffer.position(info.offset);
writeBuffer.limit(info.offset + info.size);
if (info.flags == 0 && info.size > 0)
muxerd.writeSampleData(audioTrackIdx, writeBuffer, info);
codec.releaseOutputBuffer(outputBufIndex, false);
writeBuffer.clear();
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "saw output EOS.");
outputEos = true;
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat oformat = codec.getOutputFormat();
audioTrackIdx = muxerd.addTrack(oformat);
muxerd.start();
format = oformat;
Log.e(TAG, "output format has changed to " + oformat);
Log.e("oformat key mime= ", String.valueOf(oformat.KEY_MIME));
} else {
Log.d(TAG, "dequeueOutputBuffer returned " + outputBufIndex);
}
}
fis.close();
codec.stop();
codec.release();
muxerd.stop();
muxerd.release();
I record a video from byte array using MediaCodec and MediaMuxer. But the result is that the video cannot be recorded. The system says: "File incorrect". You can see the code example below. I need to get a video file in mp4 format. Please tell me what the problem it is.
//init - this function is called once in the beginning.
private static final String MIME_TYPE = "video/avc";
private final static int MAX_WIDTH = 320;
private final static int MAX_HEIGHT = 240;
private final static int VIDEO_BITRATE = 2000000;
private static final int FRAME_RATE = 20;
private final static int VIDEO_IFRAME_INTERVAL = 10;
public boolean initCodec() {
bufferInfo = new MediaCodec.BufferInfo();
countFile++;
String savePath = "uonmap_video_" + countFile + ".mp4";
File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), savePath);
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
return false;
}
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VIDEO_IFRAME_INTERVAL);
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
e.printStackTrace();
}
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mMuxer = new MediaMuxer(file.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException ioe) {
throw new RuntimeException("MediaMuxer creation failed", ioe);
}
isStart = true;
mTrackIndex = -1;
return true;
}
//encode function - the function is called every time when a new byte array comes
public synchronized void encode(byte[] data, boolean endOfStream) {
final int TIMEOUT_USEC = 50;
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
try {
if (isPlanar) {
data = YV12toYUV420Planar(data);
} else {
data = YV12toYUV420PackedSemiPlanar(data);
}
} catch (IndexOutOfBoundsException ex) {
}
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(data);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, 50, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
} else {
return;
}
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
while (true) {
int encoderStatus = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
} else {
ByteBuffer encodedData = outputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
bufferInfo.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
bufferInfo.presentationTimeUs = 50;
mMuxer.writeSampleData(mTrackIndex, encodedData, bufferInfo);
}
mediaCodec.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
}
public byte[] YV12toYUV420PackedSemiPlanar(final byte[] input) {
/*
* COLOR_TI_FormatYUV420PackedSemiPlanar is NV12
* We convert by putting the corresponding U and V bytes together (interleaved).
*/
final int frameSize = width * height;
final int qFrameSize = frameSize/4;
byte[] output = new byte[input.length];
System.arraycopy(input, 0, output, 0, frameSize);
for (int i = 0; i < (qFrameSize); i++)
{
byte b = (input[frameSize + qFrameSize + i - 32 - 320]);
output[frameSize + i*2] = b;
output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);
}
System.arraycopy(input, 0, output, 0, frameSize); // Y
for (int i = 0; i < qFrameSize; i++) {
output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U)
output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V)
}
return output;
}
Log:
05-27 18:13:56.897 32196-32320/com.example.sasha.myrtc D/dalvikvm: GC_FOR_ALLOC freed 227K, 10% free 5333K/5876K, paused 24ms, total 24ms
05-27 18:13:56.907 32196-32434/com.example.sasha.myrtc V/MPEG4Writer: decoding time: 0 and ctts offset time: 0
05-27 18:13:56.907 32196-32434/com.example.sasha.myrtc V/MPEG4Writer: Video media time stamp: 0 and previous paused duration 50
I have working code to convert wav to m4a.is there anyway to convert mp3 to m4a by modifying my existing code.I don't want to use ffmpeg or native code to this.The below function is working for wav to m4a conversion but not working for mp3 to m4a
public void convertAudio(final String filename) {
final String AUDIO_RECORDING_FILE_NAME = Config.mp3Audio; // Input PCM file
final String COMPRESSED_AUDIO_FILE_NAME = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC).getPath()
+ "/convertedmp4.m4a"; // Output MP4/M4A file
final String COMPRESSED_AUDIO_FILE_MIME_TYPE = "audio/mp4a-latm";
final int COMPRESSED_AUDIO_FILE_BIT_RATE = 320000; // 64kbps
final int SAMPLING_RATE = 22050;
final int BUFFER_SIZE = 22050;
final int CODEC_TIMEOUT_IN_MS = 5000;
String LOGTAG = "CONVERT AUDIO";
try {
File inputFile = new File(filename);
FileInputStream fis = new FileInputStream(inputFile);
File outputFile = new File(COMPRESSED_AUDIO_FILE_NAME);
if (outputFile.exists()) outputFile.delete();
MediaMuxer mux = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
MediaFormat outputFormat = MediaFormat.createAudioFormat(COMPRESSED_AUDIO_FILE_MIME_TYPE, SAMPLING_RATE, 1);
outputFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, COMPRESSED_AUDIO_FILE_BIT_RATE);
outputFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 16384);
MediaCodec codec = MediaCodec.createEncoderByType(COMPRESSED_AUDIO_FILE_MIME_TYPE);
codec.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); // Note: Array of buffers
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
MediaCodec.BufferInfo outBuffInfo = new MediaCodec.BufferInfo();
byte[] tempBuffer = new byte[BUFFER_SIZE];
boolean hasMoreData = true;
double presentationTimeUs = 0;
int audioTrackIdx = 0;
int totalBytesRead = 0;
int percentComplete = 0;
do {
int inputBufIndex = 0;
while (inputBufIndex != -1 && hasMoreData) {
inputBufIndex = codec.dequeueInputBuffer(CODEC_TIMEOUT_IN_MS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
dstBuf.clear();
int bytesRead = fis.read(tempBuffer, 0, dstBuf.limit());
if (bytesRead == -1) { // -1 implies EOS
hasMoreData = false;
codec.queueInputBuffer(inputBufIndex, 0, 0, (long) presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
totalBytesRead += bytesRead;
dstBuf.put(tempBuffer, 0, bytesRead);
codec.queueInputBuffer(inputBufIndex, 0, bytesRead, (long) presentationTimeUs, 0);
presentationTimeUs = 1000000l * (totalBytesRead / 2) / SAMPLING_RATE;
}
}
}
int outputBufIndex = 0;
while (outputBufIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
outputBufIndex = codec.dequeueOutputBuffer(outBuffInfo, CODEC_TIMEOUT_IN_MS);
if (outputBufIndex >= 0) {
ByteBuffer encodedData = codecOutputBuffers[outputBufIndex];
encodedData.position(outBuffInfo.offset);
encodedData.limit(outBuffInfo.offset + outBuffInfo.size);
if ((outBuffInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 && outBuffInfo.size != 0) {
codec.releaseOutputBuffer(outputBufIndex, false);
} else {
mux.writeSampleData(audioTrackIdx, codecOutputBuffers[outputBufIndex], outBuffInfo);
codec.releaseOutputBuffer(outputBufIndex, false);
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
outputFormat = codec.getOutputFormat();
audioTrackIdx = mux.addTrack(outputFormat);
mux.start();
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
} else if (outputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// NO OP
} else {
}
}
percentComplete = (int) Math.round(((float) totalBytesRead / (float) inputFile.length()) * 100.0);
} while (outBuffInfo.flags != MediaCodec.BUFFER_FLAG_END_OF_STREAM);
fis.close();
mux.stop();
mux.release();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
I want to use Android MediaCodec and MediaMuxer APIS to encode a video with new format and save the result after encoding to a new .mp4 file.
This is my code:
final String MIME_TYPE = "video/avc";
final int BIT_RATE = 128000; // 128kbps
final int SAMPLING_RATE = 44100;
final int CODEC_TIMEOUT_IN_MS = 5000;
boolean suc=false;
try {
File inputFile = new File(filePath);
FileInputStream fis = new FileInputStream(inputFile);
File outputFile = new File(dstMediaPath);
if (outputFile.exists())
outputFile.delete();
MediaMuxer mux = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
MediaFormat outputFormat = MediaFormat.createAudioFormat(MIME_TYPE,SAMPLING_RATE, 1);
outputFormat = MediaFormat.createVideoFormat("video/avc",
320, 240);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000);
outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
MediaCodec codec = MediaCodec.createEncoderByType(MIME_TYPE);
codec.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); // Note: Array of buffers
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
MediaCodec.BufferInfo outBuffInfo = new MediaCodec.BufferInfo();
byte[] tempBuffer = new byte[4096];
boolean hasMoreData = true;
double presentationTimeUs = 0;
int audioTrackIdx = 0;
int totalBytesRead = 0;
int percentComplete;
do {
int inputBufIndex = 0;
while (inputBufIndex != -1 && hasMoreData) {
inputBufIndex = codec.dequeueInputBuffer(CODEC_TIMEOUT_IN_MS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
dstBuf.clear();
int bytesRead = fis.read(tempBuffer, 0, tempBuffer.length);
if (bytesRead == -1) { // -1 implies EOS
hasMoreData = false;
codec.queueInputBuffer(inputBufIndex, 0, 0, (long) presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
totalBytesRead += bytesRead;
dstBuf.put(tempBuffer, 0, bytesRead);
codec.queueInputBuffer(inputBufIndex, 0, bytesRead, (long) presentationTimeUs, 0);
presentationTimeUs = 1000000l * (totalBytesRead / 2) / SAMPLING_RATE;
}
}
}
int outputBufIndex = 0;
while (outputBufIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
outputBufIndex = codec.dequeueOutputBuffer(outBuffInfo, CODEC_TIMEOUT_IN_MS);
if (outputBufIndex >= 0) {
ByteBuffer encodedData = codecOutputBuffers[outputBufIndex];
encodedData.position(outBuffInfo.offset);
encodedData.limit(outBuffInfo.offset + outBuffInfo.size);
if ((outBuffInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 && outBuffInfo.size != 0) {
codec.releaseOutputBuffer(outputBufIndex, false);
outBuffInfo.size=0;
} else {
mux.writeSampleData(audioTrackIdx, codecOutputBuffers[outputBufIndex], outBuffInfo);
codec.releaseOutputBuffer(outputBufIndex, false);
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
outputFormat = codec.getOutputFormat();
Log.v(TAG, "Output format changed - " + outputFormat);
audioTrackIdx = mux.addTrack(outputFormat);
mux.start();
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
Log.e(TAG, "Output buffers changed during encode!");
} else if (outputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// NO OP
} else {
Log.e(TAG, "Unknown return code from dequeueOutputBuffer - " + outputBufIndex);
}
}
percentComplete = (int) Math.round(((float) totalBytesRead / (float) inputFile.length()) * 100.0);
} while (outBuffInfo.flags != MediaCodec.BUFFER_FLAG_END_OF_STREAM);
fis.close();
mux.stop();
mux.release();
Log.v(TAG, "Compression done ...");
suc=true;
} catch (FileNotFoundException e) {
Log.e(TAG, "File not found!", e);
suc=false;
} catch (IOException e) {
Log.e(TAG, "IO exception!", e);
suc=false;
}
return suc;
When open the file output, it can be played, but it shows the wrong results, an example of which follows:
Can anyone help me understand what I am doing wrong?