AudioTrack not working, there is no sound - android

can you check why is my Audiotrack not working, i do have a buffer in and out to the audiotrack, it should be able to work.
public class MainActivity extends AppCompatActivity {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
private byte[] b;
AudioManager audioManager;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button button = (Button)findViewById(R.id.button);
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
run();
}
});
}
public void run() {
extractor = new MediaExtractor();
AssetFileDescriptor sampleFD = getResources().openRawResourceFd(R.raw.pinkfloyd);
try {
extractor.setDataSource(sampleFD.getFileDescriptor(), sampleFD.getStartOffset(), sampleFD.getLength());
} catch (IOException e) {
e.printStackTrace();
}
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
extractor.selectTrack(i);
try {
decoder = MediaCodec.createDecoderByType(mime);
} catch (IOException e) {
e.printStackTrace();
}
decoder.configure(format, surface, null, 0);
break;
}
}
if (decoder == null) {
Log.e("DecodeActivity", "Can't find video info!");
return;
}
decoder.start();
ByteBuffer inputBuffers [] = decoder.getInputBuffers();
ByteBuffer outputBuffers [] = decoder.getOutputBuffers();
audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
audioManager.setMode(AudioManager.MODE_CURRENT);
audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0);
int lengthOfAudioClip = outputBuffers.length;
AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, lengthOfAudioClip, AudioTrack.MODE_STREAM);
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
audioTrack.play();
boolean isEOS = false;
while (!Thread.interrupted()) {
if (!isEOS) {
int inIndex = decoder.dequeueInputBuffer(10000);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
decoder.getInputBuffer(inIndex);
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
}
int outIndex = decoder.dequeueOutputBuffer(info, 10000);
switch (outIndex)
{
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer buffer = outputBuffers[outIndex];
Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);
b = new byte[info.size-info.offset];
Log.d("LOGGING FOR B", b + "");
audioTrack.write(b, 0, outputBuffers.length);
decoder.releaseOutputBuffer(outIndex, true);
Log.d("LOGGING FOREST KEEP OUT", outIndex + "");
Log.d("LOG STATE", audioTrack.getState() + "");
Log.d("LOG STREAMTYPE", audioTrack.getStreamType() + "");
break;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
audioTrack.flush();
audioTrack.release();
break;
}
}
Log.d("LOGGING FOR INPUT", inputBuffers + "");
Log.d("LOGGING FOR OUTPUT", outputBuffers + "");
Log.d("OUTLENGTH", outputBuffers.length + "");
Log.d("SIZE OF B", b.length + "");
// AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, 44100, AudioTrack.MODE_STREAM);
// audioTrack.getSampleRate();
decoder.stop();
decoder.release();
extractor.release();
}
}

You can try with this, easily:
MediaPlayer mPlayer = MediaPlayer.create(ThisActivity.this, R.raw.mysoundfile);
mPlayer.start();
not forgetting to stop it when activity is destroyed:
public void onDestroy() {
mPlayer.stop();
super.onDestroy();
}
For more arguments, look at this older post:
How do I play an mp3 in the res/raw folder of my android app?

byte[] b seems empty when you are writing it to audio track. You can fill byte[] b something like this
buffer.get(b, 0, info.size-info.offset);
before writing it to AudioTrack

Related

Video and Audio Sychronization issue

I am saving frames to H264 format and I saved audio to aac format. Then I concatenate of these format to creating mp4 format using ffmpeg player in android but When I concatenate Audio and Video, audio getting back of video they aren't playing at synchronous mode,how can I played video and audio at syncronous mode? H264 video format is 6 second, audio format is 8 second When I concatenate of these getting 8 second and audio coming longer and occuring asyncronous.
Recording Audio to AAC format
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
recorder.setAudioEncodingBitRate(48000);//48000
recorder.setAudioSamplingRate(720);//16000
recorder.setOutputFile(path2);
try {
recorder.prepare();
} catch (IOException e) {
e.printStackTrace();
}
recorder.start();
Saving Video to H264 format
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc",
1280,
720);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 6000000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 720); //video second
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
try {
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
e.printStackTrace();
}
mMediaCodec.configure(mediaFormat,
null,
null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
//Video format H264
private synchronized void encode(byte[] data) {
ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers();
int inputBufferIndex = mMediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.capacity();
inputBuffer.clear();
inputBuffer.put(data);
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0);
} else {
return;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
Log.i(TAG, "outputBufferIndex-->" + outputBufferIndex);
do {
if (outputBufferIndex >= 0) {
ByteBuffer outBuffer = outputBuffers[outputBufferIndex];
System.out.println("buffer info-->" + bufferInfo.offset + "--"
+ bufferInfo.size + "--" + bufferInfo.flags + "--"
+ bufferInfo.presentationTimeUs);
byte[] outData = new byte[bufferInfo.size];
outBuffer.get(outData);
try {
if (bufferInfo.offset != 0) {
fos.write(outData, bufferInfo.offset, outData.length
- bufferInfo.offset);
} else {
fos.write(outData, 0, outData.length);
}
fos.flush();
Log.i(TAG, "out data -- > " + outData.length);
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo,
0);
} catch (IOException e) {
e.printStackTrace();
}
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mMediaCodec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = mMediaCodec.getOutputFormat();
}
} while (outputBufferIndex >= 0);
}
Concatenate Video and Audio with using ffmpeg
String[] cmd = {"-i", h264_video_path, "-i", aac_audio_path, "-c", "copy", "-map","0:v:0","-map","1:a:0", outpath_mp4};
try {
//FFMPEG execute command
executeCommand(cmd);
} catch (FFmpegCommandAlreadyRunningException e) {
e.printStackTrace();
}

Picking a video, decoding it, changing its fps, encoding and saving using mediacodec

Would like to pick a video from the device and decode it inorder to change its frame rate and then encode and save it to the device. How is this possible using MediaCodec? Went through many documentations, but couldn't find a method. I have the following code for decoding. Will it be of any good for my purpose. If yes how to use that decoded data to save it with changed fps.
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 1080, 720);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2500000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
try {
decoder = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
Log.d("Error", "Fail to create MediaCodec: " + e.toString());
}
///Commenting for testing...
/*
// Pass the decoded data to the surface to display
decoder.configure(mediaFormat, null, null, 0);
//decoder.configure(mediaFormat, null, null, 0);
decoder.start();
*/
///Commenting for testing...
// new BufferInfo();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
if (null == inputBuffers) {
Log.d("Error", "null == inputBuffers");
}
if (null == outputBuffers) {
Log.d("Error", "null == outbputBuffers 111");
}
FileInputStream file = null;
try {
file = new FileInputStream(data.getData().getPath().toString());
} catch (FileNotFoundException e) {
Log.d("Error", "open file error: " + e.toString());
return;
}
int read_size = -1;
int mCount = 0;
for (; ; ) {
byte[] h264 = null;
try {
byte[] length_bytes = new byte[4];
read_size = file.read(length_bytes);
if (read_size < 0) {
Log.d("Error", "read_size<0 pos1");
break;
}
int byteCount = bytesToInt(length_bytes, 0);
//Changed to .length
//int byteCount=length_bytes.length;
Log.d("Error", "byteCount: " + byteCount);
h264 = new byte[byteCount];
read_size = file.read(h264, 0, byteCount);
// Log.d("Error", "read_size: " + read_size);
if (read_size < 0) {
Log.d("Error", "read_size<0 pos2");
break;
}
// Log.d("Error", "pos: " + file.)
} catch (IOException e) {
Log.d("Error", "read_size 2: " + read_size);
Log.d("Error", "e.toStrinig(): " + e.toString());
break;
}
int inputBufferIndex = decoder.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(h264);
// long sample_time = ;
decoder.queueInputBuffer(inputBufferIndex, 0, h264.length, mCount * 1000000 / 20, 0);
++mCount;
} else {
Log.d("Error", "dequeueInputBuffer error");
}
ByteBuffer outputBuffer = null;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
outputBuffer = outputBuffers[outputBufferIndex];
decoder.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 0);
}
// Pass the decoded data to the surface to display
decoder.configure(mediaFormat,mPreview.getHolder().getSurface() , null, 0);
//decoder.configure(mediaFormat, null, null, 0);
decoder.start();
if (outputBufferIndex >= 0) {
decoder.releaseOutputBuffer(outputBufferIndex, false);
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = decoder.getOutputBuffers();
Log.d("Error", "outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED");
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
Log.d("Error", "outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED");
}
try {
Thread.sleep(1000/20);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
public int bytesToInt(byte[] src, int offset) {
int value;
value = (int) ((src[offset] & 0xFF)
| ((src[offset+1] & 0xFF)<<8)
| ((src[offset+2] & 0xFF)<<16)
| ((src[offset+3] & 0xFF)<<24));
return value;
}
You can take a look at DecodeEditEncode, a great starting point for decoding and re-encoding using surfaces (output surface for decoder -> input surface for encoder).
Take a look especially at this method
private void editVideoData(VideoChunks inputData, MediaCodec decoder,
OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder,
VideoChunks outputData)
The working flow that you have to follow is similar to bellow:
Extract video track (MediaExtractor)
Feed the decoder input buffers
render the decoded frame to the surface
When rendered, the encoder will get the frame (you have to set timestamp too)
Use MediaMuxer to mux the encoder frame with audio track.
Extra links : some examples
ExtractDecodeEditEncodeMuxTest
VideoResample.java (very interesting)

Use MediaCodec encode PCM to amr-nb

I need to send Voice message, but PCM audio is too large, so I'm trying to convert PCM to AMR-NB using MediaCodec. I searched everywhere, but I can't implement it properly. I need your help.
private void sendAudioFile() {
initEncoder();
translaterThread = new Thread(translaterTask);
translaterThread.start();
RLog.d("AudioRecordManager", "sendAudioFile path = " + this.mAudioPath);
RLog.d("AudioRecordManager", "sendAmrFile path = " + this.mAmrPath);
if(this.mAmrPath != null) {
File file = new File(this.mAmr.getPath());
if(!file.exists() || file.length() == 0L) {
RLog.e("AudioRecordManager", "sendAudioFile fail cause of file length 0 or audio permission denied");
return;
}
CustomizeMessage customizeMessage = CustomizeMessage.obtain(this.mAmrPath);
sendMessage(mTargetId, customizeMessage), (String)null, (String)null);
}
private boolean initEncoder() {
try {
encoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AMR_NB);
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AMR_NB);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mSampleRate);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
return true;
} catch (IOException e) {
Log.e(TAG, "init encoder failed.");
e.printStackTrace();
}
return false;
}
private Runnable translaterTask = new Runnable() {
#Override
public void run() {
FileInputStream in = null;
FileOutputStream out = null;
byte[] data = new byte[minBufferSize];
ByteBuffer[] inputBuffers;
ByteBuffer[] outputBuffers;
ByteBuffer inputBuffer;
ByteBuffer outputBuffer;
MediaCodec.BufferInfo bufferInfo;
int inputBufferIndex;
int outputBufferIndex;
byte[] outData;
encoder.start();
try {
in = new FileInputStream(mAudioPath.getPath());
out = new FileOutputStream(mAmrPath.getPath());
while (in.read(data) != -1) {
inputBuffers = encoder.getInputBuffers();
outputBuffers = encoder.getOutputBuffers();
inputBufferIndex = encoder.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(data);
encoder.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0);
}
bufferInfo = new MediaCodec.BufferInfo();
outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
try {
outputBuffer = outputBuffers[outputBufferIndex];
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
out.write(outData);
Log.d("AudioEncoder", outData.length + " bytes encoded");
encoder.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0);
} catch (Exception e) {
e.printStackTrace();
}
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (encoder != null) {
encoder.stop();
encoder.release();
encoder = null;
}
}
}
};
When I click send button, I got Thread Exception: java.nio.BufferOverflowException
10-22 22:43:22.280 25633-25819/com.jike.hat E/AndroidRuntime: FATAL EXCEPTION: Thread-594
Process: com.jikexueyuan.cicada_chat, PID: 25633
java.nio.BufferOverflowException
at java.nio.Buffer.checkPutBounds(Buffer.java:183)
at java.nio.DirectByteBuffer.put(DirectByteBuffer.java:356)
at java.nio.ByteBuffer.put(ByteBuffer.java:721)
at com.jikexueyuan.cicada_chat.MyAudioRecordManager$4.run(MyAudioRecordManager.java:385)
at java.lang.Thread.run(Thread.java:831)
The parameter are set as follows:
private int mSampleRate = 16000;
private int mChannelConfig = AudioFormat.CHANNEL_IN_MONO;
private int mAudioEncodingBitRate = AudioFormat.ENCODING_PCM_16BIT;
private int BIT_RATE = 15850;
So, is there something wrong with the encoder, or parameter set? Why does it BufferOverflow ?

Decoding raw AAC with MediaCodec without using MediaExtractor

I successfully decoded and play the mp4 (AAC) file using MediaExtractor and MediaCodec with the code below.I want to decode raw AAC (in another file, with same encoding format) to PCM. The problem is that I don't know how to set SampleSize and presentationTimeUs without mediaExtractor. How can I set above parameters without using MediaExtractor?
//songwav.mp4 file is created from PCM with this format
MediaFormat outputFormat = MediaFormat.createAudioFormat(
"audio/mp4a-latm", 44100, 2);
outputFormat.setInteger(MediaFormat.KEY_AAC_PROFILE,
MediaCodecInfo.CodecProfileLevel.AACObjectLC);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE,
128000);
//decoding
String inputfilePath = Environment.getExternalStorageDirectory()
.getPath() + "/" + "songwav.mp4";
String outputFilePath = Environment.getExternalStorageDirectory()
.getPath() + "/" + "songwavmp4.pcm";
OutputStream outputStream = new FileOutputStream(outputFilePath);
MediaCodec codec;
AudioTrack audioTrack;
// extractor gets information about the stream
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(inputfilePath);
MediaFormat format = extractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
// the actual decoder
codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
// get the sample rate to configure AudioTrack
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
// create our AudioTrack instance
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.getMinBufferSize(sampleRate,
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT), AudioTrack.MODE_STREAM);
// start playing, we will feed you later
audioTrack.play();
extractor.selectTrack(0);
// start decoding
final long kTimeOutUs = 10000;
MediaCodec.BufferInfo BufInfo = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
int inputBufIndex;
int counter=0;
while (!sawOutputEOS) {
counter++;
if (!sawInputEOS) {
inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
// Log.d(LOG_TAG, " bufIndexCheck " + bufIndexCheck);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize = extractor
.readSampleData(dstBuf, 0 /* offset */);
long presentationTimeUs = 0;
if (sampleSize < 0) {
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
}
// can throw illegal state exception (???)
codec.queueInputBuffer(inputBufIndex, 0 /* offset */,
sampleSize, presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM
: 0);
if (!sawInputEOS) {
extractor.advance();
}
} else {
Log.e("sohail", "inputBufIndex " + inputBufIndex);
}
}
int res = codec.dequeueOutputBuffer(BufInfo, kTimeOutUs);
if (res >= 0) {
Log.i("sohail","decoding: deqOutputBuffer >=0, counter="+counter);
// Log.d(LOG_TAG, "got frame, size " + info.size + "/" +
// info.presentationTimeUs);
if (BufInfo.size > 0) {
// noOutputCounter = 0;
}
int outputBufIndex = res;
ByteBuffer buf = codecOutputBuffers[outputBufIndex];
final byte[] chunk = new byte[BufInfo.size];
buf.get(chunk);
buf.clear();
if (chunk.length > 0) {
// play
audioTrack.write(chunk, 0, chunk.length);
// write to file
outputStream.write(chunk);
}
codec.releaseOutputBuffer(outputBufIndex, false /* render */);
if ((BufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.i("sohail", "saw output EOS.");
sawOutputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
Log.i("sohail", "output buffers have changed.");
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat oformat = codec.getOutputFormat();
Log.i("sohail", "output format has changed to " + oformat);
} else {
Log.i("sohail", "dequeueOutputBuffer returned " + res);
}
}
Log.d(LOG_TAG, "stopping...");
// ////////closing
if (audioTrack != null) {
audioTrack.flush();
audioTrack.release();
audioTrack = null;
}
outputStream.flush();
outputStream.close();
codec.stop();

MediaCodec get all frames from video

I'm trying to use the MediaCodec to retrive all the frames from a video for image processing stuff, I'm trying to render the video and to capture the frame from the outBuffers
but I can't initiate a bitmap instance from the received bytes.
I've tried to render it to a surface or to nothing(null), because I've notice that when you rendering to null then the outBuffers are getting the bytes of the rendered frames.
This is the code:
private static final String SAMPLE = Environment.getExternalStorageDirectory() + "/test_videos/sample2.mp4";
private PlayerThread mPlayer = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
SurfaceView sv = new SurfaceView(this);
sv.getHolder().addCallback(this);
setContentView(sv);
}
protected void onDestroy() {
super.onDestroy();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (mPlayer == null) {
mPlayer = new PlayerThread(holder.getSurface());
mPlayer.start();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (mPlayer != null) {
mPlayer.interrupt();
}
}
private void writeFrameToSDCard(byte[] bytes, int i, int sampleSize) {
try {
Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, sampleSize);
File file = new File(Environment.getExternalStorageDirectory() + "/test_videos/sample" + i + ".png");
if (file.exists())
file.delete();
file.createNewFile();
FileOutputStream out = new FileOutputStream(file.getAbsoluteFile());
bmp.compress(Bitmap.CompressFormat.PNG, 90, out);
out.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
public PlayerThread(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
extractor = new MediaExtractor();
extractor.setDataSource(SAMPLE);
int index = extractor.getTrackCount();
Log.d("MediaCodecTag", "Track count: " + index);
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, surface, null, 0);
break;
}
}
if (decoder == null) {
Log.e("DecodeActivity", "Can't find video info!");
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
boolean isEOS = false;
long startMs = System.currentTimeMillis();
int i = 0;
while (!Thread.interrupted()) {
if (!isEOS) {
int inIndex = decoder.dequeueInputBuffer(10000);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
}
/* saves frame to sdcard */
int outIndex = decoder.dequeueOutputBuffer(info, 10000); // outIndex most of the times null
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer buffer = outputBuffers[outIndex];
Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);
// We use a very simple clock to keep the video FPS, or the video
// playback will be too fast
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
try {
sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, true);
try {
byte[] dst = new byte[outputBuffers[outIndex].capacity()];
outputBuffers[outIndex].get(dst);
writeFrameToSDCard(dst, i, dst.length);
i++;
} catch (Exception e) {
Log.d("iDecodeActivity", "Error while creating bitmap with: " + e.getMessage());
}
break;
}
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}
Any help would be much appriciated
You can decode to a Surface or to a ByteBuffer, but not both. Because you are configuring a Surface, there will always be zero bytes of data in the output buffer.
If you configure for ByteBuffer decoding, the data format will vary, but to my knowledge will never be an ARGB format that Bitmap understands. You can see examples of two YUV formats being examined in the buffer-to-buffer tests in the CTS EncodeDecodeTest in method checkFrame(). Note, however, that the first thing it does is check the format and return immediately if it's not recognized.
At present (Android 4.4), the only reliable way to do this is to decode to a SurfaceTexture, render that with GLES, and extract RGB data with glReadPixels(). Sample code is available on bigflake -- see ExtractMpegFramesTest (requires API 16+).

Categories

Resources