Raw H.264 stream output by MediaCodec not playble - android

I am creating raw H.264 stream output by MediaCodec. The problem is the output file is not playable in android default player (API 16). How can it be that Android can export file that is not playable in player, only in VLC on the PC. Maybe some thing wrong with my code? My video is 384x288.
public class AvcEncoder {
private MediaCodec mediaCodec;
private BufferedOutputStream outputStream;
private File f;
public AvcEncoder(int w, int h, String file_name)
{
f = new File(file_name + ".mp4");
try {
outputStream = new BufferedOutputStream(new FileOutputStream(f));
} catch (Exception e){
e.printStackTrace();
}
String key_mime = "video/avc"; //video/mp4v-es, video/3gpp, video/avc
mediaCodec = MediaCodec.createEncoderByType(key_mime);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(key_mime, w, h);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, (w * h) << 3);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE,MediaCodecInfo.CodecProfileLevel.MPEG4ProfileMain);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
}
public void close() {
try {
mediaCodec.stop();
mediaCodec.release();
outputStream.flush();
outputStream.close();
} catch (Exception e){
e.printStackTrace();
}
}
public void offerEncoder(byte[] input) {
try {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
outputStream.write(outData, 0, outData.length);
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
}
}
}

The Android MediaPlayer doesn't handle raw H.264 streams.
One difficulty with such streams is that the H.264 NAL units don't have timestamp information, so unless the video frames are at a known fixed frame rate the player wouldn't know when to present them.
You can either create your own player with MediaCodec (see e.g. "Play video (TextureView)" in Grafika), or convert the raw stream to a .mp4 file. The latter requires MediaMuxer, available in API 18, or the use of a 3rd-party library like ffmpeg.

Related

Video and Audio Sychronization issue

I am saving frames to H264 format and I saved audio to aac format. Then I concatenate of these format to creating mp4 format using ffmpeg player in android but When I concatenate Audio and Video, audio getting back of video they aren't playing at synchronous mode,how can I played video and audio at syncronous mode? H264 video format is 6 second, audio format is 8 second When I concatenate of these getting 8 second and audio coming longer and occuring asyncronous.
Recording Audio to AAC format
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
recorder.setAudioEncodingBitRate(48000);//48000
recorder.setAudioSamplingRate(720);//16000
recorder.setOutputFile(path2);
try {
recorder.prepare();
} catch (IOException e) {
e.printStackTrace();
}
recorder.start();
Saving Video to H264 format
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc",
1280,
720);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 6000000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 720); //video second
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
try {
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
e.printStackTrace();
}
mMediaCodec.configure(mediaFormat,
null,
null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
//Video format H264
private synchronized void encode(byte[] data) {
ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers();
int inputBufferIndex = mMediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.capacity();
inputBuffer.clear();
inputBuffer.put(data);
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0);
} else {
return;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
Log.i(TAG, "outputBufferIndex-->" + outputBufferIndex);
do {
if (outputBufferIndex >= 0) {
ByteBuffer outBuffer = outputBuffers[outputBufferIndex];
System.out.println("buffer info-->" + bufferInfo.offset + "--"
+ bufferInfo.size + "--" + bufferInfo.flags + "--"
+ bufferInfo.presentationTimeUs);
byte[] outData = new byte[bufferInfo.size];
outBuffer.get(outData);
try {
if (bufferInfo.offset != 0) {
fos.write(outData, bufferInfo.offset, outData.length
- bufferInfo.offset);
} else {
fos.write(outData, 0, outData.length);
}
fos.flush();
Log.i(TAG, "out data -- > " + outData.length);
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo,
0);
} catch (IOException e) {
e.printStackTrace();
}
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mMediaCodec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = mMediaCodec.getOutputFormat();
}
} while (outputBufferIndex >= 0);
}
Concatenate Video and Audio with using ffmpeg
String[] cmd = {"-i", h264_video_path, "-i", aac_audio_path, "-c", "copy", "-map","0:v:0","-map","1:a:0", outpath_mp4};
try {
//FFMPEG execute command
executeCommand(cmd);
} catch (FFmpegCommandAlreadyRunningException e) {
e.printStackTrace();
}

Android : how to extract audio samples from downloading audio file with MediaExtractor

I'm making a very simple music app, and I try to figure out how to do this:
download an audio file and record the file locally
at any moment, extract the audio frames from the file (during or after the download)
1) For the downloading part, I use Retrofit following this example. To make it short, it allows me to download a file, and recording it locally while it's downloading (so I don't have to wait for the end of the download to access the data of the file).
2) For the frame extracting part, I use MediaExtractor and MediaCodec like this:
MediaCodec codec;
MediaExtractor extractor;
MediaFormat format;
ByteBuffer[] codecInputBuffers;
ByteBuffer[] codecOutputBuffers;
Boolean sawInputEOS = false;
Boolean sawOutputEOS = false;
AudioTrack mAudioTrack;
MediaCodec.BufferInfo info;
File outputFile = null;
FileDescriptor fileDescriptor = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
...
// the file being downloaded:
outputFile = new File(directory, "test.mp3");
try {
FileInputStream fileInputStream = new FileInputStream(outputFile);
fileDescriptor = fileInputStream.getFD();
}
catch (Exception e) {}
}
// Called once when enough data to extract.
private void onAudioFileReady() {
Thread thread = new Thread(new Runnable() {
#Override
public void run() {
// thread :
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
// audio :
extractor = new MediaExtractor();
// the extractor only extracts the already downloaded part of the file:
try {
// extractor.setDataSource(url);
// extractor.setDataSource(outputFile.getAbsolutePath());
// extractor.setDataSource(MainActivity.this, Uri.parse(outputFile.getAbsolutePath()), null);
extractor.setDataSource(fileDescriptor);
}
catch (IOException e) {}
format = extractor.getTrackFormat(0);
String mime = format.getString(MediaFormat.KEY_MIME);
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
try {
codec = MediaCodec.createDecoderByType(mime);
}
catch (IOException e) {}
codec.configure(format, null, null, 0);
codec.start();
codecInputBuffers = codec.getInputBuffers();
codecOutputBuffers = codec.getOutputBuffers();
extractor.selectTrack(0);
int minBufferSize = AudioTrack.getMinBufferSize(
sampleRate,
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
mAudioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
sampleRate,
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
minBufferSize,
AudioTrack.MODE_STREAM
);
info = new MediaCodec.BufferInfo();
mAudioTrack.play();
do {
input();
output();
}
while (!sawInputEOS);
}
});
thread.start();
}
private void input() {
int inputBufferIndex = codec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer byteBuffer = codecInputBuffers[inputBufferIndex];
int sampleSize = extractor.readSampleData(byteBuffer, 0);
long presentationTimeUs = 0;
if (sampleSize < 0) {
Log.w(LOG_TAG, "Saw input end of stream!");
sampleSize = 0;
}
else {
presentationTimeUs = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufferIndex,
0,
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
// doesn't seem to work:
extractor.advance();
}
}
private void output() {
final int res = codec.dequeueOutputBuffer(info, -1);
if (res >= 0) {
ByteBuffer buf = codecOutputBuffers[res];
final byte[] chunk = new byte[info.size];
buf.get(chunk);
buf.clear();
if (chunk.length > 0) {
mAudioTrack.write(chunk, 0, chunk.length);
}
codec.releaseOutputBuffer(res, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
}
else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
}
else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
final MediaFormat oformat = codec.getOutputFormat();
mAudioTrack.setPlaybackRate(oformat.getInteger(MediaFormat.KEY_SAMPLE_RATE));
}
}
What it does:
When onAudioFileReady() is called, this code extracts and plays the audio samples of the file, but only the ones that have already been downloaded. When it reaches the end of the already downloaded part, the MediaExtractor stops (it looks like extractor.advance() doesn't want to continue the extraction...), even if there is more data available...
What I want to achieve:
I want to be able to continue the extraction of the audio samples of the file, as long as there is enough data for it of course.
IMPORTANT:
At that point, you may ask why I don't just use extractor.setDataSource(url). Here are the reasons why:
I want to save the audio file locally, so I can play it later
I want to be able to play the song, even long after the beginning of the download
Does anyone know how to achieve that? Thanks in advance for your help.

Android, Writing video frames in real-time results in pauses

I am trying to capture video frames, encode it with MediaCodec, and save it into a file. The code that I am using is:
public class AvcEncoder {
private static String TAG = AvcEncoder.class.getSimpleName();
private MediaCodec mediaCodec;
private BufferedOutputStream outputStream;
public AvcEncoder(String fileDir) {
Log.d(TAG, "Thread Id: " + Thread.currentThread().getId());
File f = new File(Environment.getExternalStorageDirectory(), "Download/LiveCamera/video_encoded.h264");
try {
outputStream = new BufferedOutputStream(new FileOutputStream(f));
Log.i("AvcEncoder", "outputStream initialized");
} catch (Exception e){
e.printStackTrace();
}
mediaCodec = MediaCodec.createEncoderByType("video/avc");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 960, 720);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2000000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
//mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
}
public void close() throws IOException {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
//outputStream.flush();
outputStream.close();
}
public void byteWriteTest(byte[] input) {
try {
outputStream.write(input, 0, input.length);
} catch(Exception e) {
Log.d("AvcEncoder", "Outputstream write failed");
e.printStackTrace();
}
Log.i("AvcEncoder", input.length + " bytes written");
}
// called from Camera.setPreviewCallbackWithBuffer(...) in other class
public void offerEncoder(byte[] input) {
try {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0);
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
try {
outputStream.write(outData, 0, outData.length);
} catch(Exception e) {
Log.d("AvcEncoder", "Outputstream write failed");
e.printStackTrace();
}
//Log.i("AvcEncoder", outData.length + " bytes written");
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
}
For each frame arrived in SurfaceView's onPreviewFrame, AvcEncoder's offerEncoder() method is called, as follows
public class CameraView extends SurfaceView implements Camera.PreviewCallback,
SurfaceHolder.Callback {
...
#Override
public void onPreviewFrame(byte[] pData, Camera pCamera) {
if (VIDEO_ENCODE) {
avcEncoder.offerEncoder(pData);
}
}
}
The Problem
Now, the problem I am having is with writing the encoded frames to file. It seems that after every N frames (roughly, not exactly the same every time), the statement outputStream.write(outData, 0, outData.length), in AvcEncoder's offerEncoder method, takes longer time (few handred times longer than at other iterations). I have assumed this probably happens when it flushes the buffer, that is actually writes to file. (Please correct me if this assumption is not correct).
This results in dropping the frames that arrive during that time (again, I assume, based on the video result), which in turn results in a pause in the recorded video after every N frame.
When I comment this statement out, then iterations of offerEncoder method takes roughly equal time.
The Question
How can I solve this issue such that writing to file is smooth.Has anyone else encountered this problem. I see that many people use this code, but no one has complained so far about this issue (or at least I did not find one).
Thanks.

Encoding AAC Audio using AudioRecord and MediaCodec on Android

I am trying to encode aac audio using android AudioRecord and MediaCodec. I have created a encoder class very similar to (Encoding H.264 from camera with Android MediaCodec). With this class, I created an instance of AudioRecord and tell it to read off its byte[] data to the AudioEncoder (audioEncoder.offerEncoder(Data)).
while(isRecording)
{
audioRecord.read(Data, 0, Data.length);
audioEncoder.offerEncoder(Data);
}
Here is my Setting for my AudioRecord
int audioSource = MediaRecorder.AudioSource.MIC;
int sampleRateInHz = 44100;
int channelConfig = AudioFormat.CHANNEL_IN_MONO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);
I successfully collected some byte[] array data and written it to a local file. Unfortunately the file is not playable. I did some more searching online and found a related post (How to generate the AAC ADTS elementary stream with Android MediaCodec). So, others who are having similar problem are saying the main problem is "The MediaCodec encoder generates the raw AAC stream. The raw AAC stream needs to be converted into a playable format, such as the ADTS stream". So I tried to add the ADTS header. Nevertheless, after I added the ADTS header(I commented out in the code below), my AudioEncoder wouldn't even write the output audio file.
Is there anything I'm missing? Is my setup correct?
Any suggestions, comments, and opinions are welcome and very appreciated. thanks guys!
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Environment;
import android.util.Log;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
public class AudioEncoder {
private MediaCodec mediaCodec;
private BufferedOutputStream outputStream;
private String mediaType = "audio/mp4a-latm";
public AudioEncoder() {
File f = new File(Environment.getExternalStorageDirectory(), "Download/audio_encoded.aac");
touch(f);
try {
outputStream = new BufferedOutputStream(new FileOutputStream(f));
Log.e("AudioEncoder", "outputStream initialized");
} catch (Exception e){
e.printStackTrace();
}
mediaCodec = MediaCodec.createEncoderByType(mediaType);
final int kSampleRates[] = { 8000, 11025, 22050, 44100, 48000 };
final int kBitRates[] = { 64000, 128000 };
MediaFormat mediaFormat = MediaFormat.createAudioFormat(mediaType,kSampleRates[3],1);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, kBitRates[1]);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
}
public void close() {
try {
mediaCodec.stop();
mediaCodec.release();
outputStream.flush();
outputStream.close();
} catch (Exception e){
e.printStackTrace();
}
}
// called AudioRecord's read
public synchronized void offerEncoder(byte[] input) {
Log.e("AudioEncoder", input.length + " is coming");
try {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0);
////trying to add a ADTS
// while (outputBufferIndex >= 0) {
// int outBitsSize = bufferInfo.size;
// int outPacketSize = outBitsSize + 7; // 7 is ADTS size
// ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
//
// outputBuffer.position(bufferInfo.offset);
// outputBuffer.limit(bufferInfo.offset + outBitsSize);
//
// byte[] outData = new byte[outPacketSize];
// addADTStoPacket(outData, outPacketSize);
//
// outputBuffer.get(outData, 7, outBitsSize);
// outputBuffer.position(bufferInfo.offset);
//
//// byte[] outData = new byte[bufferInfo.size];
// outputStream.write(outData, 0, outData.length);
// Log.e("AudioEncoder", outData.length + " bytes written");
//
// mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
// outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
//
// }
//Without ADTS header
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
outputStream.write(outData, 0, outData.length);
Log.e("AudioEncoder", outData.length + " bytes written");
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
/**
* Add ADTS header at the beginning of each and every AAC packet.
* This is needed as MediaCodec encoder generates a packet of raw
* AAC data.
*
* Note the packetLen must count in the ADTS header itself.
**/
private void addADTStoPacket(byte[] packet, int packetLen) {
int profile = 2; //AAC LC
//39=MediaCodecInfo.CodecProfileLevel.AACObjectELD;
int freqIdx = 4; //44.1KHz
int chanCfg = 2; //CPE
// fill in ADTS data
packet[0] = (byte)0xFF;
packet[1] = (byte)0xF9;
packet[2] = (byte)(((profile-1)<<6) + (freqIdx<<2) +(chanCfg>>2));
packet[3] = (byte)(((chanCfg&3)<<6) + (packetLen>>11));
packet[4] = (byte)((packetLen&0x7FF) >> 3);
packet[5] = (byte)(((packetLen&7)<<5) + 0x1F);
packet[6] = (byte)0xFC;
}
public void touch(File f)
{
try {
if(!f.exists())
f.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
}
You can use Android's MediaMuxer to package the raw streams created by MediaCodec into a .mp4 file. Bonus: AAC packets contained in a .mp4 don't require the ADTS header.
I've got a working example of this technique on Github.
Check "testEncoder" method here for how to use MediaCodec as Encoder properly.
after that
In your code,
your input(audio recorder) is configured for single audio channel while your output(ADTS packet header) is set for two channels(chanCfg = 2).
also if you change your input samplerate (currently 44.1khz) you also have to change freqIdx flag in ADTS packet header. check this link for valid values.
And ADTS header profile flag is set to "AAC LC", you can also found this under
MediaCodecInfo.CodecProfileLevel.
you have set profile = 2 that is MediaCodecInfo.CodecProfileLevel.AACObjectLC

How to use Android MediaCodec encode Camera data(YUV420sp)

Thank you for your focus!
I want to use Android MediaCodec APIs to encode the video frame which aquired from Camera,
unfortunately, I have not success to do that! I still not familiar with the MediaCodec API。
The follow is my codes,I need your help to figure out what I should do.
1、The Camera setting:
Parameters parameters = mCamera.getParameters();
parameters.setPreviewFormat(ImageFormat.NV21);
parameters.setPreviewSize(320, 240);
mCamera.setParameters(parameters);
2、Set the encoder:
private void initCodec() {
try {
fos = new FileOutputStream(mVideoFile, false);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc",
320,
240);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
mMediaCodec.configure(mediaFormat,
null,
null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
inputBuffers = mMediaCodec.getInputBuffers();
outputBuffers = mMediaCodec.getOutputBuffers();
}
private void encode(byte[] data) {
int inputBufferIndex = mMediaCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(data);
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0);
} else {
return;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
Log.i(TAG, "outputBufferIndex-->" + outputBufferIndex);
do {
if (outputBufferIndex >= 0) {
ByteBuffer outBuffer = outputBuffers[outputBufferIndex];
System.out.println("buffer info-->" + bufferInfo.offset + "--"
+ bufferInfo.size + "--" + bufferInfo.flags + "--"
+ bufferInfo.presentationTimeUs);
byte[] outData = new byte[bufferInfo.size];
outBuffer.get(outData);
try {
if (bufferInfo.offset != 0) {
fos.write(outData, bufferInfo.offset, outData.length
- bufferInfo.offset);
} else {
fos.write(outData, 0, outData.length);
}
fos.flush();
Log.i(TAG, "out data -- > " + outData.length);
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo,
0);
} catch (IOException e) {
e.printStackTrace();
}
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mMediaCodec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = mMediaCodec.getOutputFormat();
}
} while (outputBufferIndex >= 0);
}
I guess the problem occurred in the encoder method,the method will be used in the Camera Preview Callback ,like
initCodec();
//mCamera.setPreviewCallback(new MyPreviewCallback());
mCamera.setPreviewCallback(new PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
encode(data);
}
});
I just have no idea how to do it correctly with the MediaCodec API.Can you give me some advice or links about it?
Thank you!
I have solved the problem.As follows:
private synchronized void encode(byte[] data)
{
inputBuffers = mMediaCodec.getInputBuffers();// here changes
outputBuffers = mMediaCodec.getOutputBuffers();
int inputBufferIndex = mMediaCodec.dequeueInputBuffer(-1);
Log.i(TAG, "inputBufferIndex-->" + inputBufferIndex);
//......
And next,you will find your encoded video color is not right, for more information,please go to here MediaCodec and Camera: colorspaces don't match
The YUV420 formats output by the camera are incompatible with the formats accepted by the MediaCodec AVC encoder. In the best case, it's essentially NV12 vs. NV21 (U and V planes are reversed), requiring a manual reordering. In the worst case, as of Android 4.2, the encoder input format may be device-specific.
You're better off using MediaRecorder to connect the camera hardware to the encoder.
Update:
It's now possible to pass the camera's Surface preview to MediaCodec, instead of using the YUV data in the ByteBuffer. This is faster and more portable. See the CameraToMpegTest sample here.

Categories

Resources