When I use Android media-codec to encode video to h264 format, I found that encode one frame of 1280 * 720 YUV420P raw image need about 10ms but one frame of 640 * 360 YUV420P raw image need about 100ms.
I have tied on serval mobile with MTK or Qualcomm video encoder, all device perform like that.
Can anyone tell me how to fix my following code to speed up or tell the reason why lower resolution cost more time.
private boolean openH264Encoder(int width, int height, int bitrate) throws TranscodeNativeException {
try {
mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
Log.w(TAG, "codec name " + mediaCodec.getName());
}
for (int i = 0; i < MediaCodecList.getCodecCount(); i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (codecInfo.isEncoder()) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
if (mediaCodec.getName().equalsIgnoreCase(codecInfo.getName())) {
MediaCodecInfo.CodecCapabilities cc = codecInfo.getCapabilitiesForType(MediaFormat.MIMETYPE_VIDEO_AVC);
for (int c : cc.colorFormats) {
Log.i(TAG, String.format("color format 0x%x", c));
if (MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar == c) {
videoColorFormat = c;
Log.i(TAG, String.format("final color format 0x%x", videoColorFormat));
break;
}
else if (MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar == c) {
videoColorFormat = c;
Log.i(TAG, String.format("find color format 0x%x", videoColorFormat));
}
}
break;
}
}
else {
return false;
}
}
}
if (videoColorFormat == 0) {
Log.e(TAG, "can't find supported color format");
return false;
}
Log.i(TAG, String.format("use color format 0x%x", videoColorFormat));
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, videoColorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 25);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
mediaFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
encoderOutputBuffers = mediaCodec.getOutputBuffers();
} catch (Exception e) {
Log.w(TAG, "open h264 encoder failed");
throw new TranscodeNativeException("open h264 encoder failed");
}
return true;
}
public byte[] encodeH264(byte[] data, long ms) {
if (mediaCodec == null)
return null;
if (MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar == videoColorFormat) {
byte[] newData = new byte[data.length];
System.arraycopy(data, 0, newData, 0, data.length * 2 / 3);
for (int i = 0; i < data.length / 6; i++) {
newData[data.length * 2 / 3 + i * 2] = data[data.length * 2 / 3 + i];
newData[data.length * 2 / 3 + i * 2 + 1] = data[data.length * 5 / 6 + i];
}
data = newData;
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
try {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(20 * 1000);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(data);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, ms * 1000, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
while (true) {
int encoderStatus = mediaCodec.dequeueOutputBuffer(info, -1);
Log.i(TAG, "encoder status " + encoderStatus);
switch (encoderStatus) {
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.i(TAG, "why tell me try again later?");
return null;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: {
MediaFormat encformat = mediaCodec.getOutputFormat();
Log.i(TAG, "output format changed");
return new byte[0];
}
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
encoderOutputBuffers = mediaCodec.getOutputBuffers();
Log.i(TAG, "output buffer changed");
continue;
default: {
ByteBuffer encoderOutputBuffer = encoderOutputBuffers[encoderStatus];
if (encoderOutputBuffer == null) {
Log.w(TAG, "output buffer is null");
return null;
}
byte[] outData = new byte[info.size];
encoderOutputBuffer.get(outData);
ByteBuffer byteBuffer = ByteBuffer.wrap(outData);
if (spsppsBuffer == null && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
//save sps and pps
if (byteBuffer.getInt() == 0x00000001) {
spsppsBuffer = outData;
}
String d = "";
for (byte i : outData)
d += String.format("0x%02x ", i);
Log.i(TAG, "got sps pps " + d);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
continue;
}
else {
//key frame
if ((outData[4] & 0x1f) == 5) {
byte[] buffer = new byte[outData.length + spsppsBuffer.length];
System.arraycopy(spsppsBuffer, 0, buffer, 0, spsppsBuffer.length);
System.arraycopy(outData, 0, buffer, spsppsBuffer.length, outData.length);
Log.i(TAG, "got key frame");
mediaCodec.releaseOutputBuffer(encoderStatus, false);
return buffer;
}
else {
Log.i(TAG, "got non key frame");
mediaCodec.releaseOutputBuffer(encoderStatus, false);
return outData;
}
}
}
}
}
}
else {
Log.w(TAG, "dequeue input buffer failed, skip one frame");
return new byte[0];
}
}
catch (Exception e) {
e.printStackTrace();
Log.w(TAG, "encode h264 failed");
return null;
}
}
Related
I need to create videos with data hidden in them. i managed to extract video frames using mediacodec decoder as NV21 buffer and save them, then i create mp4 file from frames using mediacodec encoder.
the class below is responsible for saving frame files if we are in encode process or check the value if we want to extract data from stego-video.
public class ExtractMpegFramesBufferDecoder {
private static final String TAG = "ExtractMpegFramesDec";
private static final boolean VERBOSE = true; // lots of logging
// where to find files (note: requires WRITE_EXTERNAL_STORAGE permission)
private File STORE_FRAME_DIRECTORY;
private String INPUT_FILE;
private int frameRate; // stop extracting after this many
private int saveWidth;
private int saveHeight;
private int decodeCount;
private Handler _progressBarHandler;
private int duration;
//
private int MAX_FRAMES;
private boolean fromDecode;
//
public ExtractMpegFramesBufferDecoder(File storeFrameDirectory, String inputVideoPath, int frameRate
, int saveWidth, int saveHeight
, double duration, int rotation
, Handler _progressBarHandler) {
this.STORE_FRAME_DIRECTORY = storeFrameDirectory;
this.INPUT_FILE = inputVideoPath;
this.frameRate = frameRate;
this.saveWidth = saveWidth;
this.saveHeight = saveHeight;
this._progressBarHandler = _progressBarHandler;
this.duration = (int) duration;
}
/**
* Tests extraction from an MP4 to a series of PNG files.
* <p>
* We scale the video to 640x480 for the PNG just to demonstrate that we can scale the
* video with the GPU. If the input video has a different aspect ratio, we could preserve
* it by adjusting the GL viewport to get letterboxing or pillarboxing, but generally if
* you're extracting frames you don't want black bars.
*/
public void extractMpegFrames(int maxFrame, boolean fromDecode) throws IOException {
MediaCodec decoder = null;
MediaExtractor extractor = null;
MAX_FRAMES = maxFrame;
this.fromDecode = fromDecode;
try {
File inputFile = new File(INPUT_FILE); // must be an absolute path
// The MediaExtractor error messages aren't very useful. Check to see if the input
// file exists so we can throw a better one if it's not there.
if (!inputFile.canRead()) {
throw new FileNotFoundException("Unable to read " + inputFile);
}
extractor = new MediaExtractor();
extractor.setDataSource(inputFile.toString());
int trackIndex = selectTrack(extractor);
if (trackIndex < 0) {
throw new RuntimeException("No video track found in " + inputFile);
}
extractor.selectTrack(trackIndex);
MediaFormat format = extractor.getTrackFormat(trackIndex);
if (VERBOSE) {
Log.d(TAG, "Video size is " + format.getInteger(MediaFormat.KEY_WIDTH) + "x" +
format.getInteger(MediaFormat.KEY_HEIGHT));
}
// Create a MediaCodec decoder, and configure it with the MediaFormat from the
// extractor. It's very important to use the format from the extractor because
// it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
String mime = format.getString(MediaFormat.KEY_MIME);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, null, null, 0);
decoder.start();
doExtract(extractor, trackIndex, decoder);
} finally {
if (decoder != null) {
decoder.stop();
decoder.release();
decoder = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}
}
/**
* Selects the video track, if any.
*
* #return the track index, or -1 if no video track is found.
*/
private int selectTrack(MediaExtractor extractor) {
// Select the first video track we find, ignore the rest.
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
if (VERBOSE) {
Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
}
return i;
}
}
return -1;
}
/**
* Work loop.
*/
public void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder) throws IOException {
final int TIMEOUT_USEC = 10000;
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputChunk = 0;
decodeCount = 0;
long frameSaveTime = 0;
boolean outputDone = false;
boolean inputDone = false;
ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers();
MediaFormat decoderOutputFormat = null;
long rawSize = 0;
while (!outputDone) {
if (VERBOSE) Log.d(TAG, "loop");
// Feed more data to the decoder.
if (!inputDone) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
// Read the sample data into the ByteBuffer. This neither respects nor
// updates inputBuf's position, limit, etc.
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
if (VERBOSE) Log.d(TAG, "sent input EOS");
} else {
if (extractor.getSampleTrackIndex() != trackIndex) {
Log.w(TAG, "WEIRD: got sample from track " +
extractor.getSampleTrackIndex() + ", expected " + trackIndex);
}
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
presentationTimeUs, 0 /*flags*/);
if (VERBOSE) {
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
chunkSize);
}
inputChunk++;
extractor.advance();
}
} else {
if (VERBOSE) Log.d(TAG, "input buffer not available");
}
}
if (!outputDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (VERBOSE) Log.d(TAG, "no output from decoder available");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not important for us, since we're using Surface
if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
decoderOutputBuffers = decoder.getOutputBuffers();
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
decoderOutputFormat = newFormat;
if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
} else if (decoderStatus < 0) {
Log.e(TAG, "unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else { // decoderStatus >= 0
if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
" (size=" + info.size + ")");
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus];
outputFrame.position(info.offset);
outputFrame.limit(info.offset + info.size);
rawSize += info.size;
if (info.size == 0) {
if (VERBOSE) Log.d(TAG, "got empty frame");
} else {
// if it's decode then check the altered value
// else save the frames
if (fromDecode) {
outputFrame.rewind();
byte[] data = new byte[outputFrame.remaining()];
outputFrame.get(data);
int size = saveWidth * saveHeight;
int offset = size;
int[] pixels = new int[size];
int u, v, y1, y2, y3, y4;
int uvIndex = 0;
if (decodeCount == 1) {
// i percorre os Y and the final pixels
// k percorre os pixles U e V
for (int i = 0, k = 0; i < size; i += 2, k += 2) {
y1 = data[i] & 0xff;
y2 = data[i + 1] & 0xff;
y3 = data[saveWidth + i] & 0xff;
y4 = data[saveWidth + i + 1] & 0xff;
u = data[offset + k] & 0xff;
v = data[offset + k + 1] & 0xff;
// getting size
if (uvIndex == 0) {
int specialByte1P1 = u & 15;
int specialByte1P2 = v & 15;
int specialCharacter1 = (specialByte1P1 << 4) | specialByte1P2;
if (specialCharacter1 != 17) {
throw new IllegalArgumentException("value has changed");
}
}
uvIndex++;
if (i != 0 && (i + 2) % saveWidth == 0)
i += saveWidth;
}
}
} else {
outputFrame.rewind();
byte[] data = new byte[outputFrame.remaining()];
outputFrame.get(data);
try {
File outputFile = new File(STORE_FRAME_DIRECTORY,
String.format(Locale.US, "frame_%d.frame", decodeCount));
FileOutputStream stream = new FileOutputStream(outputFile.getAbsoluteFile());
stream.write(data);
} catch (FileNotFoundException e1) {
e1.printStackTrace();
}
}
decodeCount++;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
decoder.releaseOutputBuffer(decoderStatus, false);
}
}
}
int numSaved = (frameRate < decodeCount) ? frameRate : decodeCount;
Log.d(TAG, "Saving " + numSaved + " frames took " +
(frameSaveTime / numSaved / 1000) + " us per frame");
}
public int getDecodeCount() {
return decodeCount;
}
}
in class below i encode frames, alter one u v value of (frame 1), store digit number 17 in lsb of first u and v and build mp4 using mediacodec encoder.
public class YUVFrameBufferToVideoEncoder {
private static final String TAG = BitmapToVideoEncoder.class.getSimpleName();
private static final int ERROR_IN_PROCESS = 0;
private IBitmapToVideoEncoderCallback mCallback;
private File mOutputFile;
private Queue<File> mEncodeQueue = new ConcurrentLinkedQueue();
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private Object mFrameSync = new Object();
private CountDownLatch mNewFrameLatch;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static int mWidth;
private static int mHeight;
private static int BIT_RATE;
private static int FRAME_RATE; // Frames per second
private int frameCount;
private Handler _progressBarHandler;
private Handler _processHandler;
private static final int I_FRAME_INTERVAL = 1;
private int mGenerateIndex = 0;
private int mTrackIndex;
private boolean mNoMoreFrames = false;
private boolean mAbort = false;
//
private byte[] dataToHide;
public interface IBitmapToVideoEncoderCallback {
void onEncodingComplete(File outputFile);
}
public YUVFrameBufferToVideoEncoder(IBitmapToVideoEncoderCallback callback) {
mCallback = callback;
}
public boolean isEncodingStarted() {
return (mediaCodec != null) && (mediaMuxer != null) && !mNoMoreFrames && !mAbort;
}
public int getActiveBitmaps() {
return mEncodeQueue.size();
}
public boolean startEncoding(int width, int height, int fps, int bitrate, int frameCount
, byte[] dataToHide, Handler _progressBarHandler, Handler _processHandler
, File outputFile) {
mWidth = width;
mHeight = height;
FRAME_RATE = fps;
BIT_RATE = bitrate;
this.frameCount = frameCount;
this._progressBarHandler = _progressBarHandler;
this._processHandler = _processHandler;
mOutputFile = outputFile;
this.dataToHide = dataToHide;
String outputFileString;
try {
outputFileString = outputFile.getCanonicalPath();
} catch (IOException e) {
Log.e(TAG, "Unable to get path for " + outputFile);
ErrorManager.getInstance().addErrorMessage("Unable to get path for " + outputFile);
return false;
}
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
ErrorManager.getInstance().addErrorMessage("Unable to find an appropriate codec for " + MIME_TYPE);
return false;
}
Log.d(TAG, "found codec: " + codecInfo.getName());
int colorFormat;
try {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
} catch (Exception e) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
Log.e(TAG, "Unable to create MediaCodec " + e.getMessage());
ErrorManager.getInstance().addErrorMessage("Unable to create MediaCodec " + e.getMessage());
return false;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mediaMuxer = new MediaMuxer(outputFileString, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
Log.e(TAG, "MediaMuxer creation failed. " + e.getMessage());
ErrorManager.getInstance().addErrorMessage("MediaMuxer creation failed. " + e.getMessage());
return false;
}
Log.d(TAG, "Initialization complete. Starting encoder...");
Completable.fromAction(this::encode)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe();
return true;
}
public void stopEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to stop encoding since it never started");
return;
}
Log.d(TAG, "Stopping encoding");
mNoMoreFrames = true;
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void abortEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to abort encoding since it never started");
return;
}
Log.d(TAG, "Aborting encoding");
mNoMoreFrames = true;
mAbort = true;
mEncodeQueue = new ConcurrentLinkedQueue(); // Drop all frames
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void queueFrame(File frame) {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to queue frame. Encoding not started");
return;
}
Log.d(TAG, "Queueing frame");
mEncodeQueue.add(frame);
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
private void encode() {
Log.d(TAG, "Encoder started");
while (true) {
if (mNoMoreFrames && (mEncodeQueue.size() == 0)) break;
File frame = mEncodeQueue.poll();
if (frame == null) {
synchronized (mFrameSync) {
mNewFrameLatch = new CountDownLatch(1);
}
try {
mNewFrameLatch.await();
} catch (InterruptedException e) {
}
frame = mEncodeQueue.poll();
}
if (frame == null) continue;
int size = (int) frame.length();
byte[] bytesNV21 = new byte[size];
try {
BufferedInputStream buf = new BufferedInputStream(new FileInputStream(frame));
buf.read(bytesNV21, 0, bytesNV21.length);
buf.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
int offsetSize = mWidth * mHeight;
int byteNV21Offset = offsetSize;
int u, v, y1, y2, y3, y4;
//
int dataToHideLength = 0;
if (dataToHide != null)
dataToHideLength = dataToHide.length;
boolean isLastIndexInserted1 = false;
boolean isLastIndexInserted2 = false;
boolean isLastIndexInserted3 = false;
int uvIndex = 0;
int frameByteCapacity = ((mWidth * mHeight) / 4) / 20;
Log.e(TAG, "encode: dataToHideLength: " + dataToHideLength);
Log.e(TAG, "encode: frameByteCapacity: " + dataToHideLength);
//
// i percorre os Y and the final pixels
// k percorre os pixles U e V
for (int i = 0, k = 0; i < offsetSize; i += 2, k += 2) {
y1 = bytesNV21[i] & 0xff;
y2 = bytesNV21[i + 1] & 0xff;
y3 = bytesNV21[mWidth + i] & 0xff;
y4 = bytesNV21[mWidth + i + 1] & 0xff;
u = bytesNV21[byteNV21Offset + k] & 0xff;
v = bytesNV21[byteNV21Offset + k + 1] & 0xff;
// frame 1
// altering u and v for test
if (mGenerateIndex == 1) {
int Unew = u & 240;
int Vnew = v & 240;
if (uvIndex == 0) {
// used in start and end of stego bytes
int specialByte1Integer = 17;
int specialByte1P1 = specialByte1Integer & 240;
int specialByte1P2 = specialByte1Integer & 15;
// shift p1 right 4 position
specialByte1P1 = specialByte1P1 >> 4;
u = Unew | specialByte1P1;
v = Vnew | specialByte1P2;
}
bytesNV21[byteNV21Offset + k] = (byte) u;
bytesNV21[byteNV21Offset + k + 1] = (byte) v;
}
uvIndex++;
if (i != 0 && (i + 2) % mWidth == 0)
i += mWidth;
}
long TIMEOUT_USEC = 500000;
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
long ptsUsec = computePresentationTime(mGenerateIndex, FRAME_RATE);
if (inputBufIndex >= 0) {
final ByteBuffer inputBuffer = mediaCodec.getInputBuffers()[inputBufIndex];
inputBuffer.clear();
inputBuffer.put(bytesNV21);
mediaCodec.queueInputBuffer(inputBufIndex, 0, bytesNV21.length, ptsUsec, 0);
mGenerateIndex++;
int percentComplete = 70 + (int) ((((double) mGenerateIndex) / (frameCount)) * 30);
if (_progressBarHandler != null) {
_progressBarHandler.sendMessage(_progressBarHandler.obtainMessage(percentComplete));
}
Log.w("creatingVideo: ", "is:" + percentComplete);
}
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Log.e(TAG, "No output from encoder available");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else if (mBufferInfo.size != 0) {
ByteBuffer encodedData = mediaCodec.getOutputBuffers()[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
} else {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
}
}
}
release();
if (mAbort) {
mOutputFile.delete();
} else {
mCallback.onEncodingComplete(mOutputFile);
}
}
private void release() {
try {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
Log.d(TAG, "RELEASE CODEC");
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
Log.d(TAG, "RELEASE MUXER");
}
} catch (Exception ignored) {
ErrorManager.getInstance().addErrorMessage("unsupported video file");
Message res = _processHandler.obtainMessage(ERROR_IN_PROCESS);
_processHandler.sendMessage(res);
}
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
private static int selectColorFormat(MediaCodecInfo codecInfo,
String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private long computePresentationTime(long frameIndex, int framerate) {
return 132 + frameIndex * 1000000 / framerate;
}}
output video is created successfully without any problem, but mediacodec has changed the altered test value and i cannot retrieve it.
here is my question, is this a right approach for doing video steganography in android? if it is not the right way can you please make a suggestion?
Steganography comes with a prerequisite - lossless encoding.
None of the codecs available on Android support lossless video encoding, as of now.
So I'm afraid your LSBs would never remain the same post encoding/decoding.
Suggestion: If you don't have a lot many frames, I would suggest you use a lossless format. You may encode your frames into a sequence of PNG images.
I am using MediaCodec API for encoding video and audio into mp4 file. Data encoded in separate threads. Sometimes on some devices audio encoder stops to return any available input buffer and as result MediaMuxer crashes when trying to stop it. Here is my code:
configuring media codec:
public static final String MIME_TYPE_AUDIO = "audio/mp4a-latm";
public static final int SAMPLE_RATE = 44100;
public static final int CHANNEL_COUNT = 1;
public static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
public static final int BIT_RATE_AUDIO = 128000;
public static final int SAMPLES_PER_FRAME = 1024 * 2;
public static final int FRAMES_PER_BUFFER = 24;
public static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
public static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;
public static final int MAX_INPUT_SIZE = 16384 * 4;
public static final int MAX_SAMPLE_SIZE = 256 * 1024;
private AudioRecord audioRecord;
private ByteBuffer[] inputBuffers;
private ByteBuffer inputBuffer;
private MediaExtractor mediaExtractor;
private boolean audioSended = false;
private boolean completed = false;
private int sampleCount;
private int iBufferSize;
public AudioEncoderCore(MovieMuxer muxer) throws IOException {
this.muxer = muxer;
bufferInfo = new MediaCodec.BufferInfo();
MediaFormat mediaFormat = null;
mediaFormat = MediaFormat.createAudioFormat(MIME_TYPE_AUDIO, SAMPLE_RATE, CHANNEL_COUNT);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, MAX_INPUT_SIZE);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE_AUDIO);
encoder = MediaCodec.createEncoderByType(MIME_TYPE_AUDIO);
encoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
encoder.start();
iBufferSize = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
// Ensure buffer is adequately sized for the AudioRecord
// object to initialize
int iMinBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT);
if (iBufferSize < iMinBufferSize)
iBufferSize = ((iMinBufferSize / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
audioRecord = new AudioRecord(AUDIO_SOURCE, SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT, iBufferSize);
audioRecord.startRecording();
}
sending data to encoder:
public void sendDataFromMic(boolean endOfStream) {
if (endOfStream)
Log.d(TAG, "sendDataFromMic end of stream");
long audioPresentationTimeNs;
byte[] mTempBuffer = new byte[SAMPLES_PER_FRAME];
audioPresentationTimeNs = System.nanoTime();
int iReadResult = audioRecord.read(mTempBuffer, 0, mTempBuffer.length);
if (iReadResult == AudioRecord.ERROR_BAD_VALUE || iReadResult == AudioRecord.ERROR_INVALID_OPERATION || iReadResult == 0) {
Log.e(TAG, "audio buffer read error");
} else {
// send current frame data to encoder
try {
if (inputBuffers == null)
inputBuffers = encoder.getInputBuffers();
//Sometimes can't get any available input buffer
int inputBufferIndex = encoder.dequeueInputBuffer(100000);
Log.d(TAG, "inputBufferIndex = " + inputBufferIndex);
if (inputBufferIndex >= 0) {
inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(mTempBuffer, 0, iReadResult);
Log.d(TAG, "sending frame to audio encoder " + iReadResult + " bytes");
encoder.queueInputBuffer(inputBufferIndex, 0, iReadResult, audioPresentationTimeNs / 1000,
endOfStream ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
}
} catch (Throwable t) {
Log.e(TAG, "sendFrameToAudioEncoder exception");
t.printStackTrace();
}
}
}
drain encoder:
public void drainEncoder() {
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_NSECS);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "no output available, spinning to await EOS");
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
encoderOutputBuffers = encoder.getOutputBuffers();
else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
Log.d(TAG, "encoder format changed: " + newFormat);
trackIndex = muxer.addTrack(newFormat);
} else if (muxer.isMuxerStarted()) {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null)
throw new RuntimeException("encoded buffer " + encoderStatus + " was null");
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
muxer.writeSampleData(trackIndex, encodedData, bufferInfo);
Log.d(TAG, "sent " + bufferInfo.size + " bytes to muxer, ts=" +
bufferInfo.presentationTimeUs + " track index=" + trackIndex);
}
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "end of stream reached");
completed = true;
break; // out of while
}
}
}
}
Bug is stable reproduced on HTC One, Galaxy S3, but all works fine on Huawei Honor 3C.
After source code investigations I found solution. When I dequeue output buffer, the muxer may be not started yet, in that case buffer not released. So here's working code for drain encoder:
public void drainEncoder() {
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_NSECS);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "no output available, spinning to await EOS");
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
encoderOutputBuffers = encoder.getOutputBuffers();
else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
Log.d(TAG, "encoder format changed: " + newFormat);
trackIndex = muxer.addTrack(newFormat);
} else if (muxer.isMuxerStarted()) {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null)
throw new RuntimeException("encoded buffer " + encoderStatus + " was null");
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
muxer.writeSampleData(trackIndex, encodedData, bufferInfo);
Log.d(TAG, "sent " + bufferInfo.size + " bytes to muxer, ts=" +
bufferInfo.presentationTimeUs + " track index=" + trackIndex);
}
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "end of stream reached");
completed = true;
break; // out of while
}
}
else{
//Muxer not ready, release buffer
encoder.releaseOutputBuffer(encoderStatus, false);
Log.d(TAG, "muxer not ready, skip data");
}
}
}
The question is simple, But I have no any clue to solve it:
I write a single line code to create an AAC encoder on my Nexus 4 (Android 4.4.2)
MediaCodec codec = MediaCodec.createEncoderByType("audio/mp4a-latm");
The return value saved in "codec" is not null, but I get a red error message in Logcat:
03-20 15:25:08.985: E/OMXMaster(24517): A component of name 'OMX.qcom.audio.decoder.aac' already exists, ignoring this one.
I have also tried another line:
MediaCodec codec = MediaCodec.createByCodecName("OMX.google.aac.encoder");
And get the same error result.
Did I miss any initialization steps before using MediaCodec? I did not find any information about this in the official document.
Did anyone run into this problem?
Actually I am trying to encode PCM to AAC file. And I have read this post #hubeir. It seems that he has made it work. I did the same thing:(1)setup the mediacodec and feed in PCM data to get encoded frame. To do that, I read the code from cts . Every encoded frame length is about 371-379. (2)Add adts header to the frame, then save to file. I have checked the head bit by bit, it is correct. But the file is still not playable. So I think maybe the error log is the problem.
The following is my whole code, for reference:
MediaCodec codec = MediaCodec.createByCodecName("OMX.google.aac.encoder");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_AAC_PROFILE,
MediaCodecInfo.CodecProfileLevel.AACObjectELD);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, nSamplerate);
format.setInteger(MediaFormat.KEY_BIT_RATE, 128000);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, nChannels);
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
boolean bEndInput = false;
boolean bEndOutput = false;
while(true)
{
if (!bEndInput)
{
int inputBufferIndex = codec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0)
{
int nLen = app.readPCM(nHandle,inputBuffers[inputBufferIndex]);//This line read PCM, return 0 if end of data.
int nBufLen = inputBuffers[inputBufferIndex].capacity();
if (nLen == nBufLen)
codec.queueInputBuffer(inputBufferIndex, 0, nLen, 0, MediaCodec.BUFFER_FLAG_SYNC_FRAME);
else if (nLen < nBufLen)
{
codec.queueInputBuffer(inputBufferIndex, 0, nLen, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
bEndInput = true;
break;
}
}
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
if (!bEndOutput)
{
int outputBufferIndex = codec.dequeueOutputBuffer(info, 0);
if (outputBufferIndex >= 0)
{
int outBitsSize = info.size;
Log.d("test", "Offset:"+info.offset);
Log.d("test", "Size:"+info.size);
Log.d("test", "Time:"+info.presentationTimeUs);
Log.d("test", "Flags:"+info.flags);
if (outBitsSize <= 10)
{
codec.releaseOutputBuffer(outputBufferIndex, false /* render */);
continue;
}
int outPacketSize = outBitsSize + 7; // 7 is ADTS size
ByteBuffer outBuf = outputBuffers[outputBufferIndex];
outBuf.position(info.offset);
outBuf.limit(info.offset + outBitsSize);
try {
byte[] data = new byte[outPacketSize]; //space for ADTS header included
addADTStoPacket(data, outPacketSize);
outBuf.get(data, 7, outBitsSize);
outBuf.position(info.offset);
outputStream.write(data, 0, outPacketSize); //open FileOutputStream beforehand
} catch (IOException e) {
Log.e("test", "failed writing bitstream data to file");
e.printStackTrace();
}
outBuf.clear();
codec.releaseOutputBuffer(outputBufferIndex, false /* render */);
Log.d("test", " dequeued " + outBitsSize + " bytes of output data.");
Log.d("test", " wrote " + outPacketSize + " bytes into output file.");
if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM)
{
bEndOutput = true;
//break;
}
}
else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{
outputBuffers = codec.getOutputBuffers();
}
else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
{
}
}
if (bEndInput && bEndOutput)
break;
}
I figured out it.
(1) The error setting in the media format:
format.setInteger(MediaFormat.KEY_AAC_PROFILE,
MediaCodecInfo.CodecProfileLevel.AACObjectELD);
should be
format.setInteger(MediaFormat.KEY_AAC_PROFILE,
MediaCodecInfo.CodecProfileLevel.AACObjectLC);
(2) The encoded frame could be written to file with adts file only when (info.flags == 0)
(3) And the output file name suffix should be "aac". "mp4" or "m4a" may not work for some application.
MediaCodec codec = MediaCodec.createByCodecName("OMX.google.aac.encoder");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_AAC_PROFILE,
MediaCodecInfo.CodecProfileLevel.AACObjectLC); //fixed version
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, nSamplerate);
format.setInteger(MediaFormat.KEY_BIT_RATE, 128000);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, nChannels);
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
boolean bEndInput = false;
boolean bEndOutput = false;
while(true)
{
if (!bEndInput)
{
int inputBufferIndex = codec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0)
{
int nLen = app.readPCM(nHandle,inputBuffers[inputBufferIndex]);//This line read PCM, return 0 if end of data.
int nBufLen = inputBuffers[inputBufferIndex].capacity();
if (nLen == nBufLen)
codec.queueInputBuffer(inputBufferIndex, 0, nLen, 0, MediaCodec.BUFFER_FLAG_SYNC_FRAME);
else if (nLen < nBufLen)
{
codec.queueInputBuffer(inputBufferIndex, 0, nLen, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
bEndInput = true;
break;
}
}
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
if (!bEndOutput)
{
int outputBufferIndex = codec.dequeueOutputBuffer(info, 0);
if (outputBufferIndex >= 0)
{
int outBitsSize = info.size;
Log.d("test", "Offset:"+info.offset);
Log.d("test", "Size:"+info.size);
Log.d("test", "Time:"+info.presentationTimeUs);
Log.d("test", "Flags:"+info.flags);
if (info.flags != 0) //fixed version
{
codec.releaseOutputBuffer(outputBufferIndex, false /* render */);
continue;
}
int outPacketSize = outBitsSize + 7; // 7 is ADTS size
ByteBuffer outBuf = outputBuffers[outputBufferIndex];
outBuf.position(info.offset);
outBuf.limit(info.offset + outBitsSize);
try {
byte[] data = new byte[outPacketSize]; //space for ADTS header included
addADTStoPacket(data, outPacketSize);
outBuf.get(data, 7, outBitsSize);
outBuf.position(info.offset);
outputStream.write(data, 0, outPacketSize); //open FileOutputStream beforehand
} catch (IOException e) {
Log.e("test", "failed writing bitstream data to file");
e.printStackTrace();
}
outBuf.clear();
codec.releaseOutputBuffer(outputBufferIndex, false /* render */);
Log.d("test", " dequeued " + outBitsSize + " bytes of output data.");
Log.d("test", " wrote " + outPacketSize + " bytes into output file.");
if (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM)
{
bEndOutput = true;
//break;
}
}
else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{
outputBuffers = codec.getOutputBuffers();
}
else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
{
}
}
if (bEndInput && bEndOutput)
break;
}
Can you please check media_codecs_xxxx.xml, found under device folder
(note: xxxx normally depends on your HW platform).
Here you may have OMX.qcom.audio.decoder.aac and
OMX.google.aac.encoder.Please use any one of them and comment the other
Please refer https://source.android.com/devices/media.html (Exposing Codecs to the Framework),
this will help you
Interesting, sometime ago I met simular problem on some device: AAC decoder is created instead of encoder if MediaCodec.createEncoderByType() is used. To workaround this I used:
String codecName = selectEncoder(mime);
mediaCodec = MediaCodec.createByCodecName(codecName);
and
private String selectEncoder(String mime) {
for (int index = 0; index < MediaCodecList.getCodecCount(); index++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(index);
if (!codecInfo.isEncoder()) {
continue;
}
for (String type : codecInfo.getSupportedTypes()) {
if (type.equalsIgnoreCase(mime)) {
return codecInfogetName();
}
}
}
return null;
}
something like that
I am using MediaCodec to convert a .wav file to .amr. I have used following code for input and getting encoded buffer. I get the encoded file but it does not play, my input is proper as i am able to play that file in Audacity. I am using EncodeDecodeTest.java from android API test. Any pointers to potential problem are appreciated.
MediaCodec codec = MediaCodec.createByCodecName(componentName);
try {
codec.configure(
format,
null /* surface */,
null /* crypto */,
MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (IllegalStateException e) {
AppLog.logString("codec '" + componentName + "' failed configuration.");
// assertTrue("codec '" + componentName + "' failed configuration.", false);
}
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
int numBytesSubmitted = 0;
boolean doneSubmittingInput = false;
int numBytesDequeued = 0;
while (true) {
int index =0 ;
if (!doneSubmittingInput) {
index = codec.dequeueInputBuffer(kTimeoutUs /* timeoutUs */);
if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (numBytesSubmitted >= filesize ){
codec.queueInputBuffer(
index,
0 /* offset */,
0 /* size */,
0 /* timeUs */,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
if (VERBOSE) {
AppLog.logString("queued input EOS.");
}
doneSubmittingInput = true;
} else {
int inputsize = 0;
int size = 0;
if ((filesize - numBytesSubmitted) > codecInputBuffers[index]
.limit()) {
inputsize = codecInputBuffers[index].limit();
} else
inputsize = (int) (filesize - numBytesSubmitted);
size = queueInputBuffer(codec,
codecInputBuffers, index, in, inputsize);
numBytesSubmitted += size;
/*if (VERBOSE) {
AppLog.logString("queued " + size + " bytes of input data.");
}*/
}
}else
AppLog.logString("MediaCodec.INFO_TRY_AGAIN_LATER.");
}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
index = codec.dequeueOutputBuffer(info, kTimeoutUs /* timeoutUs */);
if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
AppLog.logString("INFO_OUTPUT_FORMAT_CHANGED");
MediaFormat fmt = codec.getOutputFormat();
AppLog.logString(fmt.getString(MediaFormat.KEY_MIME));
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
} else {
dequeueOutputBuffer(codec, codecOutputBuffers, index, info, fos);
numBytesDequeued += info.size;
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) {
AppLog.logString("dequeued output EOS.");
}
break;
}
if (VERBOSE) {
AppLog.logString("dequeued " + info.size + " bytes of output data.");
}
}
}
if (VERBOSE) {
AppLog.logString("queued a total of " + numBytesSubmitted + "bytes, "
+ "dequeued " + numBytesDequeued + " bytes.");
}
codec.release();
codec = null;
queueInputBuffer and dequeueOutputBuffer are implemented as follows:
private int queueInputBuffer(
MediaCodec codec, ByteBuffer[] inputBuffers, int index, FileInputStream in, int size) {
ByteBuffer buffer = inputBuffers[index];
buffer.clear();
// int size = buffer.limit();
AppLog.logString("Size is :" + size);
byte[] data = new byte[size];
try {
in.read(data,0,size);
} catch (IOException e) {
e.printStackTrace();
}
buffer.put(data);
buffer.flip();
codec.queueInputBuffer(index, 0 /* offset */, size, 0 /* timeUs */, MediaCodec.BUFFER_FLAG_SYNC_FRAME);
return size;
}
private void dequeueOutputBuffer(
MediaCodec codec, ByteBuffer[] outputBuffers,
int index, MediaCodec.BufferInfo info, FileOutputStream fos) {
byte[] data= new byte[info.size];
ByteBuffer out = outputBuffers[index];
out.get(data);
try {
fos.write(data);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
codec.releaseOutputBuffer(index, false /* render */);
}
You can use MediaRecorder to record a stream directly to AAC but there doesn't seem to be a way to encode an existing PCM/WAV file to AAC. The ability to encode to AAC exists natively in Android and I'd like to use that. Is there no way to do it with a pre-existing audio file?
Look at this beautiful (and perfectly working) example:
Mp4ParserSample
Look at the final part of the class (rows 335-442), the convert Runnable object just does the job! You have to shape that code to your needs, adjust the input and the output file paths and the conversion parameters (sampling rate, bit rate, etc).
public static final String AUDIO_RECORDING_FILE_NAME = "audio_Capturing-190814-034638.422.wav"; // Input PCM file
public static final String COMPRESSED_AUDIO_FILE_NAME = "convertedmp4.m4a"; // Output MP4/M4A file
public static final String COMPRESSED_AUDIO_FILE_MIME_TYPE = "audio/mp4a-latm";
public static final int COMPRESSED_AUDIO_FILE_BIT_RATE = 64000; // 64kbps
public static final int SAMPLING_RATE = 48000;
public static final int BUFFER_SIZE = 48000;
public static final int CODEC_TIMEOUT_IN_MS = 5000;
String LOGTAG = "CONVERT AUDIO";
Runnable convert = new Runnable() {
#TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_BACKGROUND);
try {
String filePath = Environment.getExternalStorageDirectory().getPath() + "/" + AUDIO_RECORDING_FILE_NAME;
File inputFile = new File(filePath);
FileInputStream fis = new FileInputStream(inputFile);
File outputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + COMPRESSED_AUDIO_FILE_NAME);
if (outputFile.exists()) outputFile.delete();
MediaMuxer mux = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
MediaFormat outputFormat = MediaFormat.createAudioFormat(COMPRESSED_AUDIO_FILE_MIME_TYPE,SAMPLING_RATE, 1);
outputFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, COMPRESSED_AUDIO_FILE_BIT_RATE);
outputFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 16384);
MediaCodec codec = MediaCodec.createEncoderByType(COMPRESSED_AUDIO_FILE_MIME_TYPE);
codec.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); // Note: Array of buffers
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
MediaCodec.BufferInfo outBuffInfo = new MediaCodec.BufferInfo();
byte[] tempBuffer = new byte[BUFFER_SIZE];
boolean hasMoreData = true;
double presentationTimeUs = 0;
int audioTrackIdx = 0;
int totalBytesRead = 0;
int percentComplete = 0;
do {
int inputBufIndex = 0;
while (inputBufIndex != -1 && hasMoreData) {
inputBufIndex = codec.dequeueInputBuffer(CODEC_TIMEOUT_IN_MS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
dstBuf.clear();
int bytesRead = fis.read(tempBuffer, 0, dstBuf.limit());
Log.e("bytesRead","Readed "+bytesRead);
if (bytesRead == -1) { // -1 implies EOS
hasMoreData = false;
codec.queueInputBuffer(inputBufIndex, 0, 0, (long) presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
totalBytesRead += bytesRead;
dstBuf.put(tempBuffer, 0, bytesRead);
codec.queueInputBuffer(inputBufIndex, 0, bytesRead, (long) presentationTimeUs, 0);
presentationTimeUs = 1000000l * (totalBytesRead / 2) / SAMPLING_RATE;
}
}
}
// Drain audio
int outputBufIndex = 0;
while (outputBufIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
outputBufIndex = codec.dequeueOutputBuffer(outBuffInfo, CODEC_TIMEOUT_IN_MS);
if (outputBufIndex >= 0) {
ByteBuffer encodedData = codecOutputBuffers[outputBufIndex];
encodedData.position(outBuffInfo.offset);
encodedData.limit(outBuffInfo.offset + outBuffInfo.size);
if ((outBuffInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 && outBuffInfo.size != 0) {
codec.releaseOutputBuffer(outputBufIndex, false);
}else{
mux.writeSampleData(audioTrackIdx, codecOutputBuffers[outputBufIndex], outBuffInfo);
codec.releaseOutputBuffer(outputBufIndex, false);
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
outputFormat = codec.getOutputFormat();
Log.v(LOGTAG, "Output format changed - " + outputFormat);
audioTrackIdx = mux.addTrack(outputFormat);
mux.start();
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
Log.e(LOGTAG, "Output buffers changed during encode!");
} else if (outputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// NO OP
} else {
Log.e(LOGTAG, "Unknown return code from dequeueOutputBuffer - " + outputBufIndex);
}
}
percentComplete = (int) Math.round(((float) totalBytesRead / (float) inputFile.length()) * 100.0);
Log.v(LOGTAG, "Conversion % - " + percentComplete);
} while (outBuffInfo.flags != MediaCodec.BUFFER_FLAG_END_OF_STREAM);
fis.close();
mux.stop();
mux.release();
Log.v(LOGTAG, "Compression done ...");
} catch (FileNotFoundException e) {
Log.e(LOGTAG, "File not found!", e);
} catch (IOException e) {
Log.e(LOGTAG, "IO exception!", e);
}
//mStop = false;
// Notify UI thread...
}
};
you can get your hands dirty with the native code and use the IOMX C++ interface to decoders in the framework. But this is build sensitive and will not work on other phones and android flavours.
Another option is port an opensource aac encoder like ffmpeg and write an app over it over jni. Will atleast work with phones with same architecture (arm-9, cortex a8..).
JB has a MediaCodec just to fulfill your wishes. But the problem would be the install base for devices with JB will be lean for some more time.
http://developer.android.com/about/versions/android-4.1.html#Multimedia
I think you can use this library.
https://github.com/timsu/android-aac-enc
http://betaful.com/post/82668810035/encoding-aac-audio-in-android