Related
I'm working on a screen recorder app where if app is in backstack on android 11 and on starting of screen recording only screen casting option is visible but "app is using microphone" message is not displayed and on stopping recording only shows thumbnail and video is not played as video size is too small this is my recording code and this error is only occuring in android 11 OS other are working fine
public void startRecording() {
synchronized (sSync) {
Log.i(TAG, "startRecording: inside synchronized");
if (mMuxer == null) {
Log.i(TAG, "startRecording: muxer is null");
getScreenSize();
mMediaProjection = mMediaProjectionManager.getMediaProjection(mScreenCaptureResultCode, mScreenCaptureIntent);
DisplayManager dm = (DisplayManager) Objects.requireNonNull(MyApplication.Companion.getApplication()).getSystemService(Context.DISPLAY_SERVICE);
Display defaultDisplay;
if (dm != null) {
Log.i(TAG, "startRecording: dim is not null");
defaultDisplay = dm.getDisplay(Display.DEFAULT_DISPLAY);
} else {
throw new IllegalStateException("Cannot display manager?!?");
}
if (defaultDisplay == null) {
throw new RuntimeException("No display found.");
}
try {
mMuxer = new MediaMuxerWrapper(context, ".mp4"); // if you record audio only, ".m4a" is also OK.
if (true) {
VideoSetting videoSetting = SettingManager.getVideoProfile(context);
Log.i(TAG, "Video config: " + videoSetting);
mCurrentVideoSetting = videoSetting;
List<RenderUtil.CustomDecorator> decors = createDecorators();
// new TestMediaScreenEncoderHard(mMuxer, mMediaEncoderListener, mMediaProjection, mCurrentVideoSetting, mScreenDensity, decors);
if (MyUtils.isRunningOnEmulator()) {
new MediaScreenEncoder(mMuxer, mMediaEncoderListener, mMediaProjection, mCurrentVideoSetting, mScreenDensity, decors);
} else {
new MediaScreenEncoderHard(mMuxer, mMediaEncoderListener, mMediaProjection, mCurrentVideoSetting, mScreenDensity, decors);
}
}
if (true) {
// for audio capturing
new MediaAudioEncoder(mMuxer, mMediaEncoderListener);
Log.i(TAG, "startRecording: audio capture");
}
mMuxer.prepare();
mMuxer.startRecording();
} catch (final IOException e) {
Log.e(TAG, "startScreenRecord:", e);
}
}
}
This is my media encoder class
public class MediaAudioEncoder extends MediaEncoder {
public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
public static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
private static final String TAG = MediaAudioEncoder.class.getSimpleName();
private static final String MIME_TYPE = "audio/mp4a-latm";
private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
private static final int BIT_RATE = 64000;
private static final int[] AUDIO_SOURCES = new int[]{
MediaRecorder.AudioSource.CAMCORDER,
MediaRecorder.AudioSource.MIC,
MediaRecorder.AudioSource.DEFAULT,
MediaRecorder.AudioSource.VOICE_COMMUNICATION,
MediaRecorder.AudioSource.VOICE_RECOGNITION,
};
private AudioThread mAudioThread = null;
public MediaAudioEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
super(muxer, listener);
}
/**
* select the first codec that match a specific MIME type
*
* #param mimeType
* #return
*/
private static final MediaCodecInfo selectAudioCodec(final String mimeType) {
// if (DEBUG) Log.i(TAG, "selectAudioCodec:");
MediaCodecInfo result = null;
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
LOOP:
for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
// if (DEBUG) Log.i(TAG, "supportedType:" + codecInfo.getName() + ",MIME=" + types[j]);
if (types[j].equalsIgnoreCase(mimeType)) {
if (result == null) {
result = codecInfo;
break LOOP;
}
}
}
}
return result;
}
#Override
public void prepare() throws IOException {
// if (DEBUG) Log.i(TAG, "prepare:");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
// prepare MediaCodec for AAC encoding of audio data from inernal mic.
final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
if (audioCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
// if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());
final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
// if (DEBUG) Log.i(TAG, "format: " + audioFormat);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
// if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
#Override
public void startRecording() {
super.startRecording();
// create and execute audio capturing thread using internal mic
if (mAudioThread == null) {
mAudioThread = new AudioThread();
mAudioThread.start();
}
}
#Override
protected void release() {
mAudioThread = null;
super.release();
}
/**
* Thread to capture audio data from internal mic as uncompressed 16bit PCM data
* and write them to the MediaCodec encoder
*/
private class AudioThread extends Thread {
#SuppressLint("MissingPermission")
#Override
public void run() {
try {
final int min_buffer_size = AudioRecord.getMinBufferSize(
SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
if (buffer_size < min_buffer_size)
buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
AudioRecord audioRecord = null;
for (final int source : AUDIO_SOURCES) {
try {
audioRecord = new AudioRecord(
source, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
if (audioRecord != null) {
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
audioRecord.release();
audioRecord = null;
}
}
} catch (final Exception e) {
audioRecord = null;
}
if (audioRecord != null) break;
}
if (audioRecord != null) {
try {
if (mIsEncoding) {
// if (DEBUG) Log.i(TAG, "AudioThread:start audio recording");
final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
int readBytes;
audioRecord.startRecording();
try {
for (; mIsEncoding && !mRequestStop && !mIsEOS; ) {
// read audio data from internal mic
buf.clear();
readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
if (readBytes > 0) {
// set audio data to encoder
buf.position(readBytes);
buf.flip();
encode(buf, readBytes, getPTSUs());
frameAvailableSoon();
}
}
frameAvailableSoon();
} finally {
audioRecord.stop();
}
}
} finally {
audioRecord.release();
}
} else {
Log.e(TAG, "failed to initialize AudioRecord");
}
} catch (final Exception e) {
Log.e(TAG, "AudioThread#run", e);
}
// if (DEBUG) Log.i(TAG, "AudioThread:finished");
}
}
}
These are my error logs
2022-06-09 09:40:56.785 21415-21616/com.example.recorderscreen
E/IAudioFlinger: createRecord returned error -1 2022-06-09
09:40:56.785 21415-21616/com.example.recorderscreen E/AudioRecord:
createRecord_l(436738312): AudioFlinger could not create record track,
status: -1 2022-06-09 09:40:56.785
21415-21616/com.example.recorderscreen E/AudioRecord-JNI: Error
creating AudioRecord instance: initialization check failed with status
-1. 2022-06-09 09:40:56.785 21415-21616/com.example.recorderscreen E/android.media.AudioRecord: Error code -20 when initializing native
AudioRecord object. 2022-06-09 09:40:56.785
21415-21616/com.example.recorderscreen E/MediaAudioEncoder: failed to
initialize AudioRecord
I need to create videos with data hidden in them. i managed to extract video frames using mediacodec decoder as NV21 buffer and save them, then i create mp4 file from frames using mediacodec encoder.
the class below is responsible for saving frame files if we are in encode process or check the value if we want to extract data from stego-video.
public class ExtractMpegFramesBufferDecoder {
private static final String TAG = "ExtractMpegFramesDec";
private static final boolean VERBOSE = true; // lots of logging
// where to find files (note: requires WRITE_EXTERNAL_STORAGE permission)
private File STORE_FRAME_DIRECTORY;
private String INPUT_FILE;
private int frameRate; // stop extracting after this many
private int saveWidth;
private int saveHeight;
private int decodeCount;
private Handler _progressBarHandler;
private int duration;
//
private int MAX_FRAMES;
private boolean fromDecode;
//
public ExtractMpegFramesBufferDecoder(File storeFrameDirectory, String inputVideoPath, int frameRate
, int saveWidth, int saveHeight
, double duration, int rotation
, Handler _progressBarHandler) {
this.STORE_FRAME_DIRECTORY = storeFrameDirectory;
this.INPUT_FILE = inputVideoPath;
this.frameRate = frameRate;
this.saveWidth = saveWidth;
this.saveHeight = saveHeight;
this._progressBarHandler = _progressBarHandler;
this.duration = (int) duration;
}
/**
* Tests extraction from an MP4 to a series of PNG files.
* <p>
* We scale the video to 640x480 for the PNG just to demonstrate that we can scale the
* video with the GPU. If the input video has a different aspect ratio, we could preserve
* it by adjusting the GL viewport to get letterboxing or pillarboxing, but generally if
* you're extracting frames you don't want black bars.
*/
public void extractMpegFrames(int maxFrame, boolean fromDecode) throws IOException {
MediaCodec decoder = null;
MediaExtractor extractor = null;
MAX_FRAMES = maxFrame;
this.fromDecode = fromDecode;
try {
File inputFile = new File(INPUT_FILE); // must be an absolute path
// The MediaExtractor error messages aren't very useful. Check to see if the input
// file exists so we can throw a better one if it's not there.
if (!inputFile.canRead()) {
throw new FileNotFoundException("Unable to read " + inputFile);
}
extractor = new MediaExtractor();
extractor.setDataSource(inputFile.toString());
int trackIndex = selectTrack(extractor);
if (trackIndex < 0) {
throw new RuntimeException("No video track found in " + inputFile);
}
extractor.selectTrack(trackIndex);
MediaFormat format = extractor.getTrackFormat(trackIndex);
if (VERBOSE) {
Log.d(TAG, "Video size is " + format.getInteger(MediaFormat.KEY_WIDTH) + "x" +
format.getInteger(MediaFormat.KEY_HEIGHT));
}
// Create a MediaCodec decoder, and configure it with the MediaFormat from the
// extractor. It's very important to use the format from the extractor because
// it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
String mime = format.getString(MediaFormat.KEY_MIME);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, null, null, 0);
decoder.start();
doExtract(extractor, trackIndex, decoder);
} finally {
if (decoder != null) {
decoder.stop();
decoder.release();
decoder = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}
}
/**
* Selects the video track, if any.
*
* #return the track index, or -1 if no video track is found.
*/
private int selectTrack(MediaExtractor extractor) {
// Select the first video track we find, ignore the rest.
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
if (VERBOSE) {
Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
}
return i;
}
}
return -1;
}
/**
* Work loop.
*/
public void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder) throws IOException {
final int TIMEOUT_USEC = 10000;
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputChunk = 0;
decodeCount = 0;
long frameSaveTime = 0;
boolean outputDone = false;
boolean inputDone = false;
ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers();
MediaFormat decoderOutputFormat = null;
long rawSize = 0;
while (!outputDone) {
if (VERBOSE) Log.d(TAG, "loop");
// Feed more data to the decoder.
if (!inputDone) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
// Read the sample data into the ByteBuffer. This neither respects nor
// updates inputBuf's position, limit, etc.
int chunkSize = extractor.readSampleData(inputBuf, 0);
if (chunkSize < 0) {
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
if (VERBOSE) Log.d(TAG, "sent input EOS");
} else {
if (extractor.getSampleTrackIndex() != trackIndex) {
Log.w(TAG, "WEIRD: got sample from track " +
extractor.getSampleTrackIndex() + ", expected " + trackIndex);
}
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufIndex, 0, chunkSize,
presentationTimeUs, 0 /*flags*/);
if (VERBOSE) {
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
chunkSize);
}
inputChunk++;
extractor.advance();
}
} else {
if (VERBOSE) Log.d(TAG, "input buffer not available");
}
}
if (!outputDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (VERBOSE) Log.d(TAG, "no output from decoder available");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not important for us, since we're using Surface
if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
decoderOutputBuffers = decoder.getOutputBuffers();
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = decoder.getOutputFormat();
decoderOutputFormat = newFormat;
if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
} else if (decoderStatus < 0) {
Log.e(TAG, "unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else { // decoderStatus >= 0
if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
" (size=" + info.size + ")");
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus];
outputFrame.position(info.offset);
outputFrame.limit(info.offset + info.size);
rawSize += info.size;
if (info.size == 0) {
if (VERBOSE) Log.d(TAG, "got empty frame");
} else {
// if it's decode then check the altered value
// else save the frames
if (fromDecode) {
outputFrame.rewind();
byte[] data = new byte[outputFrame.remaining()];
outputFrame.get(data);
int size = saveWidth * saveHeight;
int offset = size;
int[] pixels = new int[size];
int u, v, y1, y2, y3, y4;
int uvIndex = 0;
if (decodeCount == 1) {
// i percorre os Y and the final pixels
// k percorre os pixles U e V
for (int i = 0, k = 0; i < size; i += 2, k += 2) {
y1 = data[i] & 0xff;
y2 = data[i + 1] & 0xff;
y3 = data[saveWidth + i] & 0xff;
y4 = data[saveWidth + i + 1] & 0xff;
u = data[offset + k] & 0xff;
v = data[offset + k + 1] & 0xff;
// getting size
if (uvIndex == 0) {
int specialByte1P1 = u & 15;
int specialByte1P2 = v & 15;
int specialCharacter1 = (specialByte1P1 << 4) | specialByte1P2;
if (specialCharacter1 != 17) {
throw new IllegalArgumentException("value has changed");
}
}
uvIndex++;
if (i != 0 && (i + 2) % saveWidth == 0)
i += saveWidth;
}
}
} else {
outputFrame.rewind();
byte[] data = new byte[outputFrame.remaining()];
outputFrame.get(data);
try {
File outputFile = new File(STORE_FRAME_DIRECTORY,
String.format(Locale.US, "frame_%d.frame", decodeCount));
FileOutputStream stream = new FileOutputStream(outputFile.getAbsoluteFile());
stream.write(data);
} catch (FileNotFoundException e1) {
e1.printStackTrace();
}
}
decodeCount++;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "output EOS");
outputDone = true;
}
decoder.releaseOutputBuffer(decoderStatus, false);
}
}
}
int numSaved = (frameRate < decodeCount) ? frameRate : decodeCount;
Log.d(TAG, "Saving " + numSaved + " frames took " +
(frameSaveTime / numSaved / 1000) + " us per frame");
}
public int getDecodeCount() {
return decodeCount;
}
}
in class below i encode frames, alter one u v value of (frame 1), store digit number 17 in lsb of first u and v and build mp4 using mediacodec encoder.
public class YUVFrameBufferToVideoEncoder {
private static final String TAG = BitmapToVideoEncoder.class.getSimpleName();
private static final int ERROR_IN_PROCESS = 0;
private IBitmapToVideoEncoderCallback mCallback;
private File mOutputFile;
private Queue<File> mEncodeQueue = new ConcurrentLinkedQueue();
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private Object mFrameSync = new Object();
private CountDownLatch mNewFrameLatch;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static int mWidth;
private static int mHeight;
private static int BIT_RATE;
private static int FRAME_RATE; // Frames per second
private int frameCount;
private Handler _progressBarHandler;
private Handler _processHandler;
private static final int I_FRAME_INTERVAL = 1;
private int mGenerateIndex = 0;
private int mTrackIndex;
private boolean mNoMoreFrames = false;
private boolean mAbort = false;
//
private byte[] dataToHide;
public interface IBitmapToVideoEncoderCallback {
void onEncodingComplete(File outputFile);
}
public YUVFrameBufferToVideoEncoder(IBitmapToVideoEncoderCallback callback) {
mCallback = callback;
}
public boolean isEncodingStarted() {
return (mediaCodec != null) && (mediaMuxer != null) && !mNoMoreFrames && !mAbort;
}
public int getActiveBitmaps() {
return mEncodeQueue.size();
}
public boolean startEncoding(int width, int height, int fps, int bitrate, int frameCount
, byte[] dataToHide, Handler _progressBarHandler, Handler _processHandler
, File outputFile) {
mWidth = width;
mHeight = height;
FRAME_RATE = fps;
BIT_RATE = bitrate;
this.frameCount = frameCount;
this._progressBarHandler = _progressBarHandler;
this._processHandler = _processHandler;
mOutputFile = outputFile;
this.dataToHide = dataToHide;
String outputFileString;
try {
outputFileString = outputFile.getCanonicalPath();
} catch (IOException e) {
Log.e(TAG, "Unable to get path for " + outputFile);
ErrorManager.getInstance().addErrorMessage("Unable to get path for " + outputFile);
return false;
}
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
ErrorManager.getInstance().addErrorMessage("Unable to find an appropriate codec for " + MIME_TYPE);
return false;
}
Log.d(TAG, "found codec: " + codecInfo.getName());
int colorFormat;
try {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
} catch (Exception e) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
Log.e(TAG, "Unable to create MediaCodec " + e.getMessage());
ErrorManager.getInstance().addErrorMessage("Unable to create MediaCodec " + e.getMessage());
return false;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mediaMuxer = new MediaMuxer(outputFileString, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
Log.e(TAG, "MediaMuxer creation failed. " + e.getMessage());
ErrorManager.getInstance().addErrorMessage("MediaMuxer creation failed. " + e.getMessage());
return false;
}
Log.d(TAG, "Initialization complete. Starting encoder...");
Completable.fromAction(this::encode)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe();
return true;
}
public void stopEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to stop encoding since it never started");
return;
}
Log.d(TAG, "Stopping encoding");
mNoMoreFrames = true;
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void abortEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to abort encoding since it never started");
return;
}
Log.d(TAG, "Aborting encoding");
mNoMoreFrames = true;
mAbort = true;
mEncodeQueue = new ConcurrentLinkedQueue(); // Drop all frames
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void queueFrame(File frame) {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to queue frame. Encoding not started");
return;
}
Log.d(TAG, "Queueing frame");
mEncodeQueue.add(frame);
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
private void encode() {
Log.d(TAG, "Encoder started");
while (true) {
if (mNoMoreFrames && (mEncodeQueue.size() == 0)) break;
File frame = mEncodeQueue.poll();
if (frame == null) {
synchronized (mFrameSync) {
mNewFrameLatch = new CountDownLatch(1);
}
try {
mNewFrameLatch.await();
} catch (InterruptedException e) {
}
frame = mEncodeQueue.poll();
}
if (frame == null) continue;
int size = (int) frame.length();
byte[] bytesNV21 = new byte[size];
try {
BufferedInputStream buf = new BufferedInputStream(new FileInputStream(frame));
buf.read(bytesNV21, 0, bytesNV21.length);
buf.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
int offsetSize = mWidth * mHeight;
int byteNV21Offset = offsetSize;
int u, v, y1, y2, y3, y4;
//
int dataToHideLength = 0;
if (dataToHide != null)
dataToHideLength = dataToHide.length;
boolean isLastIndexInserted1 = false;
boolean isLastIndexInserted2 = false;
boolean isLastIndexInserted3 = false;
int uvIndex = 0;
int frameByteCapacity = ((mWidth * mHeight) / 4) / 20;
Log.e(TAG, "encode: dataToHideLength: " + dataToHideLength);
Log.e(TAG, "encode: frameByteCapacity: " + dataToHideLength);
//
// i percorre os Y and the final pixels
// k percorre os pixles U e V
for (int i = 0, k = 0; i < offsetSize; i += 2, k += 2) {
y1 = bytesNV21[i] & 0xff;
y2 = bytesNV21[i + 1] & 0xff;
y3 = bytesNV21[mWidth + i] & 0xff;
y4 = bytesNV21[mWidth + i + 1] & 0xff;
u = bytesNV21[byteNV21Offset + k] & 0xff;
v = bytesNV21[byteNV21Offset + k + 1] & 0xff;
// frame 1
// altering u and v for test
if (mGenerateIndex == 1) {
int Unew = u & 240;
int Vnew = v & 240;
if (uvIndex == 0) {
// used in start and end of stego bytes
int specialByte1Integer = 17;
int specialByte1P1 = specialByte1Integer & 240;
int specialByte1P2 = specialByte1Integer & 15;
// shift p1 right 4 position
specialByte1P1 = specialByte1P1 >> 4;
u = Unew | specialByte1P1;
v = Vnew | specialByte1P2;
}
bytesNV21[byteNV21Offset + k] = (byte) u;
bytesNV21[byteNV21Offset + k + 1] = (byte) v;
}
uvIndex++;
if (i != 0 && (i + 2) % mWidth == 0)
i += mWidth;
}
long TIMEOUT_USEC = 500000;
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
long ptsUsec = computePresentationTime(mGenerateIndex, FRAME_RATE);
if (inputBufIndex >= 0) {
final ByteBuffer inputBuffer = mediaCodec.getInputBuffers()[inputBufIndex];
inputBuffer.clear();
inputBuffer.put(bytesNV21);
mediaCodec.queueInputBuffer(inputBufIndex, 0, bytesNV21.length, ptsUsec, 0);
mGenerateIndex++;
int percentComplete = 70 + (int) ((((double) mGenerateIndex) / (frameCount)) * 30);
if (_progressBarHandler != null) {
_progressBarHandler.sendMessage(_progressBarHandler.obtainMessage(percentComplete));
}
Log.w("creatingVideo: ", "is:" + percentComplete);
}
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Log.e(TAG, "No output from encoder available");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else if (mBufferInfo.size != 0) {
ByteBuffer encodedData = mediaCodec.getOutputBuffers()[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
} else {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
}
}
}
release();
if (mAbort) {
mOutputFile.delete();
} else {
mCallback.onEncodingComplete(mOutputFile);
}
}
private void release() {
try {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
Log.d(TAG, "RELEASE CODEC");
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
Log.d(TAG, "RELEASE MUXER");
}
} catch (Exception ignored) {
ErrorManager.getInstance().addErrorMessage("unsupported video file");
Message res = _processHandler.obtainMessage(ERROR_IN_PROCESS);
_processHandler.sendMessage(res);
}
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
private static int selectColorFormat(MediaCodecInfo codecInfo,
String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private long computePresentationTime(long frameIndex, int framerate) {
return 132 + frameIndex * 1000000 / framerate;
}}
output video is created successfully without any problem, but mediacodec has changed the altered test value and i cannot retrieve it.
here is my question, is this a right approach for doing video steganography in android? if it is not the right way can you please make a suggestion?
Steganography comes with a prerequisite - lossless encoding.
None of the codecs available on Android support lossless video encoding, as of now.
So I'm afraid your LSBs would never remain the same post encoding/decoding.
Suggestion: If you don't have a lot many frames, I would suggest you use a lossless format. You may encode your frames into a sequence of PNG images.
I am working on WebRTC. My task is to record audio and video before call connect. I have recorded only video using VideoFileRenderer class. How can I record audio and mux with video using MediaMuxer?
This is a VideoFileRender class which I am using for recording and muxing.
package org.webrtc;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
import java.io.IOException;
import java.nio.ByteBuffer;
public class VideoFileRender implements VideoSink {
private static final String TAG = "VideoFileRender";
private final HandlerThread renderThread;
private final Handler renderThreadHandler;
private final HandlerThread audioHandlerThread;
private final Handler audioThreadHandler;
private int outputFileWidth = -1;
private int outputFileHeight = -1;
private ByteBuffer[] encoderOutputBuffers;
private ByteBuffer[] audioInputBuffers;
private ByteBuffer[] audioOutputBuffers;
private EglBase eglBase;
private EglBase.Context sharedContext;
private VideoFrameDrawer frameDrawer;
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30fps
private static final int FRAME_INTERVAL = 5; // 5 seconds between I-frames
private MediaMuxer mediaMuxer;
private MediaCodec encoder;
private MediaCodec.BufferInfo bufferInfo, audioBufferInfo;
private int trackIndex = -1;
private int audioTrackIndex;
private boolean isRunning = true;
private GlRectDrawer drawer;
private Surface surface;
private MediaCodec audioEncoder;
private AudioThread audioThread;
// private AudioThread audioThread;
private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
private static final int BIT_RATE = 64000;
public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
public static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
public VideoFileRender(String outputFile, final EglBase.Context sharedContext, boolean withAudio) throws IOException {
renderThread = new HandlerThread(TAG + "RenderThread");
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
// if (withAudio) {
audioHandlerThread = new HandlerThread(TAG + "AudioThread");
audioHandlerThread.start();
audioThreadHandler = new Handler(audioHandlerThread.getLooper());
/*} else {
audioThread = null;
audioThreadHandler = null;
}*/
bufferInfo = new MediaCodec.BufferInfo();
this.sharedContext = sharedContext;
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
mediaMuxer = new MediaMuxer(outputFile,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
audioTrackIndex = withAudio ? -1 : 0;
audioThread = new AudioThread();
}
private void initVideoEncoder() {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, outputFileWidth, outputFileHeight);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, FRAME_INTERVAL);
// Create a MediaCodec encoder and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
renderThreadHandler.post(() -> {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE);
surface = encoder.createInputSurface();
eglBase.createSurface(surface);
eglBase.makeCurrent();
drawer = new GlRectDrawer();
});
} catch (Exception e) {
Log.wtf(TAG, e);
}
audioThread.start();
}
#Override
public void onFrame(VideoFrame frame) {
frame.retain();
if (outputFileWidth == -1) {
outputFileWidth = frame.getRotatedWidth();
outputFileHeight = frame.getRotatedHeight();
Log.d(TAG, "onFrame: " + outputFileWidth + " " + outputFileHeight);
initVideoEncoder();
}
renderThreadHandler.post(() -> renderFrameOnRenderThread(frame));
}
private void renderFrameOnRenderThread(VideoFrame frame) {
if (frameDrawer == null) {
frameDrawer = new VideoFrameDrawer();
}
frameDrawer.drawFrame(frame, drawer, null, 0, 0, outputFileWidth, outputFileHeight);
frame.release();
drainEncoder();
eglBase.swapBuffers();
}
/**
* Release all resources. All already posted frames will be rendered first.
*/
public void release() {
isRunning = false;
if (audioThreadHandler != null)
audioThreadHandler.post(() -> {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
audioHandlerThread.quit();
audioThread = null;
});
renderThreadHandler.post(() -> {
muxerStarted = false;
if (encoder != null) {
encoder.stop();
encoder.release();
}
eglBase.release();
mediaMuxer.stop();
mediaMuxer.release();
renderThread.quit();
});
}
private boolean encoderStarted = false;
private volatile boolean muxerStarted = false;
private long videoFrameStart = 0;
private void drainEncoder() {
if (!encoderStarted) {
encoder.start();
encoderOutputBuffers = encoder.getOutputBuffers();
encoderStarted = true;
return;
}
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = encoder.getOutputBuffers();
Log.e(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = encoder.getOutputFormat();
Log.e(TAG, "encoder output format changed: " + newFormat);
trackIndex = mediaMuxer.addTrack(newFormat);
if (trackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
if (videoFrameStart == 0 && bufferInfo.presentationTimeUs != 0) {
videoFrameStart = bufferInfo.presentationTimeUs;
}
bufferInfo.presentationTimeUs -= videoFrameStart;
if (muxerStarted)
mediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
isRunning = isRunning && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
private long presTime = 0L;
private class AudioThread extends Thread {
#Override
public void run() {
if (audioEncoder == null) {
try {
final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
audioEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
audioEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
audioEncoder.start();
audioInputBuffers = audioEncoder.getInputBuffers();
audioOutputBuffers = audioEncoder.getOutputBuffers();
} catch (Exception e) {
e.printStackTrace();
}
}
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
try {
final int min_buffer_size = AudioRecord.getMinBufferSize(
SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
if (buffer_size < min_buffer_size)
buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
AudioRecord audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED)
audioRecord = null;
if (audioRecord != null) {
final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
int readBytes;
audioRecord.startRecording();
for (; isRunning; ) {
// read audio data from internal mic
buf.clear();
readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
if (readBytes > 0) {
// set audio data to encoder
buf.position(readBytes);
buf.flip();
encode(buf, readBytes, getPTSUs());
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
private void encode(ByteBuffer buffer, int length, long presentationTimeUs) {
final ByteBuffer[] inputBuffers = audioEncoder.getInputBuffers();
final int inputBufferIndex = audioEncoder.dequeueInputBuffer(10000);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
if (buffer != null) {
inputBuffer.put(buffer);
}
// if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
audioEncoder.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
audioEncoder.queueInputBuffer(inputBufferIndex, 0, length,
presentationTimeUs, 0);
}
} else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait for MediaCodec encoder is ready to encode
// nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
// will wait for maximum TIMEOUT_USEC(10msec) on each call
}
}
private long prevOutputPTSUs = 0;
protected long getPTSUs() {
long result = System.nanoTime() / 1000L;
if (result < prevOutputPTSUs)
result = (prevOutputPTSUs - result) + result;
return result;
}
}
I expect using scenario mp4 file will be created with audio and video, but it creates an mp4 file with only video.
I have tried everything I can think of to improve the latency in decoding an h.264 stream from an ethernet camera. The camera's manufacturer states that it has a minimum latency of 50ms using their hardware to display the stream, so I know its possible. I was also able to play the stream on my computer without any delays as well.
I am developing in Android, where I am receiving the UDP packets via DatagramSocket, parsing the RTP packet, assembling the NAL units, passing them to MediaCodec where its decoding the h.264 stream via hardware decoder, and finally displaying the stream on a SurfaceView.
The stream plays perfect without any problems, except that there is a delay of approximately 610ms between what is being recorded and what is displayed. This camera will be used on a vehicle, so a 610ms delay is unacceptable.
Any advice on how to improve on this latency will be greatly appreciated.
Here is my code I have adapted from various public sources:
// configuration constants
private static final int SURFACE_WIDTH = 640;
private static final int SURFACE_HEIGHT = 480;
public static final String CSD_0 = "csd-0";
public static final String CSD_1 = "csd-1";
public static final String DURATION_US = "durationUs";
public static boolean DEBUGGING = false;
private final SurfaceView surfaceView;
private PlayerThread playerThread;
private RTPClientThread rtpSessionThread;
private ByteBuffer inputBuffer;
private MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
private MediaCodec decoder;
private Log log = LogFactory.getLog(RtpMediaDecoder.class);
private final byte[] byteStreamStartCodePrefix = {(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x01};
private boolean useByteStreamFormat = true;
private int lastSequenceNumber = 0;
private boolean lastSequenceNumberIsValid = false;
private boolean sequenceError = false;
private boolean currentFrameHasError = false;
private BufferedSample currentFrame;
private ExecutorService executorService;
private enum NalType {
FULL,
STAPA,
STAPB,
MTAP16,
MTAP24,
FUA,
FUB,
UNKNOWN
}
public RtpMediaDecoder(SurfaceView surfaceView) {
this.surfaceView = surfaceView;
surfaceView.getHolder().addCallback(this);
}
public void start() {
rtpStartClient();
}
public void restart() {
rtpStopClient();
try {
sleep(500);
} catch (InterruptedException e) {
}
rtpStartClient();
}
public void release() {
rtpStopClient();
if (decoder != null) {
try {
decoder.stop();
} catch (Exception e) {
log.error("Encountered error while trying to stop decoder", e);
}
decoder.release();
decoder = null;
}
}
private void rtpStartClient() {
rtpSessionThread = new RTPClientThread();
executorService = Executors.newFixedThreadPool(1);
rtpSessionThread.start();
}
private void rtpStopClient() {
rtpSessionThread.interrupt();
executorService.shutdown();
}
public BufferedSample getSampleBuffer() throws Exception {
int inIndex = decoder.dequeueInputBuffer(-1);
if (inIndex < 0) {
throw new Exception("Didn't get a buffer from the MediaCodec");
}
inputBuffer = decoder.getInputBuffer(inIndex);
return new BufferedSample(inputBuffer, inIndex);
}
public void decodeFrame(BufferedSample decodeBuffer) throws Exception {
if (DEBUGGING) {
log.info(decodeBuffer.toString());
}
decoder.queueInputBuffer(decodeBuffer.getIndex(), 0,
decodeBuffer.getSampleSize(), 0, 0);
int outIndex = decoder.dequeueOutputBuffer(info, 0);
if (outIndex >= 0) {
// outputBuffer = decoder.getOutputBuffer(outIndex);
decoder.releaseOutputBuffer(outIndex,true);
}
// log.error("Completed frame decode: " + decodeBuffer.getRtpTimestamp() + " System Time: " + System.currentTimeMillis());
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
android.view.ViewGroup.LayoutParams layoutParams = surfaceView.getLayoutParams();
layoutParams.width = SURFACE_WIDTH; // required width
layoutParams.height = SURFACE_HEIGHT; // required height
surfaceView.setLayoutParams(layoutParams);
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
log.debug("Starting player thread.");
if (playerThread == null) {
playerThread = new PlayerThread(holder.getSurface());
playerThread.start();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
public MediaFormat getMediaFormat() {
String mimeType = "video/avc";
int width = 640;
int height = 480;
MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height);
// from avconv, when streaming sample.h264.mp4 from disk
// byte[] header_sps = {0, 0, 0, 1, // header
// 0x67, 0x64, (byte) 0x00, 0x1e, (byte) 0xac, (byte) 0xd9, 0x40, (byte) 0xa0, 0x3d,
// (byte) 0xa1, 0x00, 0x00, (byte) 0x03, 0x00, 0x01, 0x00, 0x00, 0x03, 0x00, 0x3C, 0x0F, 0x16, 0x2D, (byte) 0x96}; // sps
// byte[] header_pps = {0, 0, 0, 1, // header
// 0x68, (byte) 0xeb, (byte) 0xec, (byte) 0xb2, 0x2C}; // pps
byte[] header_sps = {0x00, 0x00, 0x00, 0x01, 0x67, 0x42, 0x00, 0x0a, (byte) 0xf8, 0x41, (byte) 0xa2};
byte[] header_pps = {0x00, 0x00, 0x00, 0x01, 0x68, (byte) 0xce, 0x38, (byte) 0x80};
format.setByteBuffer(CSD_0, ByteBuffer.wrap(header_sps));
format.setByteBuffer(CSD_1, ByteBuffer.wrap(header_pps));
//format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, width * height);
format.setInteger(DURATION_US, 12600000);
return format;
}
private class PlayerThread extends Thread {
private Surface surface;
public PlayerThread(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
// MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", SURFACE_WIDTH, SURFACE_HEIGHT);
MediaFormat mediaFormat = getMediaFormat();
try {
String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
decoder = MediaCodec.createDecoderByType(mime);
}
// decoder = MediaCodec.createByCodecName("OMX.Intel.hw_vd.h264");
// decoder = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
e.printStackTrace();
}
if (decoder == null) {
log.info("Can't find video info!");
return;
}
decoder.configure(mediaFormat, surface, null, 0);
// log.error("Decoder Started, System Time: " + System.currentTimeMillis());
decoder.start();
}
}
private class RTPClientThread extends Thread {
private DatagramSocket mDataGramSocket;
#Override
public void run() {
try {
sleep(200);
} catch (InterruptedException e) {
}
try {
mDataGramSocket = new DatagramSocket(50004);
mDataGramSocket.setReuseAddress(true);
mDataGramSocket.setSoTimeout(1000);
} catch (Exception e) {
e.printStackTrace();
}
byte[] recvPacket = {0};
int seqNum = 0, prevSeqNum = 0, length = 0;
byte[] message = new byte[1450];
DatagramPacket p = new DatagramPacket(message, message.length);
try {
while (!Thread.interrupted()) {
try {
mDataGramSocket.receive(p);
length = p.getLength();
recvPacket = new byte[length];
System.arraycopy(message,0,recvPacket,0,length);
seqNum = ((message[2] & 0xff) << 8) | (message[3] & 0xff);
if(seqNum != prevSeqNum) {
prevSeqNum = seqNum;
if (!executorService.isTerminated() && !executorService.isShutdown()) {
executorService.execute(new PacketRunnable(recvPacket, seqNum));
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
log.error("We Stopped");
mDataGramSocket.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public class PacketRunnable implements Runnable {
private byte[] data;
private int localSeqNum;
private PacketRunnable(byte[] _data, int _seqNum) {
this.data = _data;
this.localSeqNum = _seqNum;
}
public void run() {
DataPacket packet = DataPacket.decode(data);
String debugging = "RTP data. ";
debugging += packet.getDataSize() + "b ";
debugging += "#" + packet.getSequenceNumber();
debugging += " " + packet.getTimestamp();
if (lastSequenceNumberIsValid && ((lastSequenceNumber + 1) != localSeqNum) && (localSeqNum != 0)) {
sequenceError = true;
log.error("Seq#: "+ localSeqNum + " PrevSeq#: " + lastSequenceNumber + " SKIPPED (" + (localSeqNum - lastSequenceNumber - 1) + ")");
debugging += " SKIPPED (" + (localSeqNum - lastSequenceNumber - 1) + ")";
} else {
sequenceError = false;
}
if (RtpMediaDecoder.DEBUGGING) {
log.error(debugging);
}
H264Packet h264Packet = new H264Packet(packet);
if (h264Packet.getNRIBits() > 0) {
switch (h264Packet.h264NalType) {
case FULL:
if (RtpMediaDecoder.DEBUGGING) {
log.info("NAL: full packet");
}
startFrame(packet.getTimestamp());
if (currentFrame != null) {
if (useByteStreamFormat) {
currentFrame.getBuffer().put(byteStreamStartCodePrefix);
}
currentFrame.getBuffer().put(packet.getData().toByteBuffer());
sendFrame();
}
break;
case FUA:
if (h264Packet.isStart()) {
if (RtpMediaDecoder.DEBUGGING) {
log.info("FU-A start found. Starting new frame");
}
startFrame(packet.getTimestamp());
if (currentFrame != null) {
// Add stream header
if (useByteStreamFormat) {
currentFrame.getBuffer().put(byteStreamStartCodePrefix);
}
byte reconstructedNalTypeOctet = h264Packet.getNalTypeOctet();
currentFrame.getBuffer().put(reconstructedNalTypeOctet);
}
}
if (currentFrame != null) {
if (packet.getTimestamp() != currentFrame.getRtpTimestamp()) {
if (RtpMediaDecoder.DEBUGGING) {
log.warn("Non-consecutive timestamp found");
}
currentFrameHasError = true;
}
if (sequenceError) {
currentFrameHasError = true;
}
// If we survived possible errors, collect data to the current frame buffer
if (!currentFrameHasError) {
currentFrame.getBuffer().put(packet.getData().toByteBuffer(2, packet.getDataSize() - 2));
} else {
if (RtpMediaDecoder.DEBUGGING) {
log.info("Dropping frame");
}
}
if (h264Packet.isEnd()) {
if (RtpMediaDecoder.DEBUGGING) {
log.info("FU-A end found. Sending frame!");
}
try {
sendFrame();
} catch (Throwable t) {
log.error("Error sending frame.", t);
}
}
}
break;
case STAPA:
if (RtpMediaDecoder.DEBUGGING) {
log.info("NAL: STAP-A");
}
ChannelBuffer buffer = packet.getData();
buffer.readByte();
while (buffer.readable()) {
short nalUnitSize = buffer.readShort();
byte[] nalUnitData = new byte[nalUnitSize];
buffer.readBytes(nalUnitData);
startFrame(packet.getTimestamp());
if (currentFrame != null) {
if (useByteStreamFormat) {
currentFrame.getBuffer().put(byteStreamStartCodePrefix);
}
currentFrame.getBuffer().put(nalUnitData);
sendFrame();
}
}
break;
case STAPB:
case MTAP16:
case MTAP24:
case FUB:
case UNKNOWN:
log.warn("NAL: Unimplemented unit type: " + h264Packet.getNalType());
break;
}
} else {
log.warn("Useless packet received.");
}
lastSequenceNumber = localSeqNum;
lastSequenceNumberIsValid = true;
}
}
}
private void startFrame(long rtpTimestamp) {
// Reset error bit
currentFrameHasError = false;
// Deal with potentially non-returned buffer due to error
if (currentFrame != null) {
currentFrame.getBuffer().clear();
// Otherwise, get a fresh buffer from the codec
} else {
try {
// Get buffer from decoder
currentFrame = getSampleBuffer();
currentFrame.getBuffer().clear();
} catch (Exception e) {
currentFrameHasError = true;
e.printStackTrace();
}
}
if (!currentFrameHasError) {
// Set the sample timestamp
currentFrame.setRtpTimestamp(rtpTimestamp);
}
}
private void sendFrame() {
currentFrame.setSampleSize(currentFrame.getBuffer().position());
currentFrame.getBuffer().flip();
// log.error("Sending Frame: " + currentFrame.getRtpTimestamp() + " System Time: " + System.currentTimeMillis());
try {
decodeFrame(currentFrame);
} catch (Exception e) {
log.error("Exception sending frame to decoder", e);
}
// Always make currentFrame null to indicate we have returned the buffer to the codec
currentFrame = null;
}
private class H264Packet {
private final byte nalFBits;
private final byte nalNriBits;
private final byte nalType;
private boolean fuStart = false;
private boolean fuEnd = false;
private byte fuNalType;
private NalType h264NalType = NalType.UNKNOWN;
public H264Packet(DataPacket packet) {
// Parsing the RTP Packet - http://www.ietf.org/rfc/rfc3984.txt section 5.3
byte nalUnitOctet = packet.getData().getByte(0);
nalFBits = (byte) (nalUnitOctet & 0x80);
nalNriBits = (byte) (nalUnitOctet & 0x60);
nalType = (byte) (nalUnitOctet & 0x1F);
// If it's a single NAL packet then the entire payload is here
if (nalType > 0 && nalType < 24) {
h264NalType = NalType.FULL;
} else if (nalType == 24) {
h264NalType = NalType.STAPA;
} else if (nalType == 25) {
h264NalType = NalType.STAPB;
} else if (nalType == 26) {
h264NalType = NalType.MTAP16;
} else if (nalType == 27) {
h264NalType = NalType.MTAP24;
} else if (nalType == 28) {
h264NalType = NalType.FUA;
} else if (nalType == 29) {
h264NalType = NalType.FUB;
}
byte fuHeader = packet.getData().getByte(1);
fuStart = ((fuHeader & 0x80) != 0);
fuEnd = ((fuHeader & 0x40) != 0);
fuNalType = (byte) (fuHeader & 0x1F);
}
public byte getNalTypeOctet() {
// Excerpt from the spec:
/* "The NAL unit type octet of the fragmented
NAL unit is not included as such in the fragmentation unit payload,
but rather the information of the NAL unit type octet of the
fragmented NAL unit is conveyed in F and NRI fields of the FU
indicator octet of the fragmentation unit and in the type field of
the FU header" */
return (byte) (fuNalType | nalFBits | nalNriBits);
}
public boolean isStart() {
return fuStart;
}
public boolean isEnd() {
return fuEnd;
}
public byte getNalType() {
return nalType;
}
public byte getNRIBits() {
return nalNriBits;
}
}
}
I have read a lot of pages about Android's AudioRecorder. You can see a list of them below the question.
I'm trying to record audio with AudioRecorder, but it's not working well.
public class MainActivity extends Activity {
AudioRecord ar = null;
int buffsize = 0;
int blockSize = 256;
boolean isRecording = false;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void baslat(View v)
{
// when click to START
buffsize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
ar = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize);
ar.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
public void durdur(View v)
{
// When click to STOP
ar.stop();
isRecording = false;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.wav";
short sData[] = new short[buffsize/2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
ar.read(sData, 0, buffsize/2);
Log.d("eray","Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, buffsize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
It's creating a .wav file but, when I try to listen to it, it's not opening. I'm getting a "file not supported" error. I've tried to play the file with quite a few media player applications.
NOTE : I have to use AudioRecorder instead of MediaRecorder because my app will be doing another process while recording (displaying an equalizer) .
Here is the list of pages that I've read about this subject:
http://developer.android.com/reference/android/media/AudioRecord.html#read(short[],%20int,%20int)
Android AudioRecord example
http://audiorecordandroid.blogspot.in
AudioRecord object not initializing
Recording a wav file from the mic in Android - problems
http://i-liger.com/article/android-wav-audio-recording
Creating a WAV file from raw PCM data using the Android SDK
Capturing Sound for Analysis and Visualizing Frequencies in Android
There are a lot of different ways to go about this. I've tried lots of them but nothing works for me. I've been working on this problem for about 6 hours now so I would appreciate a definitive answer, ideally some sample code.
I wrote a simple (by which you should read, not to professional standards) class to do this yesterday, and it works.
private class Wave {
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT + ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4 * SMALLINT) + (INTEGER * 2) + LONGINT + ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE + LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE + ID_STRING_SIZE + WAV_FMT_SIZE + WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
public Wave(int sampleRate, short nChannels, short[] data, int start, int end) {
nSamples = end - start + 1;
cursor = 0;
output = new byte[nSamples * SMALLINT + WAV_HDR_SIZE];
buildHeader(sampleRate, nChannels);
writeData(data, start, end);
}
// ------------------------------------------------------------
private void buildHeader(int sampleRate, short nChannels) {
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
// ------------------------------------------------------------
public void writeFormat(int sampleRate, short nChannels) {
write("fmt ");
write(WAV_FMT_SIZE - WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short) (nChannels * SAMPLE_SIZE));
write((short) 16);
}
// ------------------------------------------------------------
public void writeData(short[] data, int start, int end) {
write("data");
write(nSamples * SMALLINT);
for (int i = start; i <= end; write(data[i++])) ;
}
// ------------------------------------------------------------
private void write(byte b) {
output[cursor++] = b;
}
// ------------------------------------------------------------
private void write(String id) {
if (id.length() != ID_STRING_SIZE)
Utils.logError("String " + id + " must have four characters.");
else {
for (int i = 0; i < ID_STRING_SIZE; ++i) write((byte) id.charAt(i));
}
}
// ------------------------------------------------------------
private void write(int i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
private void write(short i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
public boolean wroteToFile(String filename) {
boolean ok = false;
try {
File path = new File(getFilesDir(), filename);
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok = false;
} catch (IOException e) {
ok = false;
e.printStackTrace();
}
return ok;
}
}
Hope this helps
PCMAudioHelper solved my problem. I'll modify this answer and explain it but firstly i have to do some tests over this class.
You might find this OMRECORDER helpful for recording .WAV format.
In case if .aac works with you then check out this WhatsappAudioRecorder:
On startRecording button click :
Initialise new thread.
Create file with .aac extension.
Create output stream of file.
Set output
SetListener and execute thread.
OnStopClick :
Interrupt the thread and audio will be saved in file.
Here is full gist of for reference :
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.util.Log;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
public class AudioRecordThread implements Runnable {
private static final String TAG = AudioRecordThread.class.getSimpleName();
private static final int SAMPLE_RATE = 44100;
private static final int SAMPLE_RATE_INDEX = 4;
private static final int CHANNELS = 1;
private static final int BIT_RATE = 32000;
private final int bufferSize;
private final MediaCodec mediaCodec;
private final AudioRecord audioRecord;
private final OutputStream outputStream;
private OnRecorderFailedListener onRecorderFailedListener;
AudioRecordThread(OutputStream outputStream, OnRecorderFailedListener onRecorderFailedListener) throws IOException {
this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
this.audioRecord = createAudioRecord(this.bufferSize);
this.mediaCodec = createMediaCodec(this.bufferSize);
this.outputStream = outputStream;
this.onRecorderFailedListener = onRecorderFailedListener;
this.mediaCodec.start();
try {
audioRecord.startRecording();
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
}
#Override
public void run() {
if (onRecorderFailedListener != null) {
Log.d(TAG, "onRecorderStarted");
onRecorderFailedListener.onRecorderStarted();
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] codecInputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = mediaCodec.getOutputBuffers();
try {
while (!Thread.interrupted()) {
boolean success = handleCodecInput(audioRecord, mediaCodec, codecInputBuffers, Thread.currentThread().isAlive());
if (success)
handleCodecOutput(mediaCodec, codecOutputBuffers, bufferInfo, outputStream);
}
} catch (IOException e) {
Log.w(TAG, e);
} finally {
mediaCodec.stop();
audioRecord.stop();
mediaCodec.release();
audioRecord.release();
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private boolean handleCodecInput(AudioRecord audioRecord,
MediaCodec mediaCodec, ByteBuffer[] codecInputBuffers,
boolean running) throws IOException {
byte[] audioRecordData = new byte[bufferSize];
int length = audioRecord.read(audioRecordData, 0, audioRecordData.length);
if (length == AudioRecord.ERROR_BAD_VALUE ||
length == AudioRecord.ERROR_INVALID_OPERATION ||
length != bufferSize) {
if (length != bufferSize) {
if (onRecorderFailedListener != null) {
Log.d(TAG, "length != BufferSize calling onRecordFailed");
onRecorderFailedListener.onRecorderFailed();
}
return false;
}
}
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = codecInputBuffers[codecInputBufferIndex];
codecBuffer.clear();
codecBuffer.put(audioRecordData);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
return true;
}
private void handleCodecOutput(MediaCodec mediaCodec,
ByteBuffer[] codecOutputBuffers,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream)
throws IOException {
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
} else if (codecOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = mediaCodec.getOutputBuffers();
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}
private AudioRecord createAudioRecord(int bufferSize) {
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize * 10);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.d(TAG, "Unable to initialize AudioRecord");
throw new RuntimeException("Unable to initialize AudioRecord");
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.NoiseSuppressor.isAvailable()) {
android.media.audiofx.NoiseSuppressor noiseSuppressor = android.media.audiofx.NoiseSuppressor
.create(audioRecord.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
.create(audioRecord.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
}
}
return audioRecord;
}
private MediaCodec createMediaCodec(int bufferSize) throws IOException {
MediaCodec mediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
try {
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
return mediaCodec;
}
interface OnRecorderFailedListener {
void onRecorderFailed();
void onRecorderStarted();
}
}
I would add this as a comment but I don't yet have enough Stackoverflow rep points...
Opiatefuchs's link takes you to sample code that shows you the exact header formatting necessary to create a .wav file. I've been all over that code myself. Very helpful.
First you need know that wav file has its format -- header. so you can't just write the pure data to the .wav file.
Second the wav file header include the length of file . so you need write the header after recording.
My solution is , user AudioRecorder record the pcm file .
byte[] audiodata = new byte[bufferSizeInBytes];
FileOutputStream fos = null;
int readsize = 0;
try {
fos = new FileOutputStream(pcmFileName, true);
} catch (FileNotFoundException e) {
Log.e("AudioRecorder", e.getMessage());
}
status = Status.STATUS_START;
while (status == Status.STATUS_START && audioRecord != null) {
readsize = audioRecord.read(audiodata, 0, bufferSizeInBytes);
if (AudioRecord.ERROR_INVALID_OPERATION != readsize && fos != null) {
if (readsize > 0 && readsize <= audiodata.length)
fos.write(audiodata, 0, readsize);
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
}
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
then convert it to wav file.
byte buffer[] = null;
int TOTAL_SIZE = 0;
File file = new File(pcmPath);
if (!file.exists()) {
return false;
}
TOTAL_SIZE = (int) file.length();
WaveHeader header = new WaveHeader();
header.fileLength = TOTAL_SIZE + (44 - 8);
header.FmtHdrLeth = 16;
header.BitsPerSample = 16;
header.Channels = 1;
header.FormatTag = 0x0001;
header.SamplesPerSec = 8000;
header.BlockAlign = (short) (header.Channels * header.BitsPerSample / 8);
header.AvgBytesPerSec = header.BlockAlign * header.SamplesPerSec;
header.DataHdrLeth = TOTAL_SIZE;
byte[] h = null;
try {
h = header.getHeader();
} catch (IOException e1) {
Log.e("PcmToWav", e1.getMessage());
return false;
}
if (h.length != 44)
return false;
File destfile = new File(destinationPath);
if (destfile.exists())
destfile.delete();
try {
buffer = new byte[1024 * 4]; // Length of All Files, Total Size
InputStream inStream = null;
OutputStream ouStream = null;
ouStream = new BufferedOutputStream(new FileOutputStream(
destinationPath));
ouStream.write(h, 0, h.length);
inStream = new BufferedInputStream(new FileInputStream(file));
int size = inStream.read(buffer);
while (size != -1) {
ouStream.write(buffer);
size = inStream.read(buffer);
}
inStream.close();
ouStream.close();
} catch (FileNotFoundException e) {
Log.e("PcmToWav", e.getMessage());
return false;
} catch (IOException ioe) {
Log.e("PcmToWav", ioe.getMessage());
return false;
}
if (deletePcmFile) {
file.delete();
}
Log.i("PcmToWav", "makePCMFileToWAVFile success!" + new SimpleDateFormat("yyyy-MM-dd hh:mm").format(new Date()));
return true;