I need to send Voice message, but PCM audio is too large, so I'm trying to convert PCM to AMR-NB using MediaCodec. I searched everywhere, but I can't implement it properly. I need your help.
private void sendAudioFile() {
initEncoder();
translaterThread = new Thread(translaterTask);
translaterThread.start();
RLog.d("AudioRecordManager", "sendAudioFile path = " + this.mAudioPath);
RLog.d("AudioRecordManager", "sendAmrFile path = " + this.mAmrPath);
if(this.mAmrPath != null) {
File file = new File(this.mAmr.getPath());
if(!file.exists() || file.length() == 0L) {
RLog.e("AudioRecordManager", "sendAudioFile fail cause of file length 0 or audio permission denied");
return;
}
CustomizeMessage customizeMessage = CustomizeMessage.obtain(this.mAmrPath);
sendMessage(mTargetId, customizeMessage), (String)null, (String)null);
}
private boolean initEncoder() {
try {
encoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AMR_NB);
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AMR_NB);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mSampleRate);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
return true;
} catch (IOException e) {
Log.e(TAG, "init encoder failed.");
e.printStackTrace();
}
return false;
}
private Runnable translaterTask = new Runnable() {
#Override
public void run() {
FileInputStream in = null;
FileOutputStream out = null;
byte[] data = new byte[minBufferSize];
ByteBuffer[] inputBuffers;
ByteBuffer[] outputBuffers;
ByteBuffer inputBuffer;
ByteBuffer outputBuffer;
MediaCodec.BufferInfo bufferInfo;
int inputBufferIndex;
int outputBufferIndex;
byte[] outData;
encoder.start();
try {
in = new FileInputStream(mAudioPath.getPath());
out = new FileOutputStream(mAmrPath.getPath());
while (in.read(data) != -1) {
inputBuffers = encoder.getInputBuffers();
outputBuffers = encoder.getOutputBuffers();
inputBufferIndex = encoder.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(data);
encoder.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0);
}
bufferInfo = new MediaCodec.BufferInfo();
outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
try {
outputBuffer = outputBuffers[outputBufferIndex];
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
out.write(outData);
Log.d("AudioEncoder", outData.length + " bytes encoded");
encoder.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0);
} catch (Exception e) {
e.printStackTrace();
}
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (encoder != null) {
encoder.stop();
encoder.release();
encoder = null;
}
}
}
};
When I click send button, I got Thread Exception: java.nio.BufferOverflowException
10-22 22:43:22.280 25633-25819/com.jike.hat E/AndroidRuntime: FATAL EXCEPTION: Thread-594
Process: com.jikexueyuan.cicada_chat, PID: 25633
java.nio.BufferOverflowException
at java.nio.Buffer.checkPutBounds(Buffer.java:183)
at java.nio.DirectByteBuffer.put(DirectByteBuffer.java:356)
at java.nio.ByteBuffer.put(ByteBuffer.java:721)
at com.jikexueyuan.cicada_chat.MyAudioRecordManager$4.run(MyAudioRecordManager.java:385)
at java.lang.Thread.run(Thread.java:831)
The parameter are set as follows:
private int mSampleRate = 16000;
private int mChannelConfig = AudioFormat.CHANNEL_IN_MONO;
private int mAudioEncodingBitRate = AudioFormat.ENCODING_PCM_16BIT;
private int BIT_RATE = 15850;
So, is there something wrong with the encoder, or parameter set? Why does it BufferOverflow ?
Related
I use next sample to record video from buffer (from onPreviewFrame(byte[] data,...). But it saves video using Output Stream. I would like to change to MediaMuxer.
Also when using this sample the final video is being played with very high speed in video player. I'm just not sure what time to set for encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec, MediaCodec.BUFFER_FLAG_END_OF_STREAM); I use long ptsUsec = (System.currentTimeMillis() * 1000) / FRAME_RATE;
private void encodeVideoFrameFromBuffer(byte[] frameData) {
if (encoder == null) return;
final int TIMEOUT_USEC = 10000;
ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers();
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
if (!outputDone && outputStream == null) {
String fileName = Environment.getExternalStorageDirectory() + File.separator + "test" + 1280 + "x" + 720 + ".mp4";
File file = new File(fileName);
if (file.exists()) {
file.delete();
}
try {
outputStream = new FileOutputStream(fileName);
Log.d(TAG, "encoded output will be saved as " + fileName);
} catch (IOException ioe) {
Log.w(TAG, "Unable to create debug output file " + fileName);
throw new RuntimeException(ioe);
}
}
if (outputStream != null) {
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
long ptsUsec = (System.currentTimeMillis() * 1000) / FRAME_RATE;
if (outputDone) {
encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex];
inputBuf.clear();
inputBuf.put(frameData);
encoder.queueInputBuffer(inputBufIndex, 0, frameData.length, ptsUsec, 0);
}
generateIndex++;
}
int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = encoder.getOutputBuffers();
Log.d(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
encodedData.position(info.offset);
encodedData.limit(info.offset + info.size);
byte[] data = new byte[info.size];
encodedData.get(data);
encodedData.position(info.offset);
try {
outputStream.write(data);
} catch (IOException ioe) {
Log.w(TAG, "failed writing debug data to file");
throw new RuntimeException(ioe);
}
encoder.releaseOutputBuffer(encoderStatus, false);
}
}
if (outputDone) {
if (outputStream != null) {
try {
outputStream.close();
} catch (IOException ioe) {
Log.w(TAG, "failed closing debug file");
throw new RuntimeException(ioe);
}
outputStream = null;
stopEncoder();
}
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
doEncodeDecodeVideoFromBuffer(data);
}
About fast playing - try to use System.nanoTime() / 1000L instead of (System.currentTimeMillis() * 1000) / FRAME_RATE
to use muxer you have to initialize it outside of your encode/decode process and feed it with sample data in decode part. Change your
try {
outputStream.write(data);
} catch (IOException ioe) {
Log.w(TAG, "failed writing debug data to file");
throw new RuntimeException(ioe);
}
to
//somewhere outside encode/decode part
MediaCodec.BufferInfo videoInfo = new MediaCodec.BufferInfo();
muxer = new MediaMuxer(/*your file*/,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
int videoTrack = muxer.addTrack(encoder.getOutputFormat());
muxer.start();
//try-catch block replacement
muxer.writeSampleData(videoTrack, data, videoInfo);
don't forget to stop() and release() your muxer finally. That should work
Would like to pick a video from the device and decode it inorder to change its frame rate and then encode and save it to the device. How is this possible using MediaCodec? Went through many documentations, but couldn't find a method. I have the following code for decoding. Will it be of any good for my purpose. If yes how to use that decoded data to save it with changed fps.
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 1080, 720);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2500000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
try {
decoder = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
Log.d("Error", "Fail to create MediaCodec: " + e.toString());
}
///Commenting for testing...
/*
// Pass the decoded data to the surface to display
decoder.configure(mediaFormat, null, null, 0);
//decoder.configure(mediaFormat, null, null, 0);
decoder.start();
*/
///Commenting for testing...
// new BufferInfo();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
if (null == inputBuffers) {
Log.d("Error", "null == inputBuffers");
}
if (null == outputBuffers) {
Log.d("Error", "null == outbputBuffers 111");
}
FileInputStream file = null;
try {
file = new FileInputStream(data.getData().getPath().toString());
} catch (FileNotFoundException e) {
Log.d("Error", "open file error: " + e.toString());
return;
}
int read_size = -1;
int mCount = 0;
for (; ; ) {
byte[] h264 = null;
try {
byte[] length_bytes = new byte[4];
read_size = file.read(length_bytes);
if (read_size < 0) {
Log.d("Error", "read_size<0 pos1");
break;
}
int byteCount = bytesToInt(length_bytes, 0);
//Changed to .length
//int byteCount=length_bytes.length;
Log.d("Error", "byteCount: " + byteCount);
h264 = new byte[byteCount];
read_size = file.read(h264, 0, byteCount);
// Log.d("Error", "read_size: " + read_size);
if (read_size < 0) {
Log.d("Error", "read_size<0 pos2");
break;
}
// Log.d("Error", "pos: " + file.)
} catch (IOException e) {
Log.d("Error", "read_size 2: " + read_size);
Log.d("Error", "e.toStrinig(): " + e.toString());
break;
}
int inputBufferIndex = decoder.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(h264);
// long sample_time = ;
decoder.queueInputBuffer(inputBufferIndex, 0, h264.length, mCount * 1000000 / 20, 0);
++mCount;
} else {
Log.d("Error", "dequeueInputBuffer error");
}
ByteBuffer outputBuffer = null;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
outputBuffer = outputBuffers[outputBufferIndex];
decoder.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 0);
}
// Pass the decoded data to the surface to display
decoder.configure(mediaFormat,mPreview.getHolder().getSurface() , null, 0);
//decoder.configure(mediaFormat, null, null, 0);
decoder.start();
if (outputBufferIndex >= 0) {
decoder.releaseOutputBuffer(outputBufferIndex, false);
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = decoder.getOutputBuffers();
Log.d("Error", "outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED");
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
Log.d("Error", "outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED");
}
try {
Thread.sleep(1000/20);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
public int bytesToInt(byte[] src, int offset) {
int value;
value = (int) ((src[offset] & 0xFF)
| ((src[offset+1] & 0xFF)<<8)
| ((src[offset+2] & 0xFF)<<16)
| ((src[offset+3] & 0xFF)<<24));
return value;
}
You can take a look at DecodeEditEncode, a great starting point for decoding and re-encoding using surfaces (output surface for decoder -> input surface for encoder).
Take a look especially at this method
private void editVideoData(VideoChunks inputData, MediaCodec decoder,
OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder,
VideoChunks outputData)
The working flow that you have to follow is similar to bellow:
Extract video track (MediaExtractor)
Feed the decoder input buffers
render the decoded frame to the surface
When rendered, the encoder will get the frame (you have to set timestamp too)
Use MediaMuxer to mux the encoder frame with audio track.
Extra links : some examples
ExtractDecodeEditEncodeMuxTest
VideoResample.java (very interesting)
I am developing a recording service for a custom Android platform. When the application starts it will start recording a video in the background. Unfortunately this application runs on hardware that prevents me from using video recording.
My solution to this problem is to take images and hold them in a circular buffer, when an event happens it will stop feeding images to the buffer and place them together in a video.
The problem I am encountering is that when I save the images to video I just get a noisy green screen.
I based my code on this example: Using MediaCodec to save series of images as Video
Note: I cannot use MediaMux either, I am developing for API level <18.
I will guide your through the steps I take. On creation of the service I simply open the camera, I set the preview on a SurfaceTexture and I will add images to my buffer when the PreviewCallback is called.
private Camera mCamera;
private String mTimeStamp;
SurfaceTexture mSurfaceTexture;
private CircularBuffer<ByteArrayOutputStream> mCircularBuffer;
private static final int MAX_BUFFER_SIZE = 200;
private int mWidth = 720;
private int mHeight = 480;
#Override
public void onCreate() {
try {
mCircularBuffer = new CircularBuffer(MAX_BUFFER_SIZE);
mTimeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
mSurfaceTexture = new SurfaceTexture(10);
mCamera = getCameraInstance();
Parameters parameters = mCamera.getParameters();
parameters.setJpegQuality(20);
parameters.setPictureSize(mWidth, mHeight);
mCamera.setParameters(parameters);
mCamera.setPreviewTexture(mSurfaceTexture);
mCamera.startPreview();
mCamera.setPreviewCallback(mPreviewCallback);
} catch (IOException e) {
Log.d(TAG, "IOException: " + e.getMessage());
} catch (Exception e) {
Log.d(TAG, "Exception: " + e.getMessage());
}
}
private PreviewCallback mPreviewCallback = new PreviewCallback() {
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null);
Rect rectangle = new Rect(0, 0, mWidth, mHeight);
yuvImage.compressToJpeg(rectangle, 20, out);
mCircularBuffer.add(out);
}
};
All of this works, when I convert the byte arrays to jpg at this point they are all correct image files.
Now when an event happens, the service will be destroyed and the last 200 images will need to be placed behind each other and converted to mp4. I do this by first saving it to H264, based on the code provided in the link above. And then converting that file to mp4 by using mp4parser.
#Override
public void onDestroy() {
super.onDestroy();
mCamera.stopPreview();
saveFileToH264("video/avc");
convertH264ToMP4();
}
private void saveFileToH264(String MIMETYPE) {
MediaCodec codec = MediaCodec.createEncoderByType(MIMETYPE);
MediaFormat mediaFormat = null;
int height = mCamera.getParameters().getPictureSize().height;
int width = mCamera.getParameters().getPictureSize().width;
Log.d(TAG, height + ", " + width);
mediaFormat = MediaFormat.createVideoFormat(MIMETYPE, width, height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 1000000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
codec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
boolean sawInputEOS = false;
int inputBufferIndex = -1, outputBufferIndex = -1;
BufferInfo info = null;
try {
File file = new File("/sdcard/output.h264");
FileOutputStream fstream2 = new FileOutputStream(file);
DataOutputStream dos = new DataOutputStream(fstream2);
// loop through buffer and get image output streams
for (int i = 0; i < MAX_BUFFER_SIZE; i++) {
ByteArrayOutputStream out = mCircularBuffer.getData(i);
byte[] dat = out.toByteArray();
long WAITTIME = 50;
inputBufferIndex = codec.dequeueInputBuffer(WAITTIME);
int bytesread = MAX_BUFFER_SIZE - 1 - i;
int presentationTime = 0;
if (bytesread <= 0)
sawInputEOS = true;
if (inputBufferIndex >= 0) {
if (!sawInputEOS) {
int samplesiz = dat.length;
inputBuffers[inputBufferIndex].put(dat);
codec.queueInputBuffer(inputBufferIndex, 0, samplesiz, presentationTime, 0);
presentationTime += 100;
info = new BufferInfo();
outputBufferIndex = codec.dequeueOutputBuffer(info, WAITTIME);
Log.i("BATA", "outputBufferIndex=" + outputBufferIndex);
if (outputBufferIndex >= 0) {
byte[] array = new byte[info.size];
outputBuffers[outputBufferIndex].get(array);
if (array != null) {
try {
dos.write(array);
} catch (IOException e) {
e.printStackTrace();
}
}
codec.releaseOutputBuffer(outputBufferIndex, false);
inputBuffers[inputBufferIndex].clear();
outputBuffers[outputBufferIndex].clear();
if (sawInputEOS)
break;
}
} else {
codec.queueInputBuffer(inputBufferIndex, 0, 0, presentationTime,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
info = new BufferInfo();
outputBufferIndex = codec.dequeueOutputBuffer(info, WAITTIME);
if (outputBufferIndex >= 0) {
byte[] array = new byte[info.size];
outputBuffers[outputBufferIndex].get(array);
if (array != null) {
try {
dos.write(array);
} catch (IOException e) {
e.printStackTrace();
}
}
codec.releaseOutputBuffer(outputBufferIndex, false);
inputBuffers[inputBufferIndex].clear();
outputBuffers[outputBufferIndex].clear();
break;
}
}
}
}
codec.flush();
try {
fstream2.close();
dos.flush();
dos.close();
} catch (IOException e) {
e.printStackTrace();
}
codec.stop();
codec.release();
codec = null;
} catch (FileNotFoundException e) {
Log.d(TAG, "File not found: " + e.getMessage());
} catch (Exception e) {
Log.d(TAG, "Exception: " + e.getMessage());
}
}
private void convertH264ToMP4() {
try {
DataSource videoFile = new FileDataSourceImpl("/sdcard/output.h264");
H264TrackImpl h264Track = new H264TrackImpl(videoFile, "eng", 5, 1);
// 5fps. you can play with timescale and timetick to get non integer fps, 23.967 is
// 24000/1001
Movie movie = new Movie();
movie.addTrack(h264Track);
Container out = new DefaultMp4Builder().build(movie);
FileOutputStream fos = new FileOutputStream(new File("/sdcard/output.mp4"));
out.writeContainer(fos.getChannel());
fos.flush();
fos.close();
Log.d(TAG, "Video saved to sdcard");
} catch (Exception e) {
Log.d(TAG, "No file was saved");
}
}
I'm pretty sure the problem is in the saveFileToH264 code. I've read a post, on the link provided above, that this is probably a stride and/or alignment issue(?). I have however no experience with encoding/decoding so I'm not sure how to solve this issue. If anyone could help that would be greatly appreciated!
Note: I know the code is not optimal and I still need to add more checks and whatnot, but I first want to get a working video out of this.
Am developing an android app that has the feature to record the user speech. For this I have used the AndroidRecord Audio API.
Currently the pcm file(recorded audio file - recordedAudio.pcm) getting generated successfully in the sd card. But am not able to play that file. I tried in PC also with windows media palyer and some other players. But nothing helps.
Following are my code snippet.
private int minBufSize;
private AudioRecord recorder;
private int sampleRate = 44100;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private boolean status;
minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig,
audioFormat);
status = true;
startStreaming();
public void startStreaming() {
Thread streamThread = new Thread(new Runnable() {
#Override
public void run() {
try {
String filePath = Environment.getExternalStorageDirectory()
.getPath() + "/audioRecord.pcm";
FileOutputStream fileOutputStreamObj = null;
try {
fileOutputStreamObj = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
// short[] sData = new short[minBufSize];
byte[] buffer = new byte[minBufSize];
// recorder = findAudioRecord();
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, minBufSize);
Log.d(TAG, "Recorder initialized");
recorder.startRecording();
while (status) {
// reading data from MIC into buffer
minBufSize = recorder.read(buffer, 0, buffer.length);
try {
// writes the data to file from buffer
// stores the voice buffer
// byte bData[] = short2byte(sData);
fileOutputStreamObj.write(buffer, 0, buffer.length);
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
// mConnection.sendBinaryMessage(buffer);
System.out.println("MinBufferSize: " + minBufSize);
}
} catch (Exception e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
}
});
streamThread.start();
}
Please help me on this. Thanks in advance.
You don't have to convert it into WAV and Play.
AudioTrack can directly play the recorded Audio.
Following is a Code snippet to Record audio into a file using AudioRecord and playback the same using AudioTrack API.
The operation is controlled from User using Buttons.
Code
private int BufferSize;
byte[] buffer = new byte[BufferSize];
/* AudioRecord and AudioTrack Object */
private AudioRecord record = null;
private AudioTrack track = null;
/* Audio Configuration */
private int sampleRate = 44100;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private boolean isRecording = true;
private Thread recordingThread = null;
The Audio Configuration can change as per device.
Refer to this question.
GUI has three buttons, Record, Stop and Play
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setButtonHandlers();
/* Set Button Visibility */
enableButton(R.id.btnStartRec,true);
enableButton(R.id.btnStopRec,false);
enableButton(R.id.btnStartPlay,false);
BufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
}
/* Function to Enable/Disable Buttons */
private void enableButton(int id,boolean isEnable){
((Button)findViewById(id)).setEnabled(isEnable);
}
/* Assign OnClickListener to Buttons */
private void setButtonHandlers() {
((Button)findViewById(R.id.btnStartRec)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStopRec)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStartPlay)).setOnClickListener(btnClick);
}
Handling Button click:
private View.OnClickListener btnClick = new View.OnClickListener() {
#Override
public void onClick(View v) {
switch(v.getId()){
case R.id.btnStartRec:{
Log.d(TAG, "Start Recording");
enableButton(R.id.btnStartRec,false);
enableButton(R.id.btnStopRec,true);
startRecording();
break;
}
case R.id.btnStopRec:{
Log.d(TAG, "Stop Recording");
enableButton(R.id.btnStartRec,true);
enableButton(R.id.btnStopRec,false);
stopRecording();
enableButton(R.id.btnStartPlay,true);
break;
}
case R.id.btnStartPlay:{
Log.d(TAG, "Play Recording");
enableButton(R.id.btnStartRec,false);
enableButton(R.id.btnStopRec,false);
StartPlaying();
break;
}
}
}
};
Code for Start Recording
private void startRecording()
{
record = new AudioRecord(AudioSource.DEFAULT, sampleRate,
channelConfig, audioFormat, BufferSize);
if (AudioRecord.STATE_INITIALIZED == record.getState())
record.startRecording();
isRecording = true;
/* Run a thread for Recording */
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
}
private void writeAudioDataToFile()
{
byte data[] = new byte[BufferSize];
/* Record audio to following file */
String filename = "/sdcard/audiofile.pcm";
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read_bytes = 0;
if(null != os){
while(isRecording)
{
read_bytes = record.read(data, 0, BufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read_bytes){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
Code for Stop Recording
private void stopRecording()
{
if(null != record)
{
isRecording = false;
if (AudioRecord.STATE_INITIALIZED == record.getState())
{
record.stop();
record.release();
Log.d(TAG, "===== Recording Audio Completed ===== ");
}
record = null;
recordingThread = null;
}
}
Code for Playing the Audio file:
public void startPlaying()
{
enableButton(R.id.btnStartPlay,false);
int minBufferSize = AudioTrack.getMinBufferSize(sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
track = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize,
AudioTrack.MODE_STREAM);
int i = 0;
byte[] temp = new byte[minBufferSize];
try {
FileInputStream fin = new FileInputStream("/sdcard/audiofile.pcm");
Log.d(TAG, "===== Opening File for Playing : /sdcard/audiofile.pcm ===== ");
DataInputStream dis = new DataInputStream(fin);
track.play();
while((i = dis.read(temp, 0, minBufferSize)) > -1)
{
track.write(temp, 0, i);
}
Log.d(TAG, "===== Playing Audio Completed ===== ");
track.stop();
track.release();
dis.close();
fin.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
enableButton(R.id.btnStartRec,true);
}
Please include the following in AndroidManifest.xml
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" > </uses-permission>
<uses-permission android:name="android.permission.RECORD_AUDIO" > </uses-permission>
The activity_main.xml looks like this.
The string.xml looks like this.
The above code is working and tested.
You can also do the same, without a file and using a intermediate buffer.
See: Audio Recording and Streaming in Android
Yes finally I found the answer with the clue of Michael's Comment above.
Am posting here the working code.
The Client Side Code as Follow's,
From the client side am streaming the audio data to the web socket server.
private int minBufSize;
private AudioRecord recorder;
private int sampleRate = 44100;
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig,
audioFormat);
startStreaming();
public void startStreaming() {
Thread streamThread = new Thread(new Runnable() {
#Override
public void run() {
try {
byte[] buffer = new byte[minBufSize];
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, minBufSize);
Log.d(TAG, "Recorder initialized");
recorder.startRecording();
while (status) {
// reading data from MIC into buffer
minBufSize = recorder.read(buffer, 0, buffer.length);
mConnection.sendBinaryMessage(buffer);
System.out.println("MinBufferSize: " + minBufSize);
}
} catch (Exception e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
}
});
streamThread.start();
}
The Server Side Code added implementation as follows,
First the server will create the .pcm from the streamed data. Then from that pcm file it will create the wave file by adding header.
#OnMessage
public void onMessage(byte[] data, boolean arg1)
{
if ((!this.currentCommand.equals("stop")) &&
(this.currentCommand.equals("start")))
try {
System.out.println("Starting new recording.");
FileOutputStream fOut = new FileOutputStream(this.f2, true);
fOut.write(data);
fOut.close();
properWAV(this.f2, 111133.0F);
}
catch (Exception e) {
e.printStackTrace();
}
}
private void properWAV(File fileToConvert, float newRecordingID)
{
try {
long mySubChunk1Size = 16L;
int myBitsPerSample = 16;
int myFormat = 1;
long myChannels = 1L;
long mySampleRate = 44100L;
long myByteRate = mySampleRate * myChannels * myBitsPerSample / 8L;
int myBlockAlign = (int)(myChannels * myBitsPerSample / 8L);
byte[] clipData = getBytesFromFile(fileToConvert);
long myDataSize = clipData.length;
long myChunk2Size = myDataSize * myChannels * myBitsPerSample / 8L;
long myChunkSize = 36L + myChunk2Size;
OutputStream os = new FileOutputStream(new File("D:/audio/" + newRecordingID + ".wav"));
BufferedOutputStream bos = new BufferedOutputStream(os);
DataOutputStream outFile = new DataOutputStream(bos);
outFile.writeBytes("RIFF");
outFile.write(intToByteArray((int)myChunkSize), 0, 4);
outFile.writeBytes("WAVE");
outFile.writeBytes("fmt ");
outFile.write(intToByteArray((int)mySubChunk1Size), 0, 4);
outFile.write(shortToByteArray((short)myFormat), 0, 2);
outFile.write(shortToByteArray((short)(int)myChannels), 0, 2);
outFile.write(intToByteArray((int)mySampleRate), 0, 4);
outFile.write(intToByteArray((int)myByteRate), 0, 4);
outFile.write(shortToByteArray((short)myBlockAlign), 0, 2);
outFile.write(shortToByteArray((short)myBitsPerSample), 0, 2);
outFile.writeBytes("data");
outFile.write(intToByteArray((int)myDataSize), 0, 4);
outFile.write(clipData);
outFile.flush();
outFile.close();
}
catch (IOException e) {
e.printStackTrace();
}
}
private static byte[] intToByteArray(int i)
{
byte[] b = new byte[4];
b[0] = (byte)(i & 0xFF);
b[1] = (byte)(i >> 8 & 0xFF);
b[2] = (byte)(i >> 16 & 0xFF);
b[3] = (byte)(i >> 24 & 0xFF);
return b;
}
public static byte[] shortToByteArray(short data)
{
return new byte[] { (byte)(data & 0xFF), (byte)(data >>> 8 & 0xFF) };
}
public byte[] getBytesFromFile(File file)
throws IOException
{
byte[] buffer = new byte[(int)file.length()];
InputStream ios = null;
try {
ios = new FileInputStream(file);
if (ios.read(buffer) == -1)
throw new IOException("EOF reached while trying to read the whole file");
}
finally {
try {
if (ios != null)
ios.close();
}
catch (IOException localIOException)
{
}
}
try
{
if (ios != null)
ios.close();
}
catch (IOException localIOException1)
{
}
return buffer;
}
Hope this one saves many of the developer's time.
I'm trying to use the MediaCodec to retrive all the frames from a video for image processing stuff, I'm trying to render the video and to capture the frame from the outBuffers
but I can't initiate a bitmap instance from the received bytes.
I've tried to render it to a surface or to nothing(null), because I've notice that when you rendering to null then the outBuffers are getting the bytes of the rendered frames.
This is the code:
private static final String SAMPLE = Environment.getExternalStorageDirectory() + "/test_videos/sample2.mp4";
private PlayerThread mPlayer = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
SurfaceView sv = new SurfaceView(this);
sv.getHolder().addCallback(this);
setContentView(sv);
}
protected void onDestroy() {
super.onDestroy();
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (mPlayer == null) {
mPlayer = new PlayerThread(holder.getSurface());
mPlayer.start();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (mPlayer != null) {
mPlayer.interrupt();
}
}
private void writeFrameToSDCard(byte[] bytes, int i, int sampleSize) {
try {
Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, sampleSize);
File file = new File(Environment.getExternalStorageDirectory() + "/test_videos/sample" + i + ".png");
if (file.exists())
file.delete();
file.createNewFile();
FileOutputStream out = new FileOutputStream(file.getAbsoluteFile());
bmp.compress(Bitmap.CompressFormat.PNG, 90, out);
out.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
public PlayerThread(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
extractor = new MediaExtractor();
extractor.setDataSource(SAMPLE);
int index = extractor.getTrackCount();
Log.d("MediaCodecTag", "Track count: " + index);
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, surface, null, 0);
break;
}
}
if (decoder == null) {
Log.e("DecodeActivity", "Can't find video info!");
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
boolean isEOS = false;
long startMs = System.currentTimeMillis();
int i = 0;
while (!Thread.interrupted()) {
if (!isEOS) {
int inIndex = decoder.dequeueInputBuffer(10000);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
}
/* saves frame to sdcard */
int outIndex = decoder.dequeueOutputBuffer(info, 10000); // outIndex most of the times null
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer buffer = outputBuffers[outIndex];
Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);
// We use a very simple clock to keep the video FPS, or the video
// playback will be too fast
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
try {
sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, true);
try {
byte[] dst = new byte[outputBuffers[outIndex].capacity()];
outputBuffers[outIndex].get(dst);
writeFrameToSDCard(dst, i, dst.length);
i++;
} catch (Exception e) {
Log.d("iDecodeActivity", "Error while creating bitmap with: " + e.getMessage());
}
break;
}
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}
Any help would be much appriciated
You can decode to a Surface or to a ByteBuffer, but not both. Because you are configuring a Surface, there will always be zero bytes of data in the output buffer.
If you configure for ByteBuffer decoding, the data format will vary, but to my knowledge will never be an ARGB format that Bitmap understands. You can see examples of two YUV formats being examined in the buffer-to-buffer tests in the CTS EncodeDecodeTest in method checkFrame(). Note, however, that the first thing it does is check the format and return immediately if it's not recognized.
At present (Android 4.4), the only reliable way to do this is to decode to a SurfaceTexture, render that with GLES, and extract RGB data with glReadPixels(). Sample code is available on bigflake -- see ExtractMpegFramesTest (requires API 16+).