Convert Video to MP3 - android

I want to Convert Video(mp4) to Mp3 format. Please if any one know then explain me.
I have tried this but its not working.
I have implement from this link but it wont work for me.
How do I extractor audio to mp3 from mp4 using java in Android?
public class AudioFromVideo {
private String audio, video;
private MediaCodec amc;
private MediaExtractor ame;
private MediaFormat amf;
private String amime;
public AudioFromVideo(String srcVideo, String destAudio) {
this.audio = destAudio;
this.video = srcVideo;
ame = new MediaExtractor();
init();
}
public void init() {
try {
ame.setDataSource(video);
amf = ame.getTrackFormat(1);
ame.selectTrack(1);
amime = amf.getString(MediaFormat.KEY_MIME);
amc = MediaCodec.createDecoderByType(amime);
amc.configure(amf, null, null, 0);
amc.start();
} catch (IOException e) {
e.printStackTrace();
}
}
public void start() {
new AudioService(amc, ame, audio).start();
}
private class AudioService extends Thread {
private MediaCodec amc;
private MediaExtractor ame;
private ByteBuffer[] aInputBuffers, aOutputBuffers;
private String destFile;
#SuppressWarnings("deprecation")
AudioService(MediaCodec amc, MediaExtractor ame, String destFile) {
this.amc = amc;
this.ame = ame;
this.destFile = destFile;
aInputBuffers = amc.getInputBuffers();
aOutputBuffers = amc.getOutputBuffers();
}
#SuppressWarnings("deprecation")
public void run() {
try {
OutputStream os = new FileOutputStream(new File(destFile));
long count = 0;
while (true) {
int inputIndex = amc.dequeueInputBuffer(0);
if (inputIndex == -1) {
continue;
}
int sampleSize = ame.readSampleData(aInputBuffers[inputIndex], 0);
if (sampleSize == -1) break;
long presentationTime = ame.getSampleTime();
int flag = ame.getSampleFlags();
ame.advance();
amc.queueInputBuffer(inputIndex, 0, sampleSize, presentationTime, flag);
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outputIndex = amc.dequeueOutputBuffer(info, 0);
if (outputIndex >= 0) {
byte[] data = new byte[info.size];
aOutputBuffers[outputIndex].get(data, 0, data.length);
aOutputBuffers[outputIndex].clear();
os.write(data);
count += data.length;
Log.e("write", "" + count);
amc.releaseOutputBuffer(outputIndex, false);
} else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
aOutputBuffers = amc.getOutputBuffers();
} else if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
}
}
os.flush();
os.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
new AudioFromVideo(videopath,audioPath).start();

You will need to extract Audio from Mp4 Videos using Android FFmpeg Library:
ffmpeg -i my_video.mp4 -ab 128k -ac 2 -ar 44100 -vn my_audio.mp3
Here is link to the library that does this:
http://writingminds.github.io/ffmpeg-android-java/
http://www.android-arsenal.com/details/1/931

It is possible to extract audio form vidoe file in android programmatically. You can do this without using any external library. It can be done by using MediaMuxer class.
new AudioExtractor().genVideoUsingMuxer("Video source Path", "Audio destination path", -1, -1, true, false);
See this post fro better understanding and complete code.
https://androidprogrammatically425516919.wordpress.com/2020/04/21/how-to-convert-video-to-audio-in-android-programmatically/

Related

Android: MediaCodec+MediaMuxer encoded audio MP4 won't play

I have the following function which takes a WAV (PCM) file and encodes it to an AAC-encoded MP4 file using Android's MediaCode and MediaMuxer classes. This is audio only. The function runs successfully and outputs a .mp4 of reasonable that is recognized as AAC-encoded. But it doesn't play on Android, web or iOS players, and crashes Audacity. Is there something I am missing? Code is shown below.
public void encode(final String from, final String to, final Callback callback) {
new Thread(new Runnable() {
#Override
public void run() {
try {
extractor.setDataSource(from);
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; ++i) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
Log.d(TAG, "Track " + i + " mime-type: " + mime);
if (true) {
extractor.selectTrack(i);
}
}
MediaCodec codec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, 128 * 1024);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
final MediaMuxer muxer = new MediaMuxer(to, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
final ByteBuffer byteBuffer = ByteBuffer.allocate(65536);
codec.setCallback(new MediaCodec.Callback() {
#Override
public void onInputBufferAvailable(MediaCodec codec, int bufferIndex) {
ByteBuffer inputBuffer = codec.getInputBuffer(bufferIndex);
if (isEndOfStream) {
return;
}
int sampleCapacity = inputBuffer.capacity() / 8;
if (numAvailable == 0) {
numAvailable = extractor.readSampleData(byteBuffer, 0);
if (numAvailable <= 0) {
codec.queueInputBuffer(bufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEndOfStream = true;
return;
}
long presentationTimeUs = extractor.getSampleTime();
extractor.advance();
}
if (numAvailable < sampleCapacity) {
codec.queueInputBuffer(bufferIndex, 0, numAvailable * 8, 0, 0);
numAvailable = 0;
} else {
codec.queueInputBuffer(bufferIndex, 0, sampleCapacity * 8, 0, 0);
numAvailable -= sampleCapacity;
}
}
#Override
public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) {
ByteBuffer outputBuffer = codec.getOutputBuffer(index);
muxer.writeSampleData(audioTrackIndex,outputBuffer,info);
codec.releaseOutputBuffer(index, true);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "end of encoding!");
codec.stop();
codec.release();
extractor.release();
extractor = null;
muxer.stop();
muxer.release();
callback.run(true);
}
}
#Override
public void onError(MediaCodec codec, MediaCodec.CodecException e) {
Log.e(TAG, "codec error", e);
}
#Override
public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
audioTrackIndex = muxer.addTrack(format);
muxer.start();
}
});
codec.start();
} catch (IOException e) {
Log.e(TAG,"Unable to encode",e);
callback.run(false);
}
}
}).run();
You need to:
Add timestamp information correctly since media muxer need to use it to TAG packet time information.
Add logic to copy data buffer from extractor data buffer (PCM) to mediacodec input buffer, only refer to buffer index will only encode a random data buffer without initial.
Add code to apply input source property such as channels and sample rate to mediacodec. Not sure if you are intent to encode with different channels and sample rate!
Example code as below:
MediaExtractor extractor = null;
int numAvailable = 0;
boolean isEndOfStream = false;
int audioTrackIndex = 0;
long totalen = 0;
int channels = 0;
int sampleRate = 0;
public void encode(final String from, final String to) {
new Thread(new Runnable() {
#Override
public void run() {
try {
extractor = new MediaExtractor();
extractor.setDataSource(from);
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; ++i) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
Log.d(TAG, "Track " + i + " mime-type: " + mime);
if (true) {
extractor.selectTrack(i);
channels = extractor.getTrackFormat(i).getInteger(MediaFormat.KEY_CHANNEL_COUNT);
sampleRate = extractor.getTrackFormat(i).getInteger(MediaFormat.KEY_SAMPLE_RATE);
Log.e(TAG,"sampleRate:" + sampleRate + " channels:" + channels);
}
}
String mimeType = "audio/mp4a-latm";
MediaCodec codec = MediaCodec.createEncoderByType(mimeType);
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, mimeType);
format.setInteger(MediaFormat.KEY_BIT_RATE, 128 * 1024);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, channels);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, sampleRate);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
final MediaMuxer muxer = new MediaMuxer(to, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
final ByteBuffer byteBuffer = ByteBuffer.allocate(65536);
codec.setCallback(new MediaCodec.Callback() {
#Override
public void onInputBufferAvailable(MediaCodec codec, int bufferIndex) {
ByteBuffer inputBuffer = codec.getInputBuffer(bufferIndex);
inputBuffer.clear();
if (isEndOfStream) {
return;
}
int sampleCapacity = inputBuffer.capacity();
if (numAvailable == 0) {
numAvailable = extractor.readSampleData(byteBuffer, 0);
if (numAvailable <= 0) {
codec.queueInputBuffer(bufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEndOfStream = true;
return;
}
extractor.advance();
}
long timestampUs = 1000000l * totalen / (2 * channels * sampleRate);
if (numAvailable < sampleCapacity) {
byte[] byteArray = new byte[numAvailable];
byteBuffer.get(byteArray);
inputBuffer.put(byteArray, 0, (int)numAvailable);
totalen += numAvailable;
codec.queueInputBuffer(bufferIndex, 0, numAvailable, timestampUs, 0);
numAvailable = 0;
} else {
byte[] byteArray = new byte[sampleCapacity];
byteBuffer.get(byteArray);
inputBuffer.put(byteArray, 0, (int)sampleCapacity);
totalen += sampleCapacity;
codec.queueInputBuffer(bufferIndex, 0, sampleCapacity, timestampUs, 0);
numAvailable -= sampleCapacity;
}
}
#Override
public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) {
ByteBuffer outputBuffer = codec.getOutputBuffer(index);
muxer.writeSampleData(audioTrackIndex,outputBuffer,info);
codec.releaseOutputBuffer(index, true);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "end of encoding!");
codec.stop();
codec.release();
extractor.release();
extractor = null;
muxer.stop();
muxer.release();
//callback.run(true);
}
}
#Override
public void onError(MediaCodec codec, MediaCodec.CodecException e) {
Log.e(TAG, "codec error", e);
}
#Override
public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
audioTrackIndex = muxer.addTrack(format);
muxer.start();
}
});
codec.start();
} catch (IOException e) {
Log.e(TAG,"Unable to encode",e);
//callback.run(false);
}
}
}).run();
}
BTW,Why you need to divide 8 with the buffer length? And what's the Callback class? Please share the import module! I almost can not pass build with the callback parameter so comment it out!
You seem to be encoding your AAC into LATM format which isn't very popular. Maybe that's the reason players won't play it. Try using some other media type, audio/mp4 or audio/3gpp.
See AAC container formats.

MediaMuxer and MediaExtractor to extract video from mp4

I try to extract video from mp4 and mix with others audios,here is the video part:
MediaFormat videoFormat = null;
MediaMuxer mp4Muxer = createMediaMuxer();
MediaExtractor mp4Extractor = createMediaExtractor();
for (int i = 0; i < mp4Extractor.getTrackCount(); i++) {
MediaFormat format = mp4Extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
mp4Extractor.selectTrack(i);
videoFormat = format;
break;
}
}
int videoTrackIndex = mp4Muxer.addTrack(videoFormat);
mp4Muxer.start();
boolean videoMuxDone = false;
ByteBuffer videoSampleBuffer = ByteBuffer.allocateDirect(176 * 144);
BufferInfo videoBufferInfo = new BufferInfo();
int sampleSize;
while (!videoMuxDone) {
videoSampleBuffer.clear();
sampleSize = mp4Extractor.readSampleData(videoSampleBuffer, 0);
if (sampleSize < 0) {
videoMuxDone = true;
} else {
videoBufferInfo.presentationTimeUs = mp4Extractor.getSampleTime();
videoBufferInfo.flags = mp4Extractor.getSampleFlags();
videoBufferInfo.size = sampleSize;
mp4Muxer.writeSampleData(videoTrackIndex, videoSampleBuffer,videoBufferInfo);
mp4Extractor.advance();
}
}
mp4Extractor.release();
mp4Muxer.stop();
mp4Muxer.release();
creations:
private MediaExtractor createMediaExtractor() {
MediaExtractor extractor = new MediaExtractor();
try {
extractor.setDataSource(mp4VideoFile.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
return extractor;
}
private MediaMuxer createMediaMuxer() {
MediaMuxer midiaMuxer = null;
try {
midiaMuxer = new MediaMuxer(OUTPUT_MP4,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
e.printStackTrace();
}
return midiaMuxer;
}
the videoformat:
{max-input-size=6561, durationUs=3331377, height=144, mime=video/mp4v-es, csd-0=java.nio.ByteArrayBuffer[position=0,limit=30,capacity=30], width=176}
when it runs to line : mp4Muxer.stop(), exceptions happens:
MPEG4Writer Missing codec specific data
java.lang.IllegalStateException: Failed to stop the muxer
at android.media.MediaMuxer.nativeStop(Native Method)
at android.media.MediaMuxer.stop(MediaMuxer.java:226)
at com.darcye.media.Mp4Muxer.startMix(Mp4Muxer.java:155)
I have read the Extract Decode Encode Mux Audio,but still cant find the reason,please help me.

Slow H264 1080P#60fps Decoding on Android Lollipop 5.0.2

I'm developing a JAVA RTP Streaming App for a company project, which should be capable of joining the Multicast Server and receive the RTP Packets.Later I use the H264 Depacketizer to recreate the a complete frame from the NAL FU (Keep append the data until End Bit & Marker Bit set )
I want to decode and display a raw h264 video byte stream in Android and therefore I'm currently using the MediaCodec classes with Hardware Decoder configured.
The Application is Up and running for the Jeallybean (API 17). Various Resolutions which I need to decodes are :
480P at 30/60 FPS
720P/I at 30/60 FPS
1080P/I at 30/60 FPS
Recently, Due to System Upgrade we are porting the App to Android L Version 5.0.2. My App is not capable of playing the high resolutions videos like 720p#60fps and 1080p#60fps.
For the debugging purpose I started feeding the Elementary H264 Frames with size from the dump file to MediaCodec and found out the Video is Lagging.
There are timestamps on the sample video I used and it seems the actual time taken to proceed by 1 sec in Rendered Video is more
Below is my sample code and links to sample video
h264 video https://www.dropbox.com/s/cocjhhovihm8q25/dump60fps.h264?dl=0
h264 framesize https://www.dropbox.com/s/r146d5zederrne1/dump60fps.size?dl=0
Also as this is my question on stackoverflow, Please bear with me on Bad code formatting and Direct references.
public class MainActivity extends Activity {
static final String TAG = "MainActivity";
private PlayerThread mPlayer = null;
private static final String MIME_TYPE = "video/avc";
private byte[] mSPSPPSFrame = new byte [3000];
private byte[] sps = new byte[37];
File videoFile = null;
File videoFile1 = null;
TextView tv ;
FileInputStream videoFileStream = null;
FileInputStream videoFileStream1 = null;
int[] tall = null ;
SpeedControlCallback mspeed = new SpeedControlCallback();
int mStreamLen = 0;
FrameLayout game;
RelativeLayout rl ;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//mVideoSurfaceView = (SurfaceView)findViewById(R.id.videoSurfaceView);
setContentView(R.layout.activity_main);
SurfaceView first = (SurfaceView) findViewById(R.id.firstSurface);
first.getHolder().addCallback(new SurfaceHolder.Callback() {
#Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
Log.d(TAG, "First surface created!");
}
#Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) {
Log.d(TAG, "surfaceChanged()");
surfaceHolder.getSurface();
if (mPlayer == null) {
mPlayer = new PlayerThread(surfaceHolder.getSurface());
mPlayer.start();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
Log.d(TAG, "First surface destroyed!");
}
});
tv = (TextView) findViewById(R.id.textview);
videoFile = new File("/data/local/tmp/dump60fps.h264");
videoFile1 = new File("/data/local/tmp/dump60fps.size");
}
private class PlayerThread extends Thread {
private Surface surface;
public PlayerThread(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
try {
decodeVideo(0, 1920,1080, 50, surface);
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (Throwable e) {
e.printStackTrace();
}
}
}
private void decodeVideo(int testinput, int width, int height,
int threshold, Surface surface) throws Throwable {
MediaCodec codec = null;
MediaFormat mFormat;
final long kTimeOutUs = 10000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
MediaFormat oformat = null;
int errors = -1;
long presentationTimeUs = 0L;
boolean mVideoStart = false;
byte[] byteArray = new byte[65525*5*3];
int i;
int sizeInBytes = 0, index, sampleSize = 0;
try {
byte[] bytes = new byte[(int) videoFile1.length()];
FileInputStream fis = new FileInputStream(videoFile1);
fis.read(bytes);
fis.close();
String[] valueStr = new String(bytes).trim().split("\\s+");
tall = new int[valueStr.length];
mStreamLen = valueStr.length;
Log.e(TAG, "++++++ Total Frames ++++++"+mStreamLen);
for ( i = 0; i < valueStr.length; i++) {
tall[i] = Integer.parseInt(valueStr[i]);
}
} catch (IOException e1) {
e1.printStackTrace();
}
index =1;
try {
videoFileStream = new FileInputStream(videoFile);
} catch (FileNotFoundException e1) {
e1.printStackTrace();
}
System.currentTimeMillis();
if (mVideoStart == false) {
try {
sizeInBytes = videoFileStream.read(mSPSPPSFrame, 0,37);
Log.e(TAG, "VideoEngine configure ."+sizeInBytes);
//for (i = 0 ; i < sizeInBytes; i++){
// Log.e(TAG, "VideoEngine ."+mSPSPPSFrame[i]);}
} catch (IOException e1) {
e1.printStackTrace();
}
sampleSize = sizeInBytes;
index++;
index++;
mFormat = MediaFormat.createVideoFormat(MIME_TYPE, 1920,1080);
mFormat.setByteBuffer("csd-0", ByteBuffer.wrap( mSPSPPSFrame,0, sizeInBytes));
codec = MediaCodec.createDecoderByType(MIME_TYPE);
codec.configure(mFormat, surface /*surface*/ , null /* crypto */, 0 /* flags */);
codec.start();
codec.getInputBuffers();
codec.getOutputBuffers();
}
// index = 0;
while (!sawOutputEOS && errors < 0) {
if (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
//Log.d(TAG, String.format("Archana Dqing the input buffer with BufIndex #: %d",inputBufIndex));
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codec.getInputBuffers()[inputBufIndex];
/*
* Read data from file and copy to the input ByteBuffer
*/
try {
sizeInBytes = videoFileStream.read(byteArray, 0,
tall[index] /*+ 4*/);
sampleSize = tall[index]/*+ 4*/;
index++;
} catch (IOException e) {
e.printStackTrace();
}
if (sizeInBytes <= 0) {
codec.queueInputBuffer(
inputBufIndex,
0 /* offset */,
0,
presentationTimeUs,
MediaCodec.BUFFER_FLAG_END_OF_STREAM );
sawInputEOS = true;
}
else {
dstBuf.put(byteArray, 0, sizeInBytes);
if (mVideoStart == false) mVideoStart = true;
codec.queueInputBuffer(
inputBufIndex,
0 /* offset */,
sampleSize,
presentationTimeUs,
mVideoStart ? 0:MediaCodec.BUFFER_FLAG_CODEC_CONFIG );
//Log.d(TAG, String.format(" After queueing the buffer to decoder with inputbufindex and samplesize #: %d ,%d ind %d",inputBufIndex,sampleSize,index));
}
}
}
int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
//Log.d(TAG, String.format(" Getting the information about decoded output buffer flags,offset,PT,size #: %d %d %d %d",info.flags,info.offset,info.presentationTimeUs,info.size));
//Log.d(TAG, String.format(" Getting the output of decoder in res #: %d",res));
if (res >= 0) {
int outputBufIndex = res;
//Log.d(TAG, "Output PTS "+info.presentationTimeUs);
//mspeed.preRender(info.presentationTimeUs);
//mspeed.setFixedPlaybackRate(25);
codec.releaseOutputBuffer(outputBufIndex, true /* render */);
//Log.d(TAG, String.format(" releaseoutputbuffer index= #: %d",outputBufIndex));
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "saw output EOS.");
sawOutputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codec.getOutputBuffers();
Log.d(TAG, "output buffers have changed.");
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
oformat = codec.getOutputFormat();
Log.d(TAG, "output format has changed to " + oformat);
}
}
codec.stop();
codec.release();
this.finish();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.activity_main, menu);
return true;
}
}
There are couples of workaround to problem with the above sample test.
Instead of feeding One Full frame to the decoder Inout, I was feeding single of NAL Units at a time. But still the playback was slow and could not match 60FPS
Google has changed the Implementation of Surface BufferQueue from Asynchronous to Synchronous.Hence when we call MediaCodec.dequeueBuffer to get decoded data, the server side (SurfaceTexture::dequeueBuffer) will wait for a buffer to be queued, and the client side waits for that, so that SurfaceTextureClient::dequeueBuffer will not return until a buffer has actually been queued on the server side. Where as in the Asynchronous Mode, a new GraphicBuffer is allocated.

Playing javacv-ffmpeg decoded audio in Android with Audiotrack

i am developing android application in which i need to play AAC live audio stream coming from Red5 server.
I have successfully decoded the audio stream by using javacv-ffmpeg.
But my problem is how to play the audio from decoded samples.
I have tried by following way
int len = avcodec.avcodec_decode_audio4( audio_c, samples_frame, got_frame, pkt2);
if (len <= 0){
this.pkt2.size(0);
} else {
if (this.got_frame[0] != 0) {
long pts = avutil.av_frame_get_best_effort_timestamp(samples_frame);
int sample_format = samples_frame.format();
int planes = avutil.av_sample_fmt_is_planar(sample_format) != 0 ? samples_frame.channels() : 1;
int data_size = avutil.av_samples_get_buffer_size((IntPointer)null, audio_c.channels(), samples_frame.nb_samples(), audio_c.sample_fmt(), 1) / planes;
if ((samples_buf == null) || (samples_buf.length != planes)) {
samples_ptr = new BytePointer[planes];
samples_buf = new Buffer[planes];
}
BytePointer ptemp = samples_frame.data(0);
BytePointer[] temp_ptr = new BytePointer[1];
temp_ptr[0] = ptemp.capacity(sample_size);
ByteBuffer btemp = ptemp.asBuffer();
byte[] buftemp = new byte[sample_size];
btemp.get(buftemp, 0, buftemp.length);
play the buftemp[] with audiotrack.....
}
But only noise is heard from speakers, is there any processing is need to be done on AVFrame we get from decode_audio4(...) .
The Incoming audio stream is correctly encoded with AAC codec.
Any help, suggestion appreciated.
Thanks in advance.
You can use FFmpegFrameGrabber class to capture the stream. And extract the audio using a FloatBuffer class. This is a java example
public class PlayVideoAndAudio extends Application
{
private static final Logger LOG = Logger.getLogger(JavaFxPlayVideoAndAudio.class.getName());
private static final double SC16 = (double) 0x7FFF + 0.4999999999999999;
private static volatile Thread playThread;
public static void main(String[] args)
{
launch(args);
}
#Override
public void start(Stage primaryStage) throws Exception
{
String source = "rtsp://184.72.239.149/vod/mp4:BigBuckBunny_115k.mov";
StackPane root = new StackPane();
ImageView imageView = new ImageView();
root.getChildren().add(imageView);
imageView.fitWidthProperty().bind(primaryStage.widthProperty());
imageView.fitHeightProperty().bind(primaryStage.heightProperty());
Scene scene = new Scene(root, 640, 480);
primaryStage.setTitle("Video + audio");
primaryStage.setScene(scene);
primaryStage.show();
playThread = new Thread(() -> {
try {
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(source);
grabber.start();
primaryStage.setWidth(grabber.getImageWidth());
primaryStage.setHeight(grabber.getImageHeight());
AudioFormat audioFormat = new AudioFormat(grabber.getSampleRate(), 16, grabber.getAudioChannels(), true, true);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
SourceDataLine soundLine = (SourceDataLine) AudioSystem.getLine(info);
soundLine.open(audioFormat);
soundLine.start();
Java2DFrameConverter converter = new Java2DFrameConverter();
ExecutorService executor = Executors.newSingleThreadExecutor();
while (!Thread.interrupted()) {
Frame frame = grabber.grab();
if (frame == null) {
break;
}
if (frame.image != null) {
Image image = SwingFXUtils.toFXImage(converter.convert(frame), null);
Platform.runLater(() -> {
imageView.setImage(image);
});
} else if (frame.samples != null) {
FloatBuffer channelSamplesFloatBuffer = (FloatBuffer) frame.samples[0];
channelSamplesFloatBuffer.rewind();
ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesFloatBuffer.capacity() * 2);
for (int i = 0; i < channelSamplesFloatBuffer.capacity(); i++) {
short val = (short)((double) channelSamplesFloatBuffer.get(i) * SC16);
outBuffer.putShort(val);
}
/**
* We need this because soundLine.write ignores
* interruptions during writing.
*/
try {
executor.submit(() -> {
soundLine.write(outBuffer.array(), 0, outBuffer.capacity());
outBuffer.clear();
}).get();
} catch (InterruptedException interruptedException) {
Thread.currentThread().interrupt();
}
}
}
executor.shutdownNow();
executor.awaitTermination(10, TimeUnit.SECONDS);
soundLine.stop();
grabber.stop();
grabber.release();
Platform.exit();
} catch (Exception exception) {
LOG.log(Level.SEVERE, null, exception);
System.exit(1);
}
});
playThread.start();
}
#Override
public void stop() throws Exception
{
playThread.interrupt();
}
}
Because, what data you are getting in buftemp[] is in this AV_SAMPLE_FMT_FLTP format, you have to change it to AV_SAMPLE_FMT_S16 format using SwrContext and then your problem will be solved.

Recording .Wav with Android AudioRecorder

I have read a lot of pages about Android's AudioRecorder. You can see a list of them below the question.
I'm trying to record audio with AudioRecorder, but it's not working well.
public class MainActivity extends Activity {
AudioRecord ar = null;
int buffsize = 0;
int blockSize = 256;
boolean isRecording = false;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void baslat(View v)
{
// when click to START
buffsize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
ar = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize);
ar.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
public void durdur(View v)
{
// When click to STOP
ar.stop();
isRecording = false;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.wav";
short sData[] = new short[buffsize/2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
ar.read(sData, 0, buffsize/2);
Log.d("eray","Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, buffsize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
It's creating a .wav file but, when I try to listen to it, it's not opening. I'm getting a "file not supported" error. I've tried to play the file with quite a few media player applications.
NOTE : I have to use AudioRecorder instead of MediaRecorder because my app will be doing another process while recording (displaying an equalizer) .
Here is the list of pages that I've read about this subject:
http://developer.android.com/reference/android/media/AudioRecord.html#read(short[],%20int,%20int)
Android AudioRecord example
http://audiorecordandroid.blogspot.in
AudioRecord object not initializing
Recording a wav file from the mic in Android - problems
http://i-liger.com/article/android-wav-audio-recording
Creating a WAV file from raw PCM data using the Android SDK
Capturing Sound for Analysis and Visualizing Frequencies in Android
There are a lot of different ways to go about this. I've tried lots of them but nothing works for me. I've been working on this problem for about 6 hours now so I would appreciate a definitive answer, ideally some sample code.
I wrote a simple (by which you should read, not to professional standards) class to do this yesterday, and it works.
private class Wave {
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT + ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4 * SMALLINT) + (INTEGER * 2) + LONGINT + ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE + LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE + ID_STRING_SIZE + WAV_FMT_SIZE + WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
public Wave(int sampleRate, short nChannels, short[] data, int start, int end) {
nSamples = end - start + 1;
cursor = 0;
output = new byte[nSamples * SMALLINT + WAV_HDR_SIZE];
buildHeader(sampleRate, nChannels);
writeData(data, start, end);
}
// ------------------------------------------------------------
private void buildHeader(int sampleRate, short nChannels) {
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
// ------------------------------------------------------------
public void writeFormat(int sampleRate, short nChannels) {
write("fmt ");
write(WAV_FMT_SIZE - WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short) (nChannels * SAMPLE_SIZE));
write((short) 16);
}
// ------------------------------------------------------------
public void writeData(short[] data, int start, int end) {
write("data");
write(nSamples * SMALLINT);
for (int i = start; i <= end; write(data[i++])) ;
}
// ------------------------------------------------------------
private void write(byte b) {
output[cursor++] = b;
}
// ------------------------------------------------------------
private void write(String id) {
if (id.length() != ID_STRING_SIZE)
Utils.logError("String " + id + " must have four characters.");
else {
for (int i = 0; i < ID_STRING_SIZE; ++i) write((byte) id.charAt(i));
}
}
// ------------------------------------------------------------
private void write(int i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
private void write(short i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
public boolean wroteToFile(String filename) {
boolean ok = false;
try {
File path = new File(getFilesDir(), filename);
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok = false;
} catch (IOException e) {
ok = false;
e.printStackTrace();
}
return ok;
}
}
Hope this helps
PCMAudioHelper solved my problem. I'll modify this answer and explain it but firstly i have to do some tests over this class.
You might find this OMRECORDER helpful for recording .WAV format.
In case if .aac works with you then check out this WhatsappAudioRecorder:
On startRecording button click :
Initialise new thread.
Create file with .aac extension.
Create output stream of file.
Set output
SetListener and execute thread.
OnStopClick :
Interrupt the thread and audio will be saved in file.
Here is full gist of for reference :
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.util.Log;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
public class AudioRecordThread implements Runnable {
private static final String TAG = AudioRecordThread.class.getSimpleName();
private static final int SAMPLE_RATE = 44100;
private static final int SAMPLE_RATE_INDEX = 4;
private static final int CHANNELS = 1;
private static final int BIT_RATE = 32000;
private final int bufferSize;
private final MediaCodec mediaCodec;
private final AudioRecord audioRecord;
private final OutputStream outputStream;
private OnRecorderFailedListener onRecorderFailedListener;
AudioRecordThread(OutputStream outputStream, OnRecorderFailedListener onRecorderFailedListener) throws IOException {
this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
this.audioRecord = createAudioRecord(this.bufferSize);
this.mediaCodec = createMediaCodec(this.bufferSize);
this.outputStream = outputStream;
this.onRecorderFailedListener = onRecorderFailedListener;
this.mediaCodec.start();
try {
audioRecord.startRecording();
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
}
#Override
public void run() {
if (onRecorderFailedListener != null) {
Log.d(TAG, "onRecorderStarted");
onRecorderFailedListener.onRecorderStarted();
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] codecInputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = mediaCodec.getOutputBuffers();
try {
while (!Thread.interrupted()) {
boolean success = handleCodecInput(audioRecord, mediaCodec, codecInputBuffers, Thread.currentThread().isAlive());
if (success)
handleCodecOutput(mediaCodec, codecOutputBuffers, bufferInfo, outputStream);
}
} catch (IOException e) {
Log.w(TAG, e);
} finally {
mediaCodec.stop();
audioRecord.stop();
mediaCodec.release();
audioRecord.release();
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private boolean handleCodecInput(AudioRecord audioRecord,
MediaCodec mediaCodec, ByteBuffer[] codecInputBuffers,
boolean running) throws IOException {
byte[] audioRecordData = new byte[bufferSize];
int length = audioRecord.read(audioRecordData, 0, audioRecordData.length);
if (length == AudioRecord.ERROR_BAD_VALUE ||
length == AudioRecord.ERROR_INVALID_OPERATION ||
length != bufferSize) {
if (length != bufferSize) {
if (onRecorderFailedListener != null) {
Log.d(TAG, "length != BufferSize calling onRecordFailed");
onRecorderFailedListener.onRecorderFailed();
}
return false;
}
}
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = codecInputBuffers[codecInputBufferIndex];
codecBuffer.clear();
codecBuffer.put(audioRecordData);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
return true;
}
private void handleCodecOutput(MediaCodec mediaCodec,
ByteBuffer[] codecOutputBuffers,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream)
throws IOException {
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
} else if (codecOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = mediaCodec.getOutputBuffers();
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}
private AudioRecord createAudioRecord(int bufferSize) {
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize * 10);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.d(TAG, "Unable to initialize AudioRecord");
throw new RuntimeException("Unable to initialize AudioRecord");
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.NoiseSuppressor.isAvailable()) {
android.media.audiofx.NoiseSuppressor noiseSuppressor = android.media.audiofx.NoiseSuppressor
.create(audioRecord.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
.create(audioRecord.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
}
}
return audioRecord;
}
private MediaCodec createMediaCodec(int bufferSize) throws IOException {
MediaCodec mediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
try {
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
return mediaCodec;
}
interface OnRecorderFailedListener {
void onRecorderFailed();
void onRecorderStarted();
}
}
I would add this as a comment but I don't yet have enough Stackoverflow rep points...
Opiatefuchs's link takes you to sample code that shows you the exact header formatting necessary to create a .wav file. I've been all over that code myself. Very helpful.
First you need know that wav file has its format -- header. so you can't just write the pure data to the .wav file.
Second the wav file header include the length of file . so you need write the header after recording.
My solution is , user AudioRecorder record the pcm file .
byte[] audiodata = new byte[bufferSizeInBytes];
FileOutputStream fos = null;
int readsize = 0;
try {
fos = new FileOutputStream(pcmFileName, true);
} catch (FileNotFoundException e) {
Log.e("AudioRecorder", e.getMessage());
}
status = Status.STATUS_START;
while (status == Status.STATUS_START && audioRecord != null) {
readsize = audioRecord.read(audiodata, 0, bufferSizeInBytes);
if (AudioRecord.ERROR_INVALID_OPERATION != readsize && fos != null) {
if (readsize > 0 && readsize <= audiodata.length)
fos.write(audiodata, 0, readsize);
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
}
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
then convert it to wav file.
byte buffer[] = null;
int TOTAL_SIZE = 0;
File file = new File(pcmPath);
if (!file.exists()) {
return false;
}
TOTAL_SIZE = (int) file.length();
WaveHeader header = new WaveHeader();
header.fileLength = TOTAL_SIZE + (44 - 8);
header.FmtHdrLeth = 16;
header.BitsPerSample = 16;
header.Channels = 1;
header.FormatTag = 0x0001;
header.SamplesPerSec = 8000;
header.BlockAlign = (short) (header.Channels * header.BitsPerSample / 8);
header.AvgBytesPerSec = header.BlockAlign * header.SamplesPerSec;
header.DataHdrLeth = TOTAL_SIZE;
byte[] h = null;
try {
h = header.getHeader();
} catch (IOException e1) {
Log.e("PcmToWav", e1.getMessage());
return false;
}
if (h.length != 44)
return false;
File destfile = new File(destinationPath);
if (destfile.exists())
destfile.delete();
try {
buffer = new byte[1024 * 4]; // Length of All Files, Total Size
InputStream inStream = null;
OutputStream ouStream = null;
ouStream = new BufferedOutputStream(new FileOutputStream(
destinationPath));
ouStream.write(h, 0, h.length);
inStream = new BufferedInputStream(new FileInputStream(file));
int size = inStream.read(buffer);
while (size != -1) {
ouStream.write(buffer);
size = inStream.read(buffer);
}
inStream.close();
ouStream.close();
} catch (FileNotFoundException e) {
Log.e("PcmToWav", e.getMessage());
return false;
} catch (IOException ioe) {
Log.e("PcmToWav", ioe.getMessage());
return false;
}
if (deletePcmFile) {
file.delete();
}
Log.i("PcmToWav", "makePCMFileToWAVFile success!" + new SimpleDateFormat("yyyy-MM-dd hh:mm").format(new Date()));
return true;

Categories

Resources