How to record android audio playing in headset - android

MediaRecorder class in android is used to record audio from mic, can anyone tell me how can we record audio that is actually played on headset. Sounds techy but yes it is the thing i am exploring on. I was told "Visualizer" class can record system audio but as per documentation it can only be used to visualize audio and we cannot put recorder interface there.
Read more : http://developer.android.com/reference/android/media/audiofx/Visualizer.html
Does any from below will serve the purpose ?
int CAMCORDER
int DEFAULT
int MIC
int REMOTE_SUBMIX
int VOICE_CALL
int VOICE_COMMUNICAITON
int vOICE_DOWNLINK
int VOICE_RECOGNITION
int VOICE_UPLINK
Has anyone worked on OpenSLES? Heard that too serves the purpose of it
If there any Android APIs or Third Party APIs you have come across please feel free to share info. Few blogs also say this can be done at NDK level. If anyone has worked on it or do have code examples kindly inform
Thanks
Example Code to show Michael :
public class VisualizerView extends View {
private static final String TAG = "VisualizerView";
private byte[] mBytes;
private byte[] mFFTBytes;
private Rect mRect = new Rect();
private Visualizer mVisualizer;
private Set<Renderer> mRenderers;
private Paint mFlashPaint = new Paint();
private Paint mFadePaint = new Paint();
private ByteArrayOutputStream buffer;
public VisualizerView(Context context, AttributeSet attrs, int defStyle)
{
super(context, attrs);
init();
}
public VisualizerView(Context context, AttributeSet attrs)
{
this(context, attrs, 0);
}
public VisualizerView(Context context)
{
this(context, null, 0);
}
private void init() {
mBytes = null;
mFFTBytes = null;
mFlashPaint.setColor(Color.argb(122, 255, 255, 255));
mFadePaint.setColor(Color.argb(238, 255, 255, 255)); // Adjust alpha to change how quickly the image fades
mFadePaint.setXfermode(new PorterDuffXfermode(Mode.MULTIPLY));
mRenderers = new HashSet<Renderer>();
}
/**
* Links the visualizer to a player
* #param player - MediaPlayer instance to link to
*/
public void link(MediaPlayer player)
{
if(player == null)
{
throw new NullPointerException("Cannot link to null MediaPlayer");
}
// Create the Visualizer object and attach it to our media player.
mVisualizer = new Visualizer(player.getAudioSessionId());
mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]);
// Pass through Visualizer data to VisualizerView
Visualizer.OnDataCaptureListener captureListener = new Visualizer.OnDataCaptureListener()
{
#Override
public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate)
{
updateVisualizer(bytes);
//Record
if (bytes.length>-1)
buffer.write(bytes, 0, bytes.length);
//Record ends
}
#Override
public void onFftDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate)
{
updateVisualizerFFT(bytes);
}
};
mVisualizer.setDataCaptureListener(captureListener,
Visualizer.getMaxCaptureRate() / 2, true, true);
// Enabled Visualizer and disable when we're done with the stream
mVisualizer.setEnabled(true);
player.setOnCompletionListener(new MediaPlayer.OnCompletionListener()
{
#Override
public void onCompletion(MediaPlayer mediaPlayer)
{
mVisualizer.setEnabled(false);
//Save File
try {
buffer.flush();
} catch (IOException e) {
e.printStackTrace();
}
mBytes = buffer.toByteArray();
try {
buffer.close();
} catch (IOException e) {
e.printStackTrace();
}
mVisualizer.release();
File file = new File(Environment.getExternalStorageDirectory(), "music1.wav");
FileOutputStream fos;
try {
fos = new FileOutputStream(file);
fos.write(mBytes);
fos.flush();
fos.close();
} catch (FileNotFoundException e) {
// handle exception
} catch (IOException e) {
// handle exception
}
//Save File ends
}
});
}
public void addRenderer(Renderer renderer)
{
if(renderer != null)
{
mRenderers.add(renderer);
}
}
public void clearRenderers()
{
mRenderers.clear();
}
/**
* Call to release the resources used by VisualizerView. Like with the
* MediaPlayer it is good practice to call this method
*/
public void release()
{
mVisualizer.release();
}
/**
* Pass data to the visualizer. Typically this will be obtained from the
* Android Visualizer.OnDataCaptureListener call back. See
* {#link Visualizer.OnDataCaptureListener#onWaveFormDataCapture }
* #param bytes
*/
public void updateVisualizer(byte[] bytes) {
mBytes = bytes;
invalidate();
}
/**
* Pass FFT data to the visualizer. Typically this will be obtained from the
* Android Visualizer.OnDataCaptureListener call back. See
* {#link Visualizer.OnDataCaptureListener#onFftDataCapture }
* #param bytes
*/
public void updateVisualizerFFT(byte[] bytes) {
mFFTBytes = bytes;
invalidate();
}
boolean mFlash = false;
/**
* Call this to make the visualizer flash. Useful for flashing at the start
* of a song/loop etc...
*/
public void flash() {
mFlash = true;
invalidate();
}
Bitmap mCanvasBitmap;
Canvas mCanvas;
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
// Create canvas once we're ready to draw
mRect.set(0, 0, getWidth(), getHeight());
if(mCanvasBitmap == null)
{
mCanvasBitmap = Bitmap.createBitmap(canvas.getWidth(), canvas.getHeight(), Config.ARGB_8888);
}
if(mCanvas == null)
{
mCanvas = new Canvas(mCanvasBitmap);
}
if (mBytes != null) {
// Render all audio renderers
AudioData audioData = new AudioData(mBytes);
for(Renderer r : mRenderers)
{
r.render(mCanvas, audioData, mRect);
}
}
if (mFFTBytes != null) {
// Render all FFT renderers
FFTData fftData = new FFTData(mFFTBytes);
for(Renderer r : mRenderers)
{
r.render(mCanvas, fftData, mRect);
}
}
// Fade out old contents
mCanvas.drawPaint(mFadePaint);
if(mFlash)
{
mFlash = false;
mCanvas.drawPaint(mFlashPaint);
}
canvas.drawBitmap(mCanvasBitmap, new Matrix(), null);
}
}

can anyone tell me how can we record audio that is actually played on headset.
You can't, as there's no official support in the Android APIs to do that. Doesn't matter if you use the Java APIs, or the native APIs included in the NDK.
There may be hacks that work on specific devices, if you've got root access, etc, but I'm not going to cover those. If you're interested you can try searching and see what you can come up with.
I was told "Visualizer" class can record system audio but as per documentation it can only be used to visualize audio and we cannot put recorder interface there.
The Visualizer has this method:
public int getWaveForm (byte[] waveform)
Returns a waveform capture of currently playing audio content. The capture consists
in a number of consecutive 8-bit (unsigned) mono PCM samples equal to the capture size
returned by getCaptureSize().
So you can record the currently playing audio using the Visualizer. But as is mentioned in the description above, you'll only get low-quality audio data, because the purpose of this method is to get audio data that you can use for visualization purposes, not for general recording purposes.

Related

Android camera2 output to ImageReader format YUV_420_888 still slow

I'm trying to get the Android camera2 running in the background service, then process the frame in the callback ImageReader.OnImageAvailableListener. I already use the suggested raw format YUV_420_888 to get max fps, however I only get around 7fps on the resolution 640x480. This is even slower than what I get using the old Camera interface( I want to upgrade to Camera2 to get higher fps ) or with the OpenCV JavaCameraView( I can't use this because I need to run processing in the background service ).
Below is my service class. What am I missing?
My phone is Redmi Note 3 running Android 5.0.2
public class Camera2ServiceYUV extends Service {
protected static final String TAG = "VideoProcessing";
protected static final int CAMERACHOICE = CameraCharacteristics.LENS_FACING_BACK;
protected CameraDevice cameraDevice;
protected CameraCaptureSession captureSession;
protected ImageReader imageReader;
// A semaphore to prevent the app from exiting before closing the camera.
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
public static final String RESULT_RECEIVER = "resultReceiver";
private static final int JPEG_COMPRESSION = 90;
public static final int RESULT_OK = 0;
public static final int RESULT_DEVICE_NO_CAMERA= 1;
public static final int RESULT_GET_CAMERA_FAILED = 2;
public static final int RESULT_ALREADY_RUNNING = 3;
public static final int RESULT_NOT_RUNNING = 4;
private static final String START_SERVICE_COMMAND = "startServiceCommands";
private static final int COMMAND_NONE = -1;
private static final int COMMAND_START = 0;
private static final int COMMAND_STOP = 1;
private boolean mRunning = false;
public Camera2ServiceYUV() {
}
public static void startToStart(Context context, ResultReceiver resultReceiver) {
Intent intent = new Intent(context, Camera2ServiceYUV.class);
intent.putExtra(START_SERVICE_COMMAND, COMMAND_START);
intent.putExtra(RESULT_RECEIVER, resultReceiver);
context.startService(intent);
}
public static void startToStop(Context context, ResultReceiver resultReceiver) {
Intent intent = new Intent(context, Camera2ServiceYUV.class);
intent.putExtra(START_SERVICE_COMMAND, COMMAND_STOP);
intent.putExtra(RESULT_RECEIVER, resultReceiver);
context.startService(intent);
}
// SERVICE INTERFACE
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
switch (intent.getIntExtra(START_SERVICE_COMMAND, COMMAND_NONE)) {
case COMMAND_START:
startCamera(intent);
break;
case COMMAND_STOP:
stopCamera(intent);
break;
default:
throw new UnsupportedOperationException("Cannot start the camera service with an illegal command.");
}
return START_STICKY;
}
#Override
public void onDestroy() {
try {
captureSession.abortCaptures();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
captureSession.close();
}
#Override
public IBinder onBind(Intent intent) {
return null;
}
// CAMERA2 INTERFACE
/**
* 1. The android CameraManager class is used to manage all the camera devices in our android device
* Each camera device has a range of properties and settings that describe the device.
* It can be obtained through the camera characteristics.
*/
public void startCamera(Intent intent) {
final ResultReceiver resultReceiver = intent.getParcelableExtra(RESULT_RECEIVER);
if (mRunning) {
resultReceiver.send(RESULT_ALREADY_RUNNING, null);
return;
}
mRunning = true;
CameraManager manager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
String pickedCamera = getCamera(manager);
Log.e(TAG,"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA " + pickedCamera);
manager.openCamera(pickedCamera, cameraStateCallback, null);
CameraCharacteristics characteristics = manager.getCameraCharacteristics(pickedCamera);
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.YUV_420_888);
}
int width = 640;
int height = 480;
// if (jpegSizes != null && 0 < jpegSizes.length) {
// width = jpegSizes[jpegSizes.length -1].getWidth();
// height = jpegSizes[jpegSizes.length - 1].getHeight();
// }
// for(Size s : jpegSizes)
// {
// Log.e(TAG,"Size = " + s.toString());
// }
// DEBUG
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
return;
}
Log.e(TAG,"Width = " + width + ", Height = " + height);
Log.e(TAG,"output stall duration = " + map.getOutputStallDuration(ImageFormat.YUV_420_888, new Size(width,height)) );
Log.e(TAG,"Min output stall duration = " + map.getOutputMinFrameDuration(ImageFormat.YUV_420_888, new Size(width,height)) );
// Size[] sizeList = map.getInputSizes(ImageFormat.YUV_420_888);
// for(Size s : sizeList)
// {
// Log.e(TAG,"Size = " + s.toString());
// }
imageReader = ImageReader.newInstance(width, height, ImageFormat.YUV_420_888, 2 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.i(TAG, "imageReader created");
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
resultReceiver.send(RESULT_DEVICE_NO_CAMERA, null);
}catch (InterruptedException e) {
resultReceiver.send(RESULT_GET_CAMERA_FAILED, null);
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
catch(SecurityException se)
{
resultReceiver.send(RESULT_GET_CAMERA_FAILED, null);
throw new RuntimeException("Security permission exception while trying to open the camera.", se);
}
resultReceiver.send(RESULT_OK, null);
}
// We can pick the camera being used, i.e. rear camera in this case.
private String getCamera(CameraManager manager) {
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cOrientation == CAMERACHOICE) {
return cameraId;
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
return null;
}
/**
* 1.1 Callbacks when the camera changes its state - opened, disconnected, or error.
*/
protected CameraDevice.StateCallback cameraStateCallback = new CameraDevice.StateCallback() {
#Override
public void onOpened(#NonNull CameraDevice camera) {
Log.i(TAG, "CameraDevice.StateCallback onOpened");
mCameraOpenCloseLock.release();
cameraDevice = camera;
createCaptureSession();
}
#Override
public void onDisconnected(#NonNull CameraDevice camera) {
Log.w(TAG, "CameraDevice.StateCallback onDisconnected");
mCameraOpenCloseLock.release();
camera.close();
cameraDevice = null;
}
#Override
public void onError(#NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice.StateCallback onError " + error);
mCameraOpenCloseLock.release();
camera.close();
cameraDevice = null;
}
};
/**
* 2. To capture or stream images from a camera device, the application must first create
* a camera capture captureSession.
* The camera capture needs a surface to output what has been captured, in this case
* we use ImageReader in order to access the frame data.
*/
public void createCaptureSession() {
try {
cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), sessionStateCallback, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
protected CameraCaptureSession.StateCallback sessionStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(#NonNull CameraCaptureSession session) {
Log.i(TAG, "CameraCaptureSession.StateCallback onConfigured");
// The camera is already closed
if (null == cameraDevice) {
return;
}
// When the captureSession is ready, we start to grab the frame.
Camera2ServiceYUV.this.captureSession = session;
try {
session.setRepeatingRequest(createCaptureRequest(), null, null);
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
#Override
public void onConfigureFailed(#NonNull CameraCaptureSession session) {
Log.e(TAG, "CameraCaptureSession.StateCallback onConfigureFailed");
}
};
/**
* 3. The application then needs to construct a CaptureRequest, which defines all the capture parameters
* needed by a camera device to capture a single image.
*/
private CaptureRequest createCaptureRequest() {
try {
/**
* Check other templates for further details.
* TEMPLATE_MANUAL = 6
* TEMPLATE_PREVIEW = 1
* TEMPLATE_RECORD = 3
* TEMPLATE_STILL_CAPTURE = 2
* TEMPLATE_VIDEO_SNAPSHOT = 4
* TEMPLATE_ZERO_SHUTTER_LAG = 5
*
* TODO: can set camera features like auto focus, auto flash here
* captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
*/
CaptureRequest.Builder captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// captureRequestBuilder.set(CaptureRequest.EDGE_MODE,
// CaptureRequest.EDGE_MODE_OFF);
// captureRequestBuilder.set(
// CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
// CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
// captureRequestBuilder.set(
// CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE,
// CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE_OFF);
// captureRequestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE,
// CaptureRequest.NOISE_REDUCTION_MODE_OFF);
// captureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
// CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
//
// captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
// captureRequestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
captureRequestBuilder.addTarget(imageReader.getSurface());
return captureRequestBuilder.build();
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
return null;
}
}
/**
* ImageReader provides a surface for the camera to output what has been captured.
* Upon the image available, call processImage() to process the image as desired.
*/
private long frameTime = 0;
private ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Log.i(TAG, "called ImageReader.OnImageAvailable");
Image img = reader.acquireLatestImage();
if (img != null) {
if( frameTime != 0 )
{
Log.e(TAG, "fps = " + (float)(1000.0 / (float)(SystemClock.elapsedRealtime() - frameTime)) + " fps");
}
frameTime = SystemClock.elapsedRealtime();
img.close();
}
}
};
private void processImage(Image image) {
Mat outputImage = imageToMat(image);
Bitmap bmp = Bitmap.createBitmap(outputImage.cols(), outputImage.rows(), Bitmap.Config.ARGB_8888);
Utils.bitmapToMat(bmp, outputImage);
Point mid = new Point(0, 0);
Point inEnd = new Point(outputImage.cols(), outputImage.rows());
Imgproc.line(outputImage, mid, inEnd, new Scalar(255, 0, 0), 2, Core.LINE_AA, 0);
Utils.matToBitmap(outputImage, bmp);
Intent broadcast = new Intent();
broadcast.setAction("your_load_photo_action");
broadcast.putExtra("BitmapImage", bmp);
sendBroadcast(broadcast);
}
private Mat imageToMat(Image image) {
ByteBuffer buffer;
int rowStride;
int pixelStride;
int width = image.getWidth();
int height = image.getHeight();
int offset = 0;
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[image.getWidth() * image.getHeight() * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
for (int i = 0; i < planes.length; i++) {
buffer = planes[i].getBuffer();
rowStride = planes[i].getRowStride();
pixelStride = planes[i].getPixelStride();
int w = (i == 0) ? width : width / 2;
int h = (i == 0) ? height : height / 2;
for (int row = 0; row < h; row++) {
int bytesPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8;
if (pixelStride == bytesPerPixel) {
int length = w * bytesPerPixel;
buffer.get(data, offset, length);
// Advance buffer the remainder of the row stride, unless on the last row.
// Otherwise, this will throw an IllegalArgumentException because the buffer
// doesn't include the last padding.
if (h - row != 1) {
buffer.position(buffer.position() + rowStride - length);
}
offset += length;
} else {
// On the last row only read the width of the image minus the pixel stride
// plus one. Otherwise, this will throw a BufferUnderflowException because the
// buffer doesn't include the last padding.
if (h - row == 1) {
buffer.get(rowData, 0, width - pixelStride + 1);
} else {
buffer.get(rowData, 0, rowStride);
}
for (int col = 0; col < w; col++) {
data[offset++] = rowData[col * pixelStride];
}
}
}
}
// Finally, create the Mat.
Mat mat = new Mat(height + height / 2, width, CV_8UC1);
mat.put(0, 0, data);
return mat;
}
private void stopCamera(Intent intent) {
ResultReceiver resultReceiver = intent.getParcelableExtra(RESULT_RECEIVER);
if (!mRunning) {
resultReceiver.send(RESULT_NOT_RUNNING, null);
return;
}
closeCamera();
resultReceiver.send(RESULT_OK, null);
mRunning = false;
Log.d(TAG, "Service is finished.");
}
/**
* Closes the current {#link CameraDevice}.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != captureSession) {
captureSession.close();
captureSession = null;
}
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
}
I bumped into this problem recently when I try to upgrade my AR app from camera1 to camera2 API, I used a mid-range device for testing (Meizu S6) which has Exynos 7872 CPU and Mali-G71 GPU. What I want to achieve is a steady 30fps AR experience.
But through the migration I found that its quite tricky to get a decent preview frame rate using Camera2 API.
I configured my capture request using TEMPLATE_PREVIEW
mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
Then I Put 2 surfaces, one for preview which is a surfaceTexture at size 1280x720,
another ImageReader at size 1280x720 for image processing.
mImageReader = ImageReader.newInstance(
mVideoSize.getWidth(),
mVideoSize.getHeight(),
ImageFormat.YUV_420_888,
2);
List<Surface> surfaces =new ArrayList<>();
Surface previewSurface = new Surface(mSurfaceTexture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
Surface frameCaptureSurface = mImageReader.getSurface();
surfaces.add(frameCaptureSurface);
mPreviewBuilder.addTarget(frameCaptureSurface);
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), captureCallback, mBackgroundHandler);
Everything works as expected, my TextureView gets updated and framecallback gets called too Except ... the frame rate is about 10 fps and I haven't even do any image processing yet.
I have experimented many Camera2 API settings include SENSOR_FRAME_DURATION and different ImageFormat and size combinations but none of them improve the frame rate. But if I just remove the ImageReader from output surfaces, then preview gets 30 fps easily!
So I guess the problem is By adding ImageReader as Camera2 output surface decreased the preview frame rate drastically. At least on my case, so what is the solution?
My solution is glReadPixel
I know glReadPixel is one of the evil things because it copy bytes from GPU to main memory and also causing OpenGL to flush draw commands thus for sake of performance we'd better avoid using it. But its surprising that glReadPixel is actually pretty fast and providing much better frame rate then ImageReader's YUV_420_888 output.
In addition to reduce the memory overhead I make another draw call with smaller frame buffer like 360x640 instead of preview's 720p dedicated for feature detection.
Based on the implementation of camera2 by the openCV library.
I had the same problem, then I noticed this piece of code in the openCV code for the JavaCamera2View, you need to change the settings of the CaptureRequest.Builder that way:
CaptureRequest.Builder captureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
It changed the fps from 10fps to around 28-30fps for me. Worked for me with two target surfaces, one surface of the preview textureview, the second of the ImageReader:
Surface readerSurface = imageReader.getSurface();
Surface surface = new Surface(surfaceTexture);
captureBuilder.addTarget(surface);
captureBuilder.addTarget(readerSurface);
Cannot post a comment (not enough reps). But running into the same issue with Redmi 6.
If using the the TextureView for previewing the camera output I get around 30 fps but replacing it with ImageReader it went down to 8/9 fps. All the camera configs are same in either case.
Interesting enough, on trying out the CameraXBasic, it showed the same issue. The updates from Camera were sluggish. But the android-Camera2Basic (using TextureView) was running without any issues.
Update: 1
Tested out with lowering the preview size from 1280x720 to 640x480, and as expected saw a better performance.
This is what I know after tweaking with it a little, the problem lies on ImageReader's maxImage param, I changed it from 2 to 3 to 56, it changed the fps quite a lot, what I think is the surface which we render to camera2 from ImageReader has a tendency to block the process of saving the camera's image to the buffer/cache when Image class from ImageReader.OnImageAvailableListener is being processed or isn't released, or we could say the camera wanna use the buffer but it doesn't have enough buffer, so when we increase the max buffer of imageReader, we could give space to camera2 to save the image.

mute media player but play graph in android

I am using two media player, first media player will play songs with music and the other is vocal of that songs, this vocal player should be mute but play the wave graph of vocal music, how to get please help me out..!
I need to mute the media player while generating the waveform graph from audio.
I am using two media player, first media player will play songs with music and the other is vocal of that songs, this vocal player should be mute but play the wave graph of vocal music, how to get please help me out..!
I need to mute the media player while generating the waveform graph from audio.
Here is my code.
public class MainFinalAllActivity extends Activity {
private Button btnPlay;
// Media Player
private MediaPlayer mp;
private MediaPlayer mSilentPlayer; /* to avoid tunnel player issue */
private MediaPlayer vocalMediaPlayer;
private VisualizerView mVisualizerView;
// Handler to update UI timer, progress bar etc,.
private Handler mHandler = new Handler();
private int currentSongIndex = 0;
private ArrayList<HashMap<String, String>> songsList = new ArrayList<HashMap<String, String>>();
private MediaRecorder myAudioRecorder;
private String outputFile = null;
String vocalPath = "/sdcard/test_v.mp3";
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main_final_layout);
// All player buttons
btnPlay = (Button) findViewById(R.id.btnPlay);
songTitleLabel = (TextView) findViewById(R.id.songTitle);
// Mediaplayer
mp = new MediaPlayer();
vocalMediaPlayer = new MediaPlayer();
songsList = SelectedAlbumPlayList.goToFinalPageSongsList;
int songIndex = SelectedAlbumPlayList.songIndex;
mp.setLooping(true);
playSong(songIndex);
String pathOfSelectedSong = songsList.get(songIndex).get("songPath");
// We need to link the visualizer view to the media player so that
// it displays something
mVisualizerView = (VisualizerView) findViewById(R.id.visualizerView);
mVisualizerView.link(vocalMediaPlayer);
//start the line renderer
addLineRenderer();
/**
* Play button click event
* plays a song and changes button to pause image
* pauses a song and changes button to play image
* */
btnPlay.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg0) {
// check for already playing
if (mp.isPlaying()) {
if (mp != null) {
mp.pause();
vocalMediaPlayer.pause();
// Changing button image to play button
btnPlay.setText("Play");
}
} else {
// Resume song
if (mp != null) {
mp.start();
vocalMediaPlayer.start();
// Changing button image to pause button
btnPlay.setText("Pause");
}
}
}
});
}
/**
* Receiving song index from playlist view
* and play the song
*/
#Override
protected void onActivityResult(int requestCode,
int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == 100) {
currentSongIndex = data.getExtras().getInt("songIndex");
// play selected song
playSong(currentSongIndex);
}
}
/**
* Function to play a song
*
* #param songIndex - index of song
*/
public void playSong(int songIndex) {
// Play song
try {
mp.reset();
vocalMediaPlayer.reset();
//vocalMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mp.setDataSource(songsList.get(songIndex).get("songPath"));
vocalMediaPlayer.setDataSource(vocalPath);
mp.prepare();
vocalMediaPlayer.prepare();
mp.start();
vocalMediaPlayer.start();
//vocalMediaPlayer.setVolume(0,0);
// Displaying Song title
String songTitle = songsList.get(songIndex).get("songTitle");
songTitleLabel.setText(songTitle);
//playVocalSong(vocalPath);
// Changing Button Image to pause image
btnPlay.setText("Pause");
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Update timer on seekbar
* */
public void updateProgressBar() {
mHandler.postDelayed(mUpdateTimeTask, 100);
}
int currentAmplitude;
String TAG = null;
byte[] bytes;
/**
* Background Runnable thread
* */
private Runnable mUpdateTimeTask = new Runnable() {
public void run() {
Bundle b = new Bundle();
Message msg = mHandler.obtainMessage();
if (myAudioRecorder != null) {
int previousValue = currentAmplitude;
currentAmplitude = myAudioRecorder.getMaxAmplitude();
//bytes = currentAmplitude.toByteArray();
//amplitude = mRecorder.getMaxAmplitude();
b.putLong("currentTime", currentAmplitude);
//Log.i("AMPLITUDE", new Integer(currentAmplitude).toString());
} else {
b.putLong("currentTime", 0);
}
msg.setData(b);
mHandler.sendMessage(msg);
mHandler.postDelayed(this, 100);
}
};
private void addLineRenderer()
{
Paint linePaint = new Paint();
linePaint.setStrokeWidth(1f);
linePaint.setAntiAlias(true);
linePaint.setColor(Color.argb(88, 0, 128, 255));
Paint lineFlashPaint = new Paint();
lineFlashPaint.setStrokeWidth(5f);
lineFlashPaint.setAntiAlias(true);
//lineFlashPaint.setColor(Color.argb(188, 255, 255, 255));
lineFlashPaint.setColor(Color.rgb(255,69,0));
//LineRenderer lineRenderer = new LineRenderer(linePaint, lineFlashPaint, true);
LineRenderer lineRenderer = new LineRenderer(linePaint, lineFlashPaint, true);
mVisualizerView.addRenderer(lineRenderer);
}
#Override
public void onDestroy(){
super.onDestroy();
mp.stop();
vocalMediaPlayer.stop();
}
}
//VisualizerView class
/**
* A class that draws visualizations of data received from a
* {#link android.media.audiofx.Visualizer.OnDataCaptureListener#onWaveFormDataCapture } and
* {#link android.media.audiofx.Visualizer.OnDataCaptureListener#onFftDataCapture }
*/
public class VisualizerView extends View {
private static final String TAG = "VisualizerView";
private Handler mHandler = new Handler();;
private byte[] mBytes;
private byte[] mFFTBytes;
private Rect mRect = new Rect();
private Visualizer mVisualizer;
private Set<Renderer> mRenderers;
private Paint mFlashPaint = new Paint();
private Paint mFadePaint = new Paint();
public VisualizerView(Context context, AttributeSet attrs, int defStyle)
{
super(context, attrs);
init();
}
public VisualizerView(Context context, AttributeSet attrs)
{
this(context, attrs, 0);
}
public VisualizerView(Context context)
{
this(context, null, 0);
}
private void init() {
mBytes = null;
mFFTBytes = null;
mFlashPaint.setColor(Color.argb(122, 255, 255, 255));
mFadePaint.setColor(Color.argb(238, 255, 255, 255)); // Adjust alpha to change how quickly the image fades
mFadePaint.setXfermode(new PorterDuffXfermode(Mode.MULTIPLY));
mRenderers = new HashSet<Renderer>();
}
/**
* Links the visualizer to a player
* #param player - MediaPlayer instance to link to
*/
public void link(final MediaPlayer player)
{
if(player == null)
{
throw new NullPointerException("Cannot link to null MediaPlayer");
}
// Create the Visualizer object and attach it to our media player.
mVisualizer = new Visualizer(player.getAudioSessionId());
mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]);
//mVisualizer.setMeasurementMode(Visualizer.MEASUREMENT_MODE_PEAK_RMS);
// Pass through Visualizer data to VisualizerView
Visualizer.OnDataCaptureListener captureListener = new Visualizer.OnDataCaptureListener()
{
#Override
public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate)
{
updateVisualizer(bytes);
getDisplay();
player.setVolume(0,0);
}
#Override
public void onFftDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate)
{
//updateVisualizerFFT(bytes);
}
};
mVisualizer.setDataCaptureListener(captureListener,
Visualizer.getMaxCaptureRate() / 2, true, true);
// Enabled Visualizer and disable when we're done with the stream
mVisualizer.setEnabled(true);
player.setOnCompletionListener(new MediaPlayer.OnCompletionListener()
{
#Override
public void onCompletion(MediaPlayer mediaPlayer)
{
mVisualizer.setEnabled(false);
}
});
}
public void addRenderer(Renderer renderer)
{
if(renderer != null)
{
mRenderers.add(renderer);
}
}
public void clearRenderers()
{
mRenderers.clear();
}
/**
* Call to release the resources used by VisualizerView. Like with the
* MediaPlayer it is good practice to call this method
*/
public void release()
{
mVisualizer.release();
}
//calculating RMS Value from byte array
public int calculateRMSLevel(byte[] audioData) {
// audioData might be buffered data read from a data line
long lSum = 0;
for (int i = 0; i < audioData.length; i++) {
lSum = lSum + audioData[i];
}
double dAvg = lSum / audioData.length;
double sumMeanSquare = 0d;
for (int j = 0; j < audioData.length; j++) {
sumMeanSquare = sumMeanSquare + Math.pow(audioData[j] - dAvg, 2d);
}
double averageMeanSquare = sumMeanSquare / audioData.length;
return (int) (Math.pow(averageMeanSquare, 0.5d) + 0.5);
}
/**
* Pass data to the visualizer. Typically this will be obtained from the
* Android Visualizer.OnDataCaptureListener call back. See
* {#link android.media.audiofx.Visualizer.OnDataCaptureListener#onWaveFormDataCapture }
* #param bytes
*/
public void updateVisualizer(byte[] bytes) {
int t = calculateRMSLevel(bytes);
Visualizer.MeasurementPeakRms measurementPeakRms = new Visualizer.MeasurementPeakRms();
int x = mVisualizer.getMeasurementPeakRms(measurementPeakRms);
mBytes = bytes;
invalidate();
}
/**
* Pass FFT data to the visualizer. Typically this will be obtained from the
* Android Visualizer.OnDataCaptureListener call back. See
* {#link android.media.audiofx.Visualizer.OnDataCaptureListener#onFftDataCapture }
* #param bytes
*/
public void updateVisualizerFFT(byte[] bytes) {
int t = calculateRMSLevel(bytes);
//System.out.println("Amplitude:"+t);
mFFTBytes = bytes;
invalidate();
}
boolean mFlash = false;
/**
* Call this to make the visualizer flash. Useful for flashing at the start
* of a song/loop etc...
*/
public void flash() {
mFlash = true;
invalidate();
}
Bitmap mCanvasBitmap;
Canvas mCanvas;
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
// Create canvas once we're ready to draw
mRect.set(0, 0, getWidth(), getHeight());
if(mCanvasBitmap == null)
{
mCanvasBitmap = Bitmap.createBitmap(canvas.getWidth(), canvas.getHeight(), Config.ARGB_8888);
}
if(mCanvas == null)
{
mCanvas = new Canvas(mCanvasBitmap);
}
if (mBytes != null) {
// Render all audio renderers
AudioData audioData = new AudioData(mBytes);
for(Renderer r : mRenderers)
{
r.render(mCanvas, audioData, mRect);
}
}
if (mFFTBytes != null) {
// Render all FFT renderers
FFTData fftData = new FFTData(mFFTBytes);
for(Renderer r : mRenderers)
{
r.render(mCanvas, fftData, mRect);
}
}
// Fade out old contents
mCanvas.drawPaint(mFadePaint);
if(mFlash)
{
mFlash = false;
mCanvas.drawPaint(mFlashPaint);
}
canvas.drawBitmap(mCanvasBitmap, new Matrix(), null);
}
}

How to play raw h264 produced by MediaCodec encoder?

I'm a bit new when it comes to MediaCodec (and video encoding/decoding in general), so correct me if anything I say here is wrong.
I want to play the raw h264 output of MediaCodec with VLC/ffplay. I need this to play becuase my end goal is to stream some live video to a computer, and MediaMuxer only produces a file on disk rather than something I can stream with (very) low latency to a desktop. (I'm open to other solutions, but I have not found anything else that fits the latency requirement)
Here is the code I'm using encode the video and write it to a file: (it's based off the MediaCodec example found here, only with the MediaMuxer part removed)
package com.jackos2500.droidtop;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.os.Environment;
import android.util.Log;
import android.view.Surface;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
public class StreamH264 {
private static final String TAG = "StreamH264";
private static final boolean VERBOSE = true; // lots of logging
// where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission)
private static final File OUTPUT_DIR = Environment.getExternalStorageDirectory();
public static int MEGABIT = 1000 * 1000;
private static final int IFRAME_INTERVAL = 10;
private static final int TEST_R0 = 0;
private static final int TEST_G0 = 136;
private static final int TEST_B0 = 0;
private static final int TEST_R1 = 236;
private static final int TEST_G1 = 50;
private static final int TEST_B1 = 186;
private MediaCodec codec;
private CodecInputSurface inputSurface;
private BufferedOutputStream out;
private MediaCodec.BufferInfo bufferInfo;
public StreamH264() {
}
private void prepareEncoder() throws IOException {
bufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat("video/avc", 1280, 720);
format.setInteger(MediaFormat.KEY_BIT_RATE, 2 * MEGABIT);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
codec = MediaCodec.createEncoderByType("video/avc");
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
inputSurface = new CodecInputSurface(codec.createInputSurface());
codec.start();
File dst = new File(OUTPUT_DIR, "test.264");
out = new BufferedOutputStream(new FileOutputStream(dst));
}
private void releaseEncoder() throws IOException {
if (VERBOSE) Log.d(TAG, "releasing encoder objects");
if (codec != null) {
codec.stop();
codec.release();
codec = null;
}
if (inputSurface != null) {
inputSurface.release();
inputSurface = null;
}
if (out != null) {
out.flush();
out.close();
out = null;
}
}
public void stream() throws IOException {
try {
prepareEncoder();
inputSurface.makeCurrent();
for (int i = 0; i < (30 * 5); i++) {
// Feed any pending encoder output into the file.
drainEncoder(false);
// Generate a new frame of input.
generateSurfaceFrame(i);
inputSurface.setPresentationTime(computePresentationTimeNsec(i, 30));
// Submit it to the encoder. The eglSwapBuffers call will block if the input
// is full, which would be bad if it stayed full until we dequeued an output
// buffer (which we can't do, since we're stuck here). So long as we fully drain
// the encoder before supplying additional input, the system guarantees that we
// can supply another frame without blocking.
if (VERBOSE) Log.d(TAG, "sending frame " + i + " to encoder");
inputSurface.swapBuffers();
}
// send end-of-stream to encoder, and drain remaining output
drainEncoder(true);
} finally {
// release encoder, muxer, and input Surface
releaseEncoder();
}
}
private void drainEncoder(boolean endOfStream) throws IOException {
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
codec.signalEndOfInputStream();
}
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
while (true) {
int encoderStatus = codec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
outputBuffers = codec.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = codec.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = outputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
bufferInfo.size = 0;
}
if (bufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
byte[] data = new byte[bufferInfo.size];
encodedData.get(data);
out.write(data);
if (VERBOSE) Log.d(TAG, "sent " + bufferInfo.size + " bytes to file");
}
codec.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
break; // out of while
}
}
}
}
private void generateSurfaceFrame(int frameIndex) {
frameIndex %= 8;
int startX, startY;
if (frameIndex < 4) {
// (0,0) is bottom-left in GL
startX = frameIndex * (1280 / 4);
startY = 720 / 2;
} else {
startX = (7 - frameIndex) * (1280 / 4);
startY = 0;
}
GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
GLES20.glScissor(startX, startY, 1280 / 4, 720 / 2);
GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
}
private static long computePresentationTimeNsec(int frameIndex, int frameRate) {
final long ONE_BILLION = 1000000000;
return frameIndex * ONE_BILLION / frameRate;
}
/**
* Holds state associated with a Surface used for MediaCodec encoder input.
* <p>
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
* to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
* to the video encoder.
* <p>
* This object owns the Surface -- releasing this will release the Surface too.
*/
private static class CodecInputSurface {
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
private Surface mSurface;
/**
* Creates a CodecInputSurface from a Surface.
*/
public CodecInputSurface(Surface surface) {
if (surface == null) {
throw new NullPointerException();
}
mSurface = surface;
eglSetup();
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private void eglSetup() {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
throw new RuntimeException("unable to initialize EGL14");
}
// Configure EGL for recording and OpenGL ES 2.0.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0);
checkEglError("eglCreateContext RGB888+recordable ES2");
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
attrib_list, 0);
checkEglError("eglCreateContext");
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
}
/**
* Discards all resources held by this class, notably the EGL context. Also releases the
* Surface that was passed to our constructor.
*/
public void release() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
}
mSurface.release();
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
mEGLContext = EGL14.EGL_NO_CONTEXT;
mEGLSurface = EGL14.EGL_NO_SURFACE;
mSurface = null;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
checkEglError("eglMakeCurrent");
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*/
public boolean swapBuffers() {
boolean result = EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
checkEglError("eglSwapBuffers");
return result;
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
checkEglError("eglPresentationTimeANDROID");
}
/**
* Checks for EGL errors. Throws an exception if one is found.
*/
private void checkEglError(String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
}
}
However, the file produced from this code does not play with VLC or ffplay. Can anyone tell me what I'm doing wrong? I believe it is due to an incorrect format (or total lack) of headers required for the playing of raw h264, as I have had success playing .264 files downloaded from the internet with ffplay. Also, I'm not sure exactly how I'm going to stream this video to a computer, so if somebody could give me some suggestions as to how I might do that, I would be very grateful! Thanks!
You should be able to play back a raw H264 stream (as you wrote, other raw .264 files play back just fine with VLC or ffplay), but you are missing the parameter sets. These are passed in two different ways, and you happen to be missing both. First they are returned in MediaFormat when you get MediaCodec.INFO_OUTPUT_FORMAT_CHANGED (which you don't handle, you just log a message about it), secondly they are returned in a buffer with MediaCodec.BUFFER_FLAG_CODEC_CONFIG set (which you ignore by setting the size to 0). The simplest solution here is to remove the special case handling of MediaCodec.BUFFER_FLAG_CODEC_CONFIG, and it should all work just fine.
The code you've based it on does things this way in order to test all the different ways of doing things - where you copied it from, the parameter sets were carried in the MediaFormat from MediaCodec.INFO_OUTPUT_FORMAT_CHANGED. If you wanted to use that in your case with a raw H264 bytestream, you could write the byte buffers with keys csd-0 and csd-1 from the MediaFormat and keep ignoring the buffers with MediaCodec.BUFFER_FLAG_CODEC_CONFIG set.
You cannot play just raw h264. It does not have any information about format. You also can find several great examples here. In order to stream you need to implement some streaming protocol like RTSP (in a case of real time streaming) or more flexible HLS (if real time is not required)

Playing javacv-ffmpeg decoded audio in Android with Audiotrack

i am developing android application in which i need to play AAC live audio stream coming from Red5 server.
I have successfully decoded the audio stream by using javacv-ffmpeg.
But my problem is how to play the audio from decoded samples.
I have tried by following way
int len = avcodec.avcodec_decode_audio4( audio_c, samples_frame, got_frame, pkt2);
if (len <= 0){
this.pkt2.size(0);
} else {
if (this.got_frame[0] != 0) {
long pts = avutil.av_frame_get_best_effort_timestamp(samples_frame);
int sample_format = samples_frame.format();
int planes = avutil.av_sample_fmt_is_planar(sample_format) != 0 ? samples_frame.channels() : 1;
int data_size = avutil.av_samples_get_buffer_size((IntPointer)null, audio_c.channels(), samples_frame.nb_samples(), audio_c.sample_fmt(), 1) / planes;
if ((samples_buf == null) || (samples_buf.length != planes)) {
samples_ptr = new BytePointer[planes];
samples_buf = new Buffer[planes];
}
BytePointer ptemp = samples_frame.data(0);
BytePointer[] temp_ptr = new BytePointer[1];
temp_ptr[0] = ptemp.capacity(sample_size);
ByteBuffer btemp = ptemp.asBuffer();
byte[] buftemp = new byte[sample_size];
btemp.get(buftemp, 0, buftemp.length);
play the buftemp[] with audiotrack.....
}
But only noise is heard from speakers, is there any processing is need to be done on AVFrame we get from decode_audio4(...) .
The Incoming audio stream is correctly encoded with AAC codec.
Any help, suggestion appreciated.
Thanks in advance.
You can use FFmpegFrameGrabber class to capture the stream. And extract the audio using a FloatBuffer class. This is a java example
public class PlayVideoAndAudio extends Application
{
private static final Logger LOG = Logger.getLogger(JavaFxPlayVideoAndAudio.class.getName());
private static final double SC16 = (double) 0x7FFF + 0.4999999999999999;
private static volatile Thread playThread;
public static void main(String[] args)
{
launch(args);
}
#Override
public void start(Stage primaryStage) throws Exception
{
String source = "rtsp://184.72.239.149/vod/mp4:BigBuckBunny_115k.mov";
StackPane root = new StackPane();
ImageView imageView = new ImageView();
root.getChildren().add(imageView);
imageView.fitWidthProperty().bind(primaryStage.widthProperty());
imageView.fitHeightProperty().bind(primaryStage.heightProperty());
Scene scene = new Scene(root, 640, 480);
primaryStage.setTitle("Video + audio");
primaryStage.setScene(scene);
primaryStage.show();
playThread = new Thread(() -> {
try {
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(source);
grabber.start();
primaryStage.setWidth(grabber.getImageWidth());
primaryStage.setHeight(grabber.getImageHeight());
AudioFormat audioFormat = new AudioFormat(grabber.getSampleRate(), 16, grabber.getAudioChannels(), true, true);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
SourceDataLine soundLine = (SourceDataLine) AudioSystem.getLine(info);
soundLine.open(audioFormat);
soundLine.start();
Java2DFrameConverter converter = new Java2DFrameConverter();
ExecutorService executor = Executors.newSingleThreadExecutor();
while (!Thread.interrupted()) {
Frame frame = grabber.grab();
if (frame == null) {
break;
}
if (frame.image != null) {
Image image = SwingFXUtils.toFXImage(converter.convert(frame), null);
Platform.runLater(() -> {
imageView.setImage(image);
});
} else if (frame.samples != null) {
FloatBuffer channelSamplesFloatBuffer = (FloatBuffer) frame.samples[0];
channelSamplesFloatBuffer.rewind();
ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesFloatBuffer.capacity() * 2);
for (int i = 0; i < channelSamplesFloatBuffer.capacity(); i++) {
short val = (short)((double) channelSamplesFloatBuffer.get(i) * SC16);
outBuffer.putShort(val);
}
/**
* We need this because soundLine.write ignores
* interruptions during writing.
*/
try {
executor.submit(() -> {
soundLine.write(outBuffer.array(), 0, outBuffer.capacity());
outBuffer.clear();
}).get();
} catch (InterruptedException interruptedException) {
Thread.currentThread().interrupt();
}
}
}
executor.shutdownNow();
executor.awaitTermination(10, TimeUnit.SECONDS);
soundLine.stop();
grabber.stop();
grabber.release();
Platform.exit();
} catch (Exception exception) {
LOG.log(Level.SEVERE, null, exception);
System.exit(1);
}
});
playThread.start();
}
#Override
public void stop() throws Exception
{
playThread.interrupt();
}
}
Because, what data you are getting in buftemp[] is in this AV_SAMPLE_FMT_FLTP format, you have to change it to AV_SAMPLE_FMT_S16 format using SwrContext and then your problem will be solved.

Error recording voice at16000 khz 16bit mono little endian wav file in android device [duplicate]

This question already exists:
Closed 10 years ago.
Possible Duplicate:
how to convert or record .wav file in 16khz 16bit mono little-endian?
I want to implement audio recording from an android device at 16000 khz 16bit mono little endian wav file.
I had implemented the logic in android like this. I'm using one class name as extaudiorecorder.
public class ExtAudioRecorder {
private final static int[] sampleRates = {44100, 22050, 11025, 16000};
public static ExtAudioRecorder getInstanse(Boolean recordingCompressed) {
ExtAudioRecorder result = null;
if(recordingCompressed) {
result = new ExtAudioRecorder(false, AudioSource.MIC, sampleRates[3],
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
}
else {
int i=0;
do {
result = new ExtAudioRecorder(true, AudioSource.MIC, sampleRates[i],
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
} while((++i<sampleRates.length) &
!(result.getState() == ExtAudioRecorder.State.INITIALIZING));
}
return result;
}
/**
* INITIALIZING : recorder is initializing;
* READY : recorder has been initialized, recorder not yet started
* RECORDING : recording
* ERROR : reconstruction needed
* STOPPED: reset needed
*/
public enum State {INITIALIZING, READY, RECORDING, ERROR, STOPPED};
public static final boolean RECORDING_UNCOMPRESSED = true;
public static final boolean RECORDING_COMPRESSED = false;
// The interval in which the recorded samples are output to the file
// Used only in uncompressed mode
private static final int TIMER_INTERVAL = 120;
// Toggles uncompressed recording on/off;
// RECORDING_UNCOMPRESSED / RECORDING_COMPRESSED
private boolean rUncompressed;
// Recorder used for uncompressed recording
private AudioRecord audioRecorder = null;
// Recorder used for compressed recording
private MediaRecorder mediaRecorder = null;
// Stores current amplitude (only in uncompressed mode)
private int cAmplitude = 0;
// Output file path
private String filePath = null;
// Recorder state; see State
private State state;
// File writer (only in uncompressed mode)
private RandomAccessFile randomAccessWriter;
// Number of channels, sample rate, sample size(size in bits), buffer size, audio source, sample size(see AudioFormat)
private short nChannels;
private int sRate;
private short bSamples;
private int bufferSize;
private int aSource;
private int aFormat;
// Number of frames written to file on each output(only in uncompressed mode)
private int framePeriod;
// Buffer for output(only in uncompressed mode)
private byte[] buffer;
// Number of bytes written to file after header(only in uncompressed mode)
// after stop() is called, this size is written to the header/data chunk in the wave file
private int payloadSize;
/**
* Returns the state of the recorder in a RehearsalAudioRecord.State typed object.
* Useful, as no exceptions are thrown.
* #return recorder state
*/
public State getState() { return state; }
/**
* Method used for recording.
*/
private AudioRecord.OnRecordPositionUpdateListener updateListener = new AudioRecord.OnRecordPositionUpdateListener() {
public void onPeriodicNotification(AudioRecord recorder) {
audioRecorder.read(buffer, 0, buffer.length); // Fill buffer
try {
randomAccessWriter.write(buffer); // Write buffer to file
payloadSize += buffer.length;
if (bSamples == 16) {
for (int i=0; i<buffer.length/2; i++) { // 16bit sample size
short curSample = getShort(buffer[i*2], buffer[i*2+1]);
if (curSample > cAmplitude) { // Check amplitude
cAmplitude = curSample;
}
}
}
else { // 8bit sample size
for (int i=0; i<buffer.length; i++) {
if (buffer[i] > cAmplitude) { // Check amplitude
cAmplitude = buffer[i];
}
}
}
}
catch (IOException e) {
Log.e(ExtAudioRecorder.class.getName(),
"Error occured in updateListener, recording is aborted");
//stop();
}
}
public void onMarkerReached(AudioRecord recorder) {
// NOT USED
}
};
/**
* Default constructor
* Instantiates a new recorder, in case of compressed recording the parameters can be left as 0.
* In case of errors, no exception is thrown, but the state is set to ERROR
*/
public ExtAudioRecorder(boolean uncompressed, int audioSource, int sampleRate,
int channelConfig, int audioFormat) {
try {
rUncompressed = uncompressed;
if (rUncompressed) { // RECORDING_UNCOMPRESSED
if (audioFormat == AudioFormat.ENCODING_PCM_16BIT) {
bSamples = 16;
}
else { bSamples = 8; }
if (channelConfig == AudioFormat.CHANNEL_CONFIGURATION_MONO) {
nChannels = 1;
}
else { nChannels = 2; }
aSource = audioSource;
sRate = sampleRate;
aFormat = audioFormat;
framePeriod = sampleRate * TIMER_INTERVAL / 1000;
bufferSize = framePeriod * 2 * bSamples * nChannels / 8;
if (bufferSize < AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat))
{ // Check to make sure buffer size is not smaller than the smallest allowed one
bufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
// Set frame period and timer interval accordingly
framePeriod = bufferSize / ( 2 * bSamples * nChannels / 8 );
Log.w(ExtAudioRecorder.class.getName(), "Increasing buffer size to " + Integer.toString(bufferSize));
}
audioRecorder = new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSize);
if (audioRecorder.getState() != AudioRecord.STATE_INITIALIZED)
throw new Exception("AudioRecord initialization failed");
audioRecorder.setRecordPositionUpdateListener(updateListener);
audioRecorder.setPositionNotificationPeriod(framePeriod);
} else
{ // RECORDING_COMPRESSED
mediaRecorder = new MediaRecorder();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
}
cAmplitude = 0;
filePath = null;
state = State.INITIALIZING;
} catch (Exception e)
{
if (e.getMessage() != null)
{
Log.e(ExtAudioRecorder.class.getName(), e.getMessage());
}
else
{
Log.e(ExtAudioRecorder.class.getName(), "Unknown error occured while initializing recording");
}
state = State.ERROR;
}
}
/**
* Sets output file path, call directly after construction/reset.
*
* #param output file path
*
*/
public void setOutputFile(String argPath)
{
try
{
if (state == State.INITIALIZING)
{
filePath = argPath;
if (!rUncompressed)
{
mediaRecorder.setOutputFile(filePath);
}
}
}
catch (Exception e)
{
if (e.getMessage() != null)
{
Log.e(ExtAudioRecorder.class.getName(), e.getMessage());
}
else
{
Log.e(ExtAudioRecorder.class.getName(), "Unknown error occured while setting output path");
}
state = State.ERROR;
}
}
/**
*
* Returns the largest amplitude sampled since the last call to this method.
*
* #return returns the largest amplitude since the last call, or 0 when not in recording state.
*
*/
public int getMaxAmplitude()
{
if (state == State.RECORDING)
{
if (rUncompressed)
{
int result = cAmplitude;
cAmplitude = 0;
return result;
}
else
{
try
{
return mediaRecorder.getMaxAmplitude();
}
catch (IllegalStateException e)
{
return 0;
}
}
}
else
{
return 0;
}
}
/**
*
* Prepares the recorder for recording, in case the recorder is not in the INITIALIZING state and the file path was not set
* the recorder is set to the ERROR state, which makes a reconstruction necessary.
* In case uncompressed recording is toggled, the header of the wave file is written.
* In case of an exception, the state is changed to ERROR
*
*/
public void prepare()
{
try
{
if (state == State.INITIALIZING)
{
if (rUncompressed)
{
if ((audioRecorder.getState() == AudioRecord.STATE_INITIALIZED) & (filePath != null))
{
// write file header
randomAccessWriter = new RandomAccessFile(filePath, "rw");
randomAccessWriter.setLength(0); // Set file length to 0, to prevent unexpected behavior in case the file already existed
randomAccessWriter.writeBytes("RIFF");
randomAccessWriter.writeInt(0); // Final file size not known yet, write 0
randomAccessWriter.writeBytes("WAVE");
randomAccessWriter.writeBytes("fmt ");
randomAccessWriter.writeInt(Integer.reverseBytes(16)); // Sub-chunk size, 16 for PCM
randomAccessWriter.writeShort(Short.reverseBytes((short) 1)); // AudioFormat, 1 for PCM
randomAccessWriter.writeShort(Short.reverseBytes(nChannels));// Number of channels, 1 for mono, 2 for stereo
randomAccessWriter.writeInt(Integer.reverseBytes(sRate)); // Sample rate
randomAccessWriter.writeInt(Integer.reverseBytes(sRate*bSamples*nChannels/8)); // Byte rate, SampleRate*NumberOfChannels*BitsPerSample/8
randomAccessWriter.writeShort(Short.reverseBytes((short)(nChannels*bSamples/8))); // Block align, NumberOfChannels*BitsPerSample/8
randomAccessWriter.writeShort(Short.reverseBytes(bSamples)); // Bits per sample
randomAccessWriter.writeBytes("data");
randomAccessWriter.writeInt(0); // Data chunk size not known yet, write 0
buffer = new byte[framePeriod*bSamples/8*nChannels];
state = State.READY;
}
else
{
Log.e(ExtAudioRecorder.class.getName(), "prepare() method called on uninitialized recorder");
state = State.ERROR;
}
}
else
{
mediaRecorder.prepare();
state = State.READY;
}
}
else
{
Log.e(ExtAudioRecorder.class.getName(), "prepare() method called on illegal state");
release();
state = State.ERROR;
}
}
catch(Exception e)
{
if (e.getMessage() != null)
{
Log.e(ExtAudioRecorder.class.getName(), e.getMessage());
}
else
{
Log.e(ExtAudioRecorder.class.getName(), "Unknown error occured in prepare()");
}
state = State.ERROR;
}
}
/**
*
*
* Releases the resources associated with this class, and removes the unnecessary files, when necessary
*
*/
public void release()
{
if (state == State.RECORDING)
{
stop();
}
else
{
if ((state == State.READY) & (rUncompressed))
{
try
{
randomAccessWriter.close(); // Remove prepared file
}
catch (IOException e)
{
Log.e(ExtAudioRecorder.class.getName(), "I/O exception occured while closing output file");
}
(new File(filePath)).delete();
}
}
if (rUncompressed)
{
if (audioRecorder != null)
{
audioRecorder.release();
}
}
else
{
if (mediaRecorder != null)
{
mediaRecorder.release();
}
}
}
/**
*
*
* Resets the recorder to the INITIALIZING state, as if it was just created.
* In case the class was in RECORDING state, the recording is stopped.
* In case of exceptions the class is set to the ERROR state.
*
*/
public void reset()
{
try
{
if (state != State.ERROR)
{
release();
filePath = null; // Reset file path
cAmplitude = 0; // Reset amplitude
if (rUncompressed)
{
audioRecorder = new AudioRecord(aSource, sRate, nChannels+1, aFormat, bufferSize);
}
else
{
mediaRecorder = new MediaRecorder();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
}
state = State.INITIALIZING;
}
}
catch (Exception e)
{
Log.e(ExtAudioRecorder.class.getName(), e.getMessage());
state = State.ERROR;
}
}
/**
*
*
* Starts the recording, and sets the state to RECORDING.
* Call after prepare().
*
*/
public void start()
{
if (state == State.READY)
{
if (rUncompressed)
{
payloadSize = 0;
audioRecorder.startRecording();
audioRecorder.read(buffer, 0, buffer.length);
}
else
{
mediaRecorder.start();
}
state = State.RECORDING;
}
else
{
Log.e(ExtAudioRecorder.class.getName(), "start() called on illegal state");
state = State.ERROR;
}
}
/**
*
*
* Stops the recording, and sets the state to STOPPED.
* In case of further usage, a reset is needed.
* Also finalizes the wave file in case of uncompressed recording.
*
*/
public void stop()
{
if (state == State.RECORDING)
{
if (rUncompressed)
{
audioRecorder.stop();
try
{
randomAccessWriter.seek(4); // Write size to RIFF header
randomAccessWriter.writeInt(Integer.reverseBytes(36+payloadSize));
randomAccessWriter.seek(40); // Write size to Subchunk2Size field
randomAccessWriter.writeInt(Integer.reverseBytes(payloadSize));
randomAccessWriter.close();
}
catch(IOException e)
{
Log.e(ExtAudioRecorder.class.getName(), "I/O exception occured while closing output file");
state = State.ERROR;
}
}
else
{
mediaRecorder.stop();
}
state = State.STOPPED;
}
else
{
Log.e(ExtAudioRecorder.class.getName(), "stop() called on illegal state");
state = State.ERROR;
}
}
/*
*
* Converts a byte[2] to a short, in LITTLE_ENDIAN format
*
*/
private short getShort(byte argB1, byte argB2)
{
return (short)(argB1 | (argB2 << 8));
}
}
And i m using that class in my main activity as here ..i m recording sound on button click and stop it on another button click
package com.test.android_recorder;
import android.os.Bundle;
import android.os.Environment;
import android.app.Activity;
import android.view.Menu;
import android.widget.Button;
import android.widget.TextView;
import android.view.View;
import android.view.View.OnClickListener;
public class AndroidRecorder extends Activity {
ExtAudioRecorder extAudioRecorder;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(com.test.android_recorder.R.layout.activity_android_recorder);
Button button = (Button) findViewById(R.id.button1);
Button buttonstop = (Button) findViewById(R.id.buttonstop);
// Start recording
extAudioRecorder = ExtAudioRecorder.getInstanse(true);
button.setOnClickListener(new OnClickListener()
{
public void onClick(View v)
{
TextView txt1=(TextView)findViewById(com.test.android_recorder.R.id.textView1);
txt1.setText("Button clciked");
// Compressed recording (AMR)
//extAudioRecorder = ExtAudioRecorder.getInstanse(false); // Uncompressed recording (WAV)
extAudioRecorder.setOutputFile(Environment.getExternalStorageDirectory().getAbsolutePath() + "/javarecorder");
extAudioRecorder.prepare();
extAudioRecorder.start();
// ImageView iv = (ImageView) findViewById(R.id.imageview1);
//iv.setVisibility(View.VISIBLE);
}
});
buttonstop.setOnClickListener(new OnClickListener()
{
public void onClick(View v)
{
//TextView txt1=(TextView)findViewById(com.test.android_recorder.R.id.textView1);
// txt1.setText("Button clciked");
// ImageView iv = (ImageView) findViewById(R.id.imageview1);
//iv.setVisibility(View.VISIBLE);
// Stop recording
extAudioRecorder.stop();
extAudioRecorder.release();
}
});
/*
*/
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(com.test.android_recorder.R.menu.activity_android_recorder, menu);
return true;
}
}
My Problem is that now file is recording but the recorded file is too noisy and unclear.i want to use this recorded file in sphinx 4 speech recgnition.and its require 16000KHZ 16 bit mono big endian..but when i use the recorded file in my sphhinx4 speech recognition then it give me unsupported source error...i want to make my wav file quality good...how can i?
Please Help me Out
private final static int[] sampleRates = {44100, 22050, 11025, 8000};
none of these sampleRates are 16khz.
Edit:
illegal state often times indicates that you don't have the permission added in your manifest.xml.

Categories

Resources