mute media player but play graph in android - android

I am using two media player, first media player will play songs with music and the other is vocal of that songs, this vocal player should be mute but play the wave graph of vocal music, how to get please help me out..!
I need to mute the media player while generating the waveform graph from audio.
I am using two media player, first media player will play songs with music and the other is vocal of that songs, this vocal player should be mute but play the wave graph of vocal music, how to get please help me out..!
I need to mute the media player while generating the waveform graph from audio.
Here is my code.
public class MainFinalAllActivity extends Activity {
private Button btnPlay;
// Media Player
private MediaPlayer mp;
private MediaPlayer mSilentPlayer; /* to avoid tunnel player issue */
private MediaPlayer vocalMediaPlayer;
private VisualizerView mVisualizerView;
// Handler to update UI timer, progress bar etc,.
private Handler mHandler = new Handler();
private int currentSongIndex = 0;
private ArrayList<HashMap<String, String>> songsList = new ArrayList<HashMap<String, String>>();
private MediaRecorder myAudioRecorder;
private String outputFile = null;
String vocalPath = "/sdcard/test_v.mp3";
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main_final_layout);
// All player buttons
btnPlay = (Button) findViewById(R.id.btnPlay);
songTitleLabel = (TextView) findViewById(R.id.songTitle);
// Mediaplayer
mp = new MediaPlayer();
vocalMediaPlayer = new MediaPlayer();
songsList = SelectedAlbumPlayList.goToFinalPageSongsList;
int songIndex = SelectedAlbumPlayList.songIndex;
mp.setLooping(true);
playSong(songIndex);
String pathOfSelectedSong = songsList.get(songIndex).get("songPath");
// We need to link the visualizer view to the media player so that
// it displays something
mVisualizerView = (VisualizerView) findViewById(R.id.visualizerView);
mVisualizerView.link(vocalMediaPlayer);
//start the line renderer
addLineRenderer();
/**
* Play button click event
* plays a song and changes button to pause image
* pauses a song and changes button to play image
* */
btnPlay.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View arg0) {
// check for already playing
if (mp.isPlaying()) {
if (mp != null) {
mp.pause();
vocalMediaPlayer.pause();
// Changing button image to play button
btnPlay.setText("Play");
}
} else {
// Resume song
if (mp != null) {
mp.start();
vocalMediaPlayer.start();
// Changing button image to pause button
btnPlay.setText("Pause");
}
}
}
});
}
/**
* Receiving song index from playlist view
* and play the song
*/
#Override
protected void onActivityResult(int requestCode,
int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == 100) {
currentSongIndex = data.getExtras().getInt("songIndex");
// play selected song
playSong(currentSongIndex);
}
}
/**
* Function to play a song
*
* #param songIndex - index of song
*/
public void playSong(int songIndex) {
// Play song
try {
mp.reset();
vocalMediaPlayer.reset();
//vocalMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mp.setDataSource(songsList.get(songIndex).get("songPath"));
vocalMediaPlayer.setDataSource(vocalPath);
mp.prepare();
vocalMediaPlayer.prepare();
mp.start();
vocalMediaPlayer.start();
//vocalMediaPlayer.setVolume(0,0);
// Displaying Song title
String songTitle = songsList.get(songIndex).get("songTitle");
songTitleLabel.setText(songTitle);
//playVocalSong(vocalPath);
// Changing Button Image to pause image
btnPlay.setText("Pause");
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Update timer on seekbar
* */
public void updateProgressBar() {
mHandler.postDelayed(mUpdateTimeTask, 100);
}
int currentAmplitude;
String TAG = null;
byte[] bytes;
/**
* Background Runnable thread
* */
private Runnable mUpdateTimeTask = new Runnable() {
public void run() {
Bundle b = new Bundle();
Message msg = mHandler.obtainMessage();
if (myAudioRecorder != null) {
int previousValue = currentAmplitude;
currentAmplitude = myAudioRecorder.getMaxAmplitude();
//bytes = currentAmplitude.toByteArray();
//amplitude = mRecorder.getMaxAmplitude();
b.putLong("currentTime", currentAmplitude);
//Log.i("AMPLITUDE", new Integer(currentAmplitude).toString());
} else {
b.putLong("currentTime", 0);
}
msg.setData(b);
mHandler.sendMessage(msg);
mHandler.postDelayed(this, 100);
}
};
private void addLineRenderer()
{
Paint linePaint = new Paint();
linePaint.setStrokeWidth(1f);
linePaint.setAntiAlias(true);
linePaint.setColor(Color.argb(88, 0, 128, 255));
Paint lineFlashPaint = new Paint();
lineFlashPaint.setStrokeWidth(5f);
lineFlashPaint.setAntiAlias(true);
//lineFlashPaint.setColor(Color.argb(188, 255, 255, 255));
lineFlashPaint.setColor(Color.rgb(255,69,0));
//LineRenderer lineRenderer = new LineRenderer(linePaint, lineFlashPaint, true);
LineRenderer lineRenderer = new LineRenderer(linePaint, lineFlashPaint, true);
mVisualizerView.addRenderer(lineRenderer);
}
#Override
public void onDestroy(){
super.onDestroy();
mp.stop();
vocalMediaPlayer.stop();
}
}
//VisualizerView class
/**
* A class that draws visualizations of data received from a
* {#link android.media.audiofx.Visualizer.OnDataCaptureListener#onWaveFormDataCapture } and
* {#link android.media.audiofx.Visualizer.OnDataCaptureListener#onFftDataCapture }
*/
public class VisualizerView extends View {
private static final String TAG = "VisualizerView";
private Handler mHandler = new Handler();;
private byte[] mBytes;
private byte[] mFFTBytes;
private Rect mRect = new Rect();
private Visualizer mVisualizer;
private Set<Renderer> mRenderers;
private Paint mFlashPaint = new Paint();
private Paint mFadePaint = new Paint();
public VisualizerView(Context context, AttributeSet attrs, int defStyle)
{
super(context, attrs);
init();
}
public VisualizerView(Context context, AttributeSet attrs)
{
this(context, attrs, 0);
}
public VisualizerView(Context context)
{
this(context, null, 0);
}
private void init() {
mBytes = null;
mFFTBytes = null;
mFlashPaint.setColor(Color.argb(122, 255, 255, 255));
mFadePaint.setColor(Color.argb(238, 255, 255, 255)); // Adjust alpha to change how quickly the image fades
mFadePaint.setXfermode(new PorterDuffXfermode(Mode.MULTIPLY));
mRenderers = new HashSet<Renderer>();
}
/**
* Links the visualizer to a player
* #param player - MediaPlayer instance to link to
*/
public void link(final MediaPlayer player)
{
if(player == null)
{
throw new NullPointerException("Cannot link to null MediaPlayer");
}
// Create the Visualizer object and attach it to our media player.
mVisualizer = new Visualizer(player.getAudioSessionId());
mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]);
//mVisualizer.setMeasurementMode(Visualizer.MEASUREMENT_MODE_PEAK_RMS);
// Pass through Visualizer data to VisualizerView
Visualizer.OnDataCaptureListener captureListener = new Visualizer.OnDataCaptureListener()
{
#Override
public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate)
{
updateVisualizer(bytes);
getDisplay();
player.setVolume(0,0);
}
#Override
public void onFftDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate)
{
//updateVisualizerFFT(bytes);
}
};
mVisualizer.setDataCaptureListener(captureListener,
Visualizer.getMaxCaptureRate() / 2, true, true);
// Enabled Visualizer and disable when we're done with the stream
mVisualizer.setEnabled(true);
player.setOnCompletionListener(new MediaPlayer.OnCompletionListener()
{
#Override
public void onCompletion(MediaPlayer mediaPlayer)
{
mVisualizer.setEnabled(false);
}
});
}
public void addRenderer(Renderer renderer)
{
if(renderer != null)
{
mRenderers.add(renderer);
}
}
public void clearRenderers()
{
mRenderers.clear();
}
/**
* Call to release the resources used by VisualizerView. Like with the
* MediaPlayer it is good practice to call this method
*/
public void release()
{
mVisualizer.release();
}
//calculating RMS Value from byte array
public int calculateRMSLevel(byte[] audioData) {
// audioData might be buffered data read from a data line
long lSum = 0;
for (int i = 0; i < audioData.length; i++) {
lSum = lSum + audioData[i];
}
double dAvg = lSum / audioData.length;
double sumMeanSquare = 0d;
for (int j = 0; j < audioData.length; j++) {
sumMeanSquare = sumMeanSquare + Math.pow(audioData[j] - dAvg, 2d);
}
double averageMeanSquare = sumMeanSquare / audioData.length;
return (int) (Math.pow(averageMeanSquare, 0.5d) + 0.5);
}
/**
* Pass data to the visualizer. Typically this will be obtained from the
* Android Visualizer.OnDataCaptureListener call back. See
* {#link android.media.audiofx.Visualizer.OnDataCaptureListener#onWaveFormDataCapture }
* #param bytes
*/
public void updateVisualizer(byte[] bytes) {
int t = calculateRMSLevel(bytes);
Visualizer.MeasurementPeakRms measurementPeakRms = new Visualizer.MeasurementPeakRms();
int x = mVisualizer.getMeasurementPeakRms(measurementPeakRms);
mBytes = bytes;
invalidate();
}
/**
* Pass FFT data to the visualizer. Typically this will be obtained from the
* Android Visualizer.OnDataCaptureListener call back. See
* {#link android.media.audiofx.Visualizer.OnDataCaptureListener#onFftDataCapture }
* #param bytes
*/
public void updateVisualizerFFT(byte[] bytes) {
int t = calculateRMSLevel(bytes);
//System.out.println("Amplitude:"+t);
mFFTBytes = bytes;
invalidate();
}
boolean mFlash = false;
/**
* Call this to make the visualizer flash. Useful for flashing at the start
* of a song/loop etc...
*/
public void flash() {
mFlash = true;
invalidate();
}
Bitmap mCanvasBitmap;
Canvas mCanvas;
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
// Create canvas once we're ready to draw
mRect.set(0, 0, getWidth(), getHeight());
if(mCanvasBitmap == null)
{
mCanvasBitmap = Bitmap.createBitmap(canvas.getWidth(), canvas.getHeight(), Config.ARGB_8888);
}
if(mCanvas == null)
{
mCanvas = new Canvas(mCanvasBitmap);
}
if (mBytes != null) {
// Render all audio renderers
AudioData audioData = new AudioData(mBytes);
for(Renderer r : mRenderers)
{
r.render(mCanvas, audioData, mRect);
}
}
if (mFFTBytes != null) {
// Render all FFT renderers
FFTData fftData = new FFTData(mFFTBytes);
for(Renderer r : mRenderers)
{
r.render(mCanvas, fftData, mRect);
}
}
// Fade out old contents
mCanvas.drawPaint(mFadePaint);
if(mFlash)
{
mFlash = false;
mCanvas.drawPaint(mFlashPaint);
}
canvas.drawBitmap(mCanvasBitmap, new Matrix(), null);
}
}

Related

Setting an app to output ALL audio on a single channel

I want to be able to play all audio from one app on only one channel on a device e.g only on left speakers.
I can do this with MediaPlayer playing an mp3 file like this to play only on left speaker
MediaPlayer mediaPlayer=MediaPlayer.create(this,R.raw.bitter_sweet);
mediaPlayer.setVolume(1.0f, 0f);
mediaPlayer.start();
But I want ALL audio from the app to play on left speaker, i.e if i add a webview and load a youtube url the audio should still play on one channel.
Using Exoplayer I can achieve this by creating a custom AudioProcessor adopted from the ChannelMappingAudioProcessor
public class StereoVolumeProcessor implements AudioProcessor {
private int channelCount;
private int sampleRateHz;
private int[] pendingOutputChannels;
private boolean active;
private int[] outputChannels;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
private float[] volume;
private static final int LEFT_SPEAKER = 0;
private static final int RIGHT_SPEAKER = 1;
public StereoVolumeProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
}
public void setChannelMap(int[] outputChannels) {
pendingOutputChannels = outputChannels;
}
#Override
public boolean configure(int sampleRateHz, int channelCount, #C.Encoding int encoding)
throws UnhandledFormatException {
if(volume == null){
throw new IllegalStateException("volume has not been set! Call setVolume(float left,float right)");
}
boolean outputChannelsChanged = !Arrays.equals(pendingOutputChannels, outputChannels);
outputChannels = pendingOutputChannels;
if (outputChannels == null) {
active = false;
return outputChannelsChanged;
}
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
if (!outputChannelsChanged && this.sampleRateHz == sampleRateHz
&& this.channelCount == channelCount) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
active = true;
return true;
}
#Override
public boolean isActive() {
return active;
}
#Override
public int getOutputChannelCount() {
return outputChannels == null ? channelCount : outputChannels.length;
}
#Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
}
/**
* Returns the sample rate of audio output by the processor, in hertz. The value may change as a
* result of calling {#link #configure(int, int, int)} and is undefined if the instance is not
* active.
*/
#Override
public int getOutputSampleRateHz() {
return sampleRateHz;
}
#Override
public void queueInput(ByteBuffer inputBuffer) {
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int size = limit - position;
if (buffer.capacity() < size) {
buffer = ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder());
} else {
buffer.clear();
}
if(isActive()){
int ch = 0;
for(int i = position;i<limit;i+=2){
short sample = (short) (inputBuffer.getShort(i)* volume[ch++]);
buffer.putShort(sample);
ch%=channelCount;
}
}else{
throw new IllegalStateException();
}
inputBuffer.position(limit);
buffer.flip();
outputBuffer = buffer;
}
#Override
public void queueEndOfStream() {
inputEnded = true;
}
/**
* Sets the volume of right and left channels/speakers
* The values are between 0.0 and 1.0
*
* #param left
* #param right
*/
public void setVolume(float left,float right){
volume = new float[]{left,right};
}
public float getLeftVolume(){
return volume[LEFT_SPEAKER];
}
public float getRightVolume(){
return volume[RIGHT_SPEAKER];
}
#Override
public ByteBuffer getOutput() {
ByteBuffer outputBuffer = this.outputBuffer;
this.outputBuffer = EMPTY_BUFFER;
return outputBuffer;
}
#SuppressWarnings("ReferenceEquality")
#Override
public boolean isEnded() {
return inputEnded && outputBuffer == EMPTY_BUFFER;
}
#Override
public void flush() {
outputBuffer = EMPTY_BUFFER;
inputEnded = false;
}
#Override
public void reset() {
flush();
buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
outputChannels = null;
active = false;
}
}
Volume can be set with a value between 0.0 and 1.0 on either left, right or both speakers.
Use the processor as follows
stereoVolumeProcessor = new StereoVolumeProcessor();
stereoVolumeProcessor.setChannelMap(new int[]{0,1});
stereoVolumeProcessor.setVolume(1,0);
RenderersFactory factory = new DefaultRenderersFactory(this){
/**
* Builds an array of {#link AudioProcessor}s that will process PCM audio before output.
*/
#Override
protected AudioProcessor[] buildAudioProcessors() {
return new AudioProcessor[] {stereoVolumeProcessor};
}
};

Opencv4android Mediarecorder.getSurface() no View display

I want to use Opencv4android sample Tutorial1 to do Video Recorder.
And I use this solution, but my smartphone not display anything on Screen.
Just black view. Can anyone help me?
here is my code
Tutorial1Activity
public class Tutorial1Activity extends Activity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsJavaCamera = true;
private MenuItem mItemSwitchCamera = null;
//*****************writetoSD***********************//
public FileWriter fw; // = new FileWriter(folder_path, false);
public BufferedWriter bw;// = new BufferedWriter(fw);
boolean first_in = true;
String showTimefile = null;
String showTime = null;
String folder_path = Environment.getExternalStorageDirectory().getAbsolutePath();
String folder_name = "Face Detection Signal";
String folder_pathforfile = null;
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd HH_mm_ss");
SimpleDateFormat sdf_fileintxt = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
//*****************writetoSD***********************//
//---------------MediaRecorder-------------------//
public MediaRecorder mediaRecorder;
Button bt_Record;
boolean isRecord = false;
Handler mThreadHandler;
HandlerThread mHandlerThread;
//---------------MediaRecorder-------------------//
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial1Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
#Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial1_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
//---------------------------------------------------------//
bt_Record = (Button)findViewById(R.id.bt_recorder);
folder_pathforfile = folder_path + File.separator + folder_name
+ File.separator + "opencv" + "_";
CreateSDfolder();
ongetTime();
//---------------------------------------------------------//
}
#Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
#Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
private void CreateSDfolder() {
String filefolderpath = folder_path + File.separator + folder_name;
File dir = new File(filefolderpath);
if (!dir.exists()){
Log.e("folder", "not exist");
try{
//dir.createNewFile(true);
dir.mkdir();
Log.e("folder", "creat exist");
}catch(Exception e){
Log.e("folder", "creat not exist");
e.printStackTrace();
}
}
else{
Log.e("folder", "exist");
}
}
private void ongetTime() {
Date dt=new Date();
showTime=sdf_fileintxt.format(dt);
showTimefile =sdf.format(dt);
}
private void WritetoSD(String data) {
try {
fw = new FileWriter(folder_pathforfile + showTimefile+".txt", true);
bw = new BufferedWriter(fw);
if (first_in == true) {
first_in = false;
bw.append(showTime);
bw.newLine();
}
bw.append(data);
bw.newLine();
bw.flush();
bw.close();
} catch (IOException e) {
Log.e("WriteToSD", "Write To SD ERROR");
e.printStackTrace();
}
}
public void onRecordSignal (View v){
if(!isRecord){
isRecord = true;
Log.e(TAG, "button click " + isRecord);
bt_Record.setText("Stop");
//new MediaPrepareTask().execute(null, null, null);
if (prepareMediaRecorder()) {
// Camera is available and unlocked, MediaRecorder is prepared,
// now you can start recording
Log.e("debug_mediarecorder", "prepareMediaRecorder in if");
mOpenCvCameraView.setRecorder(mediaRecorder);
mediaRecorder.start();
} else {
// prepare didn't work, release the camera
Log.e("debug_mediarecorder", "prepareMediaRecorder in else");
// mediaRecorder.stop();
releaseMediaRecorder();
}
} else{
isRecord = false;
Log.e(TAG, "button click " + isRecord);
bt_Record.setText("Record");
try {
if(mediaRecorder != null)
mediaRecorder.stop(); // stop the recording
else
Log.e(TAG,"onRecordSignal mediaRecorder is null");
} catch (RuntimeException e) {
// RuntimeException is thrown when stop() is called immediately after start().
// In this case the output file is not properly constructed ans should be deleted.
Log.d(TAG, "RuntimeException: stop() is called immediately after start()");
//noinspection ResultOfMethodCallIgnored
}
releaseMediaRecorder(); // release the MediaRecorder object
}
}
public void releaseMediaRecorder() {
Log.e("debug","releaseMediaRecorder");
if (mediaRecorder != null) {
mediaRecorder.reset(); // clear recorder configuration
mediaRecorder.release(); // release the recorder object
mediaRecorder = null;
JavaCameraView.mCamera.lock();
mOpenCvCameraView.releaseRecord();
}
}
private String recordfilepath() {
// TODO Auto-generated method stub
ongetTime();
File sddir = Environment.getExternalStorageDirectory();
File vrdir = new File(sddir, folder_name);
File file = new File(vrdir, showTimefile+"_.mp4");
String filepath = file.getAbsolutePath();
Log.e("debug mediarecorder", filepath);
return filepath;
}
public boolean prepareMediaRecorder() {
// TODO Auto-generated method stub
Log.e("debug mediarecorder", "in prepareMediaRecorder");
mediaRecorder = new MediaRecorder();
try {
JavaCameraView.mCamera.lock();
JavaCameraView.mCamera.unlock();
}catch (RuntimeException e){
Log.e("debug mediarecorder","JavaCameraView.mCamera.unlock() fail");
}
/*mediaRecorder.reset();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
//mediaRecorder.setPreviewDisplay(CameraBridgeViewBase.mSurfaceHolder.getSurface());
mediaRecorder.setOutputFile(recordfilepath());
//mediaRecorder.setOnInfoListener((MediaRecorder.OnInfoListener) this);
//mediaRecorder.setOnErrorListener((MediaRecorder.OnErrorListener) this);*/
mediaRecorder.reset();
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
mediaRecorder.setProfile(cpHigh);
//mediaRecorder.setOutputFile("out.mp4");
mediaRecorder.setOutputFile(recordfilepath());
mediaRecorder.setVideoSize(mOpenCvCameraView.mFrameWidth, mOpenCvCameraView.mFrameHeight);
//mediaRecorder.setOnInfoListener(this);
//mediaRecorder.setOnErrorListener(this);
try {
mediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.e("debug mediarecorder", "not prepare");
releaseMediaRecorder();
return false;
} catch (IOException e) {
Log.e("debug mediarecorder", "not prepare IOException");
//releaseMediaRecorder();
}
return true;
}
}
CameraBridgeViewBase
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraBridge";
private static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;
private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener2 mListener;
private boolean mSurfaceExist;
private Object mSyncObject = new Object();
public int mFrameWidth;
public int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected float mScale = 0;
protected int mPreviewFormat = RGBA;
protected int mCameraIndex = CAMERA_ID_ANY;
protected boolean mEnabled;
protected FpsMeter mFpsMeter = null;
public static final int CAMERA_ID_ANY = -1;
public static final int CAMERA_ID_BACK = 99;
public static final int CAMERA_ID_FRONT = 98;
public static final int RGBA = 1;
public static final int GRAY = 2;
//-------------------------------------//
protected MediaRecorder mRecorder;
protected Surface mSurface = null;
public void setRecorder(MediaRecorder rec) {
mRecorder = rec;
//Log.e(TAG,mRecorder.toString());
if (mRecorder != null) {
mSurface = mRecorder.getSurface();
Log.e(TAG,"mRecorder is not null");
Log.e(TAG,"mSurface = "+mSurface.toString());
}
else{
Log.e(TAG,"mRecorder is null");
}
}
public void releaseRecord(){
mSurface.release();
}
//-------------------------------------//
public CameraBridgeViewBase(Context context, int cameraId) {
super(context);
mCameraIndex = cameraId;
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
int count = attrs.getAttributeCount();
Log.d(TAG, "Attr count: " + Integer.valueOf(count));
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
enableFpsMeter();
mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
styledAttrs.recycle();
}
/**
* Sets the camera index
* #param cameraIndex new camera index
*/
public void setCameraIndex(int cameraIndex) {
this.mCameraIndex = cameraIndex;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* #param width - the width of the frames that will be delivered
* #param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
public interface CvCameraViewListener2 {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* #param width - the width of the frames that will be delivered
* #param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(CvCameraViewFrame inputFrame);
};
protected class CvCameraViewListenerAdapter implements CvCameraViewListener2 {
public CvCameraViewListenerAdapter(CvCameraViewListener oldStypeListener) {
mOldStyleListener = oldStypeListener;
}
public void onCameraViewStarted(int width, int height) {
mOldStyleListener.onCameraViewStarted(width, height);
}
public void onCameraViewStopped() {
mOldStyleListener.onCameraViewStopped();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat result = null;
switch (mPreviewFormat) {
case RGBA:
result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
break;
case GRAY:
result = mOldStyleListener.onCameraFrame(inputFrame.gray());
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
return result;
}
public void setFrameFormat(int format) {
mPreviewFormat = format;
}
private int mPreviewFormat = RGBA;
private CvCameraViewListener mOldStyleListener;
};
/**
* This class interface is abstract representation of single frame from camera for onCameraFrame callback
* Attention: Do not use objects, that represents this interface out of onCameraFrame callback!
*/
public interface CvCameraViewFrame {
/**
* This method returns RGBA Mat with frame
*/
public Mat rgba();
/**
* This method returns single channel gray scale Mat with frame
*/
public Mat gray();
};
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
Log.d(TAG, "call surfaceChanged event");
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the scren
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
/**
* This method enables label with fps value on the screen
*/
public void enableFpsMeter() {
if (mFpsMeter == null) {
mFpsMeter = new FpsMeter();
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
}
public void disableFpsMeter() {
mFpsMeter = null;
}
/**
*
* #param listener
*/
public void setCvCameraViewListener(CvCameraViewListener2 listener) {
mListener = listener;
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
CvCameraViewListenerAdapter adapter = new CvCameraViewListenerAdapter(listener);
adapter.setFrameFormat(mPreviewFormat);
mListener = adapter;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* #param maxWidth - the maximum width allowed for camera frame.
* #param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
if (mListener instanceof CvCameraViewListenerAdapter) {
CvCameraViewListenerAdapter adapter = (CvCameraViewListenerAdapter) mListener;
adapter.setFrameFormat(mPreviewFormat);
}
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
Log.d(TAG, "call checkCurrentState");
int targetState;
if (mEnabled && mSurfaceExist && getVisibility() == VISIBLE) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
Log.d(TAG, "call processEnterState: " + state);
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
Log.d(TAG, "call processExitState: " + state);
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
Log.d(TAG, "call onEnterStartedState");
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* #param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas;
if (mRecorder != null) {
canvas = mSurface.lockCanvas(null);
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
mSurface.unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* #param width - the width of this SurfaceView
* #param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTextre constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* #param supportedSizes
* #param surfaceWidth
* #param surfaceHeight
* #return optimal frame size
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
return new Size(calcWidth, calcHeight);
}
}
And I add permission in Manifest
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.RECORD_VIDEO" />
<uses-permission android:name="android.permission.RECORD_AUDIO"/>

How to record android audio playing in headset

MediaRecorder class in android is used to record audio from mic, can anyone tell me how can we record audio that is actually played on headset. Sounds techy but yes it is the thing i am exploring on. I was told "Visualizer" class can record system audio but as per documentation it can only be used to visualize audio and we cannot put recorder interface there.
Read more : http://developer.android.com/reference/android/media/audiofx/Visualizer.html
Does any from below will serve the purpose ?
int CAMCORDER
int DEFAULT
int MIC
int REMOTE_SUBMIX
int VOICE_CALL
int VOICE_COMMUNICAITON
int vOICE_DOWNLINK
int VOICE_RECOGNITION
int VOICE_UPLINK
Has anyone worked on OpenSLES? Heard that too serves the purpose of it
If there any Android APIs or Third Party APIs you have come across please feel free to share info. Few blogs also say this can be done at NDK level. If anyone has worked on it or do have code examples kindly inform
Thanks
Example Code to show Michael :
public class VisualizerView extends View {
private static final String TAG = "VisualizerView";
private byte[] mBytes;
private byte[] mFFTBytes;
private Rect mRect = new Rect();
private Visualizer mVisualizer;
private Set<Renderer> mRenderers;
private Paint mFlashPaint = new Paint();
private Paint mFadePaint = new Paint();
private ByteArrayOutputStream buffer;
public VisualizerView(Context context, AttributeSet attrs, int defStyle)
{
super(context, attrs);
init();
}
public VisualizerView(Context context, AttributeSet attrs)
{
this(context, attrs, 0);
}
public VisualizerView(Context context)
{
this(context, null, 0);
}
private void init() {
mBytes = null;
mFFTBytes = null;
mFlashPaint.setColor(Color.argb(122, 255, 255, 255));
mFadePaint.setColor(Color.argb(238, 255, 255, 255)); // Adjust alpha to change how quickly the image fades
mFadePaint.setXfermode(new PorterDuffXfermode(Mode.MULTIPLY));
mRenderers = new HashSet<Renderer>();
}
/**
* Links the visualizer to a player
* #param player - MediaPlayer instance to link to
*/
public void link(MediaPlayer player)
{
if(player == null)
{
throw new NullPointerException("Cannot link to null MediaPlayer");
}
// Create the Visualizer object and attach it to our media player.
mVisualizer = new Visualizer(player.getAudioSessionId());
mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]);
// Pass through Visualizer data to VisualizerView
Visualizer.OnDataCaptureListener captureListener = new Visualizer.OnDataCaptureListener()
{
#Override
public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate)
{
updateVisualizer(bytes);
//Record
if (bytes.length>-1)
buffer.write(bytes, 0, bytes.length);
//Record ends
}
#Override
public void onFftDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate)
{
updateVisualizerFFT(bytes);
}
};
mVisualizer.setDataCaptureListener(captureListener,
Visualizer.getMaxCaptureRate() / 2, true, true);
// Enabled Visualizer and disable when we're done with the stream
mVisualizer.setEnabled(true);
player.setOnCompletionListener(new MediaPlayer.OnCompletionListener()
{
#Override
public void onCompletion(MediaPlayer mediaPlayer)
{
mVisualizer.setEnabled(false);
//Save File
try {
buffer.flush();
} catch (IOException e) {
e.printStackTrace();
}
mBytes = buffer.toByteArray();
try {
buffer.close();
} catch (IOException e) {
e.printStackTrace();
}
mVisualizer.release();
File file = new File(Environment.getExternalStorageDirectory(), "music1.wav");
FileOutputStream fos;
try {
fos = new FileOutputStream(file);
fos.write(mBytes);
fos.flush();
fos.close();
} catch (FileNotFoundException e) {
// handle exception
} catch (IOException e) {
// handle exception
}
//Save File ends
}
});
}
public void addRenderer(Renderer renderer)
{
if(renderer != null)
{
mRenderers.add(renderer);
}
}
public void clearRenderers()
{
mRenderers.clear();
}
/**
* Call to release the resources used by VisualizerView. Like with the
* MediaPlayer it is good practice to call this method
*/
public void release()
{
mVisualizer.release();
}
/**
* Pass data to the visualizer. Typically this will be obtained from the
* Android Visualizer.OnDataCaptureListener call back. See
* {#link Visualizer.OnDataCaptureListener#onWaveFormDataCapture }
* #param bytes
*/
public void updateVisualizer(byte[] bytes) {
mBytes = bytes;
invalidate();
}
/**
* Pass FFT data to the visualizer. Typically this will be obtained from the
* Android Visualizer.OnDataCaptureListener call back. See
* {#link Visualizer.OnDataCaptureListener#onFftDataCapture }
* #param bytes
*/
public void updateVisualizerFFT(byte[] bytes) {
mFFTBytes = bytes;
invalidate();
}
boolean mFlash = false;
/**
* Call this to make the visualizer flash. Useful for flashing at the start
* of a song/loop etc...
*/
public void flash() {
mFlash = true;
invalidate();
}
Bitmap mCanvasBitmap;
Canvas mCanvas;
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
// Create canvas once we're ready to draw
mRect.set(0, 0, getWidth(), getHeight());
if(mCanvasBitmap == null)
{
mCanvasBitmap = Bitmap.createBitmap(canvas.getWidth(), canvas.getHeight(), Config.ARGB_8888);
}
if(mCanvas == null)
{
mCanvas = new Canvas(mCanvasBitmap);
}
if (mBytes != null) {
// Render all audio renderers
AudioData audioData = new AudioData(mBytes);
for(Renderer r : mRenderers)
{
r.render(mCanvas, audioData, mRect);
}
}
if (mFFTBytes != null) {
// Render all FFT renderers
FFTData fftData = new FFTData(mFFTBytes);
for(Renderer r : mRenderers)
{
r.render(mCanvas, fftData, mRect);
}
}
// Fade out old contents
mCanvas.drawPaint(mFadePaint);
if(mFlash)
{
mFlash = false;
mCanvas.drawPaint(mFlashPaint);
}
canvas.drawBitmap(mCanvasBitmap, new Matrix(), null);
}
}
can anyone tell me how can we record audio that is actually played on headset.
You can't, as there's no official support in the Android APIs to do that. Doesn't matter if you use the Java APIs, or the native APIs included in the NDK.
There may be hacks that work on specific devices, if you've got root access, etc, but I'm not going to cover those. If you're interested you can try searching and see what you can come up with.
I was told "Visualizer" class can record system audio but as per documentation it can only be used to visualize audio and we cannot put recorder interface there.
The Visualizer has this method:
public int getWaveForm (byte[] waveform)
Returns a waveform capture of currently playing audio content. The capture consists
in a number of consecutive 8-bit (unsigned) mono PCM samples equal to the capture size
returned by getCaptureSize().
So you can record the currently playing audio using the Visualizer. But as is mentioned in the description above, you'll only get low-quality audio data, because the purpose of this method is to get audio data that you can use for visualization purposes, not for general recording purposes.

Get/modify frames while recording mp4 with MediaRecorder

I am recording mp4 with the following code. But while I am recording,
1- is it possible get frames of video in real time in array Data or another format?
2- is it possible to modify frames? For example, to invert the colors and make it look negative.
public class MainActivity extends Activity implements OnClickListener, SurfaceHolder.Callback {
MediaRecorder recorder;
SurfaceHolder holder;
boolean recording = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
recorder = new MediaRecorder();
initRecorder();
setContentView(R.layout.activity_main);
SurfaceView cameraView = (SurfaceView) findViewById(R.id.surfaceView1);
holder = cameraView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
cameraView.setClickable(true);
cameraView.setOnClickListener(this);
}
private void initRecorder() {
recorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
recorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT);
CamcorderProfile cpHigh = CamcorderProfile
.get(CamcorderProfile.QUALITY_HIGH);
recorder.setProfile(cpHigh);
recorder.setOutputFile(Environment.getExternalStorageDirectory().getPath() + "/video.mp4");
recorder.setMaxDuration(50 * 1000); // 50 seconds
recorder.setMaxFileSize(5 * 1000000); // Approximately 5 megabytes
}
private void prepareRecorder() {
recorder.setPreviewDisplay(holder.getSurface());
try {
recorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
finish();
} catch (IOException e) {
e.printStackTrace();
finish();
}
}
public void onClick(View v) {
if (recording) {
recorder.stop();
recording = false;
// Let's initRecorder so we can record again
initRecorder();
prepareRecorder();
} else {
recording = true;
recorder.start();
}
}
public void surfaceCreated(SurfaceHolder holder) {
prepareRecorder();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (recording) {
recorder.stop();
recording = false;
}
recorder.release();
finish();
}
}
I used JavaCV library (which wraps FFMPEG and OpenCV) for editing Frames. Capture frames on onPreviewFrame and record them through FFMPEG recorder.
Following is the RecordActivity.java sample.
public class RecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "/mnt/sdcard/stream.flv";
long startTime = 0;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private IplImage yuvIplimage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.main);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
initRecorder();
}
#Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.main, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (yuvIplimage == null) {
yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
Log.i(LOG_TAG, "create yuvIplimage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("flv");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = new short[bufferSize];
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
if (bufferReadResult > 0) {
Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera","camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
Camera.Parameters camParams = mCamera.getParameters();
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate());
camParams.setPreviewFrameRate(frameRate);
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
/* get video data */
if (yuvIplimage != null && recording) {
yuvIplimage.getByteBuffer().put(data);
Log.v(LOG_TAG,"Writing Frame");
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
#Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
}
Please check INDE Media Pack, it allows to add recording with effect applying to your app, it has several sample effects like Grayscale, Sepia, Text Overlay etc, you can easily extend them: https://software.intel.com/en-us/articles/intel-inde-media-pack-for-android-tutorials
All effects are OpenGL based, so do not affect performance and let to have a good FPS in final video

Android MediaPlayer loop has gaps even with OGG format

In Android 4.x my audio looping app has 0.2 to 0.5 second gaps in the sound loops.
I'm using MediaPlayer as my sounds can be quite large (2-3mb each in some cases) and it can run multiple instances at the same time.
I have researched this quite a bit and I see there is a bug for Android 4.x... however, I have tried many work arounds and I can't seem to get any of them working.
Converted all the wavs to OGG using Audacity (quality level 2 to 10, it didn't matter)
Tried setNextMediaPlayer()
Tried to use seekTo(0) on stop and repeat
Tried soundPool which has its own bugs
Here's a sample of the code I'm using:
public class SoundPlayer implements OnCompletionListener {
private MediaPlayer mp = null;
public void initPlayer() {
if(mp == null) {
mp = new MediaPlayer();
}
}
public void prepare(Context context, int resource) {
initPlayer();
try{
mp.reset();
Uri uri = Uri.parse("android.resource://com.myapp.app/"+resource);
mp.setDataSource(context,uri);
mp.prepare();
isPrepared = true;
mp.setOnCompletionListener(this);
} catch(Exception e) {
e.printStackTrace();
}
}
.......... etc (Uses typical MediaPlayer methods such as stop(), start(), setLooping(true)
}
I'm not using anything special, so I'm just wondering if anyone knows of a work around for the looping bug on Android.
Okay old Post, but this solution works. This is kind of hack. It'll help somebody if somebody came to this answer.
I am just using three media players. mp1, mp2, mp3. mp1 gets play, and I set mp2 to its setNextMediaPlayer. when mp1 ends, I set mp3 to mp2's setNextMediaPlayer. when mp2 ends, I set mp1 to mp3's setNextMediaPlayer. Following is the complete code which play pause, stop, set vol. Just create an object BZMediaPlayer, start it by providing uri, or resource id.
import android.content.Context;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnCompletionListener;
import android.net.Uri;
import android.util.Log;
public class BZMediaPlayer {
private Context context;
private Uri uri;
private int resourceId;
// which file is getting played
public static final int URI_PLAYING = 1;
public static final int RESOURCE_PLAYING = 2;
private int filePlaying;
// states of the media player
public static final int STATE_PLAYING = 1;
public static final int STATE_PAUSED = 2;
public static final int STATE_STOP = 3;
// current state
private int state = STATE_STOP;
// current mediaPlayer which is playing
private int mediaPlayerIndex = -1;
// 3 media players
private MediaPlayer mp[] = new MediaPlayer[3];
// current volume
private float vol;
public BZMediaPlayer(Context context) {
this.context = context;
}
/**
* plays the provided uri
* #param uri
*/
public void play(Uri uri) {
this.uri = uri;
// current playing file
filePlaying = URI_PLAYING;
// stop any playing session
stop();
// initialize and set listener to three mediaplayers
for (int i = 0; i < mp.length; i++) {
mp[i] = MediaPlayer.create(context, uri);
mp[i].setOnCompletionListener(completionListener);
}
// set nextMediaPlayers
mp[0].setNextMediaPlayer(mp[1]);
mp[1].setNextMediaPlayer(mp[2]);
// start the first MediaPlayer
mp[0].start();
// set mediaplayer inex
mediaPlayerIndex = 0;
// set state
state = STATE_PLAYING;
}
/**
* play file from resource
* #param resourceId
*/
public void play(int resourceId) {
this.resourceId = resourceId;
filePlaying = RESOURCE_PLAYING;
stop();
for (int i = 0; i < mp.length; i++) {
mp[i] = MediaPlayer.create(context, resourceId);
mp[i].setOnCompletionListener(completionListener);
}
mp[0].setNextMediaPlayer(mp[1]);
mp[1].setNextMediaPlayer(mp[2]);
mp[0].start();
mediaPlayerIndex = 0;
state = STATE_PLAYING;
}
/**
* play if the mediaplayer is pause
*/
public void play() {
if (state == STATE_PAUSED) {
mp[mediaPlayerIndex].start();
Log.d("BZMediaPlayer", "playing");
state = STATE_PLAYING;
}
}
/**
* pause current playing session
*/
public void pause() {
if (state == STATE_PLAYING) {
mp[mediaPlayerIndex].pause();
Log.d("BZMediaPlayer", "pausing");
state = STATE_PAUSED;
}
}
/**
* get current state
* #return
*/
public int getState() {
return state;
}
/**
* stop every mediaplayer
*/
public void stop() {
for(int i = 0 ; i < mp.length ; i++) {
if (mp[i] != null) {
mp[i].stop();
if(mp[i].isPlaying()) {
mp[i].release();
}
}
}
state = STATE_STOP;
}
/**
* set vol for every mediaplayer
* #param vol
*/
public void setVol(float vol) {
this.vol = vol;
for(int i = 0 ; i < mp.length ; i++) {
if (mp[i] != null && mp[i].isPlaying()) {
mp[i].setVolume(vol, vol);
}
}
}
/**
* internal listener which handles looping thing
*/
private MediaPlayer.OnCompletionListener completionListener = new OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer curmp) {
int mpEnds = 0;
int mpPlaying = 0;
int mpNext = 0;
if(curmp == mp[0]) {
mpEnds = 0;
mpPlaying = 1;
mpNext = 2;
}
else if(curmp == mp[1]) {
mpEnds = 1;
mpPlaying = 2;
mpNext = 0; // corrected, else index out of range
}
else if(curmp == mp[2]) {
mpEnds = 2;
mpPlaying = 0; // corrected, else index out of range
mpNext = 1; // corrected, else index out of range
}
// as we have set mp2 mp1's next, so index will be 1
mediaPlayerIndex = mpPlaying;
Log.d("BZMediaPlayer", "Media Player " + mpEnds);
try {
// mp3 is already playing release it
if (mp[mpNext] != null) {
mp[mpNext].release();
}
// if we are playing uri
if (filePlaying == URI_PLAYING) {
mp[mpNext] = MediaPlayer.create(context, uri);
} else {
mp[mpNext] = MediaPlayer.create(context, resourceId);
}
// at listener to mp3
mp[mpNext].setOnCompletionListener(this);
// set vol
mp[mpNext].setVolume(vol, vol);
// set nextMediaPlayer
mp[mpPlaying].setNextMediaPlayer(mp[mpNext]);
// set nextMediaPlayer vol
mp[mpPlaying].setVolume(vol, vol);
} catch (Exception e) {
e.printStackTrace();
}
}
};
}
Hope it will help somebody
Edit: clean code

Categories

Resources