Create Visualiser for audio recording Android - android

So basically I have created a page with a visualiser and a button for recording the audio. I want the visualiser to run for the audio that I am recording. I guess I need to get the MaxAmplitude for the recording and then dynamically plot a visualiser graph for every small period. Can someone please help me with that ? or if there is a better way to do it. Currently the code i pasted, the visualiser runs for the recording after recording it.
public class MainActivity extends Activity {
private static final float VISUALIZER_HEIGHT_DIP = 200f;
MediaPlayer mp;
private Visualizer mVisualizer;
private LinearLayout mLinearLayout;
private VisualizerView mVisualizerView;
private TextView mStatusTextView;
private File metapath;
private static String[] META_PATH;
private Button play;
private boolean mStartRecording;
private static final String LOG_TAG = "AudioRecordTest";
private static String mFileName = null;
private MediaRecorder mRecorder = null;
private MediaPlayer mPlayer = null;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mStatusTextView = (TextView)findViewById(R.id.recording);
if(Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)
|| Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED_READ_ONLY))
metapath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC);
META_PATH = metapath.list(new WavFilter());
mLinearLayout = (LinearLayout)findViewById(R.id.subscreen);
play = (Button)findViewById(R.id.startandstop);
mStartRecording=true;
play.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
onRecord(mStartRecording);
if (mStartRecording) {
play.setText("Stop recording");
} else {
play.setText("Start recording");
}
mStartRecording = !mStartRecording;
}
});
setupVisualizerFxAndUI();
mVisualizer.setEnabled(true);
mStatusTextView.setText("Playing audio...");
}
private void onRecord(boolean start) {
if (start) {
startRecording();
} else {
stopRecording();
}
}
private void startRecording() {
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setOutputFile(metapath.getPath()+"/myfile.3gp");
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
try {
mRecorder.prepare();
} catch (IOException e) {
Log.e(LOG_TAG, "prepare() failed");
}
mRecorder.start();
}
private void stopRecording() {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
mPlayer = new MediaPlayer();
try {
mPlayer.setDataSource(metapath.getPath()+"/myfile.3gp");
mPlayer.prepare();
mPlayer.start();
} catch (IOException e) {
Log.e(LOG_TAG, "prepare() failed");
}
}
class WavFilter implements FilenameFilter {
public boolean accept(File dir, String name) {
return (name.endsWith(".wav"));
}
}
private void setupVisualizerFxAndUI() {
mVisualizerView = new VisualizerView(this);
mVisualizerView.setLayoutParams(new ViewGroup.LayoutParams(
600,
(int)(VISUALIZER_HEIGHT_DIP * getResources().getDisplayMetrics().density)));
mLinearLayout.addView(mVisualizerView);
mLinearLayout.setGravity(Gravity.CENTER_HORIZONTAL);
mVisualizer = new Visualizer(0);
mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]);
mVisualizer.setDataCaptureListener(new Visualizer.OnDataCaptureListener() {
public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate) {
for(int i=0;i<bytes.length;i++)
bytes[i]+=2;
mVisualizerView.updateVisualizer(bytes);
}
public void onFftDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) {}
}, Visualizer.getMaxCaptureRate() / 2, true, false);
}
#Override
protected void onPause() {
super.onPause();
if (isFinishing() && mp != null) {
mVisualizer.release();
mp.release();
mp = null;
}
}
class VisualizerView extends View {
private byte[] mBytes;
private float[] mPoints;
private Rect mRect = new Rect();
private Paint mForePaint = new Paint();
public VisualizerView(Context context) {
super(context);
init();
}
private void init() {
mBytes = null;
mForePaint.setStrokeWidth(6f);//set width of the moving wave
mForePaint.setAntiAlias(true);
mForePaint.setColor(Color.RED);//color of the moving wavd\e
}
public void updateVisualizer(byte[] bytes) {
mBytes = bytes;
invalidate();
}
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mBytes == null) {
return;
}
if (mPoints == null || mPoints.length < mBytes.length * 4) {
mPoints = new float[mBytes.length * 4];
}
mRect.set(0, 0, getWidth(), getHeight());
for (int i = 0; i < mBytes.length - 1; i++) {
mPoints[i * 4] = mRect.width() * i / (mBytes.length - 1);
mPoints[i * 4 + 1] = mRect.height() / 2
+ ((byte) (mBytes[i] + 128)) * (mRect.height() / 2) / 128;
mPoints[i * 4 + 2] = mRect.width() * (i + 1) / (mBytes.length - 1);
mPoints[i * 4 + 3] = mRect.height() / 2
+ ((byte) (mBytes[i + 1] + 128)) * (mRect.height() / 2) / 128;
}
canvas.drawColor(Color.BLACK);
canvas.drawLines(mPoints, mForePaint);
}
}
}

Related

How to Crop the visible portion of landscape video in android?

I am working on Panning and cropping the landscape video using Texture View.I am in a half way that I can pan the landscape video from left to right vice versa by using this example
https://github.com/crust87/Android-VideoCropView.
FFMPEG can crop the particular portion of the video by using this command
ffmpeg -i /sdcard/videokit/in.mp4 -filter:v crop=720:1088:0:0 -c:a
copy /sdcard/videokit/out.mp4
How can I crop only the video which is visible in Texture View and save it local storage in Android.
crop=720:1088:0:0 is a hard coded width and height of the video and it is cropping fine.But how can I get the width and height of the visible video in Texture View to crop the visible video and Save it to the local storage in android.
public class MainActivity extends Activity {
// Layout Components
private FrameLayout top_frame;
// Attributes
private String originalPath;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.check);
top_frame = (FrameLayout)findViewById(R.id.top_frame);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == 1000 && resultCode == RESULT_OK) {
final VideoCropView mVideoCropView = new VideoCropView(this);
mVideoCropView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp) {
mVideoCropView.start();
}
});
top_frame.addView(mVideoCropView);
Uri selectedVideoUri = data.getData();
originalPath = getRealPathFromURI(selectedVideoUri);
mVideoCropView.setVideoURI(selectedVideoUri);
mVideoCropView.seekTo(1);
}
}
public void onButtonLoadClick(View v) {
top_frame.removeAllViews();
Intent lIntent = new Intent(Intent.ACTION_PICK);
lIntent.setType("video/*");
lIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivityForResult(lIntent, 1000);
}
public String getRealPathFromURI(Uri contentUri) { // getting image path from gallery.
Cursor cursor = null;
try {
String[] proj = { MediaStore.Images.Media.DATA };
cursor = getApplicationContext().getContentResolver().query(contentUri, proj, null, null, null);
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
} finally {
if (cursor != null) {
cursor.close();
}
}
}
}
CropVideoView
public class VideoCropView extends TextureView implements MediaPlayerControl {
// Constants
private static final String LOG_TAG = "VideoCropView";
private static final int STATE_ERROR = -1;
private static final int STATE_IDLE = 0;
private static final int STATE_PREPARING = 1;
private static final int STATE_PREPARED = 2;
private static final int STATE_PLAYING = 3;
private static final int STATE_PAUSED = 4;
private static final int STATE_PLAYBACK_COMPLETED = 5;
// MediaPlayer Components
protected Context mContext;
private MediaPlayer mMediaPlayer;
private Surface mSurface;
private OnInfoListener mOnInfoListener;
private OnCompletionListener mOCompletionListener;
private OnErrorListener mOnErrorListener;
private OnPreparedListener mOnPreparedListener;
private OnTranslatePositionListener mOnTranslatePositionListener;
// CropView Components
private Matrix mMatrix;
// MediaPlayer Attributes
protected Uri mUri;
private int mCurrentBufferPercentage;
private int mSeekWhenPrepared;
protected int mVideoWidth;
protected int mVideoHeight;
// CropView Attributes
private float mRatioWidth;
private float mRatioHeight;
private float mPositionX;
private float mPositionY;
private float mBoundX;
private float mBoundY;
private int mRotate;
private float mScaleX;
private float mScaleY;
private float mScale;
// Working Variables
private int mCurrentState = STATE_IDLE;
private int mTargetState = STATE_IDLE;
// Touch Event
// past position x, y and move point
float mPastX;
float mPastY;
float mTouchDistance;
private Context context;
// Constructors
public VideoCropView(final Context context) {
super(context);
mContext = context;
initAttributes();
initVideoView();
}
public VideoCropView(final Context context, final AttributeSet attrs) {
super(context, attrs);
mContext = context;
initAttributes(context, attrs, 0);
initVideoView();
}
public VideoCropView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
mContext = context;
initAttributes(context, attrs, defStyleAttr);
initVideoView();
}
private void initAttributes() {
mRatioWidth = 1;
mRatioHeight = 1;
}
private void initAttributes(Context context, AttributeSet attrs, int defStyleAttr) {
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.VideoCropView, defStyleAttr, 0);
mRatioWidth = typedArray.getInteger(R.styleable.VideoCropView_ratio_width, 3);
mRatioHeight = typedArray.getInteger(R.styleable.VideoCropView_ratio_height, 4);
}
#Override
protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
int heightLayout;
int widthLayout;
widthLayout = MeasureSpec.getSize(widthMeasureSpec);
heightLayout = MeasureSpec.getSize(heightMeasureSpec);
setMeasuredDimension(widthLayout, heightLayout);
/*if(widthMeasureSpec < heightMeasureSpec){
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = (int) ((width / mRatioWidth) * mRatioHeight);
setMeasuredDimension(width, height);
}else{
int width = MeasureSpec.getSize(widthMeasureSpec);
int height =MeasureSpec.getSize(heightMeasureSpec);
setMeasuredDimension(width, height);
}
*/
}
#Override
public boolean onTouchEvent(MotionEvent event) {
if(mCurrentState == STATE_ERROR || mCurrentState == STATE_IDLE || mCurrentState == STATE_PREPARING) {
return false;
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
mPastX = event.getX();
mPastY = event.getY();
mTouchDistance = 0;
case MotionEvent.ACTION_MOVE:
if(mBoundX!=0 || mBoundY!=0) {
float dx = event.getX() - mPastX;
float dy = event.getY() - mPastY;
updateViewPosition(dx, dy);
mPastX = event.getX();
mPastY = event.getY();
mTouchDistance += (Math.abs(dx) + Math.abs(dy));
}
break;
case MotionEvent.ACTION_UP:
if (mTouchDistance < 25) {
if (isPlaying()) {
pause();
} else {
start();
}
}
mTouchDistance = 0;
break;
}
return true;
}
#Override
public void onInitializeAccessibilityEvent(AccessibilityEvent event) {
super.onInitializeAccessibilityEvent(event);
event.setClassName(VideoView.class.getName());
}
#Override
public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) {
super.onInitializeAccessibilityNodeInfo(info);
info.setClassName(VideoView.class.getName());
}
public int resolveAdjustedSize(int desiredSize, int measureSpec) {
Log.d(LOG_TAG, "Resolve called.");
int result = desiredSize;
int specMode = MeasureSpec.getMode(measureSpec);
int specSize = MeasureSpec.getSize(measureSpec);
switch (specMode) {
case MeasureSpec.UNSPECIFIED:
/*
* Parent says we can be as big as we want. Just don't be larger
* than max size imposed on ourselves.
*/
result = desiredSize;
break;
case MeasureSpec.AT_MOST:
/*
* Parent says we can be as big as we want, up to specSize. Don't be
* larger than specSize, and don't be larger than the max size
* imposed on ourselves.
*/
result = Math.min(desiredSize, specSize);
break;
case MeasureSpec.EXACTLY:
// No choice. Do what we are told.
result = specSize;
break;
}
return result;
}
public void initVideoView() {
mVideoHeight = 0;
mVideoWidth = 0;
setFocusable(false);
setSurfaceTextureListener(mSurfaceTextureListener);
mCurrentState = STATE_IDLE;
mTargetState = STATE_IDLE;
}
public void setVideoPath(String path) {
if (path != null) {
setVideoURI(Uri.parse(path));
}
}
public void setVideoURI(Uri pVideoURI) {
mUri = pVideoURI;
mSeekWhenPrepared = 0;
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(mContext, pVideoURI);
// create thumbnail bitmap
if(android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) {
String rotation = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
try {
mRotate = Integer.parseInt(rotation);
} catch(NumberFormatException e) {
mRotate = 0;
}
}
retriever.release();
openVideo();
requestLayout();
invalidate();
}
public void stopPlayback() {
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
mMediaPlayer = null;
mCurrentState = STATE_IDLE;
mTargetState = STATE_IDLE;
}
}
public void openVideo() {
if ((mUri == null) || (mSurface == null)) {
// not ready for playback just yet, will try again later
return;
}
// Tell the music playback service to pause
// TODO: these constants need to be published somewhere in the
// framework.
Intent intent = new Intent("com.android.music.musicservicecommand");
intent.putExtra("command", "pause");
mContext.sendBroadcast(intent);
// we shouldn't clear the target state, because somebody might have
// called start() previously
release(false);
try {
mMediaPlayer = new MediaPlayer();
// TODO: create SubtitleController in MediaPlayer, but we need
// a context for the subtitle renderers
mMediaPlayer.setOnPreparedListener(mPreparedListener);
mMediaPlayer.setOnVideoSizeChangedListener(mSizeChangedListener);
mMediaPlayer.setOnCompletionListener(mCompletionListener);
mMediaPlayer.setOnErrorListener(mErrorListener);
mMediaPlayer.setOnInfoListener(mInfoListener);
mMediaPlayer.setOnBufferingUpdateListener(mBufferingUpdateListener);
mCurrentBufferPercentage = 0;
mMediaPlayer.setDataSource(mContext, mUri);
mMediaPlayer.setSurface(mSurface);
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mMediaPlayer.setScreenOnWhilePlaying(true);
mMediaPlayer.prepareAsync();
mMediaPlayer.setLooping(true);
mCurrentState = STATE_PREPARING;
} catch (IllegalStateException e) {
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
e.printStackTrace();
} catch (IOException e) {
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
e.printStackTrace();
}
}
private OnVideoSizeChangedListener mSizeChangedListener = new OnVideoSizeChangedListener() {
#Override
public void onVideoSizeChanged(final MediaPlayer mp, final int width,
final int height) {
mVideoWidth = mp.getVideoWidth();
mVideoHeight = mp.getVideoHeight();
if (mVideoWidth != 0 && mVideoHeight != 0) {
requestLayout();
if(mVideoWidth >= mVideoHeight)
initVideo();
}
}
};
private OnPreparedListener mPreparedListener = new OnPreparedListener() {
#Override
public void onPrepared(final MediaPlayer mp) {
mCurrentState = STATE_PREPARED;
if (mOnPreparedListener != null) {
mOnPreparedListener.onPrepared(mp);
}
mVideoWidth = mp.getVideoWidth();
mVideoHeight = mp.getVideoHeight();
int seekToPosition = mSeekWhenPrepared; // mSeekWhenPrepared may be
// changed after seekTo()
if (seekToPosition != 0) {
seekTo(seekToPosition);
}
if ((mVideoWidth != 0) && (mVideoHeight != 0)) {
if(mVideoWidth >= mVideoHeight) initVideo();
if (mTargetState == STATE_PLAYING) {
start();
}
} else {
// We don't know the video size yet, but should start anyway.
// The video size might be reported to us later.
if (mTargetState == STATE_PLAYING) {
start();
}
}
}
};
private OnCompletionListener mCompletionListener = new OnCompletionListener() {
#Override
public void onCompletion(final MediaPlayer mp) {
mCurrentState = STATE_PLAYBACK_COMPLETED;
mTargetState = STATE_PLAYBACK_COMPLETED;
if (mOCompletionListener != null) {
mOCompletionListener.onCompletion(mMediaPlayer);
}
}
};
private OnInfoListener mInfoListener = new OnInfoListener() {
public boolean onInfo(MediaPlayer mp, int arg1, int arg2) {
if (mOnInfoListener != null) {
mOnInfoListener.onInfo(mp, arg1, arg2);
}
return true;
}
};
private OnErrorListener mErrorListener = new OnErrorListener() {
#Override
public boolean onError(MediaPlayer mp, int framework_err, int impl_err) {
Log.d(LOG_TAG, "Error: " + framework_err + "," + impl_err);
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
/* If an error handler has been supplied, use it and finish. */
if (mOnErrorListener != null) {
if (mOnErrorListener.onError(mMediaPlayer, framework_err,
impl_err)) {
return true;
}
}
return true;
}
};
private OnBufferingUpdateListener mBufferingUpdateListener = new OnBufferingUpdateListener() {
#Override
public void onBufferingUpdate(final MediaPlayer mp, final int percent) {
mCurrentBufferPercentage = percent;
}
};
public void setOnPreparedListener(OnPreparedListener listener) {
mOnPreparedListener = listener;
}
public void setOnCompletionListener(OnCompletionListener listener) {
mOCompletionListener = listener;
}
public void setOnErrorListener(OnErrorListener listener) {
mOnErrorListener = listener;
}
public void setOnInfoListener(OnInfoListener listener) {
mOnInfoListener = listener;
}
private void release(boolean cleartargetstate) {
if (mMediaPlayer != null) {
mMediaPlayer.reset();
mMediaPlayer.release();
mMediaPlayer = null;
mCurrentState = STATE_IDLE;
if (cleartargetstate) {
mTargetState = STATE_IDLE;
}
}
}
#Override
public void start() {
if (isInPlaybackState()) {
mMediaPlayer.start();
mCurrentState = STATE_PLAYING;
}
mTargetState = STATE_PLAYING;
}
#Override
public void pause() {
if (isInPlaybackState()) {
if (mMediaPlayer.isPlaying()) {
mMediaPlayer.pause();
mCurrentState = STATE_PAUSED;
}
}
mTargetState = STATE_PAUSED;
}
#Override
public int getDuration() {
if (isInPlaybackState()) {
return mMediaPlayer.getDuration();
}
return -1;
}
#Override
public int getCurrentPosition() {
if (isInPlaybackState()) {
return mMediaPlayer.getCurrentPosition();
}
return 0;
}
#Override
public void seekTo(int msec) {
if (isInPlaybackState()) {
mMediaPlayer.seekTo(msec);
mSeekWhenPrepared = 0;
} else {
mSeekWhenPrepared = msec;
}
}
#Override
public boolean isPlaying() {
return isInPlaybackState() && mMediaPlayer.isPlaying();
}
#Override
public int getBufferPercentage() {
if (mMediaPlayer != null) {
return mCurrentBufferPercentage;
}
return 0;
}
private boolean isInPlaybackState() {
return (mMediaPlayer != null && mCurrentState != STATE_ERROR
&& mCurrentState != STATE_IDLE && mCurrentState != STATE_PREPARING);
}
#Override
public boolean canPause() {
return false;
}
#Override
public boolean canSeekBackward() {
return false;
}
#Override
public boolean canSeekForward() {
return false;
}
#Override
public int getAudioSessionId() {
return -1;
}
SurfaceTextureListener mSurfaceTextureListener = new SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
mSurface = new Surface(surface);
openVideo();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
boolean isValidState = (mTargetState == STATE_PLAYING);
boolean hasValidSize = (mVideoWidth == width && mVideoHeight == height);
if (mMediaPlayer != null && isValidState && hasValidSize) {
start();
}
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (mMediaPlayer != null) {
mMediaPlayer.reset();
mMediaPlayer.release();
mMediaPlayer = null;
}
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
return true;
}
#Override
public void onSurfaceTextureUpdated(final SurfaceTexture surface) {
}
};
#Override
protected void onVisibilityChanged(View changedView, int visibility) {
super.onVisibilityChanged(changedView, visibility);
if (visibility == View.INVISIBLE || visibility == View.GONE) {
if (isPlaying()) {
stopPlayback();
}
}
}
public float getScale() {
return mScale;
}
private void initVideo() {
try {
int width = getWidth();
int height = getHeight();
mScaleX = 1.0f;
mScaleY = 1.0f;
mPositionX = 0;
mPositionY = 0;
mBoundX = 0;
mBoundY = 0;
mMatrix = new Matrix();
mScaleX = (float) mVideoWidth / width;
mScaleY = (float) mVideoHeight / height;
mBoundX = width - mVideoWidth / mScaleY;
mBoundY = height - mVideoHeight / mScaleX;
if (mScaleX < mScaleY) {
mScale = mScaleX;
mScaleY = mScaleY * (1.0f / mScaleX);
mScaleX = 1.0f;
mBoundX = 0;
} else {
mScale = mScaleY;
mScaleX = mScaleX * (1.0f / mScaleY);
mScaleY = 1.0f;
mBoundY = 0;
}
mMatrix = new Matrix();
mMatrix.setScale(mScaleX, mScaleY);
setTransform(mMatrix);
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
public void updateViewPosition(float x, float y) {
float nextX = mPositionX + x;
float nextY = mPositionY + y;
if(mScaleX == 1.0f) {
x = 0;
} else {
if(nextX > 0) {
x = -mPositionX;
mPositionX = mPositionX + x;
} else if(nextX < mBoundX) {
x = mBoundX - mPositionX;
mPositionX = mPositionX + x;
} else {
mPositionX = nextX;
}
}
if(mScaleY == 1.0f) {
y = 0;
} else {
if(nextY > 0) {
y = -mPositionY;
mPositionY = mPositionY + y;
} else if(nextY < mBoundY) {
y = mBoundY - mPositionY;
mPositionY = mPositionY + y;
} else {
mPositionY = nextY;
}
}
if(mOnTranslatePositionListener != null) {
mOnTranslatePositionListener.onTranslatePosition(mPositionX, mPositionY, mPositionX * -mScale, mPositionY * -mScale);
}
mMatrix.postTranslate(x, y);
setTransform(mMatrix);
invalidate();
}
// public void setOriginalRatio() {
// if(mVideoWidth != 0 && mVideoHeight != 0) {
// int gcd = gcd(mVideoWidth, mVideoHeight);
// setRatio(mVideoWidth / gcd, mVideoHeight / gcd);
// }
// }
public int gcd(int n, int m) {
while (m != 0) {
int t = n % m;
n = m;
m = t;
}
return Math.abs(n);
}
// public void setRatio(float ratioWidth, float ratioHeight) {
// mRatioWidth = ratioWidth;
// mRatioHeight = ratioHeight;
//
// int seek = getCurrentPosition();
//
// requestLayout();
// invalidate();
// openVideo();
//
// seekTo(seek);
// }
public float getRatioWidth() {
return mRatioWidth;
}
public float getRatioHeight() {
return mRatioHeight;
}
public float getRealPositionX() {
return mPositionX * -mScale;
}
public float getRealPositionY() {
return mPositionY * -mScale;
}
public int getVideoWidth() {
return mVideoWidth;
}
public int getVideoHeight() {
return mVideoHeight;
}
public int getRotate() {
return mRotate;
}
public void setOnTranslatePositionListener(OnTranslatePositionListener pOnTranslatePositionListener) {
mOnTranslatePositionListener = pOnTranslatePositionListener;
}
public void setContext(Context context) {
this.context = context;
}
public interface OnTranslatePositionListener {
public abstract void onTranslatePosition(float x, float y, float rx, float ry);
}
}
FFMPEG for cropping particular portion
ffmpeg -i /sdcard/videokit/in.mp4 -filter:v crop=720:1088:0:0 -c:a copy /sdcard/videokit/out.mp4
public class SimpleExample extends Activity {
String workFolder = null;
String demoVideoFolder = null;
String demoVideoPath = null;
String vkLogPath = null;
private boolean commandValidationFailedFlag = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.ffmpeg_demo_client_1);
demoVideoFolder = Environment.getExternalStorageDirectory().getAbsolutePath() + "/videokit/";
demoVideoPath = demoVideoFolder + "in.mp4";
Log.i(Prefs.TAG, getString(R.string.app_name) + " version: " + GeneralUtils.getVersionName(getApplicationContext()) );
workFolder = getApplicationContext().getFilesDir().getAbsolutePath() + "/";
//Log.i(Prefs.TAG, "workFolder: " + workFolder);
vkLogPath = workFolder + "vk.log";
GeneralUtils.copyLicenseFromAssetsToSDIfNeeded(this, workFolder);
GeneralUtils.copyDemoVideoFromAssetsToSDIfNeeded(this, demoVideoFolder);
Button invoke = (Button)findViewById(R.id.invokeButton);
invoke.setOnClickListener(new OnClickListener() {
public void onClick(View v){
Log.i(Prefs.TAG, "run clicked.");
if (GeneralUtils.checkIfFileExistAndNotEmpty(demoVideoPath)) {
new TranscdingBackground(SimpleExample.this).execute();
}
else {
Toast.makeText(getApplicationContext(), demoVideoPath + " not found", Toast.LENGTH_LONG).show();
}
}
});
int rc = GeneralUtils.isLicenseValid(getApplicationContext(), workFolder);
Log.i(Prefs.TAG, "License check RC: " + rc);
}
public class TranscdingBackground extends AsyncTask<String, Integer, Integer>
{
ProgressDialog progressDialog;
Activity _act;
String commandStr;
public TranscdingBackground (Activity act) {
_act = act;
}
#Override
protected void onPreExecute() {
EditText commandText = (EditText)findViewById(R.id.CommandText);
commandStr = commandText.getText().toString();
progressDialog = new ProgressDialog(_act);
progressDialog.setMessage("FFmpeg4Android Transcoding in progress...");
progressDialog.show();
}
protected Integer doInBackground(String... paths) {
Log.i(Prefs.TAG, "doInBackground started...");
// delete previous log
boolean isDeleted = GeneralUtils.deleteFileUtil(workFolder + "/vk.log");
Log.i(Prefs.TAG, "vk deleted: " + isDeleted);
PowerManager powerManager = (PowerManager)_act.getSystemService(Activity.POWER_SERVICE);
WakeLock wakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "VK_LOCK");
Log.d(Prefs.TAG, "Acquire wake lock");
wakeLock.acquire();
///////////// Set Command using code (overriding the UI EditText) /////
//commandStr = "ffmpeg -y -i /sdcard/videokit/in.mp4 -strict experimental -s 320x240 -r 30 -aspect 3:4 -ab 48000 -ac 2 -ar 22050 -vcodec mpeg4 -b 2097152 /sdcard/videokit/out.mp4";
//String[] complexCommand = {"ffmpeg", "-y" ,"-i", "/sdcard/videokit/in.mp4","-strict","experimental","-s", "160x120","-r","25", "-vcodec", "mpeg4", "-b", "150k", "-ab","48000", "-ac", "2", "-ar", "22050", "/sdcard/videokit/out.mp4"};
///////////////////////////////////////////////////////////////////////
LoadJNI vk = new LoadJNI();
try {
vk.run(GeneralUtils.utilConvertToComplex(commandStr), workFolder, getApplicationContext());
GeneralUtils.copyFileToFolder(vkLogPath, demoVideoFolder);
} catch (Throwable e) {
Log.e(Prefs.TAG, "vk run exeption.", e);
}
finally {
if (wakeLock.isHeld())
wakeLock.release();
else{
Log.i(Prefs.TAG, "Wake lock is already released, doing nothing");
}
}
Log.i(Prefs.TAG, "doInBackground finished");
return Integer.valueOf(0);
}
protected void onProgressUpdate(Integer... progress) {
}
#Override
protected void onCancelled() {
Log.i(Prefs.TAG, "onCancelled");
//progressDialog.dismiss();
super.onCancelled();
}
#Override
protected void onPostExecute(Integer result) {
Log.i(Prefs.TAG, "onPostExecute");
progressDialog.dismiss();
super.onPostExecute(result);
// finished Toast
String rc = null;
if (commandValidationFailedFlag) {
rc = "Command Vaidation Failed";
}
else {
rc = GeneralUtils.getReturnCodeFromLog(vkLogPath);
}
final String status = rc;
SimpleExample.this.runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(SimpleExample.this, status, Toast.LENGTH_LONG).show();
if (status.equals("Transcoding Status: Failed")) {
Toast.makeText(SimpleExample.this, "Check: " + vkLogPath + " for more information.", Toast.LENGTH_LONG).show();
}
}
});
}
}
}
try to use getBitmap: TextureView.getBitmap for VideoCropView
it returns bitmap wanted resolution.
Then you can crop using Bitmap.createBitmap
like this
resizedbitmap=Bitmap.createBitmap(bmp, 0,0,yourwidth, yourheight);
Use this library to crop video with visible portion
Video Trimmer Library

android- visualizer with surfaceview

I followed tutorial from this link-> http://android-er.blogspot.com/2015/02/create-audio-visualizer-for-mediaplayer.html
where graphics is done by extending view class.Now i am trying to do the same with surface view .Here`s the code:
MainActivity:
public class MainActivity extends Activity {
VisualizerView mVisualizerView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mVisualizerView= new VisualizerView(this);
setContentView(mVisualizerView);
}
#Override
protected void onPause(){
super.onPause();
if(isFinishing()){
mVisualizerView.clean();
}
}}
Sufaceview class:
public class VisualizerView extends SurfaceView implements SurfaceHolder.Callback {
private byte[] mBytes;
private float[] mPoints;
private Rect mRect = new Rect();
private Paint mForePaint = new Paint();
SurfaceThread _thread;
private MediaPlayer player;
private Visualizer visualizer;
private Context context;
public VisualizerView(Context context) {
super(context);
init();
getHolder().addCallback(this);
_thread = new SurfaceThread(getHolder(), this);
this.context=context;
}
private void init(){
mBytes = null;
mForePaint.setStrokeWidth(1f);
mForePaint.setAntiAlias(true);
mForePaint.setColor(Color.rgb(0,0, 255));
}
public void updateVisualizer(byte[] bytes) {
mBytes = bytes;
// invalidate();
}
private void initAudio(){
player= MediaPlayer.create(context, R.raw.duck);
setupVisualizerFxAndUI();
visualizer.setEnabled(true);
player.setOnCompletionListener(new MediaPlayer.OnCompletionListener(){
public void onCompletion(MediaPlayer mediaPlayer){
visualizer.setEnabled(false);
}
}
);
player.start();
}
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mBytes == null) {
return;
}
if (mPoints == null || mPoints.length < mBytes.length * 4) {
mPoints = new float[mBytes.length * 4];
}
mRect.set(0, 0, getWidth(), getHeight());
for (int i = 0; i < mBytes.length - 1; i++) {
mPoints[i * 4] = mRect.width() * i / (mBytes.length - 1);
mPoints[i * 4 + 1] = mRect.height() / 2
+ ((byte) (mBytes[i] + 128)) * (mRect.height() / 2) / 128;
mPoints[i * 4 + 2] = mRect.width() * (i + 1) / (mBytes.length - 1);
mPoints[i * 4 + 3] = mRect.height() / 2
+ ((byte) (mBytes[i + 1] + 128)) * (mRect.height() / 2)
/ 128;
}
canvas.drawColor(Color.WHITE);
canvas.drawLines(mPoints, mForePaint);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
_thread.setRunning(true);
_thread.start();
initAudio();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
boolean retry = true;
_thread.setRunning(false);
while (retry) {
try {
_thread.join();
retry = false;
} catch (InterruptedException e) {
// we will try it again and again...
}
}
}
public void clean(){
if(player!=null){
visualizer.release();
player.release();
player= null;
}
}
private void setupVisualizerFxAndUI(){
Equalizer mEqualizer = new Equalizer(0, player.getAudioSessionId());
mEqualizer.setEnabled(false);
visualizer =new Visualizer(player.getAudioSessionId());
visualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]);
visualizer.setDataCaptureListener(new Visualizer.OnDataCaptureListener() {
#Override
public void onWaveFormDataCapture(Visualizer visualizer, byte[] waveform,
int samplingRate) {
// TODO Auto-generated method stub
updateVisualizer(waveform);
}
#Override
public void onFftDataCapture(Visualizer visualizer, byte[] fft,
int samplingRate) {
// TODO Auto-generated method stub
}
},Visualizer.getMaxCaptureRate()/2,true,false) ;
// Log.d(Tag,String.valueOf(Visualizer.getMaxCaptureRate()));
}}
Thread class:
public class SurfaceThread extends Thread {
private SurfaceHolder surfaceHolder;
private VisualizerView panel;
private boolean run = false;
public SurfaceThread(SurfaceHolder surfaceHolder, VisualizerView panel) {
this. surfaceHolder = surfaceHolder;
this. panel = panel;
}
public void setRunning(boolean run) {
this.run = run;
}
#Override
public void run() {
Canvas c;
while (run) {
c = null;
try {
c = surfaceHolder.lockCanvas(null);
synchronized (surfaceHolder) {
panel.onDraw(c);
}
} finally {
// do this in a finally so that if an exception is thrown
// during the above, we don't leave the Surface in an
// inconsistent state
if (c != null) {
surfaceHolder.unlockCanvasAndPost(c);
}
}
}
}}
Now problem is that app will crash after running it twice.I know that i am
not passing data correctly from:
public void onWaveFormDataCapture(Visualizer visualizer, byte[] waveform,
int samplingRate) {
// TODO Auto-generated method stub
updateVisualizer(waveform);
}
So, how do i manage it ?
Also i am confused in which thread visualizer/mediaplayer is running and where should it go?

JavaCV video recorder orientation is not proper in portrait mode

Hi i am using https://github.com/bytedeco/javacv/ for recording video.
When using landscape mode orientation is fine but when i change the orientation to portrait mode the video is rotated -90 degree.
Any body have an idea what i may be doing wrong. Here is the code.
package org.bytedeco.javacv_android_example.record;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacpp.opencv_imgproc;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.javacv_android_example.R;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class RecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
/* The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
long startTime = 0;
boolean recording = false;
volatile boolean runAudioThread = true;
Frame[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
private PowerManager.WakeLock mWakeLock;
private File ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
private FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private Frame yuvImage = null;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
OpenCVFrameConverter.ToIplImage converter = new OpenCVFrameConverter.ToIplImage();
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
setContentView(R.layout.activity_record);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
}
#Override
protected void onResume() {
super.onResume();
if(mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if(mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if(cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if(mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if(keyCode == KeyEvent.KEYCODE_BACK) {
if(recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.activity_record, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if(1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG, "init recorder");
if(RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new Frame[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for(int i = 0; i < images.length; i++) {
images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
timestamps[i] = -1;
}
} else if(yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(LOG_TAG, "create yuvImage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch(InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if(recorder != null && recording) {
if(RECORD_LENGTH > 0) {
Log.v(LOG_TAG, "Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if(imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if(lastIndex < firstIndex) {
lastIndex += images.length;
}
for(int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if(t >= 0) {
if(t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}
firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if(samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if(lastIndex < firstIndex) {
lastIndex += samples.length;
}
for(int i = firstIndex; i <= lastIndex; i++) {
recorder.recordSamples(samples[i % samples.length]);
}
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
recording = false;
Log.v(LOG_TAG, "Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public void onClick(View v) {
if(!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if(RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for(int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while(runAudioThread) {
if(RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if(bufferReadResult > 0) {
Log.v(LOG_TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if(recording) {
if(RECORD_LENGTH <= 0) {
try {
recorder.recordSamples(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG, "audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera", "camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch(IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters camParams = mCamera.getParameters();
List<Camera.Size> sizes = camParams.getSupportedPreviewSizes();
// Sort the list in ascending order
Collections.sort(sizes, new Comparator<Camera.Size>() {
public int compare(final Camera.Size a, final Camera.Size b) {
return a.width * a.height - b.width * b.height;
}
});
// Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot
// reach the initial settings of imageWidth/imageHeight.
for(int i = 0; i < sizes.size(); i++) {
if((sizes.get(i).width >= imageWidth && sizes.get(i).height >= imageHeight) || i == sizes.size() - 1) {
imageWidth = sizes.get(i).width;
imageHeight = sizes.get(i).height;
Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth + "x" + imageHeight);
break;
}
}
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG, "Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
camParams.setPreviewFrameRate(frameRate);
Log.v(LOG_TAG, "Preview Framerate: " + camParams.getPreviewFrameRate());
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch(RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if(!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if(isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if(RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvImage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if(yuvImage != null && recording) {
((ByteBuffer) yuvImage.image[0].position(0)).put(data);
if(RECORD_LENGTH <= 0) {
try {
Log.v(LOG_TAG, "Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if(t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
opencv_core.IplImage toBeRotated = converter.convert(yuvImage);
toBeRotated = rotate(toBeRotated, -90);
Frame newFrame = converter.convert(toBeRotated);
opencv_core.cvReleaseImage(toBeRotated);
recorder.record(newFrame);
// recorder.record(yuvImage);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
public static opencv_core.IplImage rotate(opencv_core.IplImage image, double angle) {
opencv_core.IplImage copy = opencv_core.cvCloneImage(image);
double radians = Math.toRadians(angle);
double sin = Math.abs(Math.sin(radians));
double cos = Math.abs(Math.cos(radians));
int newWidth = (int) (copy.width() * cos + copy.height() * sin);
int newHeight = (int) (copy.width() * sin + copy.height() * cos);
int[] newWidthHeight = {newWidth, newHeight};
opencv_core.CvSize size = new opencv_core.CvSize(copy.height(), copy.width());
opencv_core.IplImage rotatedImage = opencv_core.cvCreateImage(size, copy.depth(), copy.nChannels());
opencv_core.CvMat mapMatrix = opencv_core.cvCreateMat( 2, 3, opencv_core.CV_32FC1 );
//Define Mid Point
opencv_core.CvPoint2D32f centerPoint = new opencv_core.CvPoint2D32f();
centerPoint.x(copy.width()/2);
centerPoint.y(copy.width()/2);
//Get Rotational Matrix
opencv_imgproc.cv2DRotationMatrix(centerPoint, angle, 1, mapMatrix);
opencv_imgproc.cvWarpAffine(copy, rotatedImage, mapMatrix, opencv_imgproc.CV_INTER_CUBIC + opencv_imgproc.CV_WARP_FILL_OUTLIERS, opencv_core.cvScalarAll(170));
opencv_core.cvReleaseImage(copy);
opencv_core.cvReleaseMat(mapMatrix);
return rotatedImage;
}
}
You could either do something like recorder.setVideoMetadata("rotate", "90") or transform the images with something like this:
FFmpegFrameFilter filter = new FFmpegFrameFilter("transpose=cclock_flip", imageWidth, imageHeight);
filter.push(frame);
Frame frame2;
while ((frame2 = filter.pull()) != null) {
recorder.record(frame2);
}

MediaPlayer freezes when playing video

I use MediaPlayer to play videos with GPUImage effects apply to it. On certain devices, after a short while - a few seconds or more (seems random), the video stops playing (the image remains stuck), but audio keeps playing (most of the time). I target the MediaPlayer to a GLSurfaceView, in order to be able to use GPUImage effects on top of it.
Here is the custom view:
public class FilteredVideoView extends GLSurfaceView {
private MediaPlayer mMediaPlayer = null;
private long time = 0;
private MyCustomRenderer renderer;
private boolean mediaPlayerInitialized = false;
private String videoPath;
public FilteredVideoView(Context context) {
super(context);
setWillNotDraw(false);
init();
//startFilterChangeThread();
}
public void setFilter(MyGPUImageFilter filter) {
renderer.setFilter(filter);
}
public void stopVideo() {
mMediaPlayer.stop();
}
public void prepareVideo() {
try {
mMediaPlayer.prepare();
} catch (Exception e) {
}
}
public void playVideo() {
initMediaPlayer();
mMediaPlayer.seekTo(0);
mMediaPlayer.start();
}
private void init() {
setEGLContextClientVersion(2);
getHolder().setFormat(PixelFormat.TRANSLUCENT);
setEGLConfigChooser(8, 8, 8, 8, 16, 0);
mMediaPlayer = new MediaPlayer();
//initMediaPlayer();
renderer = new MyCustomRenderer(new MyNoFilterFilter(), mMediaPlayer);
setRenderer(renderer);
}
private void initMediaPlayer() {
if (mediaPlayerInitialized) {
return;
}
mediaPlayerInitialized = true;
try {
FileInputStream fi = new FileInputStream(new File(videoPath));
mMediaPlayer.setDataSource(fi.getFD());
mMediaPlayer.prepare();
mMediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
onVideoPlaybackFinished();
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
void onVideoPlaybackFinished() {
}
#Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
}
}
And here is the renderer.
public class MyCustomRenderer implements Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final int NO_IMAGE = -1;
public static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
private MyGPUImageFilter mFilter;
public final Object mSurfaceChangedWaiter = new Object();
private int mTextureID = NO_IMAGE;
private SurfaceTexture mSurfaceTexture = null;
private final FloatBuffer mGLCubeBuffer;
private final FloatBuffer mGLTextureBuffer;
private IntBuffer mGLRgbBuffer;
private int mOutputWidth;
private int mOutputHeight;
private int mImageWidth;
private int mImageHeight;
private int mAddedPadding;
private final Queue<Runnable> mRunOnDraw;
private final Queue<Runnable> mRunOnDrawEnd;
private Rotation mRotation;
private boolean mFlipHorizontal;
private boolean mFlipVertical;
private GPUImage.ScaleType mScaleType = GPUImage.ScaleType.CENTER_CROP;
public SurfaceTexture mSurface;
public boolean updateSurface = false;
private MediaPlayer mMediaPlayer;
private int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private float[] mSTMatrix = new float[16];
public MyCustomRenderer(final MyGPUImageFilter filter, MediaPlayer mediaPlayer) {
mMediaPlayer = mediaPlayer;
mFilter = filter;
mRunOnDraw = new LinkedList<Runnable>();
mRunOnDrawEnd = new LinkedList<Runnable>();
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
setRotation(Rotation.NORMAL, false, false);
Matrix.setIdentityM(mSTMatrix, 0);
}
#Override
public void onSurfaceCreated(final GL10 unused, final EGLConfig config) {
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
surface.release();
synchronized (this) {
updateSurface = false;
}
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mFilter.init();
//mMediaPlayer.start();
}
#Override
public void onSurfaceChanged(final GL10 gl, final int width, final int height) {
mOutputWidth = width;
mOutputHeight = height;
GLES20.glViewport(0, 0, width, height);
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(width, height);
adjustImageScaling();
synchronized (mSurfaceChangedWaiter) {
mSurfaceChangedWaiter.notifyAll();
}
}
#Override
public void onDrawFrame(final GL10 gl) {
synchronized (this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
} else {
//return;
}
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
runAll(mRunOnDraw);
mFilter.onDraw(mTextureID, mGLCubeBuffer, mGLTextureBuffer);
runAll(mRunOnDrawEnd);
/*if (mSurfaceTexture != null) {
mSurfaceTexture.updateTexImage();
}*/
}
private void runAll(Queue<Runnable> queue) {
synchronized (queue) {
while (!queue.isEmpty()) {
queue.poll().run();
}
}
}
public void setFilter(final MyGPUImageFilter filter) {
runOnDraw(new Runnable() {
#Override
public void run() {
final MyGPUImageFilter oldFilter = mFilter;
mFilter = filter;
if (oldFilter != null) {
oldFilter.destroy();
}
mFilter.init();
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(mOutputWidth, mOutputHeight);
}
});
}
public void deleteImage() {
runOnDraw(new Runnable() {
#Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{
mTextureID
}, 0);
mTextureID = NO_IMAGE;
}
});
}
public void setScaleType(GPUImage.ScaleType scaleType) {
mScaleType = scaleType;
}
protected int getFrameWidth() {
return mOutputWidth;
}
protected int getFrameHeight() {
return mOutputHeight;
}
private void adjustImageScaling() {
float outputWidth = mOutputWidth;
float outputHeight = mOutputHeight;
if (mRotation == Rotation.ROTATION_270 || mRotation == Rotation.ROTATION_90) {
outputWidth = mOutputHeight;
outputHeight = mOutputWidth;
}
mImageWidth = App.screenW();
mImageHeight = App.screenH();
outputWidth = App.screenW();
outputHeight = App.screenH();
float ratio1 = outputWidth / mImageWidth;
float ratio2 = outputHeight / mImageHeight;
float ratioMax = Math.max(ratio1, ratio2);
int imageWidthNew = Math.round(mImageWidth * ratioMax);
int imageHeightNew = Math.round(mImageHeight * ratioMax);
float ratioWidth = imageWidthNew / outputWidth;
float ratioHeight = imageHeightNew / outputHeight;
float[] cube = CUBE;
float[] textureCords = TextureRotationUtil.getRotation(mRotation, mFlipHorizontal, mFlipVertical);
if (mScaleType == GPUImage.ScaleType.CENTER_CROP) {
float distHorizontal = (1 - 1 / ratioWidth) / 2;
float distVertical = (1 - 1 / ratioHeight) / 2;
textureCords = new float[]{
addDistance(textureCords[0], distHorizontal), addDistance(textureCords[1], distVertical),
addDistance(textureCords[2], distHorizontal), addDistance(textureCords[3], distVertical),
addDistance(textureCords[4], distHorizontal), addDistance(textureCords[5], distVertical),
addDistance(textureCords[6], distHorizontal), addDistance(textureCords[7], distVertical),
};
} else {
cube = new float[]{
CUBE[0] / ratioHeight, CUBE[1] / ratioWidth,
CUBE[2] / ratioHeight, CUBE[3] / ratioWidth,
CUBE[4] / ratioHeight, CUBE[5] / ratioWidth,
CUBE[6] / ratioHeight, CUBE[7] / ratioWidth,
};
}
mGLCubeBuffer.clear();
mGLCubeBuffer.put(cube).position(0);
mGLTextureBuffer.clear();
mGLTextureBuffer.put(textureCords).position(0);
}
private float addDistance(float coordinate, float distance) {
return coordinate == 0.0f ? distance : 1 - distance;
}
public void setRotationCamera(final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
setRotation(rotation, flipVertical, flipHorizontal);
}
public void setRotation(final Rotation rotation) {
mRotation = rotation;
adjustImageScaling();
}
public void setRotation(final Rotation rotation,
final boolean flipHorizontal, final boolean flipVertical) {
mFlipHorizontal = flipHorizontal;
mFlipVertical = flipVertical;
setRotation(rotation);
}
public Rotation getRotation() {
return mRotation;
}
public boolean isFlippedHorizontally() {
return mFlipHorizontal;
}
public boolean isFlippedVertically() {
return mFlipVertical;
}
protected void runOnDraw(final Runnable runnable) {
synchronized (mRunOnDraw) {
mRunOnDraw.add(runnable);
}
}
protected void runOnDrawEnd(final Runnable runnable) {
synchronized (mRunOnDrawEnd) {
mRunOnDrawEnd.add(runnable);
}
}
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
updateSurface = true;
}
}
Any ideas what could be causing the issue? Maybe a memory-leak?
Thank you for your help!

How to retrieve data when recording and display it as graphs(Visualizer) on screen?

Record.java
public void onClick(View v) {
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile(mFilename);
mVisualizer = new Visualizer(0);
mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]);
mVisualizer.setDataCaptureListener(datacaptureListener,Visualizer.getMaxCaptureRate() /2,false,true);
mVisualizer.setEnabled(true);
mRecorder.setOnErrorListener(errorListenerForRecorder);
try {
mRecorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
try {
System.out.println("****");
mRecorder.start();
} catch (Exception e) {
Toast.makeText(getApplicationContext(), "Error :: " + e.getMessage(), Toast.LENGTH_LONG).show();
}
}
}
OnDataCaptureListener datacaptureListener = new OnDataCaptureListener() {
#Override
public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes,int samplingRate) {
System.out.println("1--->");
mVisualizerView.updateVisualizer(bytes);
}
public void onFftDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate {
System.out.println("2--->");
mVisualizerView.updateVisualizer(bytes);
}
};
VisualizerView.java
public class VisualizerView extends View {
private byte[] mBytes;
private float[] mPoints;
private Rect mRect = new Rect();
private Paint mForePaint = new Paint();
public VisualizerView(Context context) {
super(context);
init();
}
private void init() {
mBytes = null;
mForePaint.setStrokeWidth(1f);
mForePaint.setAntiAlias(true);
mForePaint.setColor(Color.rgb(0, 128, 255));
}
public void updateVisualizer(byte[] bytes) {
System.out.println("3--->");
mBytes = bytes;
System.out.println(mBytes);
invalidate();
}
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mBytes == null) {
return;
}
if (mPoints == null || mPoints.length < mBytes.length * 4) {
mPoints = new float[mBytes.length * 4];
}
mRect.set(0, 0, getWidth(), getHeight());
for (int i = 0; i < mBytes.length - 1; i++) {
mPoints[i * 4] = mRect.width() * i / (mBytes.length - 1);
mPoints[i * 4 + 1] = mRect.height() / 2
+ ((byte) (mBytes[i] + 128)) * (mRect.height() / 2) / 128;
mPoints[i * 4 + 2] = mRect.width() * (i + 1) / (mBytes.length - 1);
mPoints[i * 4 + 3] = mRect.height() / 2
+ ((byte) (mBytes[i + 1] + 128)) * (mRect.height() / 2) / 128;
}
canvas.drawLines(mPoints, mForePaint);
}
}
Above is my settings for MediaRecord. I use the "datacaptureListener" to get data when recording.
I set Visualizer(0), it is defined to get byte flow from outside channel. I can record sound from microphone into .3pg audio file perfectly, but I hope to get binary or decimal data from recording.
I created another file VisualizerView.java and use the class canvas to draw the graphs according to the captured data.
The problem right now is the system can only output the address of the data(using function "updateVisualizer",
I donot know if the address contains the data I need) and the program will go to function "OnDraw". Is there anyone who are familiar with Visualizer can help me?
THX!

Categories

Resources