I use MediaPlayer to play videos with GPUImage effects apply to it. On certain devices, after a short while - a few seconds or more (seems random), the video stops playing (the image remains stuck), but audio keeps playing (most of the time). I target the MediaPlayer to a GLSurfaceView, in order to be able to use GPUImage effects on top of it.
Here is the custom view:
public class FilteredVideoView extends GLSurfaceView {
private MediaPlayer mMediaPlayer = null;
private long time = 0;
private MyCustomRenderer renderer;
private boolean mediaPlayerInitialized = false;
private String videoPath;
public FilteredVideoView(Context context) {
super(context);
setWillNotDraw(false);
init();
//startFilterChangeThread();
}
public void setFilter(MyGPUImageFilter filter) {
renderer.setFilter(filter);
}
public void stopVideo() {
mMediaPlayer.stop();
}
public void prepareVideo() {
try {
mMediaPlayer.prepare();
} catch (Exception e) {
}
}
public void playVideo() {
initMediaPlayer();
mMediaPlayer.seekTo(0);
mMediaPlayer.start();
}
private void init() {
setEGLContextClientVersion(2);
getHolder().setFormat(PixelFormat.TRANSLUCENT);
setEGLConfigChooser(8, 8, 8, 8, 16, 0);
mMediaPlayer = new MediaPlayer();
//initMediaPlayer();
renderer = new MyCustomRenderer(new MyNoFilterFilter(), mMediaPlayer);
setRenderer(renderer);
}
private void initMediaPlayer() {
if (mediaPlayerInitialized) {
return;
}
mediaPlayerInitialized = true;
try {
FileInputStream fi = new FileInputStream(new File(videoPath));
mMediaPlayer.setDataSource(fi.getFD());
mMediaPlayer.prepare();
mMediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
onVideoPlaybackFinished();
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
void onVideoPlaybackFinished() {
}
#Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
}
}
And here is the renderer.
public class MyCustomRenderer implements Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final int NO_IMAGE = -1;
public static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
private MyGPUImageFilter mFilter;
public final Object mSurfaceChangedWaiter = new Object();
private int mTextureID = NO_IMAGE;
private SurfaceTexture mSurfaceTexture = null;
private final FloatBuffer mGLCubeBuffer;
private final FloatBuffer mGLTextureBuffer;
private IntBuffer mGLRgbBuffer;
private int mOutputWidth;
private int mOutputHeight;
private int mImageWidth;
private int mImageHeight;
private int mAddedPadding;
private final Queue<Runnable> mRunOnDraw;
private final Queue<Runnable> mRunOnDrawEnd;
private Rotation mRotation;
private boolean mFlipHorizontal;
private boolean mFlipVertical;
private GPUImage.ScaleType mScaleType = GPUImage.ScaleType.CENTER_CROP;
public SurfaceTexture mSurface;
public boolean updateSurface = false;
private MediaPlayer mMediaPlayer;
private int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private float[] mSTMatrix = new float[16];
public MyCustomRenderer(final MyGPUImageFilter filter, MediaPlayer mediaPlayer) {
mMediaPlayer = mediaPlayer;
mFilter = filter;
mRunOnDraw = new LinkedList<Runnable>();
mRunOnDrawEnd = new LinkedList<Runnable>();
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
setRotation(Rotation.NORMAL, false, false);
Matrix.setIdentityM(mSTMatrix, 0);
}
#Override
public void onSurfaceCreated(final GL10 unused, final EGLConfig config) {
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
surface.release();
synchronized (this) {
updateSurface = false;
}
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mFilter.init();
//mMediaPlayer.start();
}
#Override
public void onSurfaceChanged(final GL10 gl, final int width, final int height) {
mOutputWidth = width;
mOutputHeight = height;
GLES20.glViewport(0, 0, width, height);
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(width, height);
adjustImageScaling();
synchronized (mSurfaceChangedWaiter) {
mSurfaceChangedWaiter.notifyAll();
}
}
#Override
public void onDrawFrame(final GL10 gl) {
synchronized (this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
} else {
//return;
}
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
runAll(mRunOnDraw);
mFilter.onDraw(mTextureID, mGLCubeBuffer, mGLTextureBuffer);
runAll(mRunOnDrawEnd);
/*if (mSurfaceTexture != null) {
mSurfaceTexture.updateTexImage();
}*/
}
private void runAll(Queue<Runnable> queue) {
synchronized (queue) {
while (!queue.isEmpty()) {
queue.poll().run();
}
}
}
public void setFilter(final MyGPUImageFilter filter) {
runOnDraw(new Runnable() {
#Override
public void run() {
final MyGPUImageFilter oldFilter = mFilter;
mFilter = filter;
if (oldFilter != null) {
oldFilter.destroy();
}
mFilter.init();
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(mOutputWidth, mOutputHeight);
}
});
}
public void deleteImage() {
runOnDraw(new Runnable() {
#Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{
mTextureID
}, 0);
mTextureID = NO_IMAGE;
}
});
}
public void setScaleType(GPUImage.ScaleType scaleType) {
mScaleType = scaleType;
}
protected int getFrameWidth() {
return mOutputWidth;
}
protected int getFrameHeight() {
return mOutputHeight;
}
private void adjustImageScaling() {
float outputWidth = mOutputWidth;
float outputHeight = mOutputHeight;
if (mRotation == Rotation.ROTATION_270 || mRotation == Rotation.ROTATION_90) {
outputWidth = mOutputHeight;
outputHeight = mOutputWidth;
}
mImageWidth = App.screenW();
mImageHeight = App.screenH();
outputWidth = App.screenW();
outputHeight = App.screenH();
float ratio1 = outputWidth / mImageWidth;
float ratio2 = outputHeight / mImageHeight;
float ratioMax = Math.max(ratio1, ratio2);
int imageWidthNew = Math.round(mImageWidth * ratioMax);
int imageHeightNew = Math.round(mImageHeight * ratioMax);
float ratioWidth = imageWidthNew / outputWidth;
float ratioHeight = imageHeightNew / outputHeight;
float[] cube = CUBE;
float[] textureCords = TextureRotationUtil.getRotation(mRotation, mFlipHorizontal, mFlipVertical);
if (mScaleType == GPUImage.ScaleType.CENTER_CROP) {
float distHorizontal = (1 - 1 / ratioWidth) / 2;
float distVertical = (1 - 1 / ratioHeight) / 2;
textureCords = new float[]{
addDistance(textureCords[0], distHorizontal), addDistance(textureCords[1], distVertical),
addDistance(textureCords[2], distHorizontal), addDistance(textureCords[3], distVertical),
addDistance(textureCords[4], distHorizontal), addDistance(textureCords[5], distVertical),
addDistance(textureCords[6], distHorizontal), addDistance(textureCords[7], distVertical),
};
} else {
cube = new float[]{
CUBE[0] / ratioHeight, CUBE[1] / ratioWidth,
CUBE[2] / ratioHeight, CUBE[3] / ratioWidth,
CUBE[4] / ratioHeight, CUBE[5] / ratioWidth,
CUBE[6] / ratioHeight, CUBE[7] / ratioWidth,
};
}
mGLCubeBuffer.clear();
mGLCubeBuffer.put(cube).position(0);
mGLTextureBuffer.clear();
mGLTextureBuffer.put(textureCords).position(0);
}
private float addDistance(float coordinate, float distance) {
return coordinate == 0.0f ? distance : 1 - distance;
}
public void setRotationCamera(final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
setRotation(rotation, flipVertical, flipHorizontal);
}
public void setRotation(final Rotation rotation) {
mRotation = rotation;
adjustImageScaling();
}
public void setRotation(final Rotation rotation,
final boolean flipHorizontal, final boolean flipVertical) {
mFlipHorizontal = flipHorizontal;
mFlipVertical = flipVertical;
setRotation(rotation);
}
public Rotation getRotation() {
return mRotation;
}
public boolean isFlippedHorizontally() {
return mFlipHorizontal;
}
public boolean isFlippedVertically() {
return mFlipVertical;
}
protected void runOnDraw(final Runnable runnable) {
synchronized (mRunOnDraw) {
mRunOnDraw.add(runnable);
}
}
protected void runOnDrawEnd(final Runnable runnable) {
synchronized (mRunOnDrawEnd) {
mRunOnDrawEnd.add(runnable);
}
}
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
updateSurface = true;
}
}
Any ideas what could be causing the issue? Maybe a memory-leak?
Thank you for your help!
Related
I am working on Panning and cropping the landscape video using Texture View.I am in a half way that I can pan the landscape video from left to right vice versa by using this example
https://github.com/crust87/Android-VideoCropView.
FFMPEG can crop the particular portion of the video by using this command
ffmpeg -i /sdcard/videokit/in.mp4 -filter:v crop=720:1088:0:0 -c:a
copy /sdcard/videokit/out.mp4
How can I crop only the video which is visible in Texture View and save it local storage in Android.
crop=720:1088:0:0 is a hard coded width and height of the video and it is cropping fine.But how can I get the width and height of the visible video in Texture View to crop the visible video and Save it to the local storage in android.
public class MainActivity extends Activity {
// Layout Components
private FrameLayout top_frame;
// Attributes
private String originalPath;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.check);
top_frame = (FrameLayout)findViewById(R.id.top_frame);
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == 1000 && resultCode == RESULT_OK) {
final VideoCropView mVideoCropView = new VideoCropView(this);
mVideoCropView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp) {
mVideoCropView.start();
}
});
top_frame.addView(mVideoCropView);
Uri selectedVideoUri = data.getData();
originalPath = getRealPathFromURI(selectedVideoUri);
mVideoCropView.setVideoURI(selectedVideoUri);
mVideoCropView.seekTo(1);
}
}
public void onButtonLoadClick(View v) {
top_frame.removeAllViews();
Intent lIntent = new Intent(Intent.ACTION_PICK);
lIntent.setType("video/*");
lIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivityForResult(lIntent, 1000);
}
public String getRealPathFromURI(Uri contentUri) { // getting image path from gallery.
Cursor cursor = null;
try {
String[] proj = { MediaStore.Images.Media.DATA };
cursor = getApplicationContext().getContentResolver().query(contentUri, proj, null, null, null);
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
} finally {
if (cursor != null) {
cursor.close();
}
}
}
}
CropVideoView
public class VideoCropView extends TextureView implements MediaPlayerControl {
// Constants
private static final String LOG_TAG = "VideoCropView";
private static final int STATE_ERROR = -1;
private static final int STATE_IDLE = 0;
private static final int STATE_PREPARING = 1;
private static final int STATE_PREPARED = 2;
private static final int STATE_PLAYING = 3;
private static final int STATE_PAUSED = 4;
private static final int STATE_PLAYBACK_COMPLETED = 5;
// MediaPlayer Components
protected Context mContext;
private MediaPlayer mMediaPlayer;
private Surface mSurface;
private OnInfoListener mOnInfoListener;
private OnCompletionListener mOCompletionListener;
private OnErrorListener mOnErrorListener;
private OnPreparedListener mOnPreparedListener;
private OnTranslatePositionListener mOnTranslatePositionListener;
// CropView Components
private Matrix mMatrix;
// MediaPlayer Attributes
protected Uri mUri;
private int mCurrentBufferPercentage;
private int mSeekWhenPrepared;
protected int mVideoWidth;
protected int mVideoHeight;
// CropView Attributes
private float mRatioWidth;
private float mRatioHeight;
private float mPositionX;
private float mPositionY;
private float mBoundX;
private float mBoundY;
private int mRotate;
private float mScaleX;
private float mScaleY;
private float mScale;
// Working Variables
private int mCurrentState = STATE_IDLE;
private int mTargetState = STATE_IDLE;
// Touch Event
// past position x, y and move point
float mPastX;
float mPastY;
float mTouchDistance;
private Context context;
// Constructors
public VideoCropView(final Context context) {
super(context);
mContext = context;
initAttributes();
initVideoView();
}
public VideoCropView(final Context context, final AttributeSet attrs) {
super(context, attrs);
mContext = context;
initAttributes(context, attrs, 0);
initVideoView();
}
public VideoCropView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
mContext = context;
initAttributes(context, attrs, defStyleAttr);
initVideoView();
}
private void initAttributes() {
mRatioWidth = 1;
mRatioHeight = 1;
}
private void initAttributes(Context context, AttributeSet attrs, int defStyleAttr) {
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.VideoCropView, defStyleAttr, 0);
mRatioWidth = typedArray.getInteger(R.styleable.VideoCropView_ratio_width, 3);
mRatioHeight = typedArray.getInteger(R.styleable.VideoCropView_ratio_height, 4);
}
#Override
protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
int heightLayout;
int widthLayout;
widthLayout = MeasureSpec.getSize(widthMeasureSpec);
heightLayout = MeasureSpec.getSize(heightMeasureSpec);
setMeasuredDimension(widthLayout, heightLayout);
/*if(widthMeasureSpec < heightMeasureSpec){
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = (int) ((width / mRatioWidth) * mRatioHeight);
setMeasuredDimension(width, height);
}else{
int width = MeasureSpec.getSize(widthMeasureSpec);
int height =MeasureSpec.getSize(heightMeasureSpec);
setMeasuredDimension(width, height);
}
*/
}
#Override
public boolean onTouchEvent(MotionEvent event) {
if(mCurrentState == STATE_ERROR || mCurrentState == STATE_IDLE || mCurrentState == STATE_PREPARING) {
return false;
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
mPastX = event.getX();
mPastY = event.getY();
mTouchDistance = 0;
case MotionEvent.ACTION_MOVE:
if(mBoundX!=0 || mBoundY!=0) {
float dx = event.getX() - mPastX;
float dy = event.getY() - mPastY;
updateViewPosition(dx, dy);
mPastX = event.getX();
mPastY = event.getY();
mTouchDistance += (Math.abs(dx) + Math.abs(dy));
}
break;
case MotionEvent.ACTION_UP:
if (mTouchDistance < 25) {
if (isPlaying()) {
pause();
} else {
start();
}
}
mTouchDistance = 0;
break;
}
return true;
}
#Override
public void onInitializeAccessibilityEvent(AccessibilityEvent event) {
super.onInitializeAccessibilityEvent(event);
event.setClassName(VideoView.class.getName());
}
#Override
public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) {
super.onInitializeAccessibilityNodeInfo(info);
info.setClassName(VideoView.class.getName());
}
public int resolveAdjustedSize(int desiredSize, int measureSpec) {
Log.d(LOG_TAG, "Resolve called.");
int result = desiredSize;
int specMode = MeasureSpec.getMode(measureSpec);
int specSize = MeasureSpec.getSize(measureSpec);
switch (specMode) {
case MeasureSpec.UNSPECIFIED:
/*
* Parent says we can be as big as we want. Just don't be larger
* than max size imposed on ourselves.
*/
result = desiredSize;
break;
case MeasureSpec.AT_MOST:
/*
* Parent says we can be as big as we want, up to specSize. Don't be
* larger than specSize, and don't be larger than the max size
* imposed on ourselves.
*/
result = Math.min(desiredSize, specSize);
break;
case MeasureSpec.EXACTLY:
// No choice. Do what we are told.
result = specSize;
break;
}
return result;
}
public void initVideoView() {
mVideoHeight = 0;
mVideoWidth = 0;
setFocusable(false);
setSurfaceTextureListener(mSurfaceTextureListener);
mCurrentState = STATE_IDLE;
mTargetState = STATE_IDLE;
}
public void setVideoPath(String path) {
if (path != null) {
setVideoURI(Uri.parse(path));
}
}
public void setVideoURI(Uri pVideoURI) {
mUri = pVideoURI;
mSeekWhenPrepared = 0;
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(mContext, pVideoURI);
// create thumbnail bitmap
if(android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) {
String rotation = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
try {
mRotate = Integer.parseInt(rotation);
} catch(NumberFormatException e) {
mRotate = 0;
}
}
retriever.release();
openVideo();
requestLayout();
invalidate();
}
public void stopPlayback() {
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
mMediaPlayer = null;
mCurrentState = STATE_IDLE;
mTargetState = STATE_IDLE;
}
}
public void openVideo() {
if ((mUri == null) || (mSurface == null)) {
// not ready for playback just yet, will try again later
return;
}
// Tell the music playback service to pause
// TODO: these constants need to be published somewhere in the
// framework.
Intent intent = new Intent("com.android.music.musicservicecommand");
intent.putExtra("command", "pause");
mContext.sendBroadcast(intent);
// we shouldn't clear the target state, because somebody might have
// called start() previously
release(false);
try {
mMediaPlayer = new MediaPlayer();
// TODO: create SubtitleController in MediaPlayer, but we need
// a context for the subtitle renderers
mMediaPlayer.setOnPreparedListener(mPreparedListener);
mMediaPlayer.setOnVideoSizeChangedListener(mSizeChangedListener);
mMediaPlayer.setOnCompletionListener(mCompletionListener);
mMediaPlayer.setOnErrorListener(mErrorListener);
mMediaPlayer.setOnInfoListener(mInfoListener);
mMediaPlayer.setOnBufferingUpdateListener(mBufferingUpdateListener);
mCurrentBufferPercentage = 0;
mMediaPlayer.setDataSource(mContext, mUri);
mMediaPlayer.setSurface(mSurface);
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mMediaPlayer.setScreenOnWhilePlaying(true);
mMediaPlayer.prepareAsync();
mMediaPlayer.setLooping(true);
mCurrentState = STATE_PREPARING;
} catch (IllegalStateException e) {
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
e.printStackTrace();
} catch (IOException e) {
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
e.printStackTrace();
}
}
private OnVideoSizeChangedListener mSizeChangedListener = new OnVideoSizeChangedListener() {
#Override
public void onVideoSizeChanged(final MediaPlayer mp, final int width,
final int height) {
mVideoWidth = mp.getVideoWidth();
mVideoHeight = mp.getVideoHeight();
if (mVideoWidth != 0 && mVideoHeight != 0) {
requestLayout();
if(mVideoWidth >= mVideoHeight)
initVideo();
}
}
};
private OnPreparedListener mPreparedListener = new OnPreparedListener() {
#Override
public void onPrepared(final MediaPlayer mp) {
mCurrentState = STATE_PREPARED;
if (mOnPreparedListener != null) {
mOnPreparedListener.onPrepared(mp);
}
mVideoWidth = mp.getVideoWidth();
mVideoHeight = mp.getVideoHeight();
int seekToPosition = mSeekWhenPrepared; // mSeekWhenPrepared may be
// changed after seekTo()
if (seekToPosition != 0) {
seekTo(seekToPosition);
}
if ((mVideoWidth != 0) && (mVideoHeight != 0)) {
if(mVideoWidth >= mVideoHeight) initVideo();
if (mTargetState == STATE_PLAYING) {
start();
}
} else {
// We don't know the video size yet, but should start anyway.
// The video size might be reported to us later.
if (mTargetState == STATE_PLAYING) {
start();
}
}
}
};
private OnCompletionListener mCompletionListener = new OnCompletionListener() {
#Override
public void onCompletion(final MediaPlayer mp) {
mCurrentState = STATE_PLAYBACK_COMPLETED;
mTargetState = STATE_PLAYBACK_COMPLETED;
if (mOCompletionListener != null) {
mOCompletionListener.onCompletion(mMediaPlayer);
}
}
};
private OnInfoListener mInfoListener = new OnInfoListener() {
public boolean onInfo(MediaPlayer mp, int arg1, int arg2) {
if (mOnInfoListener != null) {
mOnInfoListener.onInfo(mp, arg1, arg2);
}
return true;
}
};
private OnErrorListener mErrorListener = new OnErrorListener() {
#Override
public boolean onError(MediaPlayer mp, int framework_err, int impl_err) {
Log.d(LOG_TAG, "Error: " + framework_err + "," + impl_err);
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
/* If an error handler has been supplied, use it and finish. */
if (mOnErrorListener != null) {
if (mOnErrorListener.onError(mMediaPlayer, framework_err,
impl_err)) {
return true;
}
}
return true;
}
};
private OnBufferingUpdateListener mBufferingUpdateListener = new OnBufferingUpdateListener() {
#Override
public void onBufferingUpdate(final MediaPlayer mp, final int percent) {
mCurrentBufferPercentage = percent;
}
};
public void setOnPreparedListener(OnPreparedListener listener) {
mOnPreparedListener = listener;
}
public void setOnCompletionListener(OnCompletionListener listener) {
mOCompletionListener = listener;
}
public void setOnErrorListener(OnErrorListener listener) {
mOnErrorListener = listener;
}
public void setOnInfoListener(OnInfoListener listener) {
mOnInfoListener = listener;
}
private void release(boolean cleartargetstate) {
if (mMediaPlayer != null) {
mMediaPlayer.reset();
mMediaPlayer.release();
mMediaPlayer = null;
mCurrentState = STATE_IDLE;
if (cleartargetstate) {
mTargetState = STATE_IDLE;
}
}
}
#Override
public void start() {
if (isInPlaybackState()) {
mMediaPlayer.start();
mCurrentState = STATE_PLAYING;
}
mTargetState = STATE_PLAYING;
}
#Override
public void pause() {
if (isInPlaybackState()) {
if (mMediaPlayer.isPlaying()) {
mMediaPlayer.pause();
mCurrentState = STATE_PAUSED;
}
}
mTargetState = STATE_PAUSED;
}
#Override
public int getDuration() {
if (isInPlaybackState()) {
return mMediaPlayer.getDuration();
}
return -1;
}
#Override
public int getCurrentPosition() {
if (isInPlaybackState()) {
return mMediaPlayer.getCurrentPosition();
}
return 0;
}
#Override
public void seekTo(int msec) {
if (isInPlaybackState()) {
mMediaPlayer.seekTo(msec);
mSeekWhenPrepared = 0;
} else {
mSeekWhenPrepared = msec;
}
}
#Override
public boolean isPlaying() {
return isInPlaybackState() && mMediaPlayer.isPlaying();
}
#Override
public int getBufferPercentage() {
if (mMediaPlayer != null) {
return mCurrentBufferPercentage;
}
return 0;
}
private boolean isInPlaybackState() {
return (mMediaPlayer != null && mCurrentState != STATE_ERROR
&& mCurrentState != STATE_IDLE && mCurrentState != STATE_PREPARING);
}
#Override
public boolean canPause() {
return false;
}
#Override
public boolean canSeekBackward() {
return false;
}
#Override
public boolean canSeekForward() {
return false;
}
#Override
public int getAudioSessionId() {
return -1;
}
SurfaceTextureListener mSurfaceTextureListener = new SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
mSurface = new Surface(surface);
openVideo();
}
#Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
boolean isValidState = (mTargetState == STATE_PLAYING);
boolean hasValidSize = (mVideoWidth == width && mVideoHeight == height);
if (mMediaPlayer != null && isValidState && hasValidSize) {
start();
}
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (mMediaPlayer != null) {
mMediaPlayer.reset();
mMediaPlayer.release();
mMediaPlayer = null;
}
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
return true;
}
#Override
public void onSurfaceTextureUpdated(final SurfaceTexture surface) {
}
};
#Override
protected void onVisibilityChanged(View changedView, int visibility) {
super.onVisibilityChanged(changedView, visibility);
if (visibility == View.INVISIBLE || visibility == View.GONE) {
if (isPlaying()) {
stopPlayback();
}
}
}
public float getScale() {
return mScale;
}
private void initVideo() {
try {
int width = getWidth();
int height = getHeight();
mScaleX = 1.0f;
mScaleY = 1.0f;
mPositionX = 0;
mPositionY = 0;
mBoundX = 0;
mBoundY = 0;
mMatrix = new Matrix();
mScaleX = (float) mVideoWidth / width;
mScaleY = (float) mVideoHeight / height;
mBoundX = width - mVideoWidth / mScaleY;
mBoundY = height - mVideoHeight / mScaleX;
if (mScaleX < mScaleY) {
mScale = mScaleX;
mScaleY = mScaleY * (1.0f / mScaleX);
mScaleX = 1.0f;
mBoundX = 0;
} else {
mScale = mScaleY;
mScaleX = mScaleX * (1.0f / mScaleY);
mScaleY = 1.0f;
mBoundY = 0;
}
mMatrix = new Matrix();
mMatrix.setScale(mScaleX, mScaleY);
setTransform(mMatrix);
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
public void updateViewPosition(float x, float y) {
float nextX = mPositionX + x;
float nextY = mPositionY + y;
if(mScaleX == 1.0f) {
x = 0;
} else {
if(nextX > 0) {
x = -mPositionX;
mPositionX = mPositionX + x;
} else if(nextX < mBoundX) {
x = mBoundX - mPositionX;
mPositionX = mPositionX + x;
} else {
mPositionX = nextX;
}
}
if(mScaleY == 1.0f) {
y = 0;
} else {
if(nextY > 0) {
y = -mPositionY;
mPositionY = mPositionY + y;
} else if(nextY < mBoundY) {
y = mBoundY - mPositionY;
mPositionY = mPositionY + y;
} else {
mPositionY = nextY;
}
}
if(mOnTranslatePositionListener != null) {
mOnTranslatePositionListener.onTranslatePosition(mPositionX, mPositionY, mPositionX * -mScale, mPositionY * -mScale);
}
mMatrix.postTranslate(x, y);
setTransform(mMatrix);
invalidate();
}
// public void setOriginalRatio() {
// if(mVideoWidth != 0 && mVideoHeight != 0) {
// int gcd = gcd(mVideoWidth, mVideoHeight);
// setRatio(mVideoWidth / gcd, mVideoHeight / gcd);
// }
// }
public int gcd(int n, int m) {
while (m != 0) {
int t = n % m;
n = m;
m = t;
}
return Math.abs(n);
}
// public void setRatio(float ratioWidth, float ratioHeight) {
// mRatioWidth = ratioWidth;
// mRatioHeight = ratioHeight;
//
// int seek = getCurrentPosition();
//
// requestLayout();
// invalidate();
// openVideo();
//
// seekTo(seek);
// }
public float getRatioWidth() {
return mRatioWidth;
}
public float getRatioHeight() {
return mRatioHeight;
}
public float getRealPositionX() {
return mPositionX * -mScale;
}
public float getRealPositionY() {
return mPositionY * -mScale;
}
public int getVideoWidth() {
return mVideoWidth;
}
public int getVideoHeight() {
return mVideoHeight;
}
public int getRotate() {
return mRotate;
}
public void setOnTranslatePositionListener(OnTranslatePositionListener pOnTranslatePositionListener) {
mOnTranslatePositionListener = pOnTranslatePositionListener;
}
public void setContext(Context context) {
this.context = context;
}
public interface OnTranslatePositionListener {
public abstract void onTranslatePosition(float x, float y, float rx, float ry);
}
}
FFMPEG for cropping particular portion
ffmpeg -i /sdcard/videokit/in.mp4 -filter:v crop=720:1088:0:0 -c:a copy /sdcard/videokit/out.mp4
public class SimpleExample extends Activity {
String workFolder = null;
String demoVideoFolder = null;
String demoVideoPath = null;
String vkLogPath = null;
private boolean commandValidationFailedFlag = false;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.ffmpeg_demo_client_1);
demoVideoFolder = Environment.getExternalStorageDirectory().getAbsolutePath() + "/videokit/";
demoVideoPath = demoVideoFolder + "in.mp4";
Log.i(Prefs.TAG, getString(R.string.app_name) + " version: " + GeneralUtils.getVersionName(getApplicationContext()) );
workFolder = getApplicationContext().getFilesDir().getAbsolutePath() + "/";
//Log.i(Prefs.TAG, "workFolder: " + workFolder);
vkLogPath = workFolder + "vk.log";
GeneralUtils.copyLicenseFromAssetsToSDIfNeeded(this, workFolder);
GeneralUtils.copyDemoVideoFromAssetsToSDIfNeeded(this, demoVideoFolder);
Button invoke = (Button)findViewById(R.id.invokeButton);
invoke.setOnClickListener(new OnClickListener() {
public void onClick(View v){
Log.i(Prefs.TAG, "run clicked.");
if (GeneralUtils.checkIfFileExistAndNotEmpty(demoVideoPath)) {
new TranscdingBackground(SimpleExample.this).execute();
}
else {
Toast.makeText(getApplicationContext(), demoVideoPath + " not found", Toast.LENGTH_LONG).show();
}
}
});
int rc = GeneralUtils.isLicenseValid(getApplicationContext(), workFolder);
Log.i(Prefs.TAG, "License check RC: " + rc);
}
public class TranscdingBackground extends AsyncTask<String, Integer, Integer>
{
ProgressDialog progressDialog;
Activity _act;
String commandStr;
public TranscdingBackground (Activity act) {
_act = act;
}
#Override
protected void onPreExecute() {
EditText commandText = (EditText)findViewById(R.id.CommandText);
commandStr = commandText.getText().toString();
progressDialog = new ProgressDialog(_act);
progressDialog.setMessage("FFmpeg4Android Transcoding in progress...");
progressDialog.show();
}
protected Integer doInBackground(String... paths) {
Log.i(Prefs.TAG, "doInBackground started...");
// delete previous log
boolean isDeleted = GeneralUtils.deleteFileUtil(workFolder + "/vk.log");
Log.i(Prefs.TAG, "vk deleted: " + isDeleted);
PowerManager powerManager = (PowerManager)_act.getSystemService(Activity.POWER_SERVICE);
WakeLock wakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "VK_LOCK");
Log.d(Prefs.TAG, "Acquire wake lock");
wakeLock.acquire();
///////////// Set Command using code (overriding the UI EditText) /////
//commandStr = "ffmpeg -y -i /sdcard/videokit/in.mp4 -strict experimental -s 320x240 -r 30 -aspect 3:4 -ab 48000 -ac 2 -ar 22050 -vcodec mpeg4 -b 2097152 /sdcard/videokit/out.mp4";
//String[] complexCommand = {"ffmpeg", "-y" ,"-i", "/sdcard/videokit/in.mp4","-strict","experimental","-s", "160x120","-r","25", "-vcodec", "mpeg4", "-b", "150k", "-ab","48000", "-ac", "2", "-ar", "22050", "/sdcard/videokit/out.mp4"};
///////////////////////////////////////////////////////////////////////
LoadJNI vk = new LoadJNI();
try {
vk.run(GeneralUtils.utilConvertToComplex(commandStr), workFolder, getApplicationContext());
GeneralUtils.copyFileToFolder(vkLogPath, demoVideoFolder);
} catch (Throwable e) {
Log.e(Prefs.TAG, "vk run exeption.", e);
}
finally {
if (wakeLock.isHeld())
wakeLock.release();
else{
Log.i(Prefs.TAG, "Wake lock is already released, doing nothing");
}
}
Log.i(Prefs.TAG, "doInBackground finished");
return Integer.valueOf(0);
}
protected void onProgressUpdate(Integer... progress) {
}
#Override
protected void onCancelled() {
Log.i(Prefs.TAG, "onCancelled");
//progressDialog.dismiss();
super.onCancelled();
}
#Override
protected void onPostExecute(Integer result) {
Log.i(Prefs.TAG, "onPostExecute");
progressDialog.dismiss();
super.onPostExecute(result);
// finished Toast
String rc = null;
if (commandValidationFailedFlag) {
rc = "Command Vaidation Failed";
}
else {
rc = GeneralUtils.getReturnCodeFromLog(vkLogPath);
}
final String status = rc;
SimpleExample.this.runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(SimpleExample.this, status, Toast.LENGTH_LONG).show();
if (status.equals("Transcoding Status: Failed")) {
Toast.makeText(SimpleExample.this, "Check: " + vkLogPath + " for more information.", Toast.LENGTH_LONG).show();
}
}
});
}
}
}
try to use getBitmap: TextureView.getBitmap for VideoCropView
it returns bitmap wanted resolution.
Then you can crop using Bitmap.createBitmap
like this
resizedbitmap=Bitmap.createBitmap(bmp, 0,0,yourwidth, yourheight);
Use this library to crop video with visible portion
Video Trimmer Library
Does anyone have an idea what would cause the below error?
queueBuffer: error queuing buffer to SurfaceTexture, -32
I'm using SurfaceTexture in my app. The above error occurs when I try to set as
a livewallpaper
Below is the code:
public class ClockWallpaperService extends WallpaperService {
private WallpaperEngine myEngine;
public void onCreate() {
super.onCreate();
}
#Override
public Engine onCreateEngine() {
System.out.println("Service: onCreateEngine");
this.myEngine = new WallpaperEngine();
return myEngine;
}
public void onDestroy() {
this.myEngine = null;
super.onDestroy();
}
private class WallpaperEngine extends Engine implements OnGestureListener,
OnSharedPreferenceChangeListener {
public Bitmap image1, backgroundImage;
private ArrayList<Leaf> leafList;
private Bitmap bitmap1;
private Bitmap bitmap2;
private Bitmap bitmap3;
private Bitmap currentBackgroundBitmap;
private Paint paint;
private int count;
private int heightOfCanvas;
private int widthOfCanvas;
private float touchX;
private float touchY;
private int interval;
private int amount;
private boolean fallingDown;
private float bgX = 0;
private String colorFlag;
private String backgroundFlag;
private Random rand;
private GestureDetector detector;
private static final int DRAW_MSG = 0;
private static final int MAX_SIZE = 101;
private Handler mHandler = new Handler() {
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what) {
case DRAW_MSG:
drawPaper();
break;
}
}
};
/** hands colors for hour, min, sec */
private int[] colors = { 0xFFFF0000, 0xFF0000FF, 0xFFA2BC13 };
// private int bgColor;
private int width;
private int height;
private boolean visible = true;
private boolean displayHandSec;
private AnalogClock clock;
private SharedPreferences prefs;
WallpaperEngine() {
// get the fish and background image references
backgroundImage = BitmapFactory.decodeResource(getResources(),
R.drawable.bg1);
SharedPreferences sp = getSharedPreferences("back_position",
Activity.MODE_PRIVATE);
int position = sp.getInt("back_position", 1);
Global.backgroundDial = BitmapFactory.decodeResource(
getResources(), Global.BackgroundId[position]);
prefs = PreferenceManager
.getDefaultSharedPreferences(ClockWallpaperService.this);
prefs.registerOnSharedPreferenceChangeListener(this);
displayHandSec = prefs.getBoolean(
SettingsActivity.DISPLAY_HAND_SEC_KEY, true);
paint = new Paint();
paint.setAntiAlias(true);
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeWidth(5);
// bgColor = Color.parseColor("#C0C0C0");
clock = new AnalogClock(getApplicationContext());
}
#Override
public void onCreate(SurfaceHolder surfaceHolder) {
// TODO Auto-generated method stub
super.onCreate(surfaceHolder);
System.out.println("Engine: onCreate");
this.leafList = new ArrayList<Leaf>();
this.bitmap1 = BitmapFactory.decodeResource(getResources(),
R.drawable.flower1);
this.bitmap2 = BitmapFactory.decodeResource(getResources(),
R.drawable.flower2);
this.bitmap3 = BitmapFactory.decodeResource(getResources(),
R.drawable.flower3);
this.paint = new Paint();
this.paint.setAntiAlias(true);
this.count = -1;
this.rand = new Random();
this.detector = new GestureDetector(this);
this.touchX = -1.0f;
this.touchY = -1.0f;
SharedPreferences pref = PreferenceManager
.getDefaultSharedPreferences(ClockWallpaperService.this);
pref.registerOnSharedPreferenceChangeListener(this);
String speedStr = pref.getString("leaf_falling_speed", "20");
String amountStr = pref.getString("leaf_number", "50");
this.interval = Integer.parseInt(speedStr);
this.amount = Integer.parseInt(amountStr);
this.colorFlag = pref.getString("leaf_color", "0");
this.backgroundFlag = pref.getString("paper_background", "0");
String directionFlag = pref.getString("leaf_moving_direction", "0");
if (directionFlag.equals("0")) {
this.fallingDown = true;
} else {
this.fallingDown = false;
}
this.setTouchEventsEnabled(true);
}
#Override
public void onDestroy() {
// TODO Auto-generated method stub
System.out.println("Engine: onDestroy");
this.mHandler.removeMessages(DRAW_MSG);
PreferenceManager.getDefaultSharedPreferences(
ClockWallpaperService.this)
.unregisterOnSharedPreferenceChangeListener(this);
super.onDestroy();
}
#Override
public void onSurfaceChanged(SurfaceHolder holder, int format,
int width, int height) {
// TODO Auto-generated method stub
this.width = width;
this.height = height;
super.onSurfaceChanged(holder, format, width, height);
}
#Override
public void onSurfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
super.onSurfaceCreated(holder);
System.out.println("Engine: onSurfaceCreate");
Canvas canvas = holder.lockCanvas();
this.heightOfCanvas = canvas.getHeight();
this.widthOfCanvas = canvas.getWidth();
System.out.println("Width = " + widthOfCanvas + ", Height = "
+ heightOfCanvas);
holder.unlockCanvasAndPost(canvas);
this.mHandler.sendEmptyMessage(DRAW_MSG);
}
#Override
public void onSurfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
System.out.println("Engine: onSurfaceDestroyed");
this.mHandler.removeMessages(DRAW_MSG);
if (this.currentBackgroundBitmap != null) {
this.currentBackgroundBitmap.recycle();
this.currentBackgroundBitmap = null;
}
if (this.bitmap1 != null) {
this.bitmap1.recycle();
this.bitmap1 = null;
}
if (this.bitmap2 != null) {
this.bitmap2.recycle();
this.bitmap2 = null;
}
if (this.bitmap3 != null) {
this.bitmap3.recycle();
this.bitmap3 = null;
}
super.onSurfaceDestroyed(holder);
}
#Override
public void onOffsetsChanged(float xOffset, float yOffset,
float xOffsetStep, float yOffsetStep, int xPixelOffset,
int yPixelOffset) {
super.onOffsetsChanged(xOffset, yOffset, xOffsetStep, yOffsetStep,
xPixelOffset, yPixelOffset);
System.out.println("xPixelOffset: " + xPixelOffset
+ ", yPixelOffset: " + yPixelOffset);
this.bgX = xPixelOffset;
}
private void drawPaper() {
count++;
if (count >= 10000) {
count = 0;
}
if (count % 10 == 0) {
if (this.leafList.size() < MAX_SIZE) {
Leaf l = null;
Bitmap temp = bitmap1;
if (colorFlag.equals("0")) {
int index = rand.nextInt(3) + 1;
switch (index) {
case 1:
temp = bitmap1;
break;
case 2:
temp = bitmap2;
break;
case 3:
temp = bitmap3;
break;
default:
temp = bitmap1;
break;
}
} else if (colorFlag.equals("1")) {
temp = bitmap1;
} else if (colorFlag.equals("2")) {
temp = bitmap2;
} else if (colorFlag.equals("3")) {
temp = bitmap3;
}
l = new Leaf(temp, this.heightOfCanvas, this.widthOfCanvas);
this.leafList.add(l);
}
}
SurfaceHolder holder = this.getSurfaceHolder();
Canvas canvas = holder.lockCanvas();
drawBackground(canvas);
int size = Math.min(this.amount, this.leafList.size());
for (int i = 0; i < size; i++) {
Leaf l = this.leafList.get(i);
if (l.isTouched()) {
l.handleTouched(touchX, touchY);
} else {
l.handleFalling(this.fallingDown);
}
l.drawLeaf(canvas, paint);
}
holder.unlockCanvasAndPost(canvas);
this.mHandler.sendEmptyMessageDelayed(DRAW_MSG, this.interval);
}
private void drawBackground(Canvas c) {
c.drawBitmap(backgroundImage, 0, 0, null);
clock.config(width / 2, height / 2, (int) (width * 0.6f),
new Date(), paint, colors, displayHandSec);
clock.draw(c);
}
#Override
public void onTouchEvent(MotionEvent event) {
super.onTouchEvent(event);
this.detector.onTouchEvent(event);
}
public boolean onDown(MotionEvent e) {
touchX = e.getX();
touchY = e.getY();
int size = Math.min(this.amount, this.leafList.size());
for (int i = 0; i < size; i++) {
Leaf l = this.leafList.get(i);
float centerX = l.getX() + l.getBitmap().getWidth() / 2.0f;
float centerY = l.getY() + l.getBitmap().getHeight() / 2.0f;
if (!l.isTouched()) {
if (Math.abs(centerX - touchX) <= 80
&& Math.abs(centerY - touchY) <= 80
&& centerX != touchX) {
l.setTouched(true);
}
}
}
return true;
}
public void onShowPress(MotionEvent e) {
}
public boolean onSingleTapUp(MotionEvent e) {
return false;
}
public boolean onScroll(MotionEvent e1, MotionEvent e2,
float distanceX, float distanceY) {
return false;
}
public void onLongPress(MotionEvent e) {
}
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX,
float velocityY) {
return false;
}
public void onSharedPreferenceChanged(
SharedPreferences sharedPreferences, String key) {
if (key.equals("leaf_falling_speed")) {
String speedStr = sharedPreferences.getString(key, "20");
this.interval = Integer.parseInt(speedStr);
} else if (key.equals("leaf_number")) {
String amountStr = sharedPreferences.getString(key, "50");
this.amount = Integer.parseInt(amountStr);
} else if (key.equals("leaf_moving_direction")) {
String directionFlag = sharedPreferences.getString(key, "0");
if (directionFlag.equals("0")) {
this.fallingDown = true;
} else {
this.fallingDown = false;
}
} else if (key.equals("leaf_color")) {
this.colorFlag = sharedPreferences.getString(key, "0");
this.leafList.removeAll(leafList);
}
}
}
}
So basically I have created a page with a visualiser and a button for recording the audio. I want the visualiser to run for the audio that I am recording. I guess I need to get the MaxAmplitude for the recording and then dynamically plot a visualiser graph for every small period. Can someone please help me with that ? or if there is a better way to do it. Currently the code i pasted, the visualiser runs for the recording after recording it.
public class MainActivity extends Activity {
private static final float VISUALIZER_HEIGHT_DIP = 200f;
MediaPlayer mp;
private Visualizer mVisualizer;
private LinearLayout mLinearLayout;
private VisualizerView mVisualizerView;
private TextView mStatusTextView;
private File metapath;
private static String[] META_PATH;
private Button play;
private boolean mStartRecording;
private static final String LOG_TAG = "AudioRecordTest";
private static String mFileName = null;
private MediaRecorder mRecorder = null;
private MediaPlayer mPlayer = null;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mStatusTextView = (TextView)findViewById(R.id.recording);
if(Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)
|| Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED_READ_ONLY))
metapath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC);
META_PATH = metapath.list(new WavFilter());
mLinearLayout = (LinearLayout)findViewById(R.id.subscreen);
play = (Button)findViewById(R.id.startandstop);
mStartRecording=true;
play.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
onRecord(mStartRecording);
if (mStartRecording) {
play.setText("Stop recording");
} else {
play.setText("Start recording");
}
mStartRecording = !mStartRecording;
}
});
setupVisualizerFxAndUI();
mVisualizer.setEnabled(true);
mStatusTextView.setText("Playing audio...");
}
private void onRecord(boolean start) {
if (start) {
startRecording();
} else {
stopRecording();
}
}
private void startRecording() {
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setOutputFile(metapath.getPath()+"/myfile.3gp");
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
try {
mRecorder.prepare();
} catch (IOException e) {
Log.e(LOG_TAG, "prepare() failed");
}
mRecorder.start();
}
private void stopRecording() {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
mPlayer = new MediaPlayer();
try {
mPlayer.setDataSource(metapath.getPath()+"/myfile.3gp");
mPlayer.prepare();
mPlayer.start();
} catch (IOException e) {
Log.e(LOG_TAG, "prepare() failed");
}
}
class WavFilter implements FilenameFilter {
public boolean accept(File dir, String name) {
return (name.endsWith(".wav"));
}
}
private void setupVisualizerFxAndUI() {
mVisualizerView = new VisualizerView(this);
mVisualizerView.setLayoutParams(new ViewGroup.LayoutParams(
600,
(int)(VISUALIZER_HEIGHT_DIP * getResources().getDisplayMetrics().density)));
mLinearLayout.addView(mVisualizerView);
mLinearLayout.setGravity(Gravity.CENTER_HORIZONTAL);
mVisualizer = new Visualizer(0);
mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]);
mVisualizer.setDataCaptureListener(new Visualizer.OnDataCaptureListener() {
public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes,
int samplingRate) {
for(int i=0;i<bytes.length;i++)
bytes[i]+=2;
mVisualizerView.updateVisualizer(bytes);
}
public void onFftDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) {}
}, Visualizer.getMaxCaptureRate() / 2, true, false);
}
#Override
protected void onPause() {
super.onPause();
if (isFinishing() && mp != null) {
mVisualizer.release();
mp.release();
mp = null;
}
}
class VisualizerView extends View {
private byte[] mBytes;
private float[] mPoints;
private Rect mRect = new Rect();
private Paint mForePaint = new Paint();
public VisualizerView(Context context) {
super(context);
init();
}
private void init() {
mBytes = null;
mForePaint.setStrokeWidth(6f);//set width of the moving wave
mForePaint.setAntiAlias(true);
mForePaint.setColor(Color.RED);//color of the moving wavd\e
}
public void updateVisualizer(byte[] bytes) {
mBytes = bytes;
invalidate();
}
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mBytes == null) {
return;
}
if (mPoints == null || mPoints.length < mBytes.length * 4) {
mPoints = new float[mBytes.length * 4];
}
mRect.set(0, 0, getWidth(), getHeight());
for (int i = 0; i < mBytes.length - 1; i++) {
mPoints[i * 4] = mRect.width() * i / (mBytes.length - 1);
mPoints[i * 4 + 1] = mRect.height() / 2
+ ((byte) (mBytes[i] + 128)) * (mRect.height() / 2) / 128;
mPoints[i * 4 + 2] = mRect.width() * (i + 1) / (mBytes.length - 1);
mPoints[i * 4 + 3] = mRect.height() / 2
+ ((byte) (mBytes[i + 1] + 128)) * (mRect.height() / 2) / 128;
}
canvas.drawColor(Color.BLACK);
canvas.drawLines(mPoints, mForePaint);
}
}
}
how to use GPUImage library without resizing original image and I need to apply for a filter like as Pixelate or Blur to an entire GPUImageView.
I thought to use GPUImage but I don't know how to do it.
You change GPUImage library GPUImageRendering class
below code is
/*
* Copyright (C) 2012 CyberAgent
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.co.cyberagent.android.gpuimage;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView.Renderer;
import jp.co.cyberagent.android.gpuimage.util.TextureRotationUtil;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.LinkedList;
import java.util.Queue;
import static jp.co.cyberagent.android.gpuimage.util.TextureRotationUtil.TEXTURE_NO_ROTATION;
#SuppressLint("WrongCall")
#TargetApi(11)
public class GPUImageRenderer implements Renderer, PreviewCallback {
public static final int NO_IMAGE = -1;
static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
private GPUImageFilter mFilter;
public final Object mSurfaceChangedWaiter = new Object();
private int mGLTextureId = NO_IMAGE;
private SurfaceTexture mSurfaceTexture = null;
private final FloatBuffer mGLCubeBuffer;
private final FloatBuffer mGLTextureBuffer;
private IntBuffer mGLRgbBuffer;
int mwidth,mheight,wheight;
private int mOutputWidth;
private int mOutputHeight;
private int mImageWidth;
private int mImageHeight;
private int mAddedPadding;
private final Queue<Runnable> mRunOnDraw;
private final Queue<Runnable> mRunOnDrawEnd;
private Rotation mRotation;
private boolean mFlipHorizontal;
private boolean mFlipVertical;
private GPUImage.ScaleType mScaleType = GPUImage.ScaleType.CENTER_CROP;
public GPUImageRenderer(final GPUImageFilter filter) {
mFilter = filter;
mRunOnDraw = new LinkedList<Runnable>();
mRunOnDrawEnd = new LinkedList<Runnable>();
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
setRotation(Rotation.NORMAL, false, false);
}
#Override
public void onSurfaceCreated(final GL10 unused, final EGLConfig config) {
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mFilter.init();
}
public void onSurfacSize(final int width, final int height) {
mwidth=width;
mheight=height;
}
#Override
public void onSurfaceChanged(final GL10 gl, final int width, final int height) {
if(height>mheight)
{
mOutputWidth = mwidth;
mOutputHeight = mheight;
GLES20.glViewport(0,0, mwidth, mheight);
// GLES20.glViewport(0+(mwidth/2), 0+(mheight/2), mwidth, mheight);
mFilter.onOutputSizeChanged(mwidth,mheight);
}
else
{
mOutputWidth = width;
mOutputHeight = height;
GLES20.glViewport(0,0, width, height);
mFilter.onOutputSizeChanged(width,height);
}
// GLES20.glViewport(20,-20, width, height);
GLES20.glUseProgram(mFilter.getProgram());
adjustImageScaling();
synchronized (mSurfaceChangedWaiter) {
mSurfaceChangedWaiter.notifyAll();
}
}
#Override
public void onDrawFrame(final GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
runAll(mRunOnDraw);
mFilter.onDraw(mGLTextureId, mGLCubeBuffer, mGLTextureBuffer);
runAll(mRunOnDrawEnd);
if (mSurfaceTexture != null) {
mSurfaceTexture.updateTexImage();
}
}
private void runAll(Queue<Runnable> queue) {
synchronized (queue) {
while (!queue.isEmpty()) {
queue.poll().run();
}
}
}
#Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
final Size previewSize = camera.getParameters().getPreviewSize();
if (mGLRgbBuffer == null) {
mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
}
if (mRunOnDraw.isEmpty()) {
runOnDraw(new Runnable() {
#Override
public void run() {
GPUImageNativeLibrary.YUVtoRBGA(data, previewSize.width, previewSize.height,
mGLRgbBuffer.array());
mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
camera.addCallbackBuffer(data);
if (mImageWidth != previewSize.width) {
mImageWidth = previewSize.width;
mImageHeight = previewSize.height;
adjustImageScaling();
}
}
});
}
}
public void setUpSurfaceTexture(final Camera camera) {
runOnDraw(new Runnable() {
#Override
public void run() {
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mSurfaceTexture = new SurfaceTexture(textures[0]);
try {
camera.setPreviewTexture(mSurfaceTexture);
camera.setPreviewCallback(GPUImageRenderer.this);
camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
public void setFilter(final GPUImageFilter filter) {
runOnDraw(new Runnable() {
#Override
public void run() {
final GPUImageFilter oldFilter = mFilter;
mFilter = filter;
if (oldFilter != null) {
oldFilter.destroy();
}
mFilter.init();
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(mOutputWidth, mOutputHeight);
}
});
}
public void setFilter3(final GPUImageFilter filter) {
runOnDraw(new Runnable() {
#Override
public void run() {
final GPUImageFilter oldFilter = mFilter;
mFilter = filter;
// if (oldFilter != null) {
// oldFilter.destroy();
// }
mFilter.init();
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(mOutputWidth, mOutputHeight);
}
});
}
public void deleteImage() {
runOnDraw(new Runnable() {
#Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{
mGLTextureId
}, 0);
mGLTextureId = NO_IMAGE;
}
});
}
public void setImageBitmap(final Bitmap bitmap) {
setImageBitmap(bitmap, true);
}
public void setImageBitmap(final Bitmap bitmap, final boolean recycle) {
if (bitmap == null) {
return;
}
runOnDraw(new Runnable() {
#Override
public void run() {
Bitmap resizedBitmap = null;
if (bitmap.getWidth() % 2 == 1) {
resizedBitmap = Bitmap.createBitmap(bitmap.getWidth() + 1, bitmap.getHeight(),
Bitmap.Config.ARGB_8888);
Canvas can = new Canvas(resizedBitmap);
can.drawARGB(0x00, 0x00, 0x00, 0x00);
can.drawBitmap(bitmap, 0, 0, null);
mAddedPadding = 1;
} else {
mAddedPadding = 0;
}
mGLTextureId = OpenGlUtils.loadTexture(
resizedBitmap != null ? resizedBitmap : bitmap, mGLTextureId, recycle);
if (resizedBitmap != null) {
resizedBitmap.recycle();
}
mImageWidth = bitmap.getWidth();
mImageHeight = bitmap.getHeight();
adjustImageScaling();
}
});
}
public void setScaleType(GPUImage.ScaleType scaleType) {
mScaleType = scaleType;
}
protected int getFrameWidth() {
return mOutputWidth;
}
protected int getFrameHeight() {
return mOutputHeight;
}
private void adjustImageScaling() {
float outputWidth = mOutputWidth;
float outputHeight = mOutputHeight;
if (mRotation == Rotation.ROTATION_270 || mRotation == Rotation.ROTATION_90) {
outputWidth = mOutputHeight;
outputHeight = mOutputWidth;
}
float ratio1 = outputWidth / mImageWidth;
float ratio2 =outputHeight / mImageHeight;
float ratioMax = Math.max(ratio1, ratio2);
int imageWidthNew = (Math.round(mImageWidth * ratioMax));
int imageHeightNew = (Math.round(mImageHeight * ratioMax));
float ratioWidth = imageWidthNew / (outputWidth);
float ratioHeight = imageHeightNew / (outputHeight);
float[] cube = CUBE;
float[] textureCords = TextureRotationUtil.getRotation(mRotation, mFlipHorizontal, mFlipVertical);
if (mScaleType == GPUImage.ScaleType.CENTER_CROP) {
float distHorizontal = (1 - 1 / ratioWidth) / 2;
float distVertical = (1 - 1 / ratioHeight) / 2;
textureCords = new float[]{
addDistance(textureCords[0], distHorizontal), addDistance(textureCords[1], distVertical),
addDistance(textureCords[2], distHorizontal), addDistance(textureCords[3], distVertical),
addDistance(textureCords[4], distHorizontal), addDistance(textureCords[5], distVertical),
addDistance(textureCords[6], distHorizontal), addDistance(textureCords[7], distVertical),
};
} else {
cube = new float[]{
CUBE[0] * ratioWidth, CUBE[1] * ratioHeight,
CUBE[2] * ratioWidth, CUBE[3] * ratioHeight,
CUBE[4] * ratioWidth, CUBE[5] * ratioHeight,
CUBE[6] * ratioWidth, CUBE[7] * ratioHeight,
};
}
mGLCubeBuffer.clear();
mGLCubeBuffer.put(cube).position(0);
mGLTextureBuffer.clear();
mGLTextureBuffer.put(textureCords).position(0);
}
private float addDistance(float coordinate, float distance) {
return coordinate == 0.0f ? distance : 1 - distance;
}
public void setRotationCamera(final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
setRotation(rotation, flipVertical, flipHorizontal);
}
public void setRotation(final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
mRotation = rotation;
mFlipHorizontal = flipHorizontal;
mFlipVertical = flipVertical;
adjustImageScaling();
}
public Rotation getRotation() {
return mRotation;
}
public boolean isFlippedHorizontally() {
return mFlipHorizontal;
}
public boolean isFlippedVertically() {
return mFlipVertical;
}
protected void runOnDraw(final Runnable runnable) {
synchronized (mRunOnDraw) {
mRunOnDraw.add(runnable);
}
}
protected void runOnDrawEnd(final Runnable runnable) {
synchronized (mRunOnDrawEnd) {
mRunOnDrawEnd.add(runnable);
}
}
}
i have a shake listener as follows
public class ShakeListener implements SensorEventListener {
private String TAG = ShakeListener.class.getSimpleName();
private static final int FORCE_THRESHOLD = 800;
private static final int TIME_THRESHOLD = 100;
private static final int SHAKE_TIMEOUT = 500;
private static final int SHAKE_DURATION = 1000;
private static final int SHAKE_COUNT = 5;
private SensorManager mSensorMgr;
private float mLastX = -1.0f, mLastY = -1.0f, mLastZ = -1.0f;
private long mLastTime;
private OnShakeListener mShakeListener;
private Context mContext;
private int mShakeCount = 0;
private long mLastShake;
private long mLastForce;
public interface OnShakeListener {
public void onShake();
}
public ShakeListener(Context context) {
Log.d(TAG,"ShakeListener invoked---->");
mContext = context;
resume();
}
public void setOnShakeListener(OnShakeListener listener) {
mShakeListener = listener;
}
public void resume() {
mSensorMgr = (SensorManager) mContext
.getSystemService(Context.SENSOR_SERVICE);
if (mSensorMgr == null) {
throw new UnsupportedOperationException("Sensors not supported");
}
boolean supported = false;
try {
supported = mSensorMgr.registerListener(this,
mSensorMgr.getDefaultSensor(Sensor.TYPE_ACCELEROMETER),
SensorManager.SENSOR_DELAY_GAME);
} catch (Exception e) {
Toast.makeText(mContext, "Shaking not supported", Toast.LENGTH_LONG)
.show();
}
if ((!supported) && (mSensorMgr != null))
mSensorMgr.unregisterListener(this);
}
public void pause() {
if (mSensorMgr != null) {
mSensorMgr.unregisterListener(this);
mSensorMgr = null;
}
}
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
public void onSensorChanged(SensorEvent event) {
if (event.sensor.getType() != Sensor.TYPE_ACCELEROMETER)
return;
long now = System.currentTimeMillis();
if ((now - mLastForce) > SHAKE_TIMEOUT) {
mShakeCount = 0;
}
if ((now - mLastTime) > TIME_THRESHOLD) {
long diff = now - mLastTime;
float speed = Math.abs(event.values[SensorManager.DATA_X]
+ event.values[SensorManager.DATA_Y]
+ event.values[SensorManager.DATA_Z] - mLastX - mLastY
- mLastZ)
/ diff * 10000;
if (speed > FORCE_THRESHOLD) {
if ((++mShakeCount >= SHAKE_COUNT)
&& (now - mLastShake > SHAKE_DURATION)) {
mLastShake = now;
mShakeCount = 0;
Log.d(TAG,"ShakeListener mShakeListener---->"+mShakeListener);
if (mShakeListener != null) {
mShakeListener.onShake();
}
}
mLastForce = now;
}
mLastTime = now;
mLastX = event.values[SensorManager.DATA_X];
mLastY = event.values[SensorManager.DATA_Y];
mLastZ = event.values[SensorManager.DATA_Z];
}}}
iam using this listener in one of my activity as ffollows
mShaker = new ShakeListener(this);
mShaker.setOnShakeListener(new ShakeListener.OnShakeListener () {
public void onShake()
{
Intent myIntent = new Intent(myActivity.this, loginActivity.class);
startActivity(myIntent);
}
});
this shake should trigger only when iam on that particular activity..but it s triggering in all activities ,worse part is even when i close the app if i shake, the event is triggering and making the app open..Any help is appreciated.
add this method in your activity:
#Override
protected void finalize() throws Throwable {
try {
stop();
} catch (Exception e){
e.printStackTrace();
}
super.finalize();
}
public void stop(){
try {
sensorMgr.unregisterListener(this, mAccelerometer);
} catch (Exception e){
e.printStackTrace();
}
}