SurfaceView height + width gets ignored - android

The following script works flawless, but the size of my videoplayer will get f'd after changing my surfaceview from portrait to landscape. Tried several options like setFixedSize(), setSizeFromLayout() and removing everything which involves my width + height from my surfaceview. Does anyone knows whats wrong with the code below?
Or did someone have the same problem in the past?
package com.list;
import io.vov.vitamio.MediaPlayer;
import io.vov.vitamio.MediaPlayer.OnBufferingUpdateListener;
import io.vov.vitamio.MediaPlayer.OnCompletionListener;
import io.vov.vitamio.MediaPlayer.OnPreparedListener;
import io.vov.vitamio.MediaPlayer.OnVideoSizeChangedListener;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class mediaPlayer extends Activity implements OnBufferingUpdateListener, OnCompletionListener, OnPreparedListener, OnVideoSizeChangedListener, SurfaceHolder.Callback {
private static final String TAG = "MediaPlayerDemo";
private int mVideoWidth;
private int mVideoHeight;
private MediaPlayer mMediaPlayer;
private SurfaceView mPreview;
private SurfaceHolder holder;
private boolean mIsVideoSizeKnown = false;
private boolean mIsVideoReadyToBePlayed = false;
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.mediaplayer);
mPreview = (SurfaceView) findViewById(R.id.surface);
holder = mPreview.getHolder();
holder.addCallback(this);
}
private void playVideo() {
doCleanUp();
try {
mMediaPlayer = new MediaPlayer(this);
Intent myIntent = this.getIntent();
String url = myIntent.getStringExtra("url");
mMediaPlayer.setDataSource(url);
mMediaPlayer.setDisplay(holder);
mMediaPlayer.prepareAsync();
mMediaPlayer.setScreenOnWhilePlaying(true);
mMediaPlayer.setOnBufferingUpdateListener(this);
mMediaPlayer.setOnCompletionListener(this);
mMediaPlayer.setOnPreparedListener(this);
mMediaPlayer.setOnVideoSizeChangedListener(this);
} catch (Exception e) {
Log.e(TAG, "error: " + e.getMessage(), e);
}
}
public void onBufferingUpdate(MediaPlayer arg0, int percent) {
Log.d(TAG, "onBufferingUpdate percent:" + percent);
}
public void onCompletion(MediaPlayer arg0) {
Log.d(TAG, "onCompletion called");
mMediaPlayer.release();
}
public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
Log.v(TAG, "onVideoSizeChanged called");
if (width == 0 || height == 0) {
Log.e(TAG, "invalid video width(" + width + ") or height(" + height + ")");
return;
}
Log.v("afmeting", "->" +width+"px bij "+height+"px");
mIsVideoSizeKnown = true;
if (mIsVideoReadyToBePlayed && mIsVideoSizeKnown) {
startVideoPlayback();
}
}
public void onPrepared(MediaPlayer mediaplayer) {
Log.d(TAG, "onPrepared called");
mIsVideoReadyToBePlayed = true;
if (mIsVideoReadyToBePlayed && mIsVideoSizeKnown) {
startVideoPlayback();
}
}
public void surfaceChanged(SurfaceHolder surfaceholder, int i, int j, int k) {
Log.d(TAG, "surfaceChanged called" + i + " " + j + " " + k);
}
public void surfaceDestroyed(SurfaceHolder surfaceholder) {
Log.d(TAG, "surfaceDestroyed called");
}
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "surfaceCreated called");
playVideo();
}
#Override
protected void onPause() {
super.onPause();
releaseMediaPlayer();
doCleanUp();
}
#Override
protected void onDestroy() {
super.onDestroy();
releaseMediaPlayer();
doCleanUp();
}
private void releaseMediaPlayer() {
if (mMediaPlayer != null) {
mMediaPlayer.release();
mMediaPlayer = null;
}
}
private void doCleanUp() {
mVideoWidth = 0;
mVideoHeight = 0;
mIsVideoReadyToBePlayed = false;
mIsVideoSizeKnown = false;
}
private void startVideoPlayback() {
Log.v(TAG, "startVideoPlayback");
holder.setSizeFromLayout();
mMediaPlayer.start();
}
}

Same problem here:
I believe the problem is the Vitamio library as I have experienced a similar problem when dealing with the Vitamio SDK. Resizing didn't work for me, when using a SurfaceView and invoking setLayoutParams(ViewGroup.LayoutParams params). The below code works fine using the standard Android media-framework, but importing the vitamio packages breaks it.
RelativeLayout.LayoutParams video_VIEWLAYOUT = (RelativeLayout.LayoutParams) videoView.getLayoutParams();
video_VIEWLAYOUT.width = screenWidth;
video_VIEWLAYOUT.height = (int) (((float)videoHeight / (float)videoWidth) *(float)screenWidth);
videoView.setLayoutParams(video_VIEWLAYOUT);
My recommendation would be to try using the io.vov.vitamio.widget.VideoView instead of a android.view.SurfaceView.

use this method and call it inside onConfigurationChanged method :
private void setVideoSize() {
// // Get the dimensions of the video
// mVideoView is your video view object.
int videoWidth = mVideoView.getVideoWidth();
int videoHeight = mVideoView.getVideoHeight();
float videoProportion = (float) videoWidth / (float) videoHeight;
// Get the width of the screen
int screenWidth = getWindowManager().getDefaultDisplay().getWidth();
int screenHeight = getWindowManager().getDefaultDisplay().getHeight();
float screenProportion = (float) screenWidth / (float) screenHeight;
// Get the SurfaceView layout parameters
android.view.ViewGroup.LayoutParams lp = mVideoView.getLayoutParams();
if (videoProportion > screenProportion) {
lp.width = screenWidth;
lp.height = (int) ((float) screenWidth / videoProportion);
} else {
lp.width = (int) (videoProportion * (float) screenHeight);
lp.height = screenHeight;
}
// Commit the layout parameters
mVideoView.setLayoutParams(lp);
}
Now on change of orientation your videoview will be resized.

Related

JavaCV video recorder orientation is not proper in portrait mode

Hi i am using https://github.com/bytedeco/javacv/ for recording video.
When using landscape mode orientation is fine but when i change the orientation to portrait mode the video is rotated -90 degree.
Any body have an idea what i may be doing wrong. Here is the code.
package org.bytedeco.javacv_android_example.record;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacpp.opencv_imgproc;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.javacv_android_example.R;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class RecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
/* The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
long startTime = 0;
boolean recording = false;
volatile boolean runAudioThread = true;
Frame[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
private PowerManager.WakeLock mWakeLock;
private File ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
private FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private Frame yuvImage = null;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
OpenCVFrameConverter.ToIplImage converter = new OpenCVFrameConverter.ToIplImage();
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
setContentView(R.layout.activity_record);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
}
#Override
protected void onResume() {
super.onResume();
if(mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if(mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if(cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if(mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if(keyCode == KeyEvent.KEYCODE_BACK) {
if(recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.activity_record, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if(1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG, "init recorder");
if(RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new Frame[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for(int i = 0; i < images.length; i++) {
images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
timestamps[i] = -1;
}
} else if(yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(LOG_TAG, "create yuvImage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch(InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if(recorder != null && recording) {
if(RECORD_LENGTH > 0) {
Log.v(LOG_TAG, "Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if(imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if(lastIndex < firstIndex) {
lastIndex += images.length;
}
for(int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if(t >= 0) {
if(t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}
firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if(samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if(lastIndex < firstIndex) {
lastIndex += samples.length;
}
for(int i = firstIndex; i <= lastIndex; i++) {
recorder.recordSamples(samples[i % samples.length]);
}
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
recording = false;
Log.v(LOG_TAG, "Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public void onClick(View v) {
if(!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if(RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for(int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while(runAudioThread) {
if(RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if(bufferReadResult > 0) {
Log.v(LOG_TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if(recording) {
if(RECORD_LENGTH <= 0) {
try {
recorder.recordSamples(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG, "audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera", "camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch(IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters camParams = mCamera.getParameters();
List<Camera.Size> sizes = camParams.getSupportedPreviewSizes();
// Sort the list in ascending order
Collections.sort(sizes, new Comparator<Camera.Size>() {
public int compare(final Camera.Size a, final Camera.Size b) {
return a.width * a.height - b.width * b.height;
}
});
// Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot
// reach the initial settings of imageWidth/imageHeight.
for(int i = 0; i < sizes.size(); i++) {
if((sizes.get(i).width >= imageWidth && sizes.get(i).height >= imageHeight) || i == sizes.size() - 1) {
imageWidth = sizes.get(i).width;
imageHeight = sizes.get(i).height;
Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth + "x" + imageHeight);
break;
}
}
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG, "Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
camParams.setPreviewFrameRate(frameRate);
Log.v(LOG_TAG, "Preview Framerate: " + camParams.getPreviewFrameRate());
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch(RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if(!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if(isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if(RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvImage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if(yuvImage != null && recording) {
((ByteBuffer) yuvImage.image[0].position(0)).put(data);
if(RECORD_LENGTH <= 0) {
try {
Log.v(LOG_TAG, "Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if(t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
opencv_core.IplImage toBeRotated = converter.convert(yuvImage);
toBeRotated = rotate(toBeRotated, -90);
Frame newFrame = converter.convert(toBeRotated);
opencv_core.cvReleaseImage(toBeRotated);
recorder.record(newFrame);
// recorder.record(yuvImage);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
public static opencv_core.IplImage rotate(opencv_core.IplImage image, double angle) {
opencv_core.IplImage copy = opencv_core.cvCloneImage(image);
double radians = Math.toRadians(angle);
double sin = Math.abs(Math.sin(radians));
double cos = Math.abs(Math.cos(radians));
int newWidth = (int) (copy.width() * cos + copy.height() * sin);
int newHeight = (int) (copy.width() * sin + copy.height() * cos);
int[] newWidthHeight = {newWidth, newHeight};
opencv_core.CvSize size = new opencv_core.CvSize(copy.height(), copy.width());
opencv_core.IplImage rotatedImage = opencv_core.cvCreateImage(size, copy.depth(), copy.nChannels());
opencv_core.CvMat mapMatrix = opencv_core.cvCreateMat( 2, 3, opencv_core.CV_32FC1 );
//Define Mid Point
opencv_core.CvPoint2D32f centerPoint = new opencv_core.CvPoint2D32f();
centerPoint.x(copy.width()/2);
centerPoint.y(copy.width()/2);
//Get Rotational Matrix
opencv_imgproc.cv2DRotationMatrix(centerPoint, angle, 1, mapMatrix);
opencv_imgproc.cvWarpAffine(copy, rotatedImage, mapMatrix, opencv_imgproc.CV_INTER_CUBIC + opencv_imgproc.CV_WARP_FILL_OUTLIERS, opencv_core.cvScalarAll(170));
opencv_core.cvReleaseImage(copy);
opencv_core.cvReleaseMat(mapMatrix);
return rotatedImage;
}
}
You could either do something like recorder.setVideoMetadata("rotate", "90") or transform the images with something like this:
FFmpegFrameFilter filter = new FFmpegFrameFilter("transpose=cclock_flip", imageWidth, imageHeight);
filter.push(frame);
Frame frame2;
while ((frame2 = filter.pull()) != null) {
recorder.record(frame2);
}

surfaceview playing video as stretched view

I am using the mediaplayer.xml to play the video file
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/MainView"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:background="#4d4d4d"
android:gravity="center"
android:keepScreenOn="true"
android:orientation="vertical" >
<SurfaceView
android:id="#+id/surfaceView"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_gravity="center"
/>
</LinearLayout>
The following MediaPlayerActivity class using SurfaceView and MediaPlayer
While playing video in portrait mode it seems that the video stretched one. I want to play the video vertically center and should not occupy the full screen and then in landscape mode should play in full screen mode.
public class MediaPlayerActivity extends Activity implements OnCompletionListener,
OnErrorListener, OnInfoListener, OnBufferingUpdateListener,
OnPreparedListener, OnSeekCompleteListener, SurfaceHolder.Callback,
MediaController.MediaPlayerControl {
private MediaController controller;
Display currentDisplay;
private SurfaceView surfaceView;
private SurfaceHolder surfaceHolder;
private MediaPlayer mediaPlayer;
int videoWidth = 0;
int videoHeight = 0;
boolean readyToPlay = false;
private String streamingVideoUrl;
private ProgressDialog dialog;
public final String TAG = "VIDEO_PLAYER";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d(TAG,"Media player activity called ");
setContentView(R.layout.mediaplayer);
if(getIntent() != null ){
streamingVideoUrl = getIntent().getStringExtra("stream_url");
}
surfaceView = (SurfaceView) findViewById(R.id.surfaceView);
surfaceView.setOnClickListener(surViewClickListener);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
/*setVideoSize();*/
try {
mediaPlayer = new MediaPlayer();
mediaPlayer.setOnCompletionListener(this);
mediaPlayer.setOnErrorListener(this);
mediaPlayer.setOnInfoListener(this);
mediaPlayer.setOnPreparedListener(this);
mediaPlayer.setOnSeekCompleteListener(this);
mediaPlayer.setOnBufferingUpdateListener(this);
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
//Log.d(TAG,"Returned by getDatasource :"+getDataSource(streamingVideoUrl));
mediaPlayer.setDataSource(streamingVideoUrl);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (SecurityException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
catch(Exception e)
{
e.printStackTrace();
}
currentDisplay = getWindowManager().getDefaultDisplay();
controller = new MediaController(this);
dialog = new ProgressDialog(this);
//dialog.setMessage("Preparing File to Streaming");
dialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
dialog.setCanceledOnTouchOutside(false);
if(this!= null && !this.isFinishing()){
dialog.show();
}
handler.postDelayed(r, 100);
}
/*private void setVideoSize() {
try {
// // Get the dimensions of the video
int videoWidth = mediaPlayer.getVideoWidth();
int videoHeight = mediaPlayer.getVideoHeight();
float videoProportion = (float) videoWidth / (float) videoHeight;
// Get the width of the screen
int screenWidth = getWindowManager().getDefaultDisplay().getWidth();
int screenHeight = getWindowManager().getDefaultDisplay().getHeight();
float screenProportion = (float) screenWidth / (float) screenHeight;
// Get the SurfaceView layout parameters
android.view.ViewGroup.LayoutParams lp = surfaceView.getLayoutParams();
if (videoProportion > screenProportion) {
lp.width = screenWidth;
lp.height = (int) ((float) screenWidth / videoProportion);
} else {
lp.width = (int) (videoProportion * (float) screenHeight);
lp.height = screenHeight;
}
// Commit the layout parameters
surfaceView.setLayoutParams(lp);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}*/
/*//coded by Karthikeyan V
//converting url to byte
private String getDataSource(String path) throws IOException {
Log.d(TAG,"getDataSource called");
if (!URLUtil.isNetworkUrl(path)) {
return path;
} else {
URL url = new URL(path);
URLConnection cn = url.openConnection();
cn.connect();
InputStream stream = cn.getInputStream();
if (stream == null)
throw new RuntimeException("stream is null");
File temp = File.createTempFile("mediaplayertmp", "dat");
temp.deleteOnExit();
String tempPath = temp.getAbsolutePath();
FileOutputStream out = new FileOutputStream(temp);
byte buf[] = new byte[128];
do {
int numread = stream.read(buf);
if (numread <= 0)
break;
out.write(buf, 0, numread);
} while (true);
try {
stream.close();
} catch (IOException ex) {
Log.e(TAG, "error: " + ex.getMessage(), ex);
}
Log.d(TAG,"temp path :"+tempPath);
return tempPath;
}
}*/
#Override
public boolean canPause() {
return true;
}
#Override
public boolean canSeekBackward() {
return true;
}
#Override
public boolean canSeekForward() {
return true;
}
#Override
public int getBufferPercentage() {
return 0;
}
#Override
public int getCurrentPosition() {
if(mediaPlayer !=null)
return mediaPlayer.getCurrentPosition();
else
return 0;
}
#Override
public int getDuration() {
return mediaPlayer.getDuration();
}
#Override
public boolean isPlaying() {
return mediaPlayer.isPlaying();
}
#Override
public void pause() {
if (mediaPlayer != null) {
if (mediaPlayer.isPlaying()) {
mediaPlayer.pause();
}
}
}
#Override
public void seekTo(int pos) {
mediaPlayer.seekTo(pos);
}
#Override
public void start() {
mediaPlayer.start();
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
if(Constants.DEBUG)
Log.v(TAG, "Media Flow surfaceChanged Called");
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
if(Constants.DEBUG)
Log.v(TAG, "Media Flow surfaceCreated Called");
mediaPlayer.setDisplay(holder);
try {
mediaPlayer.prepareAsync();
} catch (IllegalStateException e) {
if(Constants.DEBUG)
Log.v(TAG, "Media Flow IllegalStateException " + e.getMessage());
finish();
}
if(Constants.DEBUG)
Log.d(TAG, "Media Flow MediaPlayer Preparing");
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
if(Constants.DEBUG)
Log.v(TAG, "Media Flow surfaceDestroyed Called");
}
#Override
public void onSeekComplete(MediaPlayer mp) {
if(Constants.DEBUG)
Log.v(TAG, "Media Flow onSeekComplete Called");
}
#Override
public void onPrepared(MediaPlayer mp) {
if(Constants.DEBUG)
Log.v(TAG, "Media Flow onPrepared Called");
// dismissProgressLoading();
controller.setMediaPlayer(this);
controller.setAnchorView(this.findViewById(R.id.MainView));
controller.setEnabled(true);
controller.show();
mp.start();
}
#Override
public void onBufferingUpdate(MediaPlayer mp, int percent) {
if(Constants.DEBUG)
Log.v(TAG, "Media Flow MediaPlayer Buffering: " + percent + "%");
dismissProgressLoading();
}
#Override
public boolean onInfo(MediaPlayer mp, int whatInfo, int extra) {
//
dismissProgressLoading();
if(Constants.DEBUG)
{
if (whatInfo == MediaPlayer.MEDIA_INFO_BAD_INTERLEAVING) {
if(Constants.DEBUG)
Log.v(TAG, "Media Flow Media Info, Media Info Bad Interleaving " + extra);
} else if (whatInfo == MediaPlayer.MEDIA_INFO_NOT_SEEKABLE) {
if(Constants.DEBUG)
Log.v(TAG, "Media Flow Media Info, Media Info Not Seekable " + extra);
} else if (whatInfo == MediaPlayer.MEDIA_INFO_UNKNOWN) {
Log.v(TAG, "Media Flow Media Info, Media Info Unknown " + extra);
} else if (whatInfo == MediaPlayer.MEDIA_INFO_VIDEO_TRACK_LAGGING) {
Log.v(TAG, "Media Flow MediaInfo, Media Info Video Track Lagging " + extra);
} else if (whatInfo == MediaPlayer.MEDIA_INFO_METADATA_UPDATE) {
Log.v(TAG, "Media Flow MediaInfo, Media Info Metadata Update " + extra);
}
}
return false;
}
#Override
public boolean onError(MediaPlayer mp, int what, int extra) {
// TODO Auto-generated method stub
dismissProgressLoading();
StringBuilder sb = new StringBuilder();
sb.append("Media Player Error: ");
switch (what) {
case MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
sb.append("Not Valid for Progressive Playback");
break;
case MediaPlayer.MEDIA_ERROR_SERVER_DIED:
sb.append("Server Died");
break;
case MediaPlayer.MEDIA_ERROR_UNKNOWN:
sb.append("Media Error Unknown");
break;
default:
sb.append(" Non standard (");
sb.append(what);
sb.append(")");
}
//sb.append(" (" + what + ") ");
sb.append(extra);
showErrorDialog("Cannot play video", sb.toString());
return true;
}
void dismissProgressLoading() {
if (dialog != null && dialog.isShowing()) {
dialog.dismiss();
}
}
#Override
public void onCompletion(MediaPlayer mp) {
if(Constants.DEBUG)
Log.v(TAG, "Media Flow onCompletion Called");
dismissProgressLoading();
onBackPressed();
}
#Override
public void onBackPressed() {
// TODO Auto-generated method stub
super.onBackPressed();
}
final Runnable r = new Runnable() {
public void run() {
if (mediaPlayer != null) {
if (mediaPlayer.isPlaying()
&& mediaPlayer.getCurrentPosition() > 0) {
if(Constants.DEBUG)
Log.d(TAG, "isPlaying : " + mediaPlayer.isPlaying());
if(Constants.DEBUG)
Log.d(TAG,
"currentPosition : "
+ mediaPlayer.getCurrentPosition());
handler.sendEmptyMessage(0);
} else {
handler.postDelayed(this, 100);
}
}
}
};
Handler handler = new Handler() {
public void handleMessage(Message msg) {
dismissProgressLoading();
}
};
#Override
protected void onResume() {
if(Constants.SHOULD_FINISH_APPLICATION)
finish();
super.onResume();
}
#Override
protected void onRestart() {
super.onRestart();
}
protected void onPause() {
super.onPause();
pause();
}
#Override
protected void onDestroy() {
super.onDestroy();
dismissProgressLoading();
if (mediaPlayer != null) {
mediaPlayer.stop();
mediaPlayer.release();
mediaPlayer = null;
}
if(controller !=null)
{
controller.hide();
}
}
OnClickListener surViewClickListener = new OnClickListener() {
#Override
public void onClick(View v) {
if (controller != null) {
if (controller.isShowing()) {
controller.hide();
} else {
controller.show();
}
}
}
};
#SuppressWarnings("deprecation")
void showErrorDialog(String title, String message) {
AlertDialog alertDialog = new AlertDialog.Builder(this).create();
// Setting Dialog Title
alertDialog.setTitle(title);
// Setting Dialog Message
alertDialog.setMessage(message);
// Setting Icon to Dialog
alertDialog.setIcon(android.R.drawable.ic_dialog_alert);
// Setting OK Button
alertDialog.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
// Write your code here to execute after dialog closed
dialog.dismiss();
finish();
}
});
// Showing Alert Message
if(this!= null && !this.isFinishing()){
alertDialog.show();
}
}
}
handleAspectRatio() {
int surfaceView_Width = surfaceView.getWidth();
int surfaceView_Height = surfaceView.getHeight();
float video_Width = mediaPlayer.getVideoWidth();
float video_Height = mediaPlayer.getVideoHeight();
float ratio_width = surfaceView_Width/video_Width;
float ratio_height = surfaceView_Height/video_Height;
float aspectratio = video_Width/video_Height;
LayoutParams layoutParams = surfaceView.getLayoutParams();
if (ratio_width > ratio_height){
layoutParams.width = (int) (surfaceView_Height * aspectratio);
layoutParams.height = surfaceView_Height;
}else{
layoutParams.width = surfaceView_Width;
layoutParams.height = (int) (surfaceView_Width / aspectratio);
}
surfaceView.setLayoutParams(layoutParams);
}
call above function before calling
mediaPlayer.start();
I met the same problem,after i look into VideoView source code in API 23,i found the good method,here is my solution,we can create a custom SurfaceView and override onMeasure(),then here you go,use VideoSurfaceView to play the video.
public class VideoSurfaceView extends SurfaceView implements MediaPlayer.OnVideoSizeChangedListener {
private int mVideoWidth;
private int mVideoHeight;
public VideoSurfaceView(Context context) {
super(context);
}
public VideoSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public VideoSurfaceView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
/**
* Set video size.
*
* #see MediaPlayer#getVideoWidth()
* #see MediaPlayer#getVideoHeight()
*/
public void setVideoSize(int videoWidth, int videoHeight) {
mVideoWidth = videoWidth;
mVideoHeight = videoHeight;
}
#Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
//Log.i("####", "onMeasure(" + MeasureSpec.toString(widthMeasureSpec) + ", "
// + MeasureSpec.toString(heightMeasureSpec) + ")");
int width = getDefaultSize(mVideoWidth, widthMeasureSpec);
int height = getDefaultSize(mVideoHeight, heightMeasureSpec);
if (mVideoWidth > 0 && mVideoHeight > 0) {
int widthSpecMode = MeasureSpec.getMode(widthMeasureSpec);
int widthSpecSize = MeasureSpec.getSize(widthMeasureSpec);
int heightSpecMode = MeasureSpec.getMode(heightMeasureSpec);
int heightSpecSize = MeasureSpec.getSize(heightMeasureSpec);
if (widthSpecMode == MeasureSpec.EXACTLY && heightSpecMode == MeasureSpec.EXACTLY) {
// the size is fixed
width = widthSpecSize;
height = heightSpecSize;
// for compatibility, we adjust size based on aspect ratio
if (mVideoWidth * height < width * mVideoHeight) {
//Log.i("###", "image too wide, correcting");
width = height * mVideoWidth / mVideoHeight;
} else if (mVideoWidth * height > width * mVideoHeight) {
//Log.i("###", "image too tall, correcting");
height = width * mVideoHeight / mVideoWidth;
}
} else if (widthSpecMode == MeasureSpec.EXACTLY) {
// only the width is fixed, adjust the height to match aspect ratio if possible
width = widthSpecSize;
height = width * mVideoHeight / mVideoWidth;
if (heightSpecMode == MeasureSpec.AT_MOST && height > heightSpecSize) {
// couldn't match aspect ratio within the constraints
height = heightSpecSize;
}
} else if (heightSpecMode == MeasureSpec.EXACTLY) {
// only the height is fixed, adjust the width to match aspect ratio if possible
height = heightSpecSize;
width = height * mVideoWidth / mVideoHeight;
if (widthSpecMode == MeasureSpec.AT_MOST && width > widthSpecSize) {
// couldn't match aspect ratio within the constraints
width = widthSpecSize;
}
} else {
// neither the width nor the height are fixed, try to use actual video size
width = mVideoWidth;
height = mVideoHeight;
if (heightSpecMode == MeasureSpec.AT_MOST && height > heightSpecSize) {
// too tall, decrease both width and height
height = heightSpecSize;
width = height * mVideoWidth / mVideoHeight;
}
if (widthSpecMode == MeasureSpec.AT_MOST && width > widthSpecSize) {
// too wide, decrease both width and height
width = widthSpecSize;
height = width * mVideoHeight / mVideoWidth;
}
}
} else {
// no size yet, just adopt the given spec sizes
}
setMeasuredDimension(width, height);
}
#Override
public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
mVideoWidth = mp.getVideoWidth();
mVideoHeight = mp.getVideoHeight();
if (mVideoWidth != 0 && mVideoHeight != 0) {
getHolder().setFixedSize(mVideoWidth, mVideoHeight);
requestLayout();
}
}
}

MediaPlayer playing sound but not video

I'm trying to play a video from Android using a SurfaceView.
I can load the video, the sound is played correctly but I can't see the video.
Here is my activity :
public class VideoTest extends Activity implements OnBufferingUpdateListener, OnCompletionListener, OnPreparedListener, OnVideoSizeChangedListener, SurfaceHolder.Callback, OnErrorListener {
private static final String TAG = "MediaPlayerDemo";
private int mVideoWidth;
private int mVideoHeight;
private MediaPlayer mMediaPlayer;
private SurfaceView mPreview;
private SurfaceHolder holder;
private String path;
private Bundle extras;
public static final String MEDIA = "media";
private static final int LOCAL_AUDIO = 1;
public static final int STREAM_AUDIO = 2;
private static final int RESOURCES_AUDIO = 3;
private static final int LOCAL_VIDEO = 4;
public static final int STREAM_VIDEO = 5;
private boolean mIsVideoSizeKnown = false;
private boolean mIsVideoReadyToBePlayed = false;
private RelativeLayout layout;
/**
*
* Called when the activity is first created.
*/
#Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
mPreview = new SurfaceView(this);
holder = mPreview.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
extras = getIntent().getExtras();
mPreview.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
layout = new RelativeLayout(this);
layout.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT));
layout.addView(mPreview, 0);
setContentView(layout);
}
private void playVideo(Integer Media) {
doCleanUp();
try {
mMediaPlayer = MediaPlayer.create(this, R.raw.video);
mMediaPlayer.setDisplay(holder);
mMediaPlayer.setOnBufferingUpdateListener(this);
mMediaPlayer.setOnCompletionListener(this);
mMediaPlayer.setOnPreparedListener(this);
mMediaPlayer.setOnVideoSizeChangedListener(this);
mMediaPlayer.setOnErrorListener(this);
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
} catch (Exception e) {
Log.e(TAG, "error: " + e.getMessage(), e);
}
}
public void onBufferingUpdate(MediaPlayer arg0, int percent) {
Log.d(TAG, "onBufferingUpdate percent:" + percent + " pos : " + mMediaPlayer.getCurrentPosition() + " / " + mMediaPlayer.getDuration());
if (mMediaPlayer.isPlaying()) {
Log.d(TAG, "Playing");
} else {
mMediaPlayer.start();
}
}
public void onCompletion(MediaPlayer arg0) {
Log.d(TAG, "onCompletion called");
}
public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
Log.v(TAG, "onVideoSizeChanged called");
if (width == 0 || height == 0) {
Log.e(TAG, "invalid video width(" + width + ") or height(" + height
+ ")");
return;
}
mIsVideoSizeKnown = true;
mVideoWidth = width;
mVideoHeight = height;
if (mIsVideoReadyToBePlayed && mIsVideoSizeKnown) {
startVideoPlayback();
}
}
public void onPrepared(MediaPlayer mediaplayer) {
Log.d(TAG, "onPrepared called");
mIsVideoReadyToBePlayed = true;
if (mIsVideoReadyToBePlayed && mIsVideoSizeKnown) {
startVideoPlayback();
}
}
public void surfaceChanged(SurfaceHolder surfaceholder, int i, int j, int k) {
Log.d(TAG, "surfaceChanged called. Width : " + j + ", height : " + k);
holder = surfaceholder;
mMediaPlayer.setDisplay(holder);
mIsVideoSizeKnown = true;
mVideoWidth = j;
mVideoHeight = k;
if (mIsVideoReadyToBePlayed && mIsVideoSizeKnown) {
startVideoPlayback();
}
}
public void surfaceDestroyed(SurfaceHolder surfaceholder) {
Log.d(TAG, "surfaceDestroyed called");
}
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "surfaceCreated called");
playVideo(extras.getInt(MEDIA));
}
#Override
protected void onPause() {
super.onPause();
releaseMediaPlayer();
doCleanUp();
}
#Override
protected void onDestroy() {
super.onDestroy();
releaseMediaPlayer();
doCleanUp();
}
private void releaseMediaPlayer() {
if (mMediaPlayer != null) {
mMediaPlayer.release();
mMediaPlayer = null;
}
}
private void doCleanUp() {
mVideoWidth = 0;
mVideoHeight = 0;
mIsVideoReadyToBePlayed = false;
mIsVideoSizeKnown = false;
}
private void startVideoPlayback() {
Log.v(TAG, "startVideoPlayback " + mVideoWidth + "x" + mVideoHeight);
int width = mPreview.getWidth();
int height = mPreview.getHeight();
float boxWidth = width;
float boxHeight = height;
float videoWidth = mMediaPlayer.getVideoWidth();
float videoHeight = mMediaPlayer.getVideoHeight();
Log.i(TAG, String.format("startVideoPlayback # video %dx%d - box %dx%d", (int) videoWidth, (int) videoHeight, width, height));
float wr = boxWidth / videoWidth;
float hr = boxHeight / videoHeight;
float ar = videoWidth / videoHeight;
if (wr > hr) {
width = (int) (boxHeight * ar);
} else {
height = (int) (boxWidth / ar);
}
Log.i(TAG, String.format("Scaled to %dx%d", width, height));
holder.setFixedSize(width, height);
mMediaPlayer.start();
}
public boolean onError(MediaPlayer arg0, int arg1, int arg2) {
Log.e(TAG, "ERROR called : " + arg1 + ", " + arg2);
return false;
}
}
The onVideoSizeChanged function is never called but the onPrepared and surfaceChanged are called.
Thus the startVideoPlayback function is called, but the video width and height are 0.
My video is playing, as I can hear the sound, but nothing is displayed on the screen.
I also tried to give raw width and height to the setFixedSize function of the SurfaceHolder object but I still don't have anything displayed.
Can you help me?
I'm using Android 8
EDIT
Here is the log I have when I'm playing a video from the resources :
WARN info/warning (1, 35)
WARN info/warning (1, 44)
DEBUG Duration : 101248
DEBUG surfaceChanged called. Width : 480, height : 270
INFO Info (1,35)
INFO Info (1,44)
DEBUG onPrepared called
VERBOSE startVideoPlayback 480x270
INFO startVideoPlayback # video 0x0 - box 480x270
INFO Scaled to 480x0
DEBUG surfaceDestroyed called
EDIT 2
I tried with another video and it's working.
Here are the specifications of the "not working" video :
Container : MP4 - QuickTime
Rate : 2 340 Kbps
Format : H.264/MPEG-4-AVC
Size : 1280*640
Aspect pixel : undefined
Image proportion : 2.000
Encoding profile : Baseline#L3.1
Here are the specifications for the working video :
Container : MP4 - QuickTime
Rate : 537 Kbps
Format : H.264/MPEG-4-AVC
Size : 640*360
Aspect pixel : undefined
Image proportion : 16:9
Encoding profile : Baseline#L3.0
Do you know what's wrong with the first video ?
Some things to try:
1) Should you be calling mMediaPlayer.prepareAsync() in your playVideo method?
2) Try removing your if statement in onPrepared
3) I would call holder.setSizeFromLayout(); and then mMediaPlayer.start() from your onPrepared callback (rather than onBufferingUpdate).

Recording video on Android using JavaCV (Updated 2014 02 17)

I'm trying to record a video in Android using the JavaCV lib.
I need to record the video in 640x360.
I have installed everything as described in README.txt file and I followed the example as below:
https://code.google.com/p/javacv/source/browse/samples/RecordActivity.java
In this example, the video size is this:
private int imageWidth = 320;
private int imageHeight = 240;
In my case, I need to record a video in 640x360 H.264.
(UPDATE) I have reverted my code and kept exactly like in the example, just changing imageWidth and imageHeight to 640x360.
Now I'm getting the video like this image:
http://bergmann.net.br/img/screenshot_video_error.png
Here is my code:
import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
import java.io.IOException;
import java.nio.ShortBuffer;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import com.autosonvideo.helpers.Helpers;
import com.autosonvideo.logic.CameraHelpers;
import com.googlecode.javacv.FFmpegFrameRecorder;
import com.googlecode.javacv.cpp.opencv_core.IplImage;
public class FFmpegRecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link;
long startTime = 0;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 640;
private int imageHeight = 480;
private int finalImageWidth = 640;
private int finalImageHeight = 360;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private IplImage yuvIplimage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 1280;
private final int live_height = 960;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.main);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
CLASS_LABEL);
mWakeLock.acquire();
initLayout();
initRecorder();
}
#Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE))
.getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(
R.layout.main, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width
/ live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
// ---------------------------------------
// initialize ffmpeg_recorder
// ---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG, "init recorder");
if (yuvIplimage == null) {
yuvIplimage = IplImage.create(finalImageWidth, finalImageHeight,
IPL_DEPTH_8U, 2);
Log.i(LOG_TAG, "create yuvIplimage");
}
ffmpeg_link = CameraHelpers.getOutputMediaFile(
CameraHelpers.MEDIA_TYPE_VIDEO).toString();
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, finalImageWidth,
finalImageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
}
public void startRecording() {
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,
"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
// ---------------------------------------------
// audio thread, gets and encodes audio data
// ---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;
bufferSize = AudioRecord
.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = new short[bufferSize];
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
// Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData, 0,
audioData.length);
if (bufferReadResult > 0) {
Log.v(LOG_TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it
// never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.record(ShortBuffer.wrap(audioData, 0,
bufferReadResult));
// Log.v(LOG_TAG,"recording " + 1024*i + " to " +
// 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG, "audioRecord released");
}
}
}
// ---------------------------------------------
// camera thread, gets and encodes video data
// ---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback,
PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera", "camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.v(LOG_TAG, "Setting imageWidth: " + imageWidth
+ " imageHeight: " + imageHeight + " frameRate: "
+ frameRate);
Camera.Parameters camParams = mCamera.getParameters();
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,
"Preview Framerate: " + camParams.getPreviewFrameRate());
camParams.setPreviewFrameRate(frameRate);
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
/* get video data */
if (yuvIplimage != null && recording) {
// yuvIplimage.getByteBuffer().put(data);
final int startY = 640 * (480 - 360) / 2;
final int lenY = 640 * 360;
yuvIplimage.getByteBuffer().put(data, startY, lenY);
final int startVU = 640 * 480 + 320 * 2 * (240 - 180) / 2;
final int lenVU = 320 * 180 * 2;
yuvIplimage.getByteBuffer().put(data, startVU, lenVU);
Log.v(LOG_TAG, "Writing Frame");
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
#Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set
// isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
}
Your camera, most likely, can provide 640x480 preview frames. The fix would be to clip this frame before it is recorded, like this:
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
/* get video data */
if (yuvIplimage != null && recording) {
ByteBuffer bb = yuvIplimage.getByteBuffer(); // resets the buffer
final int startY = imageWidth*(imageHeight-finalImageHeight)/2;
final int lenY = imageWidth*finalImageHeight;
bb.put(data, startY, lenY);
final int startVU = imageWidth*imageHeight + imageWidth*(imageHeight-finalImageHeight)/4;
final int lenVU = imageWidth* finalImageHeight/2;
bb.put(data, startVU, lenVU);
// Log.v(LOG_TAG, "Writing Frame");
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.e(LOG_TAG, "problem with recorder():", e);
}
}
}
The preview frame has semi-planar YVU format: 640x480 luminance (Y) bytes, followed by 320x240 pairs of chroma (V and U) bytes. We copy to yuvIpImage first the relevant Y, and after that - relevant VU pairs. Note that it is easy and fast because the width you want is same as the native width.
Your camera and camera view should be initialized for 640x480, and recorder - to 640x360. Note that the efficient cropping is only possible when imageWidth==finalImageWidth.
FIX it happens so that IplImage.getByteBuffer() resets the buffer, therefore the fix is to use a temporary bb object.
Note that you will probably want to overlay the preview with a frame that will "hide" margins that you crop this way: our manipulations only change the recorded frames, not the CameraView.
#Fabio Seeing that your code is from this Open Source Android Touch-To-Record library and I too have used it. Here is my modified version of the onPreviewFrame method, inside CameraPreview class, to take transpose and resize a captured frame, as the captured video played sideways (app was locked to portrait) and with greenish output.
I defined "yuvIplImage" as following in my setCameraParams() method.
IplImage yuvIplImage = IplImage.create(mPreviewSize.height, mPreviewSize.width, opencv_core.IPL_DEPTH_8U, 2);
Also initialize your videoRecorder object as following, giving width as height and vice versa.
//call initVideoRecorder() method like this to initialize videoRecorder object of FFmpegFrameRecorder class.
initVideoRecorder(strVideoPath, mPreview.getPreviewSize().height, mPreview.getPreviewSize().width, recorderParameters);
//method implementation
public void initVideoRecorder(String videoPath, int width, int height, RecorderParameters recorderParameters)
{
Log.e(TAG, "initVideoRecorder");
videoRecorder = new FFmpegFrameRecorder(videoPath, width, height, 1);
videoRecorder.setFormat(recorderParameters.getVideoOutputFormat());
videoRecorder.setSampleRate(recorderParameters.getAudioSamplingRate());
videoRecorder.setFrameRate(recorderParameters.getVideoFrameRate());
videoRecorder.setVideoCodec(recorderParameters.getVideoCodec());
videoRecorder.setVideoQuality(recorderParameters.getVideoQuality());
videoRecorder.setAudioQuality(recorderParameters.getVideoQuality());
videoRecorder.setAudioCodec(recorderParameters.getAudioCodec());
videoRecorder.setVideoBitrate(1000000);
videoRecorder.setAudioBitrate(64000);
}
This is my onPreviewFrame() method:
#Override
public void onPreviewFrame(byte[] data, Camera camera)
{
long frameTimeStamp = 0L;
if(FragmentCamera.mAudioTimestamp == 0L && FragmentCamera.firstTime > 0L)
{
frameTimeStamp = 1000L * (System.currentTimeMillis() - FragmentCamera.firstTime);
}
else if(FragmentCamera.mLastAudioTimestamp == FragmentCamera.mAudioTimestamp)
{
frameTimeStamp = FragmentCamera.mAudioTimestamp + FragmentCamera.frameTime;
}
else
{
long l2 = (System.nanoTime() - FragmentCamera.mAudioTimeRecorded) / 1000L;
frameTimeStamp = l2 + FragmentCamera.mAudioTimestamp;
FragmentCamera.mLastAudioTimestamp = FragmentCamera.mAudioTimestamp;
}
synchronized(FragmentCamera.mVideoRecordLock)
{
if(FragmentCamera.recording && FragmentCamera.rec && lastSavedframe != null && lastSavedframe.getFrameBytesData() != null && yuvIplImage != null)
{
FragmentCamera.mVideoTimestamp += FragmentCamera.frameTime;
if(lastSavedframe.getTimeStamp() > FragmentCamera.mVideoTimestamp)
{
FragmentCamera.mVideoTimestamp = lastSavedframe.getTimeStamp();
}
try
{
yuvIplImage.getByteBuffer().put(lastSavedframe.getFrameBytesData());
IplImage bgrImage = IplImage.create(mPreviewSize.width, mPreviewSize.height, opencv_core.IPL_DEPTH_8U, 4);// In my case, mPreviewSize.width = 1280 and mPreviewSize.height = 720
IplImage transposed = IplImage.create(mPreviewSize.height, mPreviewSize.width, yuvIplImage.depth(), 4);
IplImage squared = IplImage.create(mPreviewSize.height, mPreviewSize.height, yuvIplImage.depth(), 4);
int[] _temp = new int[mPreviewSize.width * mPreviewSize.height];
Util.YUV_NV21_TO_BGR(_temp, data, mPreviewSize.width, mPreviewSize.height);
bgrImage.getIntBuffer().put(_temp);
opencv_core.cvTranspose(bgrImage, transposed);
opencv_core.cvFlip(transposed, transposed, 1);
opencv_core.cvSetImageROI(transposed, opencv_core.cvRect(0, 0, mPreviewSize.height, mPreviewSize.height));
opencv_core.cvCopy(transposed, squared, null);
opencv_core.cvResetImageROI(transposed);
videoRecorder.setTimestamp(lastSavedframe.getTimeStamp());
videoRecorder.record(squared);
}
catch(com.googlecode.javacv.FrameRecorder.Exception e)
{
e.printStackTrace();
}
}
lastSavedframe = new SavedFrames(data, frameTimeStamp);
}
}
This code uses a method "YUV_NV21_TO_BGR", which I found from this link
Basically this method is used to resolve, which I call as, "The Green Devil problem on Android", just like yours. I was having the same issue and wasted almost 3-4 days. Before adding "YUV_NV21_TO_BGR" method when I just took transpose of YuvIplImage, more importantly a combination of transpose, flip (with or without resizing), there was greenish output in resulting video. This "YUV_NV21_TO_BGR" method saved the day. Thanks to #David Han from above google groups thread.
Use this link to resolve the issue . The issue is with rotation of image.The YUV Image handling has been done.

android mediaplayer over 3g connection not work but work on wifi

I am working on a project that uses videoView to display a .mp4 video from url. My code works in the emulator fine but on the physical device (Samsung siii) it only works over WiFi and not work over 3G.
youtube video works fine over 3g and and i test same video with other player vplayer and itz wrok fine..
this is my code
package com.video;
public class StreamingVideoPlayer extends Activity implements
OnCompletionListener, OnErrorListener, OnInfoListener,
OnBufferingUpdateListener, OnPreparedListener, OnSeekCompleteListener,
OnVideoSizeChangedListener, SurfaceHolder.Callback,
MediaController.MediaPlayerControl {
MediaController controller;
Display currentDisplay;
SurfaceView surfaceView;
SurfaceHolder surfaceHolder;
MediaPlayer mediaPlayer;
View mainView;
TextView statusView;
int videoWidth = 0;
int videoHeight = 0;
boolean readyToPlay = false;
public final static String LOGTAG = "STREAMING_VIDEO_PLAYER";
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mainView = this.findViewById(R.id.MainView);
statusView = (TextView) this.findViewById(R.id.StatusTextView);
surfaceView = (SurfaceView) this.findViewById(R.id.SurfaceView);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mediaPlayer = new MediaPlayer();
statusView.setText("MediaPlayer Created");
mediaPlayer.setOnCompletionListener(this);
mediaPlayer.setOnErrorListener(this);
mediaPlayer.setOnInfoListener(this);
mediaPlayer.setOnPreparedListener(this);
mediaPlayer.setOnSeekCompleteListener(this);
mediaPlayer.setOnVideoSizeChangedListener(this);
mediaPlayer.setOnBufferingUpdateListener(this);
//String filePath = "http://sffsapps.s3.amazonaws.com/02-benefits-100dc.mp4";
// String filePath="http://100dcapps.s3.amazonaws.com/droidtest/index.droid.mp4";
String filePath="https://100dcapps.s3.amazonaws.com/droidtest/index.mp4";
try {
mediaPlayer.setDataSource(filePath);
} catch (IllegalArgumentException e) {
Log.v(LOGTAG, e.getMessage());
finish();
} catch (IllegalStateException e) {
Log.v(LOGTAG, e.getMessage());
finish();
} catch (IOException e) {
Log.v(LOGTAG, e.getMessage());
finish();
}
statusView.setText("MediaPlayer DataSource Set");
currentDisplay = getWindowManager().getDefaultDisplay();
controller = new MediaController(this);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.v(LOGTAG, "surfaceCreated Called");
mediaPlayer.setDisplay(holder);
statusView.setText("MediaPlayer Display Surface Set");
try {
mediaPlayer.prepareAsync();
} catch (IllegalStateException e) {
Log.v(LOGTAG, "IllegalStateException " + e.getMessage());
finish();
}
statusView.setText("MediaPlayer Preparing");
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.v(LOGTAG, "surfaceChanged Called");
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v(LOGTAG, "surfaceDestroyed Called");
}
public void onCompletion(MediaPlayer mp) {
Log.v(LOGTAG, "onCompletion Called");
statusView.setText("MediaPlayer Playback Completed");
}
public boolean onError(MediaPlayer mp, int whatError, int extra) {
Log.v(LOGTAG, "onError Called");
statusView.setText("MediaPlayer Error");
if (whatError == MediaPlayer.MEDIA_ERROR_SERVER_DIED) {
Log.v(LOGTAG, "Media Error, Server Died " + extra);
} else if (whatError == MediaPlayer.MEDIA_ERROR_UNKNOWN) {
Log.v(LOGTAG, "Media Error, Error Unknown " + extra);
}
return false;
}
public boolean onInfo(MediaPlayer mp, int whatInfo, int extra) {
statusView.setText("MediaPlayer onInfo Called");
if (whatInfo == MediaPlayer.MEDIA_INFO_BAD_INTERLEAVING) {
Log.v(LOGTAG, "Media Info, Media Info Bad Interleaving " + extra);
} else if (whatInfo == MediaPlayer.MEDIA_INFO_NOT_SEEKABLE) {
Log.v(LOGTAG, "Media Info, Media Info Not Seekable " + extra);
} else if (whatInfo == MediaPlayer.MEDIA_INFO_UNKNOWN) {
Log.v(LOGTAG, "Media Info, Media Info Unknown " + extra);
} else if (whatInfo == MediaPlayer.MEDIA_INFO_VIDEO_TRACK_LAGGING) {
Log.v(LOGTAG, "MediaInfo, Media Info Video Track Lagging " + extra);
} /*
* Android version 2.0 or higher else if (whatInfo ==
* MediaPlayer.MEDIA_INFO_METADATA_UPDATE) { Log.v(LOGTAG,
* "MediaInfo, Media Info Metadata Update " + extra); }
*/
return false;
}
public void onPrepared(MediaPlayer mp) {
Log.v(LOGTAG, "onPrepared Called");
statusView.setText("MediaPlayer Prepared");
videoWidth = mp.getVideoWidth();
videoHeight = mp.getVideoHeight();
Log.v(LOGTAG, "Width: " + videoWidth);
Log.v(LOGTAG, "Height: " + videoHeight);
if (videoWidth > currentDisplay.getWidth()
|| videoHeight > currentDisplay.getHeight()) {
float heightRatio = (float) videoHeight
/ (float) currentDisplay.getHeight();
float widthRatio = (float) videoWidth
/ (float) currentDisplay.getWidth();
if (heightRatio > 1 || widthRatio > 1) {
if (heightRatio > widthRatio) {
videoHeight = (int) Math.ceil((float) videoHeight
/ (float) heightRatio);
videoWidth = (int) Math.ceil((float) videoWidth
/ (float) heightRatio);
} else {
videoHeight = (int) Math.ceil((float) videoHeight
/ (float) widthRatio);
videoWidth = (int) Math.ceil((float) videoWidth
/ (float) widthRatio);
}
}
}
surfaceView.setLayoutParams(new LinearLayout.LayoutParams(videoWidth,
videoHeight));
controller.setMediaPlayer(this);
controller.setAnchorView(this.findViewById(R.id.MainView));
controller.setEnabled(true);
controller.show();
mp.start();
statusView.setText("MediaPlayer Started");
}
public void onSeekComplete(MediaPlayer mp) {
Log.v(LOGTAG, "onSeekComplete Called");
}
public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
Log.v(LOGTAG, "onVideoSizeChanged Called");
videoWidth = mp.getVideoWidth();
videoHeight = mp.getVideoHeight();
Log.v(LOGTAG, "Width: " + videoWidth);
Log.v(LOGTAG, "Height: " + videoHeight);
if (videoWidth > currentDisplay.getWidth()
|| videoHeight > currentDisplay.getHeight()) {
float heightRatio = (float) videoHeight
/ (float) currentDisplay.getHeight();
float widthRatio = (float) videoWidth
/ (float) currentDisplay.getWidth();
if (heightRatio > 1 || widthRatio > 1) {
if (heightRatio > widthRatio) {
videoHeight = (int) Math.ceil((float) videoHeight
/ (float) heightRatio);
videoWidth = (int) Math.ceil((float) videoWidth
/ (float) heightRatio);
} else {
videoHeight = (int) Math.ceil((float) videoHeight
/ (float) widthRatio);
videoWidth = (int) Math.ceil((float) videoWidth
/ (float) widthRatio);
}
}
}
surfaceView.setLayoutParams(new LinearLayout.LayoutParams(videoWidth,
videoHeight));
}
public void onBufferingUpdate(MediaPlayer mp, int bufferedPercent) {
statusView.setText("MediaPlayer Buffering: " + bufferedPercent + "%");
Log.v(LOGTAG, "MediaPlayer Buffering: " + bufferedPercent + "%");
}
public boolean canPause() {
return true;
}
public boolean canSeekBackward() {
return true;
}
public boolean canSeekForward() {
return true;
}
public int getBufferPercentage() {
return 0;
}
public int getCurrentPosition() {
return mediaPlayer.getCurrentPosition();
}
public int getDuration() {
return mediaPlayer.getDuration();
}
public boolean isPlaying() {
return mediaPlayer.isPlaying();
}
public void pause() {
if (mediaPlayer.isPlaying()) {
mediaPlayer.pause();
}
}
public void seekTo(int pos) {
mediaPlayer.seekTo(pos);
}
public void start() {
mediaPlayer.start();
}
#Override
public boolean onTouchEvent(MotionEvent ev) {
/*if (controller.isShowing()) {
controller.hide();
} else {*/
controller.show();
//}
return false;
}
}

Categories

Resources