custom code vlc on android - no video - android

I've almost got a successfully working code sample of vlc for android.
I've been using the VideoPlayerActivity as an example.
Currently I have the surface displayed through the surfacehandler and I see a black video image background box (SurfaceView). I also have working audio.
However for some reason I cannot get any video.
Logcat says the following continuously:
yuv_rgb_neon: can't get video picture.
I think it's a very small issue in my code. I've tried virtually anything I can think of but I can't get it to work.
Can anyone point me in the right direction perhaps?
this is the core libvlc code i use: I think something is wrong with the init context parameter, but I can't seem to find out what it is.
(I have the same surface handler as the original VideoPlayerActivity located in org.videloan.vlc.gui.video)
mLibVLC = LibVLC.getInstance();
mLibVLC.setIomx(false);
mLibVLC.setSubtitlesEncoding("");
mLibVLC.setTimeStretching(false);
mLibVLC.setFrameSkip(true);
mLibVLC.setChroma("RV16");
mLibVLC.setVerboseMode(true);
mLibVLC.setAout(-1);
mLibVLC.setDeblocking(0);
mLibVLC.setNetworkCaching(0);
mLibVLC.init(this.getApplicationContext());
Full code snippet:
/* VideoPlayerActivity.java */
package com.example.mp2;
import java.lang.reflect.Method;
import org.videolan.libvlc.EventHandler;
import org.videolan.libvlc.IVideoPlayer;
import org.videolan.libvlc.LibVLC;
import org.videolan.libvlc.LibVlcException;
import org.videolan.vlc.Util;
import org.videolan.vlc.WeakHandler;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import android.view.View.OnSystemUiVisibilityChangeListener;
import android.view.ViewGroup.LayoutParams;
import android.view.WindowManager;
import android.widget.FrameLayout;
public class VideoPlayerActivity extends Activity implements IVideoPlayer {
public final static String TAG = "VLC/VideoPlayerActivity";
// Internal intent identifier to distinguish between internal launch and
// external intent.
private SurfaceView mSurface;
private SurfaceHolder mSurfaceHolder;
private FrameLayout mSurfaceFrame;
private LibVLC mLibVLC;
private String mLocation;
private static final int SURFACE_BEST_FIT = 0;
private static final int SURFACE_FIT_HORIZONTAL = 1;
private static final int SURFACE_FIT_VERTICAL = 2;
private static final int SURFACE_FILL = 3;
private static final int SURFACE_16_9 = 4;
private static final int SURFACE_4_3 = 5;
private static final int SURFACE_ORIGINAL = 6;
private int mCurrentSize = SURFACE_BEST_FIT;
/** Overlay */
private static final int SURFACE_SIZE = 3;
// size of the video
private int mVideoHeight;
private int mVideoWidth;
private int mVideoVisibleHeight;
private int mVideoVisibleWidth;
private int mSarNum;
private int mSarDen;
private static VideoPlayerActivity context;
public static VideoPlayerActivity getContext() {
return context;
}
/**
* Used to store a selected subtitle; see onActivityResult. It is possible
* to have multiple custom subs in one session (just like desktop VLC allows
* you as well.)
*/#Override#TargetApi(Build.VERSION_CODES.HONEYCOMB)
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.player);
if (Util.isICSOrLater()) getWindow().getDecorView().findViewById(android.R.id.content).setOnSystemUiVisibilityChangeListener(new OnSystemUiVisibilityChangeListener() {#Override
public void onSystemUiVisibilityChange(int visibility) {
setSurfaceSize(mVideoWidth, mVideoHeight, mVideoVisibleWidth, mVideoVisibleHeight, mSarNum, mSarDen);
}
});
mSurface = (SurfaceView) findViewById(R.id.player_surface);
mSurfaceHolder = mSurface.getHolder();
mSurfaceFrame = (FrameLayout) findViewById(R.id.player_surface_frame);
String chroma = "RV16";
context = this;
if (Util.isGingerbreadOrLater() && chroma.equals("YV12")) {
mSurfaceHolder.setFormat(ImageFormat.YV12);
} else if (chroma.equals("RV16")) {
mSurfaceHolder.setFormat(PixelFormat.RGB_565);
} else {
mSurfaceHolder.setFormat(PixelFormat.RGBX_8888);
}
mSurfaceHolder.addCallback(mSurfaceCallback);
try {
mLibVLC = LibVLC.getInstance();
mLibVLC.setIomx(false);
mLibVLC.setSubtitlesEncoding("");
mLibVLC.setTimeStretching(false);
mLibVLC.setFrameSkip(true);
mLibVLC.setChroma("RV16");
mLibVLC.setVerboseMode(true);
mLibVLC.setAout(-1);
mLibVLC.setDeblocking(0);
mLibVLC.setNetworkCaching(0);
mLibVLC.init(this.getApplicationContext());
} catch (LibVlcException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
EventHandler em = EventHandler.getInstance();
em.addHandler(eventHandler);
}
#Override
protected void onStart() {
super.onStart();
}
#Override
protected void onPause() {
super.onPause();
mLibVLC.stop();
mSurface.setKeepScreenOn(false);
}
#Override
protected void onStop() {
super.onStop();
}
#Override
protected void onDestroy() {
super.onDestroy();
if (mLibVLC != null) {
mLibVLC.stop();
}
}
#Override
protected void onResume() {
super.onResume();
load();
}
private final Handler eventHandler = new VideoPlayerEventHandler(this);
private static class VideoPlayerEventHandler extends WeakHandler < VideoPlayerActivity > {
public VideoPlayerEventHandler(VideoPlayerActivity owner) {
super(owner);
}
#Override
public void handleMessage(Message msg) {
VideoPlayerActivity activity = getOwner();
if (activity == null) return;
switch (msg.getData().getInt("event")) {
case EventHandler.MediaPlayerPlaying:
Log.i(TAG, "MediaPlayerPlaying");
// activity.setESTracks();
// activity.setESTracks();
break;
case EventHandler.MediaPlayerPaused:
Log.i(TAG, "MediaPlayerPaused");
break;
case EventHandler.MediaPlayerStopped:
Log.i(TAG, "MediaPlayerStopped");
break;
case EventHandler.MediaPlayerEndReached:
Log.i(TAG, "MediaPlayerEndReached");
// activity.endReached();
break;
case EventHandler.MediaPlayerVout:
// activity.handleVout(msg);
break;
case EventHandler.MediaPlayerPositionChanged:
// don't spam the logs
break;
case EventHandler.MediaPlayerEncounteredError:
Log.i(TAG, "MediaPlayerEncounteredError");
// activity.encounteredError();
break;
default:
Log.e(TAG, String.format("Event not handled (0x%x)", msg.getData().getInt("event")));
break;
}
// activity.updateOverlayPausePlay();
}
};
private final Handler mHandler = new VideoPlayerHandler(this);
private static class VideoPlayerHandler extends WeakHandler < VideoPlayerActivity > {
public VideoPlayerHandler(VideoPlayerActivity owner) {
super(owner);
}
#Override
public void handleMessage(Message msg) {
VideoPlayerActivity activity = getOwner();
if (activity == null) // WeakReference could be GC'ed early
return;
switch (msg.what) {
case SURFACE_SIZE:
activity.changeSurfaceSize();
break;
}
}
};
#Override
public void setSurfaceSize(int width, int height, int visible_width, int visible_height, int sar_num, int sar_den) {
if (width * height == 0) return;
// store video size
mVideoHeight = height;
mVideoWidth = width;
mVideoVisibleHeight = visible_height;
mVideoVisibleWidth = visible_width;
mSarNum = sar_num;
mSarDen = sar_den;
Message msg = mHandler.obtainMessage(SURFACE_SIZE);
mHandler.sendMessage(msg);
}
private void changeSurfaceSize() {
// get screen size
int dw = getWindow().getDecorView().getWidth();
int dh = getWindow().getDecorView().getHeight();
// getWindow().getDecorView() doesn't always take orientation into
// account, we have to correct the values
boolean isPortrait = getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
if (dw > dh && isPortrait || dw < dh && !isPortrait) {
int d = dw;
dw = dh;
dh = d;
}
// sanity check
if (dw * dh == 0 || mVideoWidth * mVideoHeight == 0) {
Log.e(TAG, "Invalid surface size");
return;
}
// compute the aspect ratio
double ar, vw;
double density = (double) mSarNum / (double) mSarDen;
if (density == 1.0) {
/* No indication about the density, assuming 1:1 */
vw = mVideoVisibleWidth;
ar = (double) mVideoVisibleWidth / (double) mVideoVisibleHeight;
} else {
/* Use the specified aspect ratio */
vw = mVideoVisibleWidth * density;
ar = vw / mVideoVisibleHeight;
}
// compute the display aspect ratio
double dar = (double) dw / (double) dh;
switch (mCurrentSize) {
case SURFACE_BEST_FIT:
if (dar < ar) dh = (int)(dw / ar);
else dw = (int)(dh * ar);
break;
case SURFACE_FIT_HORIZONTAL:
dh = (int)(dw / ar);
break;
case SURFACE_FIT_VERTICAL:
dw = (int)(dh * ar);
break;
case SURFACE_FILL:
break;
case SURFACE_16_9:
ar = 16.0 / 9.0;
if (dar < ar) dh = (int)(dw / ar);
else dw = (int)(dh * ar);
break;
case SURFACE_4_3:
ar = 4.0 / 3.0;
if (dar < ar) dh = (int)(dw / ar);
else dw = (int)(dh * ar);
break;
case SURFACE_ORIGINAL:
dh = mVideoVisibleHeight;
dw = (int) vw;
break;
}
// force surface buffer size
// mSurfaceHolder.setFixedSize(mVideoWidth, mVideoHeight);
// set display size
LayoutParams lp = mSurface.getLayoutParams();
lp.width = dw * mVideoWidth / mVideoVisibleWidth;
lp.height = dh * mVideoHeight / mVideoVisibleHeight;
mSurface.setLayoutParams(lp);
// set frame size (crop if necessary)
lp = mSurfaceFrame.getLayoutParams();
lp.width = dw;
lp.height = dh;
mSurfaceFrame.setLayoutParams(lp);
mSurface.invalidate();
}
private final SurfaceHolder.Callback mSurfaceCallback = new Callback() {#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (format == PixelFormat.RGBX_8888) Log.d(TAG, "Pixel format is RGBX_8888");
else if (format == PixelFormat.RGB_565) Log.d(TAG, "Pixel format is RGB_565");
else if (format == ImageFormat.YV12) Log.d(TAG, "Pixel format is YV12");
else Log.d(TAG, "Pixel format is other/unknown");
mLibVLC.attachSurface(holder.getSurface(), VideoPlayerActivity.this, width, height);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
mLibVLC.detachSurface();
}
};
/**
* External extras: - position (long) - position of the video to start with
* (in ms)
*/
private void load() {
mLocation = "file:///sdcard/fam.mp4";
mSurface.setKeepScreenOn(true);
// MediaList mediaList = new MediaList(mLibVLC);
// mLibVLC.setMediaList();
mLibVLC.readMedia(mLocation, false);
mLibVLC.setTime(0);
mLibVLC.play();
}
#SuppressWarnings("deprecation")
private int getScreenRotation() {
WindowManager wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
Display display = wm.getDefaultDisplay();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO
/*
* Android 2.2
* has
* getRotation
*/
) {
try {
Method m = display.getClass().getDeclaredMethod("getRotation");
return (Integer) m.invoke(display);
} catch (Exception e) {
return Surface.ROTATION_0;
}
} else {
return display.getOrientation();
}
}
#TargetApi(Build.VERSION_CODES.GINGERBREAD)
private int getScreenOrientation() {
WindowManager wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
Display display = wm.getDefaultDisplay();
int rot = getScreenRotation();
/*
* Since getRotation() returns the screen's "natural" orientation, which
* is not guaranteed to be SCREEN_ORIENTATION_PORTRAIT, we have to
* invert the SCREEN_ORIENTATION value if it is "naturally" landscape.
*/#SuppressWarnings("deprecation")
boolean defaultWide = display.getWidth() > display.getHeight();
if (rot == Surface.ROTATION_90 || rot == Surface.ROTATION_270) defaultWide = !defaultWide;
if (defaultWide) {
switch (rot) {
case Surface.ROTATION_0:
return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
case Surface.ROTATION_90:
return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
case Surface.ROTATION_180:
// SCREEN_ORIENTATION_REVERSE_PORTRAIT only available since API
// Level 9+
return (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO ? ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE : ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
case Surface.ROTATION_270:
// SCREEN_ORIENTATION_REVERSE_LANDSCAPE only available since API
// Level 9+
return (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO ? ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT : ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
default:
return 0;
}
} else {
switch (rot) {
case Surface.ROTATION_0:
return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
case Surface.ROTATION_90:
return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
case Surface.ROTATION_180:
// SCREEN_ORIENTATION_REVERSE_PORTRAIT only available since API
// Level 9+
return (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO ? ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT : ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
case Surface.ROTATION_270:
// SCREEN_ORIENTATION_REVERSE_LANDSCAPE only available since API
// Level 9+
return (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO ? ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE : ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
default:
return 0;
}
}
}
}

When troubleshooting VLC for Android you'll find a lot of helpful information in their forums:
VLC for Android VideoLAN Forums
You are missing two options before initializing mLibVLC. They are:
mLibVLC.setHardwareAcceleration(LibVLC.HW_ACCELERATION_FULL);
mLibVLC.eventVideoPlayerActivityCreated(Boolean.TRUE);
Without these, your video will not show up or you'll show a green screen in your SurfaceView. So in the onCreate of your VLC activity, to have video properly show, you can have something like the below (granted your SurfaceView is set to the correct size) :
mSurfaceView = (SurfaceView) findViewById(R.id.player_surface);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceFrame = (FrameLayout) findViewById(R.id.player_surface_frame);
mMediaUrl = getIntent().getExtras().getString("videoUrl");
try {
mLibVLC = LibVLC.getInstance();
mLibVLC.setAout(mLibVLC.AOUT_AUDIOTRACK);
mLibVLC.setVout(mLibVLC.VOUT_ANDROID_SURFACE);
mLibVLC.setHardwareAcceleration(LibVLC.HW_ACCELERATION_FULL);
mLibVLC.eventVideoPlayerActivityCreated(Boolean.TRUE);
mLibVLC.init(getApplicationContext());
} catch (LibVlcException e){
Log.e(TAG, e.toString());
}
mSurfaceHolder.addCallback(mSurfaceCallback);
mSurface = mSurfaceHolder.getSurface();
mLibVLC.attachSurface(mSurface, VideoVLCActivity.this);
mLibVLC.playMRL(mMediaUrl);
Sidenote, anyone can use the following in their build.gradle instead of compling and inc vlc libraries themselves:
compile "de.mrmaffen:vlc-android-sdk:1.0.3"

Related

How do you Fullscreen RTSP Stream in Android LibVLC?

I'm using mrmaffen's VLC-ANDROID-SDK to develop an RTSP streaming app.
https://github.com/mrmaffen/vlc-android-sdk
I've had a lot of success getting it working and running quite well, but the problem I'm having that I can't seem to shake is getting it to display the video feed in fullscreen on the SurfaceView, or even just in the center of the SurfaceView.
This is what I get:
http://s1378.photobucket.com/user/Jo_Han_Solo/media/Screenshot_20171214-125504_zps437k1kw2.png.html?filters[user]=146993343&filters[recent]=1&sort=1&o=1
The black window is the total size of the screen, I want that video to fill the screen and hopefully always fill from center, but I can't figure out how to do it.
Anyone have any experience with anything like this and knows how to fix it?
I kind of solved the problem but in a bit of a dodgy way, it's far from complete but considering the lack of knowledge and information on the topic I thought this might help someone for the time being.
Find the size of your screen.
Set up your final IVLCOut to incorporate the screen size.
Adjust setScale to "fullscreen" the video stream.
To explain each task:
Setup your globals:
public class SingleStreamView extends AppCompatActivity implements
IVLCVout.Callback {
public int mHeight;
public int mWidth;
Secondly, in the onCreate task find your screen sizes of your device:
DisplayMetrics displayMetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
mHeight = displayMetrics.heightPixels;
mWidth = displayMetrics.widthPixels;
2.
Then go down to your "CreatePlayer" event and where you set up your video output:
// Set up video output
final IVLCVout vout = mMediaPlayer.getVLCVout();
vout.setVideoView(mSurface);
vout.setWindowSize(mWidth,mHeight);
vout.addCallback(this);
vout.attachViews();
The winning line that made it center in my surface was the "vout.setWindowSize(mWidth,mHeight);"
Then I simply used the setscale option to "fullscreen" the video. That said, it's a bit of a hack way of doing it, and I would like to try and figure out a way to grab the codec information so to dynamically set the scale of the video and that way automatically fullscreen every size video stream to any size screen but for now this will work for known video stream resolutions, it will automatically adjust to the screen size of your phone.
Either way I found that with a Samsung Galaxy s8, a good scaling factor for a 640x480p RTSP stream was 1.8. Coded like so:
Media m = new Media(libvlc, Uri.parse(RTSP_ADDRESS));
m.setHWDecoderEnabled(true,false);
m.addOption(":network-caching=100");
m.addOption(":clock-jitter=0");
m.addOption(":clock-synchro=0");
m.addOption(":fullscreen");
mMediaPlayer.setMedia(m);
mMediaPlayer.setAspectRatio("16:9");
mMediaPlayer.setScale(1.8f);
mMediaPlayer.play();
Where you got "mMediaPlayer.setScale(1.8f);"
Hope this helps someone!
your solution seems interesting, however I'm facing the same issues, which I can't seem to solve (yet) with your approach.
Screenshots of what I got sofar can be seen at:
https://photos.app.goo.gl/9nKo22Mkc2SZq4SK9
I also want to (vertically) center an rtsp-video-stream in either landscape/portrait mode on a Samsung-XCover4 (with 720x1280 pixels) and on a device with minimum resolution of 320x480. The minimum Android SDK-version I would love to have it running is API-22 (Android 5.1.1).
The libvlc code for which I got the (embedded)VLC-player working, is based on 'de.mrmaffen:libvlc-android:2.1.12#aar'.
Given the above 'requirements', you can see the following behavior in the screenshots. The first two screenshots are on a Samsung-XCover4 (720x1280) where you can see that device-orientation=landscape clips the video and doesn't scale it, whereas the 3rd and 4th screenshot show that the same video-stream doesn't follow the SURFACE_BEST_FIT method (see code below for an explanation) on a device with small-resolution.
I would love to see an updateVideoSurfaces to handle the change in device-orientation or at least to show the entire video on startup.
The layout for my VLC-video-player (part of a vertical LinearLayout) is as follows:
<LinearLayout
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="0.3"
android:layout_marginBottom="8dp"
android:layout_marginEnd="8dp"
android:layout_marginStart="8dp"
android:layout_marginTop="8dp"
android:orientation="vertical">
<FrameLayout
android:id="#+id/video_surface_frame"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_gravity="center"
android:foregroundGravity="clip_horizontal|clip_vertical"
tools:ignore="true">
<ViewStub
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout="#layout/surface_view"
android:id="#+id/surface_stub" />
<ViewStub
android:layout_width="1dp"
android:layout_height="1dp"
android:layout="#layout/surface_view"
android:id="#+id/subtitles_surface_stub" />
<ViewStub
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout="#layout/texture_view"
android:id="#+id/texture_stub" />
</FrameLayout>
</LinearLayout>
The example code I got from de.mrmaffen uses an updateVideoSurfaces (see below java-code) which uses a number of SURFACE_XX method which to me seem to cover all scenarios with different device-orientations and resolution.
For some reason I can't figure out why this doesn't work and I suspect that the layout I'm using for the player (the FrameLayout/ViewStub's) may cause the issues.
I was wondering if you can shed some light on directions in order to make sure that the video stream will auto-scale/center on any device orientation/resolution.
The player-code I'm using is as follows:
package com.testing.vlc2player;
import ...
public class VLC2PlayerActivity extends AppCompatActivity implements IVLCVout.OnNewVideoLayoutListener,
IVLCVout.Callback {
private static final Logger log = LoggerFactory.getLogger(VLC2PlayerActivity.class);
private static final boolean USE_SURFACE_VIEW = true;
private static final boolean ENABLE_SUBTITLES = false;
private static final int SURFACE_BEST_FIT = 0;
private static final int SURFACE_FIT_SCREEN = 1;
private static final int SURFACE_FILL = 2;
private static final int SURFACE_16_9 = 3;
private static final int SURFACE_4_3 = 4;
private static final int SURFACE_ORIGINAL = 5;
private static final int CURRENT_SIZE = SURFACE_BEST_FIT;
private FrameLayout mVideoSurfaceFrame = null;
private SurfaceView mVideoSurface = null;
private SurfaceView mSubtitlesSurface = null;
private TextureView mVideoTexture = null;
private View mVideoView = null;
private final Handler mHandler = new Handler();
private View.OnLayoutChangeListener mOnLayoutChangeListener = null;
private LibVLC mLibVLC = null;
private MediaPlayer mMediaPlayer = null;
private int mVideoHeight = 0;
private int mVideoWidth = 0;
private int mVideoVisibleHeight = 0;
private int mVideoVisibleWidth = 0;
private int mVideoSarNum = 0;
private int mVideoSarDen = 0;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_player);
setupVLCLayout();
}
private void setupVLCLayout() {
log.debug("...");
final ArrayList<String> args = new ArrayList<>();
args.add("-vvv");
mLibVLC = new LibVLC(this, args);
mMediaPlayer = new MediaPlayer(mLibVLC);
mVideoSurfaceFrame = findViewById(R.id.video_surface_frame);
if (USE_SURFACE_VIEW) {
ViewStub stub = findViewById(R.id.surface_stub);
mVideoSurface = (SurfaceView) stub.inflate();
if (ENABLE_SUBTITLES) {
stub = findViewById(R.id.subtitles_surface_stub);
mSubtitlesSurface = (SurfaceView) stub.inflate();
mSubtitlesSurface.setZOrderMediaOverlay(true);
mSubtitlesSurface.getHolder().setFormat(PixelFormat.TRANSLUCENT);
}
mVideoView = mVideoSurface;
} else {
ViewStub stub = findViewById(R.id.texture_stub);
mVideoTexture = (TextureView) stub.inflate();
mVideoView = mVideoTexture;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
mMediaPlayer.release();
mLibVLC.release();
}
#Override
protected void onStart() {
super.onStart();
final IVLCVout vlcVout = mMediaPlayer.getVLCVout();
if (mVideoSurface != null) {
vlcVout.setVideoView(mVideoSurface);
if (mSubtitlesSurface != null) {
vlcVout.setSubtitlesView(mSubtitlesSurface);
}
} else {
vlcVout.setVideoView(mVideoTexture);
}
vlcVout.attachViews(this);
String url = getString(R.string.videoURL);
Uri uri = Uri.parse(url);
final Media media = new Media(mLibVLC, uri);
mMediaPlayer.setMedia(media);
media.release();
mMediaPlayer.play();
if (mOnLayoutChangeListener == null) {
mOnLayoutChangeListener = new View.OnLayoutChangeListener() {
private final Runnable mRunnable = new Runnable() {
#Override
public void run() {
updateVideoSurfaces();
}
};
#Override
public void onLayoutChange(View v, int left, int top, int right,
int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
if (left != oldLeft || top != oldTop || right != oldRight || bottom != oldBottom) {
mHandler.removeCallbacks(mRunnable);
mHandler.post(mRunnable);
}
}
};
}
mVideoSurfaceFrame.addOnLayoutChangeListener(mOnLayoutChangeListener);
}
#Override
protected void onStop() {
super.onStop();
if (mOnLayoutChangeListener != null) {
mVideoSurfaceFrame.removeOnLayoutChangeListener(mOnLayoutChangeListener);
mOnLayoutChangeListener = null;
}
mMediaPlayer.stop();
mMediaPlayer.getVLCVout().detachViews();
}
private void changeMediaPlayerLayout(int displayW, int displayH) {
log.debug("displayW={}, displayH={}", displayW, displayH);
/* Change the video placement using the MediaPlayer API */
int dispWd = displayW;
int dispHt = displayH;
dispWd = mVideoSurface.getWidth(); //Note: we do NOT want to use the entire display!
dispHt = mVideoSurface.getHeight();
switch (CURRENT_SIZE) {
case SURFACE_BEST_FIT:
mMediaPlayer.setAspectRatio(null);
mMediaPlayer.setScale(0);
break;
case SURFACE_FIT_SCREEN:
case SURFACE_FILL: {
Media.VideoTrack vtrack = mMediaPlayer.getCurrentVideoTrack();
if (vtrack == null) {
return;
}
final boolean videoSwapped = vtrack.orientation == Media.VideoTrack.Orientation.LeftBottom
|| vtrack.orientation == Media.VideoTrack.Orientation.RightTop;
if (CURRENT_SIZE == SURFACE_FIT_SCREEN) {
int videoW = vtrack.width;
int videoH = vtrack.height;
if (videoSwapped) {
int swap = videoW;
videoW = videoH;
videoH = swap;
}
if (vtrack.sarNum != vtrack.sarDen) {
videoW = videoW * vtrack.sarNum / vtrack.sarDen;
}
float ar = videoW / (float) videoH;
float dar = dispWd / (float) dispHt;
//noinspection unused
float scale;
if (dar >= ar) {
scale = dispWd / (float) videoW; /* horizontal */
} else {
scale = dispHt / (float) videoH; /* vertical */
}
log.debug("scale={}", scale);
mMediaPlayer.setScale(scale);
mMediaPlayer.setAspectRatio(null);
} else {
mMediaPlayer.setScale(0);
mMediaPlayer.setAspectRatio(!videoSwapped ? ""+dispWd+":"+dispHt
: ""+dispHt+":"+dispWd);
}
break;
}
case SURFACE_16_9:
mMediaPlayer.setAspectRatio("16:9");
mMediaPlayer.setScale(0);
break;
case SURFACE_4_3:
mMediaPlayer.setAspectRatio("4:3");
mMediaPlayer.setScale(0);
break;
case SURFACE_ORIGINAL:
mMediaPlayer.setAspectRatio(null);
mMediaPlayer.setScale(1);
break;
}
}
private void updateVideoSurfaces() {
log.debug("...");
int sw = getWindow().getDecorView().getWidth();
int sh = getWindow().getDecorView().getHeight();
// sanity check
if (sw * sh == 0) {
log.error("Invalid surface size");
return;
}
mMediaPlayer.getVLCVout().setWindowSize(sw, sh);
ViewGroup.LayoutParams lp = mVideoView.getLayoutParams();
if (mVideoWidth * mVideoHeight == 0) {
/* Case of OpenGL vouts: handles the placement of the video using MediaPlayer API */
lp.width = ViewGroup.LayoutParams.MATCH_PARENT;
lp.height = ViewGroup.LayoutParams.MATCH_PARENT;
mVideoView.setLayoutParams(lp);
lp = mVideoSurfaceFrame.getLayoutParams();
lp.width = ViewGroup.LayoutParams.MATCH_PARENT;
lp.height = ViewGroup.LayoutParams.MATCH_PARENT;
mVideoSurfaceFrame.setLayoutParams(lp);
changeMediaPlayerLayout(sw, sh);
return;
}
if (lp.width == lp.height && lp.width == ViewGroup.LayoutParams.MATCH_PARENT) {
/* We handle the placement of the video using Android View LayoutParams */
mMediaPlayer.setAspectRatio(null);
mMediaPlayer.setScale(0);
}
double dw = sw, dh = sh;
final boolean isPortrait = getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
if (sw > sh && isPortrait || sw < sh && !isPortrait) {
dw = sh;
dh = sw;
}
// compute the aspect ratio
double ar, vw;
if (mVideoSarDen == mVideoSarNum) {
/* No indication about the density, assuming 1:1 */
vw = mVideoVisibleWidth;
ar = (double)mVideoVisibleWidth / (double)mVideoVisibleHeight;
} else {
/* Use the specified aspect ratio */
vw = mVideoVisibleWidth * (double)mVideoSarNum / mVideoSarDen;
ar = vw / mVideoVisibleHeight;
}
// compute the display aspect ratio
double dar = dw / dh;
switch (CURRENT_SIZE) {
case SURFACE_BEST_FIT:
if (dar < ar) {
dh = dw / ar;
} else {
dw = dh * ar;
}
break;
case SURFACE_FIT_SCREEN:
if (dar >= ar) {
dh = dw / ar; /* horizontal */
} else {
dw = dh * ar; /* vertical */
}
break;
case SURFACE_FILL:
break;
case SURFACE_16_9:
ar = 16.0 / 9.0;
if (dar < ar) {
dh = dw / ar;
} else {
dw = dh * ar;
}
break;
case SURFACE_4_3:
ar = 4.0 / 3.0;
if (dar < ar) {
dh = dw / ar;
} else {
dw = dh * ar;
}
break;
case SURFACE_ORIGINAL:
dh = mVideoVisibleHeight;
dw = vw;
break;
}
// set display size
lp.width = (int) Math.ceil(dw * mVideoWidth / mVideoVisibleWidth);
lp.height = (int) Math.ceil(dh * mVideoHeight / mVideoVisibleHeight);
mVideoView.setLayoutParams(lp);
if (mSubtitlesSurface != null) {
mSubtitlesSurface.setLayoutParams(lp);
}
// set frame size (crop if necessary)
lp = mVideoSurfaceFrame.getLayoutParams();
lp.width = (int) Math.floor(dw);
lp.height = (int) Math.floor(dh);
mVideoSurfaceFrame.setLayoutParams(lp);
mVideoView.invalidate();
if (mSubtitlesSurface != null) {
mSubtitlesSurface.invalidate();
}
}
#TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
#Override
public void onNewVideoLayout(IVLCVout vlcVout, int width, int height,
int visibleWidth, int visibleHeight,
int sarNum, int sarDen) {
log.debug("...");
mVideoWidth = width;
mVideoHeight = height;
mVideoVisibleWidth = visibleWidth;
mVideoVisibleHeight = visibleHeight;
mVideoSarNum = sarNum;
mVideoSarDen = sarDen;
updateVideoSurfaces();
}
#Override
public void onSurfacesCreated(IVLCVout vlcVout) {
log.debug("vlcVout={}", vlcVout);
}
/**
* This callback is called when surfaces are destroyed.
*/
public void onSurfacesDestroyed(IVLCVout vlcVout) {
log.debug("vlcVout={}", vlcVout);
}
public void onStopClientMonitoring(View view) {
// log.info("UI -> Stop monitoring clientId= ...");
// onBackPressed();
String androidSDKRelease = Build.VERSION.RELEASE;
int androidSDKInt = Build.VERSION.SDK_INT;
String androidInfo = String.format(Locale.getDefault(), "Android %s (Version %d)", androidSDKRelease, androidSDKInt);
String appVersionName = BuildConfig.VERSION_NAME;
String appName = getString(R.string.app_name);
String appInfoTitle = String.format(getString(R.string.app_info_title), appName);
String infoMsg = String.format(getString(R.string.app_info_message), appVersionName, androidInfo);
new AlertDialog.Builder(this).setTitle(appInfoTitle)
.setMessage(infoMsg)
.setPositiveButton(getString(R.string.button_ok), new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
// Dismiss dialog
dialog.dismiss();
}
})
.create()
.show();
}
}

LibVLC for Android - Stretch Live Stream RTSP to desired aspect ratio on SurfaceView

Okay so I'm using LibVLC for Android (with Android Studio) to receive the live RTSP stream of an IP Camera since VideoView doesn't quite support LIVE streams. I'm using a sample code from the VideoLAN people which can be found here:
https://code.videolan.org/videolan/libvlc-android-samples
And I've done a lot of investigation on the code to achieve 19:6 aspect ratio out of the 4:3 that my camera outputs. The reason why I'm trying to break the aspect ratio is because this IP Camera records 1280x720 pixels but outputs 640x480 through it's second stream. The problem is that the width isn't cropped, but stretched from the sides, so it looks kinda compressed tight.
I've tried setting the 4 alignParent options to true on the SurfaceView, but no results. Also tried to multiply some of the width variables I found there in the JavaActivity class code by 1.33333 which should theorically stretch the width, but nothing happened at all, not even an error or an exception. I also tried making a new class extending SurfaceView and tweaking with the onMeasure method, but no dice. This is the JavaActivity code as is from the example, (of course I've adapted mine to work with my project but with minor changes)
package org.videolan.javasample;
import android.annotation.TargetApi;
import android.content.res.Configuration;
import android.graphics.PixelFormat;
import android.net.Uri;
import android.os.Build;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceView;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewStub;
import android.widget.FrameLayout;
import org.videolan.libvlc.IVLCVout;
import org.videolan.libvlc.LibVLC;
import org.videolan.libvlc.Media;
import org.videolan.libvlc.MediaPlayer;
import java.util.ArrayList;
public class JavaActivity extends AppCompatActivity implements IVLCVout.OnNewVideoLayoutListener {
private static final boolean USE_SURFACE_VIEW = true;
private static final boolean ENABLE_SUBTITLES = true;
private static final String TAG = "JavaActivity";
private static final String SAMPLE_URL = "http://download.blender.org/peach/bigbuckbunny_movies/BigBuckBunny_640x360.m4v";
// Not the actual RTSP Live Stream link but you know...
private static final int SURFACE_BEST_FIT = 0;
private static final int SURFACE_FIT_SCREEN = 1;
private static final int SURFACE_FILL = 2;
private static final int SURFACE_16_9 = 3;
private static final int SURFACE_4_3 = 4;
private static final int SURFACE_ORIGINAL = 5;
private static int CURRENT_SIZE = SURFACE_BEST_FIT;
private FrameLayout mVideoSurfaceFrame = null;
private SurfaceView mVideoSurface = null;
private SurfaceView mSubtitlesSurface = null;
private TextureView mVideoTexture = null;
private View mVideoView = null;
private final Handler mHandler = new Handler();
private View.OnLayoutChangeListener mOnLayoutChangeListener = null;
private LibVLC mLibVLC = null;
private MediaPlayer mMediaPlayer = null;
private int mVideoHeight = 0;
private int mVideoWidth = 0;
private int mVideoVisibleHeight = 0;
private int mVideoVisibleWidth = 0;
private int mVideoSarNum = 0;
private int mVideoSarDen = 0;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final ArrayList<String> args = new ArrayList<>();
args.add("-vvv");
mLibVLC = new LibVLC(this, args);
mMediaPlayer = new MediaPlayer(mLibVLC);
mVideoSurfaceFrame = (FrameLayout) findViewById(R.id.video_surface_frame);
if (USE_SURFACE_VIEW) {
ViewStub stub = (ViewStub) findViewById(R.id.surface_stub);
mVideoSurface = (SurfaceView) stub.inflate();
if (ENABLE_SUBTITLES) {
stub = (ViewStub) findViewById(R.id.subtitles_surface_stub);
mSubtitlesSurface = (SurfaceView) stub.inflate();
mSubtitlesSurface.setZOrderMediaOverlay(true);
mSubtitlesSurface.getHolder().setFormat(PixelFormat.TRANSLUCENT);
}
mVideoView = mVideoSurface;
}
else
{
ViewStub stub = (ViewStub) findViewById(R.id.texture_stub);
mVideoTexture = (TextureView) stub.inflate();
mVideoView = mVideoTexture;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
mMediaPlayer.release();
mLibVLC.release();
}
#Override
protected void onStart() {
super.onStart();
final IVLCVout vlcVout = mMediaPlayer.getVLCVout();
if (mVideoSurface != null) {
vlcVout.setVideoView(mVideoSurface);
if (mSubtitlesSurface != null)
vlcVout.setSubtitlesView(mSubtitlesSurface);
}
else
vlcVout.setVideoView(mVideoTexture);
vlcVout.attachViews(this);
Media media = new Media(mLibVLC, Uri.parse(SAMPLE_URL));
mMediaPlayer.setMedia(media);
media.release();
mMediaPlayer.play();
if (mOnLayoutChangeListener == null) {
mOnLayoutChangeListener = new View.OnLayoutChangeListener() {
private final Runnable mRunnable = new Runnable() {
#Override
public void run() {
updateVideoSurfaces();
}
};
#Override
public void onLayoutChange(View v, int left, int top, int right,
int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
if (left != oldLeft || top != oldTop || right != oldRight || bottom != oldBottom) {
mHandler.removeCallbacks(mRunnable);
mHandler.post(mRunnable);
}
}
};
}
mVideoSurfaceFrame.addOnLayoutChangeListener(mOnLayoutChangeListener);
}
#Override
protected void onStop() {
super.onStop();
if (mOnLayoutChangeListener != null) {
mVideoSurfaceFrame.removeOnLayoutChangeListener(mOnLayoutChangeListener);
mOnLayoutChangeListener = null;
}
mMediaPlayer.stop();
mMediaPlayer.getVLCVout().detachViews();
}
private void changeMediaPlayerLayout(int displayW, int displayH) {
/* Change the video placement using the MediaPlayer API */
switch (CURRENT_SIZE) {
case SURFACE_BEST_FIT:
mMediaPlayer.setAspectRatio(null);
mMediaPlayer.setScale(0);
break;
case SURFACE_FIT_SCREEN:
case SURFACE_FILL: {
Media.VideoTrack vtrack = mMediaPlayer.getCurrentVideoTrack();
if (vtrack == null)
return;
final boolean videoSwapped = vtrack.orientation == Media.VideoTrack.Orientation.LeftBottom
|| vtrack.orientation == Media.VideoTrack.Orientation.RightTop;
if (CURRENT_SIZE == SURFACE_FIT_SCREEN) {
int videoW = vtrack.width;
int videoH = vtrack.height;
if (videoSwapped) {
int swap = videoW;
videoW = videoH;
videoH = swap;
}
if (vtrack.sarNum != vtrack.sarDen)
videoW = videoW * vtrack.sarNum / vtrack.sarDen;
float ar = videoW / (float) videoH;
float dar = displayW / (float) displayH;
float scale;
if (dar >= ar)
scale = displayW / (float) videoW; /* horizontal */
else
scale = displayH / (float) videoH; /* vertical */
mMediaPlayer.setScale(scale);
mMediaPlayer.setAspectRatio(null);
} else {
mMediaPlayer.setScale(0);
mMediaPlayer.setAspectRatio(!videoSwapped ? ""+displayW+":"+displayH
: ""+displayH+":"+displayW);
}
break;
}
case SURFACE_16_9:
mMediaPlayer.setAspectRatio("16:9");
mMediaPlayer.setScale(0);
break;
case SURFACE_4_3:
mMediaPlayer.setAspectRatio("4:3");
mMediaPlayer.setScale(0);
break;
case SURFACE_ORIGINAL:
mMediaPlayer.setAspectRatio(null);
mMediaPlayer.setScale(1);
break;
}
}
private void updateVideoSurfaces() {
int sw = getWindow().getDecorView().getWidth();
int sh = getWindow().getDecorView().getHeight();
// sanity check
if (sw * sh == 0) {
Log.e(TAG, "Invalid surface size");
return;
}
mMediaPlayer.getVLCVout().setWindowSize(sw, sh);
ViewGroup.LayoutParams lp = mVideoView.getLayoutParams();
if (mVideoWidth * mVideoHeight == 0) {
/* Case of OpenGL vouts: handles the placement of the video using MediaPlayer API */
lp.width = ViewGroup.LayoutParams.MATCH_PARENT;
lp.height = ViewGroup.LayoutParams.MATCH_PARENT;
mVideoView.setLayoutParams(lp);
lp = mVideoSurfaceFrame.getLayoutParams();
lp.width = ViewGroup.LayoutParams.MATCH_PARENT;
lp.height = ViewGroup.LayoutParams.MATCH_PARENT;
mVideoSurfaceFrame.setLayoutParams(lp);
changeMediaPlayerLayout(sw, sh);
return;
}
if (lp.width == lp.height && lp.width == ViewGroup.LayoutParams.MATCH_PARENT) {
/* We handle the placement of the video using Android View LayoutParams */
mMediaPlayer.setAspectRatio(null);
mMediaPlayer.setScale(0);
}
double dw = sw, dh = sh;
final boolean isPortrait = getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
if (sw > sh && isPortrait || sw < sh && !isPortrait) {
dw = sh;
dh = sw;
}
// compute the aspect ratio
double ar, vw;
if (mVideoSarDen == mVideoSarNum) {
/* No indication about the density, assuming 1:1 */
vw = mVideoVisibleWidth;
ar = (double)mVideoVisibleWidth / (double)mVideoVisibleHeight;
} else {
/* Use the specified aspect ratio */
vw = mVideoVisibleWidth * (double)mVideoSarNum / mVideoSarDen;
ar = vw / mVideoVisibleHeight;
}
// compute the display aspect ratio
double dar = dw / dh;
switch (CURRENT_SIZE) {
case SURFACE_BEST_FIT:
if (dar < ar)
dh = dw / ar;
else
dw = dh * ar;
break;
case SURFACE_FIT_SCREEN:
if (dar >= ar)
dh = dw / ar; /* horizontal */
else
dw = dh * ar; /* vertical */
break;
case SURFACE_FILL:
break;
case SURFACE_16_9:
ar = 16.0 / 9.0;
if (dar < ar)
dh = dw / ar;
else
dw = dh * ar;
break;
case SURFACE_4_3:
ar = 4.0 / 3.0;
if (dar < ar)
dh = dw / ar;
else
dw = dh * ar;
break;
case SURFACE_ORIGINAL:
dh = mVideoVisibleHeight;
dw = vw;
break;
}
// set display size
lp.width = (int) Math.ceil(dw * mVideoWidth / mVideoVisibleWidth);
lp.height = (int) Math.ceil(dh * mVideoHeight / mVideoVisibleHeight);
mVideoView.setLayoutParams(lp);
if (mSubtitlesSurface != null)
mSubtitlesSurface.setLayoutParams(lp);
// set frame size (crop if necessary)
lp = mVideoSurfaceFrame.getLayoutParams();
lp.width = (int) Math.floor(dw);
lp.height = (int) Math.floor(dh);
mVideoSurfaceFrame.setLayoutParams(lp);
mVideoView.invalidate();
if (mSubtitlesSurface != null)
mSubtitlesSurface.invalidate();
}
#TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
#Override
public void onNewVideoLayout(IVLCVout vlcVout, int width, int height, int visibleWidth, int visibleHeight, int sarNum, int sarDen) {
mVideoWidth = width;
mVideoHeight = height;
mVideoVisibleWidth = visibleWidth;
mVideoVisibleHeight = visibleHeight;
mVideoSarNum = sarNum;
mVideoSarDen = sarDen;
updateVideoSurfaces();
}
}
This is how it's looking right now:
And this is how I'd like it to show:
I've photoshopped the second one btw.
Any help is appreciated. If you need any more data, just let me know.
I followed along with the same sample and ran into the same issues. It turns out you need to add an argument when initializing VLC.
final ArrayList<String> args = new ArrayList<>();
args.add("--vout=android-display"); // Add this line!
args.add("-vvv");
mLibVLC = new LibVLC(this, args);
mMediaPlayer = new MediaPlayer(mLibVLC);
Credit goes to Alexander Ukhov in the videolan forums for pointing this out.
Here ( https://drive.google.com/file/d/1lK_aOOYaKwMxvtpyyEoDXEFWcrDjGMyy/view?usp=sharing ) is the demo source code for RTMP and RTSP both. I have personally checked it and it works. I used it for live video uploading to the server. It upload video as you shoot it. Down streaming need to be done by Back end developer, they will just provide you a link and you need to use that for down streaming .

FFmpeg - Transition Effect Between Images of Video Slideshow

Trying to get the perfect command for image animation effects.
This is my command to create video from images, but I want to add image transition effect (example is fadein/fadeout) between two images.
public static String[] cmdCreateVideo(int frame,String videoSource, String outPut){
float duration_frame=1.0F;
if (frame<= 20) {
duration_frame = 1.0F;
}else {
duration_frame = 0.6F;
}
String str5 = "-framerate 1/" + duration_frame + " -start_number 0 -i " + videoSource + " -vcodec mpeg4 -q:v 3 -r 20 -vf scale=480x800 " +outPut;
return str5.split(" ");
}
In this project you can have 4 types of effect
No EFFECT
FADEIN
Rotate
SlideIn
ADD images to Parcelable[] ,These will be your input images.(you can use also bitmap array aor other stuffs )
Pass these image to Effect activity class:
Effectactivity.java
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Parcelable;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.request.target.Target;
import com.hini.slideshow.R;
import com.hini.slideshow.SlideApplication;
import com.hini.slideshow.draw.SlideShow;
import com.hini.slideshow.encoding.SlideEncoder;
import java.util.concurrent.ExecutionException;
/**
* Created by PrakashSaurav.
*/
public class EffectActivity extends Activity implements View.OnClickListener {
private static final String TAG = "EffectActivity";
private ViewPager mViewPager;
private Button mBtnBitRate, mBtnTime, mBtnEffect, mBtnNext, mBtnPrev;
private TextView mTvBitRate, mTvTime, mTvEffect;
private Parcelable[] mParcelableUris;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_effect);
mParcelableUris = getIntent().getParcelableArrayExtra("parcelableUris");
if (mParcelableUris == null) {
Toast.makeText(getApplicationContext(), "There is no path info for the photo.", Toast.LENGTH_SHORT).show();
return;
}
mViewPager = (ViewPager) findViewById(R.id.pager_image);
mViewPager.setAdapter(new CustomPageAdapter());
mBtnBitRate = (Button) findViewById(R.id.btn_bgm);
mTvBitRate = (TextView) findViewById(R.id.tv_bitrate);
mBtnTime = (Button) findViewById(R.id.btn_time);
mTvTime = (TextView) findViewById(R.id.tv_time);
mBtnEffect = (Button) findViewById(R.id.btn_effect);
mTvEffect = (TextView) findViewById(R.id.tv_effect);
mBtnNext = (Button) findViewById(R.id.btn_next);
mBtnPrev = (Button) findViewById(R.id.btn_prev);
mBtnBitRate.setOnClickListener(this);
mBtnTime.setOnClickListener(this);
mBtnEffect.setOnClickListener(this);
mBtnNext.setOnClickListener(this);
mBtnPrev.setOnClickListener(this);
}
int timeCheck = 0;
int effectCheck = 0;
boolean isClicked;
#Override
public void onClick(View v) {
// BGM Setting
if (v == mBtnBitRate) {
isClicked = !isClicked;
int bitRate;
if (isClicked)
bitRate = 2000 * 1024;
else
bitRate = 500 * 1024;
SlideApplication.BIT_RATE = bitRate;
mTvBitRate.setText(String.valueOf(bitRate / 1024) + "kbps");
Toast.makeText(getApplicationContext(), SlideApplication.BIT_RATE / 1024 + "kbps", Toast.LENGTH_SHORT).show();
}
// Time Setting
else if (v == mBtnTime) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
final String[] items = {"2s", "3s", "5s", "10s"};
if (SlideApplication.SLIDE_TIME == 2) timeCheck = 0;
else if (SlideApplication.SLIDE_TIME == 3) timeCheck = 1;
else if (SlideApplication.SLIDE_TIME == 5) timeCheck = 2;
else if (SlideApplication.SLIDE_TIME == 10) timeCheck = 3;
builder.setSingleChoiceItems(items, timeCheck, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
timeCheck = which;
Toast.makeText(getApplicationContext(), items[which], Toast.LENGTH_SHORT).show();
}
});
builder.setNegativeButton("cancel", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
builder.setPositiveButton("Confirm", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
SlideApplication.SLIDE_TIME = Integer.parseInt(items[timeCheck].split("초")[0]);
mTvTime.setText(Integer.toString(SlideApplication.SLIDE_TIME));
}
});
AlertDialog dialog = builder.create();
dialog.show();
}
// Effect Setting
else if (v == mBtnEffect) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
final String[] items = {"None", "FadeIn", "Rotate", "SlideIn"};
if (SlideApplication.SLIDE_EFFECT == SlideShow.NONE) effectCheck = 0;
else if (SlideApplication.SLIDE_EFFECT == SlideShow.FADE_IN) effectCheck = 1;
else if (SlideApplication.SLIDE_EFFECT == SlideShow.ROTATE) effectCheck = 2;
else if (SlideApplication.SLIDE_EFFECT == SlideShow.SLIDE_IN) effectCheck = 3;
builder.setSingleChoiceItems(items, effectCheck, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
effectCheck = which;
Toast.makeText(getApplicationContext(), items[which], Toast.LENGTH_SHORT).show();
}
});
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
builder.setPositiveButton("Confirm", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
SlideApplication.SLIDE_EFFECT = effectCheck;
mTvEffect.setText(items[SlideApplication.SLIDE_EFFECT]);
}
});
AlertDialog dialog = builder.create();
dialog.show();
}
// Next
else if (v == mBtnNext) {
new BitmapChangerTask().execute();
}
// Prev
else if (v == mBtnPrev) {
finish();
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == 1) {
if (resultCode == RESULT_OK) {
if (data != null && data.getStringExtra("bgm_path") != null) {
SlideApplication.BGM_PATH = data.getStringExtra("bgm_path");
}
}
}
}
private class BitmapChangerTask extends AsyncTask<Void, Void, Void> {
#Override
protected Void doInBackground(Void... params) {
for (int i = 0; i < mParcelableUris.length; i++) {
try {
Bitmap bm = Glide.with(getApplicationContext()).load(mParcelableUris[i].toString())
.asBitmap()
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(Target.SIZE_ORIGINAL, Target.SIZE_ORIGINAL)
.get();
int width = bm.getWidth();
int height = bm.getHeight();
// Land ( 1280 x 720 )
if (width > height) {
bm = Bitmap.createScaledBitmap(bm, SlideEncoder.WIDTH, ((SlideEncoder.WIDTH * height) / width), true);
}
// Port ( 720 x 1280 )
else if (width < height) {
bm = Bitmap.createScaledBitmap(bm, ((SlideEncoder.HEIGHT * width) / height), SlideEncoder.HEIGHT, true);
}
// Square ( 800 x 800 )
else {
bm = Bitmap.createScaledBitmap(bm, SlideEncoder.WIDTH, SlideEncoder.HEIGHT, true);
}
SlideApplication.bitmapList.add(bm);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
return null;
}
#Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
Intent i = new Intent(getApplicationContext(), EncodingActivity.class);
startActivity(i);
}
}
private class CustomPageAdapter extends PagerAdapter {
#Override
public int getCount() {
return mParcelableUris.length;
}
#Override
public int getItemPosition(Object object) {
return POSITION_NONE;
}
#Override
public Object instantiateItem(ViewGroup container, int position) {
ImageView iv = new ImageView(getApplicationContext());
Glide.with(getApplicationContext())
.load(mParcelableUris[position].toString())
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(iv);
try {
container.addView(iv);
} catch (IllegalStateException ise) {
ise.printStackTrace();
}
return iv;
}
#Override
public void destroyItem(ViewGroup container, int position, Object object) {
container.removeView((View) object);
}
#Override
public boolean isViewFromObject(View view, Object object) {
return view == object;
}
}
}
EffectActivity.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<android.support.v4.view.ViewPager
android:id="#+id/pager_image"
android:layout_width="match_parent"
android:layout_height="350dp">
</android.support.v4.view.ViewPager>
<Button
android:id="#+id/btn_bgm"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="#+id/pager_image"
android:layout_marginStart="80dp"
android:layout_marginTop="30dp"
android:text="BitRate"/>
<TextView
android:id="#+id/tv_bitrate"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBaseline="#+id/btn_bgm"
android:layout_alignParentEnd="true"
android:layout_below="#+id/pager_image"
android:layout_marginEnd="80dp"
android:singleLine="true"
android:ellipsize="marquee"
android:text="500Kbps"
android:textSize="20dp"/>
<Button
android:id="#+id/btn_time"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="#+id/btn_bgm"
android:layout_marginStart="80dp"
android:text="time"/>
<TextView
android:id="#+id/tv_time"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBaseline="#+id/btn_time"
android:layout_alignParentEnd="true"
android:layout_below="#+id/pager_image"
android:layout_marginEnd="80dp"
android:text="2s"
android:textSize="20dp"/>
<Button
android:id="#+id/btn_effect"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="#+id/btn_time"
android:layout_marginStart="80dp"
android:text="EFFECT"/>
<TextView
android:id="#+id/tv_effect"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBaseline="#+id/btn_effect"
android:layout_alignParentEnd="true"
android:layout_below="#+id/pager_image"
android:layout_marginEnd="80dp"
android:text="none"
android:textSize="20dp"/>
<Button
android:id="#+id/btn_prev"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_alignParentStart="true"
android:text="Previous"/>
<Button
android:id="#+id/btn_next"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_alignParentEnd="true"
android:text="NEXT"/>
</RelativeLayout>
EncodingActivity.java
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import com.hini.slideshow.SlideApplication;
import com.hini.slideshow.draw.SlideShow;
import com.hini.slideshow.encoding.SlideEncoder;
import java.io.File;
import java.io.IOException;
public class EncodingActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
new EncodingTask().execute();
}
class EncodingTask extends AsyncTask<Void, String, Void> {
File f = new File(Environment.getExternalStorageDirectory().getAbsolutePath()
+ "/test_" + SlideApplication.BIT_RATE / 1024 + "_" + SlideApplication.SLIDE_EFFECT + ".mp4");
ProgressDialog dialog;
#Override
protected void onPreExecute() {
super.onPreExecute();
dialog = new ProgressDialog(EncodingActivity.this);
dialog.setTitle("Generating Video..");
dialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
dialog.setCancelable(false);
dialog.show();
}
#Override
protected Void doInBackground(Void... params) {
long startTime = System.currentTimeMillis();
if (f.exists()) f.delete();
SlideEncoder slideEncoder = new SlideEncoder();
try {
slideEncoder.prepareEncoder(f);
Bitmap prevBm = null;
dialog.setMax(SlideApplication.bitmapList.size());
for (int idx = 0; idx < SlideApplication.bitmapList.size(); idx++) {
publishProgress(String.valueOf(idx + 1));
SlideShow.init();
if (idx > 0) prevBm = SlideApplication.bitmapList.get(idx - 1);
Bitmap curBm = SlideApplication.bitmapList.get(idx);
for (int i = 0; i < (SlideApplication.FRAME_PER_SEC * SlideApplication.SLIDE_TIME); i++) {
// Drain any data from the encoder into the muxer.
slideEncoder.drainEncoder(false);
// Generate a frame and submit it.
slideEncoder.generateFrame(prevBm, curBm);
}
}
slideEncoder.drainEncoder(true);
} catch (IOException e) {
e.printStackTrace();
} finally {
slideEncoder.releaseEncoder();
}
Log.e("TAG", "total time : " + (System.currentTimeMillis() - startTime));
return null;
}
#Override
protected void onProgressUpdate(String... values) {
super.onProgressUpdate(values);
dialog.setProgress(Integer.parseInt(values[0]));
}
#Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
if (dialog.isShowing()) dialog.dismiss();
for (Bitmap bm : SlideApplication.bitmapList) bm.recycle();
SlideApplication.bitmapList.clear();
Intent i = new Intent(EncodingActivity.this, MainActivity.class);
i.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(i);
}
}
*****SlideEncoder.java*****
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.view.Surface;
import com.hini.slideshow.SlideApplication;
import com.hini.slideshow.draw.SlideShow;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
public class SlideEncoder {
private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC;
public static final int WIDTH = 800;
public static final int HEIGHT = 800;
private static final int IFRAME_INTERVAL = 3;
private MediaCodec.BufferInfo mBufferInfo;
private MediaCodec mEncoder;
private Surface mInputSurface;
private MediaMuxer mMuxer;
private int mTrackIndex;
private boolean mMuxerStarted;
private long mFakePts;
void prepareEncoder(File outputFile) throws IOException {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, SlideApplication.BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, SlideApplication.FRAME_PER_SEC);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
mMuxer = new MediaMuxer(outputFile.toString(),
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = -1;
mMuxerStarted = false;
}
/**
* MediaCodec, InputSurface, MediaMuxer 해제
*/
public void releaseEncoder() {
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mInputSurface != null) {
mInputSurface.release();
mInputSurface = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
/**
* Extracts all pending data from the encoder.
* <p/>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
*/
public void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
if (endOfStream) {
mEncoder.signalEndOfInputStream();
}
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mBufferInfo.presentationTimeUs = mFakePts;
long timeStampLength = 1000000L / SlideApplication.FRAME_PER_SEC;
mFakePts += timeStampLength;
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
} else {
}
break; // out of while
}
}
}
}
/**
* Generates a frame, writing to the Surface via the "software" API (lock/unlock).
* <p/>
* There's no way to set the time stamp.
*/
public void generateFrame(Bitmap prevBm, Bitmap curBm) {
Canvas canvas = mInputSurface.lockCanvas(null);
try {
SlideShow.draw(canvas, prevBm, curBm);
} finally {
mInputSurface.unlockCanvasAndPost(canvas);
}
}
}
SlideShow.java
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import com.appwallet.slideshow.SlideApplication;
import com.appwallet.slideshow.encoding.SlideEncoder;
public class SlideShow {
// effect
public static final int NONE = 0;
public static final int FADE_IN = 1;
public static final int ROTATE = 2;
public static final int SLIDE_IN = 3;
// 위치
private static int curStartX;
private static int curStartY;
private static int prevStartX;
private static int prevStartY;
//Variables used for effect
private static float in_alpha = 0f;
private static float rotate = 0;
private static int slideX = SlideEncoder.WIDTH;
private static int slideCount = 1;
private static float out_alpha = 255f;
/**
*
* #param canvas canvas
* #param prevBm Background Bitmap
* #param curBm Foregound Bitmap
*/
public static void draw(Canvas canvas, Bitmap prevBm, Bitmap curBm) {
if (canvas == null || curBm == null) return;
setLocation(prevBm, curBm);
if (SlideApplication.SLIDE_EFFECT == ROTATE && prevBm == null)
canvas.drawColor(Color.BLACK);
if (prevBm != null) drawFadeOut(canvas, prevBm);
switch (SlideApplication.SLIDE_EFFECT) {
case NONE:
drawNone(canvas, curBm);
break;
case FADE_IN:
drawFadeIn(canvas, curBm);
break;
case ROTATE:
drawRotate(canvas, curBm);
break;
case SLIDE_IN:
drawSlideIn(canvas, curBm);
break;
default:
throw new IllegalStateException("unexpected state");
}
}
/*
*Adjust position according to figure
* #param prevBm Background Bitmap
* #param curBm Foreground Bitmap
*/
private static void setLocation(Bitmap prevBm, Bitmap curBm) {
if (curBm != null) {
int cWidth = curBm.getWidth();
int cHeight = curBm.getHeight();
if (cWidth > cHeight) {
curStartX = 0;
curStartY = (SlideEncoder.HEIGHT - cHeight) / 2;
} else if (cHeight > cWidth) {
curStartX = (SlideEncoder.WIDTH - cWidth) / 2;
curStartY = 0;
} else {
curStartX = 0;
curStartY = 0;
}
}
if (prevBm != null) {
int pWidth = prevBm.getWidth();
int pHeight = prevBm.getHeight();
if (pWidth > pHeight) {
prevStartX = 0;
prevStartY = (SlideEncoder.HEIGHT - pHeight) / 2;
} else if (pHeight > pWidth) {
prevStartX = (SlideEncoder.WIDTH - pWidth) / 2;
prevStartY = 0;
} else {
prevStartX = 0;
prevStartY = 0;
}
}
}
/**
*/
public static void init() {
in_alpha = 0f;
out_alpha = 255f;
rotate = 0f;
slideX = 800;
slideCount = 1;
curStartX = 0;
curStartY = 0;
prevStartX = 0;
prevStartY = 0;
}
/**
* drawNone
*
* #param c canvas
* #param bm bitmap
*/
private static void drawNone(Canvas c, Bitmap bm) {
c.drawBitmap(bm, curStartX, curStartY, null);
}
* Fade in effect *
private static void drawFadeIn(Canvas c, Bitmap bm) {
Paint p = new Paint();
int ratio = (int) Math.ceil(255 / SlideApplication.FRAME_PER_SEC);
in_alpha += ratio;
if (in_alpha > 255f) in_alpha = 255f;
p.setAlpha((int) in_alpha);
c.drawBitmap(bm, curStartX, curStartY, p);
}
Rotate effect
private static void drawRotate(Canvas c, Bitmap bm) {
Matrix matrix = new Matrix();
matrix.preTranslate(curStartX, curStartY);
float ratio = 360 / SlideApplication.FRAME_PER_SEC;
rotate += Math.ceil(ratio);
if (rotate > 360) rotate = 360;
matrix.postRotate(rotate, SlideEncoder.WIDTH / 2, SlideEncoder.HEIGHT / 2);
c.drawBitmap(bm, matrix, null);
}
/**
* drawSlideIn drawSlideIn
*
* #param c canvas
* #param bm bitmap
*/
private static void drawSlideIn(Canvas c, Bitmap bm) {
Matrix matrix = new Matrix();
int ratio = 1;
if (slideCount < 30) ratio = (int) Math.pow(slideCount++, 1.4);
slideX -= ratio;
if (slideX < curStartX) slideX = curStartX;
matrix.setTranslate(slideX, curStartY);
c.drawBitmap(bm, matrix, null);
}
/**
*
Fade-out effect (applies only to pictures that are drawn behind)
*
* #param c canvas
* #param bm bitmap
*/
private static void drawFadeOut(Canvas c, Bitmap bm) {
c.drawColor(Color.BLACK);
Paint p = new Paint();
int ratio = (int) Math.ceil(255 / SlideApplication.FRAME_PER_SEC);
out_alpha -= ratio;
if (out_alpha < 0f) out_alpha = 0f;
p.setAlpha((int) out_alpha);
c.drawBitmap(bm, prevStartX, prevStartY, p);
}
}
SlideApplication.java (To save variable)
import android.app.Application;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Parcelable;
import android.view.View;
import com.appwallet.slideshow.draw.SlideShow;
import java.util.ArrayList;
public class SlideApplication extends Application {
public static String BGM_PATH = "";
public static int SLIDE_TIME = 2;
public static int SLIDE_EFFECT = SlideShow.NONE;
public static int BIT_RATE = 2000 * 1024;
public static int FRAME_PER_SEC = 30;
public static ArrayList<Bitmap> bitmapList = new ArrayList<>();
public static int overlayEffect=0;
public static Parcelable[] images;
public static String deleteFolderPath;
}

JavaCV video recorder orientation is not proper in portrait mode

Hi i am using https://github.com/bytedeco/javacv/ for recording video.
When using landscape mode orientation is fine but when i change the orientation to portrait mode the video is rotated -90 degree.
Any body have an idea what i may be doing wrong. Here is the code.
package org.bytedeco.javacv_android_example.record;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacpp.opencv_imgproc;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.javacv_android_example.R;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class RecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
/* The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
long startTime = 0;
boolean recording = false;
volatile boolean runAudioThread = true;
Frame[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
private PowerManager.WakeLock mWakeLock;
private File ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
private FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private Frame yuvImage = null;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
OpenCVFrameConverter.ToIplImage converter = new OpenCVFrameConverter.ToIplImage();
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
setContentView(R.layout.activity_record);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
}
#Override
protected void onResume() {
super.onResume();
if(mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if(mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if(cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if(mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if(keyCode == KeyEvent.KEYCODE_BACK) {
if(recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.activity_record, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if(1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG, "init recorder");
if(RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new Frame[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for(int i = 0; i < images.length; i++) {
images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
timestamps[i] = -1;
}
} else if(yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(LOG_TAG, "create yuvImage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch(InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if(recorder != null && recording) {
if(RECORD_LENGTH > 0) {
Log.v(LOG_TAG, "Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if(imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if(lastIndex < firstIndex) {
lastIndex += images.length;
}
for(int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if(t >= 0) {
if(t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}
firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if(samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if(lastIndex < firstIndex) {
lastIndex += samples.length;
}
for(int i = firstIndex; i <= lastIndex; i++) {
recorder.recordSamples(samples[i % samples.length]);
}
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
recording = false;
Log.v(LOG_TAG, "Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public void onClick(View v) {
if(!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if(RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for(int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while(runAudioThread) {
if(RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if(bufferReadResult > 0) {
Log.v(LOG_TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if(recording) {
if(RECORD_LENGTH <= 0) {
try {
recorder.recordSamples(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG, "audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera", "camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch(IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters camParams = mCamera.getParameters();
List<Camera.Size> sizes = camParams.getSupportedPreviewSizes();
// Sort the list in ascending order
Collections.sort(sizes, new Comparator<Camera.Size>() {
public int compare(final Camera.Size a, final Camera.Size b) {
return a.width * a.height - b.width * b.height;
}
});
// Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot
// reach the initial settings of imageWidth/imageHeight.
for(int i = 0; i < sizes.size(); i++) {
if((sizes.get(i).width >= imageWidth && sizes.get(i).height >= imageHeight) || i == sizes.size() - 1) {
imageWidth = sizes.get(i).width;
imageHeight = sizes.get(i).height;
Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth + "x" + imageHeight);
break;
}
}
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG, "Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
camParams.setPreviewFrameRate(frameRate);
Log.v(LOG_TAG, "Preview Framerate: " + camParams.getPreviewFrameRate());
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch(RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if(!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if(isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if(RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvImage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if(yuvImage != null && recording) {
((ByteBuffer) yuvImage.image[0].position(0)).put(data);
if(RECORD_LENGTH <= 0) {
try {
Log.v(LOG_TAG, "Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if(t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
opencv_core.IplImage toBeRotated = converter.convert(yuvImage);
toBeRotated = rotate(toBeRotated, -90);
Frame newFrame = converter.convert(toBeRotated);
opencv_core.cvReleaseImage(toBeRotated);
recorder.record(newFrame);
// recorder.record(yuvImage);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
public static opencv_core.IplImage rotate(opencv_core.IplImage image, double angle) {
opencv_core.IplImage copy = opencv_core.cvCloneImage(image);
double radians = Math.toRadians(angle);
double sin = Math.abs(Math.sin(radians));
double cos = Math.abs(Math.cos(radians));
int newWidth = (int) (copy.width() * cos + copy.height() * sin);
int newHeight = (int) (copy.width() * sin + copy.height() * cos);
int[] newWidthHeight = {newWidth, newHeight};
opencv_core.CvSize size = new opencv_core.CvSize(copy.height(), copy.width());
opencv_core.IplImage rotatedImage = opencv_core.cvCreateImage(size, copy.depth(), copy.nChannels());
opencv_core.CvMat mapMatrix = opencv_core.cvCreateMat( 2, 3, opencv_core.CV_32FC1 );
//Define Mid Point
opencv_core.CvPoint2D32f centerPoint = new opencv_core.CvPoint2D32f();
centerPoint.x(copy.width()/2);
centerPoint.y(copy.width()/2);
//Get Rotational Matrix
opencv_imgproc.cv2DRotationMatrix(centerPoint, angle, 1, mapMatrix);
opencv_imgproc.cvWarpAffine(copy, rotatedImage, mapMatrix, opencv_imgproc.CV_INTER_CUBIC + opencv_imgproc.CV_WARP_FILL_OUTLIERS, opencv_core.cvScalarAll(170));
opencv_core.cvReleaseImage(copy);
opencv_core.cvReleaseMat(mapMatrix);
return rotatedImage;
}
}
You could either do something like recorder.setVideoMetadata("rotate", "90") or transform the images with something like this:
FFmpegFrameFilter filter = new FFmpegFrameFilter("transpose=cclock_flip", imageWidth, imageHeight);
filter.push(frame);
Frame frame2;
while ((frame2 = filter.pull()) != null) {
recorder.record(frame2);
}

Recording video on Android using JavaCV (Updated 2014 02 17)

I'm trying to record a video in Android using the JavaCV lib.
I need to record the video in 640x360.
I have installed everything as described in README.txt file and I followed the example as below:
https://code.google.com/p/javacv/source/browse/samples/RecordActivity.java
In this example, the video size is this:
private int imageWidth = 320;
private int imageHeight = 240;
In my case, I need to record a video in 640x360 H.264.
(UPDATE) I have reverted my code and kept exactly like in the example, just changing imageWidth and imageHeight to 640x360.
Now I'm getting the video like this image:
http://bergmann.net.br/img/screenshot_video_error.png
Here is my code:
import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
import java.io.IOException;
import java.nio.ShortBuffer;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import com.autosonvideo.helpers.Helpers;
import com.autosonvideo.logic.CameraHelpers;
import com.googlecode.javacv.FFmpegFrameRecorder;
import com.googlecode.javacv.cpp.opencv_core.IplImage;
public class FFmpegRecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link;
long startTime = 0;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 640;
private int imageHeight = 480;
private int finalImageWidth = 640;
private int finalImageHeight = 360;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private IplImage yuvIplimage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 1280;
private final int live_height = 960;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.main);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
CLASS_LABEL);
mWakeLock.acquire();
initLayout();
initRecorder();
}
#Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE))
.getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(
R.layout.main, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width
/ live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
// ---------------------------------------
// initialize ffmpeg_recorder
// ---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG, "init recorder");
if (yuvIplimage == null) {
yuvIplimage = IplImage.create(finalImageWidth, finalImageHeight,
IPL_DEPTH_8U, 2);
Log.i(LOG_TAG, "create yuvIplimage");
}
ffmpeg_link = CameraHelpers.getOutputMediaFile(
CameraHelpers.MEDIA_TYPE_VIDEO).toString();
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, finalImageWidth,
finalImageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
}
public void startRecording() {
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,
"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
// ---------------------------------------------
// audio thread, gets and encodes audio data
// ---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;
bufferSize = AudioRecord
.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = new short[bufferSize];
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
// Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData, 0,
audioData.length);
if (bufferReadResult > 0) {
Log.v(LOG_TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it
// never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.record(ShortBuffer.wrap(audioData, 0,
bufferReadResult));
// Log.v(LOG_TAG,"recording " + 1024*i + " to " +
// 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG, "audioRecord released");
}
}
}
// ---------------------------------------------
// camera thread, gets and encodes video data
// ---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback,
PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera", "camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.v(LOG_TAG, "Setting imageWidth: " + imageWidth
+ " imageHeight: " + imageHeight + " frameRate: "
+ frameRate);
Camera.Parameters camParams = mCamera.getParameters();
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,
"Preview Framerate: " + camParams.getPreviewFrameRate());
camParams.setPreviewFrameRate(frameRate);
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
/* get video data */
if (yuvIplimage != null && recording) {
// yuvIplimage.getByteBuffer().put(data);
final int startY = 640 * (480 - 360) / 2;
final int lenY = 640 * 360;
yuvIplimage.getByteBuffer().put(data, startY, lenY);
final int startVU = 640 * 480 + 320 * 2 * (240 - 180) / 2;
final int lenVU = 320 * 180 * 2;
yuvIplimage.getByteBuffer().put(data, startVU, lenVU);
Log.v(LOG_TAG, "Writing Frame");
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
#Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set
// isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
}
Your camera, most likely, can provide 640x480 preview frames. The fix would be to clip this frame before it is recorded, like this:
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
/* get video data */
if (yuvIplimage != null && recording) {
ByteBuffer bb = yuvIplimage.getByteBuffer(); // resets the buffer
final int startY = imageWidth*(imageHeight-finalImageHeight)/2;
final int lenY = imageWidth*finalImageHeight;
bb.put(data, startY, lenY);
final int startVU = imageWidth*imageHeight + imageWidth*(imageHeight-finalImageHeight)/4;
final int lenVU = imageWidth* finalImageHeight/2;
bb.put(data, startVU, lenVU);
// Log.v(LOG_TAG, "Writing Frame");
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.e(LOG_TAG, "problem with recorder():", e);
}
}
}
The preview frame has semi-planar YVU format: 640x480 luminance (Y) bytes, followed by 320x240 pairs of chroma (V and U) bytes. We copy to yuvIpImage first the relevant Y, and after that - relevant VU pairs. Note that it is easy and fast because the width you want is same as the native width.
Your camera and camera view should be initialized for 640x480, and recorder - to 640x360. Note that the efficient cropping is only possible when imageWidth==finalImageWidth.
FIX it happens so that IplImage.getByteBuffer() resets the buffer, therefore the fix is to use a temporary bb object.
Note that you will probably want to overlay the preview with a frame that will "hide" margins that you crop this way: our manipulations only change the recorded frames, not the CameraView.
#Fabio Seeing that your code is from this Open Source Android Touch-To-Record library and I too have used it. Here is my modified version of the onPreviewFrame method, inside CameraPreview class, to take transpose and resize a captured frame, as the captured video played sideways (app was locked to portrait) and with greenish output.
I defined "yuvIplImage" as following in my setCameraParams() method.
IplImage yuvIplImage = IplImage.create(mPreviewSize.height, mPreviewSize.width, opencv_core.IPL_DEPTH_8U, 2);
Also initialize your videoRecorder object as following, giving width as height and vice versa.
//call initVideoRecorder() method like this to initialize videoRecorder object of FFmpegFrameRecorder class.
initVideoRecorder(strVideoPath, mPreview.getPreviewSize().height, mPreview.getPreviewSize().width, recorderParameters);
//method implementation
public void initVideoRecorder(String videoPath, int width, int height, RecorderParameters recorderParameters)
{
Log.e(TAG, "initVideoRecorder");
videoRecorder = new FFmpegFrameRecorder(videoPath, width, height, 1);
videoRecorder.setFormat(recorderParameters.getVideoOutputFormat());
videoRecorder.setSampleRate(recorderParameters.getAudioSamplingRate());
videoRecorder.setFrameRate(recorderParameters.getVideoFrameRate());
videoRecorder.setVideoCodec(recorderParameters.getVideoCodec());
videoRecorder.setVideoQuality(recorderParameters.getVideoQuality());
videoRecorder.setAudioQuality(recorderParameters.getVideoQuality());
videoRecorder.setAudioCodec(recorderParameters.getAudioCodec());
videoRecorder.setVideoBitrate(1000000);
videoRecorder.setAudioBitrate(64000);
}
This is my onPreviewFrame() method:
#Override
public void onPreviewFrame(byte[] data, Camera camera)
{
long frameTimeStamp = 0L;
if(FragmentCamera.mAudioTimestamp == 0L && FragmentCamera.firstTime > 0L)
{
frameTimeStamp = 1000L * (System.currentTimeMillis() - FragmentCamera.firstTime);
}
else if(FragmentCamera.mLastAudioTimestamp == FragmentCamera.mAudioTimestamp)
{
frameTimeStamp = FragmentCamera.mAudioTimestamp + FragmentCamera.frameTime;
}
else
{
long l2 = (System.nanoTime() - FragmentCamera.mAudioTimeRecorded) / 1000L;
frameTimeStamp = l2 + FragmentCamera.mAudioTimestamp;
FragmentCamera.mLastAudioTimestamp = FragmentCamera.mAudioTimestamp;
}
synchronized(FragmentCamera.mVideoRecordLock)
{
if(FragmentCamera.recording && FragmentCamera.rec && lastSavedframe != null && lastSavedframe.getFrameBytesData() != null && yuvIplImage != null)
{
FragmentCamera.mVideoTimestamp += FragmentCamera.frameTime;
if(lastSavedframe.getTimeStamp() > FragmentCamera.mVideoTimestamp)
{
FragmentCamera.mVideoTimestamp = lastSavedframe.getTimeStamp();
}
try
{
yuvIplImage.getByteBuffer().put(lastSavedframe.getFrameBytesData());
IplImage bgrImage = IplImage.create(mPreviewSize.width, mPreviewSize.height, opencv_core.IPL_DEPTH_8U, 4);// In my case, mPreviewSize.width = 1280 and mPreviewSize.height = 720
IplImage transposed = IplImage.create(mPreviewSize.height, mPreviewSize.width, yuvIplImage.depth(), 4);
IplImage squared = IplImage.create(mPreviewSize.height, mPreviewSize.height, yuvIplImage.depth(), 4);
int[] _temp = new int[mPreviewSize.width * mPreviewSize.height];
Util.YUV_NV21_TO_BGR(_temp, data, mPreviewSize.width, mPreviewSize.height);
bgrImage.getIntBuffer().put(_temp);
opencv_core.cvTranspose(bgrImage, transposed);
opencv_core.cvFlip(transposed, transposed, 1);
opencv_core.cvSetImageROI(transposed, opencv_core.cvRect(0, 0, mPreviewSize.height, mPreviewSize.height));
opencv_core.cvCopy(transposed, squared, null);
opencv_core.cvResetImageROI(transposed);
videoRecorder.setTimestamp(lastSavedframe.getTimeStamp());
videoRecorder.record(squared);
}
catch(com.googlecode.javacv.FrameRecorder.Exception e)
{
e.printStackTrace();
}
}
lastSavedframe = new SavedFrames(data, frameTimeStamp);
}
}
This code uses a method "YUV_NV21_TO_BGR", which I found from this link
Basically this method is used to resolve, which I call as, "The Green Devil problem on Android", just like yours. I was having the same issue and wasted almost 3-4 days. Before adding "YUV_NV21_TO_BGR" method when I just took transpose of YuvIplImage, more importantly a combination of transpose, flip (with or without resizing), there was greenish output in resulting video. This "YUV_NV21_TO_BGR" method saved the day. Thanks to #David Han from above google groups thread.
Use this link to resolve the issue . The issue is with rotation of image.The YUV Image handling has been done.

Categories

Resources