FFmpeg - Transition Effect Between Images of Video Slideshow - android

Trying to get the perfect command for image animation effects.
This is my command to create video from images, but I want to add image transition effect (example is fadein/fadeout) between two images.
public static String[] cmdCreateVideo(int frame,String videoSource, String outPut){
float duration_frame=1.0F;
if (frame<= 20) {
duration_frame = 1.0F;
}else {
duration_frame = 0.6F;
}
String str5 = "-framerate 1/" + duration_frame + " -start_number 0 -i " + videoSource + " -vcodec mpeg4 -q:v 3 -r 20 -vf scale=480x800 " +outPut;
return str5.split(" ");
}

In this project you can have 4 types of effect
No EFFECT
FADEIN
Rotate
SlideIn
ADD images to Parcelable[] ,These will be your input images.(you can use also bitmap array aor other stuffs )
Pass these image to Effect activity class:
Effectactivity.java
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Parcelable;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.request.target.Target;
import com.hini.slideshow.R;
import com.hini.slideshow.SlideApplication;
import com.hini.slideshow.draw.SlideShow;
import com.hini.slideshow.encoding.SlideEncoder;
import java.util.concurrent.ExecutionException;
/**
* Created by PrakashSaurav.
*/
public class EffectActivity extends Activity implements View.OnClickListener {
private static final String TAG = "EffectActivity";
private ViewPager mViewPager;
private Button mBtnBitRate, mBtnTime, mBtnEffect, mBtnNext, mBtnPrev;
private TextView mTvBitRate, mTvTime, mTvEffect;
private Parcelable[] mParcelableUris;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_effect);
mParcelableUris = getIntent().getParcelableArrayExtra("parcelableUris");
if (mParcelableUris == null) {
Toast.makeText(getApplicationContext(), "There is no path info for the photo.", Toast.LENGTH_SHORT).show();
return;
}
mViewPager = (ViewPager) findViewById(R.id.pager_image);
mViewPager.setAdapter(new CustomPageAdapter());
mBtnBitRate = (Button) findViewById(R.id.btn_bgm);
mTvBitRate = (TextView) findViewById(R.id.tv_bitrate);
mBtnTime = (Button) findViewById(R.id.btn_time);
mTvTime = (TextView) findViewById(R.id.tv_time);
mBtnEffect = (Button) findViewById(R.id.btn_effect);
mTvEffect = (TextView) findViewById(R.id.tv_effect);
mBtnNext = (Button) findViewById(R.id.btn_next);
mBtnPrev = (Button) findViewById(R.id.btn_prev);
mBtnBitRate.setOnClickListener(this);
mBtnTime.setOnClickListener(this);
mBtnEffect.setOnClickListener(this);
mBtnNext.setOnClickListener(this);
mBtnPrev.setOnClickListener(this);
}
int timeCheck = 0;
int effectCheck = 0;
boolean isClicked;
#Override
public void onClick(View v) {
// BGM Setting
if (v == mBtnBitRate) {
isClicked = !isClicked;
int bitRate;
if (isClicked)
bitRate = 2000 * 1024;
else
bitRate = 500 * 1024;
SlideApplication.BIT_RATE = bitRate;
mTvBitRate.setText(String.valueOf(bitRate / 1024) + "kbps");
Toast.makeText(getApplicationContext(), SlideApplication.BIT_RATE / 1024 + "kbps", Toast.LENGTH_SHORT).show();
}
// Time Setting
else if (v == mBtnTime) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
final String[] items = {"2s", "3s", "5s", "10s"};
if (SlideApplication.SLIDE_TIME == 2) timeCheck = 0;
else if (SlideApplication.SLIDE_TIME == 3) timeCheck = 1;
else if (SlideApplication.SLIDE_TIME == 5) timeCheck = 2;
else if (SlideApplication.SLIDE_TIME == 10) timeCheck = 3;
builder.setSingleChoiceItems(items, timeCheck, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
timeCheck = which;
Toast.makeText(getApplicationContext(), items[which], Toast.LENGTH_SHORT).show();
}
});
builder.setNegativeButton("cancel", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
builder.setPositiveButton("Confirm", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
SlideApplication.SLIDE_TIME = Integer.parseInt(items[timeCheck].split("초")[0]);
mTvTime.setText(Integer.toString(SlideApplication.SLIDE_TIME));
}
});
AlertDialog dialog = builder.create();
dialog.show();
}
// Effect Setting
else if (v == mBtnEffect) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
final String[] items = {"None", "FadeIn", "Rotate", "SlideIn"};
if (SlideApplication.SLIDE_EFFECT == SlideShow.NONE) effectCheck = 0;
else if (SlideApplication.SLIDE_EFFECT == SlideShow.FADE_IN) effectCheck = 1;
else if (SlideApplication.SLIDE_EFFECT == SlideShow.ROTATE) effectCheck = 2;
else if (SlideApplication.SLIDE_EFFECT == SlideShow.SLIDE_IN) effectCheck = 3;
builder.setSingleChoiceItems(items, effectCheck, new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
effectCheck = which;
Toast.makeText(getApplicationContext(), items[which], Toast.LENGTH_SHORT).show();
}
});
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
builder.setPositiveButton("Confirm", new DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
SlideApplication.SLIDE_EFFECT = effectCheck;
mTvEffect.setText(items[SlideApplication.SLIDE_EFFECT]);
}
});
AlertDialog dialog = builder.create();
dialog.show();
}
// Next
else if (v == mBtnNext) {
new BitmapChangerTask().execute();
}
// Prev
else if (v == mBtnPrev) {
finish();
}
}
#Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == 1) {
if (resultCode == RESULT_OK) {
if (data != null && data.getStringExtra("bgm_path") != null) {
SlideApplication.BGM_PATH = data.getStringExtra("bgm_path");
}
}
}
}
private class BitmapChangerTask extends AsyncTask<Void, Void, Void> {
#Override
protected Void doInBackground(Void... params) {
for (int i = 0; i < mParcelableUris.length; i++) {
try {
Bitmap bm = Glide.with(getApplicationContext()).load(mParcelableUris[i].toString())
.asBitmap()
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(Target.SIZE_ORIGINAL, Target.SIZE_ORIGINAL)
.get();
int width = bm.getWidth();
int height = bm.getHeight();
// Land ( 1280 x 720 )
if (width > height) {
bm = Bitmap.createScaledBitmap(bm, SlideEncoder.WIDTH, ((SlideEncoder.WIDTH * height) / width), true);
}
// Port ( 720 x 1280 )
else if (width < height) {
bm = Bitmap.createScaledBitmap(bm, ((SlideEncoder.HEIGHT * width) / height), SlideEncoder.HEIGHT, true);
}
// Square ( 800 x 800 )
else {
bm = Bitmap.createScaledBitmap(bm, SlideEncoder.WIDTH, SlideEncoder.HEIGHT, true);
}
SlideApplication.bitmapList.add(bm);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
return null;
}
#Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
Intent i = new Intent(getApplicationContext(), EncodingActivity.class);
startActivity(i);
}
}
private class CustomPageAdapter extends PagerAdapter {
#Override
public int getCount() {
return mParcelableUris.length;
}
#Override
public int getItemPosition(Object object) {
return POSITION_NONE;
}
#Override
public Object instantiateItem(ViewGroup container, int position) {
ImageView iv = new ImageView(getApplicationContext());
Glide.with(getApplicationContext())
.load(mParcelableUris[position].toString())
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(iv);
try {
container.addView(iv);
} catch (IllegalStateException ise) {
ise.printStackTrace();
}
return iv;
}
#Override
public void destroyItem(ViewGroup container, int position, Object object) {
container.removeView((View) object);
}
#Override
public boolean isViewFromObject(View view, Object object) {
return view == object;
}
}
}
EffectActivity.xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<android.support.v4.view.ViewPager
android:id="#+id/pager_image"
android:layout_width="match_parent"
android:layout_height="350dp">
</android.support.v4.view.ViewPager>
<Button
android:id="#+id/btn_bgm"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="#+id/pager_image"
android:layout_marginStart="80dp"
android:layout_marginTop="30dp"
android:text="BitRate"/>
<TextView
android:id="#+id/tv_bitrate"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBaseline="#+id/btn_bgm"
android:layout_alignParentEnd="true"
android:layout_below="#+id/pager_image"
android:layout_marginEnd="80dp"
android:singleLine="true"
android:ellipsize="marquee"
android:text="500Kbps"
android:textSize="20dp"/>
<Button
android:id="#+id/btn_time"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="#+id/btn_bgm"
android:layout_marginStart="80dp"
android:text="time"/>
<TextView
android:id="#+id/tv_time"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBaseline="#+id/btn_time"
android:layout_alignParentEnd="true"
android:layout_below="#+id/pager_image"
android:layout_marginEnd="80dp"
android:text="2s"
android:textSize="20dp"/>
<Button
android:id="#+id/btn_effect"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="#+id/btn_time"
android:layout_marginStart="80dp"
android:text="EFFECT"/>
<TextView
android:id="#+id/tv_effect"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBaseline="#+id/btn_effect"
android:layout_alignParentEnd="true"
android:layout_below="#+id/pager_image"
android:layout_marginEnd="80dp"
android:text="none"
android:textSize="20dp"/>
<Button
android:id="#+id/btn_prev"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_alignParentStart="true"
android:text="Previous"/>
<Button
android:id="#+id/btn_next"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_alignParentEnd="true"
android:text="NEXT"/>
</RelativeLayout>
EncodingActivity.java
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import com.hini.slideshow.SlideApplication;
import com.hini.slideshow.draw.SlideShow;
import com.hini.slideshow.encoding.SlideEncoder;
import java.io.File;
import java.io.IOException;
public class EncodingActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
new EncodingTask().execute();
}
class EncodingTask extends AsyncTask<Void, String, Void> {
File f = new File(Environment.getExternalStorageDirectory().getAbsolutePath()
+ "/test_" + SlideApplication.BIT_RATE / 1024 + "_" + SlideApplication.SLIDE_EFFECT + ".mp4");
ProgressDialog dialog;
#Override
protected void onPreExecute() {
super.onPreExecute();
dialog = new ProgressDialog(EncodingActivity.this);
dialog.setTitle("Generating Video..");
dialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
dialog.setCancelable(false);
dialog.show();
}
#Override
protected Void doInBackground(Void... params) {
long startTime = System.currentTimeMillis();
if (f.exists()) f.delete();
SlideEncoder slideEncoder = new SlideEncoder();
try {
slideEncoder.prepareEncoder(f);
Bitmap prevBm = null;
dialog.setMax(SlideApplication.bitmapList.size());
for (int idx = 0; idx < SlideApplication.bitmapList.size(); idx++) {
publishProgress(String.valueOf(idx + 1));
SlideShow.init();
if (idx > 0) prevBm = SlideApplication.bitmapList.get(idx - 1);
Bitmap curBm = SlideApplication.bitmapList.get(idx);
for (int i = 0; i < (SlideApplication.FRAME_PER_SEC * SlideApplication.SLIDE_TIME); i++) {
// Drain any data from the encoder into the muxer.
slideEncoder.drainEncoder(false);
// Generate a frame and submit it.
slideEncoder.generateFrame(prevBm, curBm);
}
}
slideEncoder.drainEncoder(true);
} catch (IOException e) {
e.printStackTrace();
} finally {
slideEncoder.releaseEncoder();
}
Log.e("TAG", "total time : " + (System.currentTimeMillis() - startTime));
return null;
}
#Override
protected void onProgressUpdate(String... values) {
super.onProgressUpdate(values);
dialog.setProgress(Integer.parseInt(values[0]));
}
#Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
if (dialog.isShowing()) dialog.dismiss();
for (Bitmap bm : SlideApplication.bitmapList) bm.recycle();
SlideApplication.bitmapList.clear();
Intent i = new Intent(EncodingActivity.this, MainActivity.class);
i.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(i);
}
}
*****SlideEncoder.java*****
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.view.Surface;
import com.hini.slideshow.SlideApplication;
import com.hini.slideshow.draw.SlideShow;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
public class SlideEncoder {
private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC;
public static final int WIDTH = 800;
public static final int HEIGHT = 800;
private static final int IFRAME_INTERVAL = 3;
private MediaCodec.BufferInfo mBufferInfo;
private MediaCodec mEncoder;
private Surface mInputSurface;
private MediaMuxer mMuxer;
private int mTrackIndex;
private boolean mMuxerStarted;
private long mFakePts;
void prepareEncoder(File outputFile) throws IOException {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, SlideApplication.BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, SlideApplication.FRAME_PER_SEC);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
mMuxer = new MediaMuxer(outputFile.toString(),
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = -1;
mMuxerStarted = false;
}
/**
* MediaCodec, InputSurface, MediaMuxer 해제
*/
public void releaseEncoder() {
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mInputSurface != null) {
mInputSurface.release();
mInputSurface = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
/**
* Extracts all pending data from the encoder.
* <p/>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
*/
public void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
if (endOfStream) {
mEncoder.signalEndOfInputStream();
}
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mBufferInfo.presentationTimeUs = mFakePts;
long timeStampLength = 1000000L / SlideApplication.FRAME_PER_SEC;
mFakePts += timeStampLength;
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
} else {
}
break; // out of while
}
}
}
}
/**
* Generates a frame, writing to the Surface via the "software" API (lock/unlock).
* <p/>
* There's no way to set the time stamp.
*/
public void generateFrame(Bitmap prevBm, Bitmap curBm) {
Canvas canvas = mInputSurface.lockCanvas(null);
try {
SlideShow.draw(canvas, prevBm, curBm);
} finally {
mInputSurface.unlockCanvasAndPost(canvas);
}
}
}
SlideShow.java
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import com.appwallet.slideshow.SlideApplication;
import com.appwallet.slideshow.encoding.SlideEncoder;
public class SlideShow {
// effect
public static final int NONE = 0;
public static final int FADE_IN = 1;
public static final int ROTATE = 2;
public static final int SLIDE_IN = 3;
// 위치
private static int curStartX;
private static int curStartY;
private static int prevStartX;
private static int prevStartY;
//Variables used for effect
private static float in_alpha = 0f;
private static float rotate = 0;
private static int slideX = SlideEncoder.WIDTH;
private static int slideCount = 1;
private static float out_alpha = 255f;
/**
*
* #param canvas canvas
* #param prevBm Background Bitmap
* #param curBm Foregound Bitmap
*/
public static void draw(Canvas canvas, Bitmap prevBm, Bitmap curBm) {
if (canvas == null || curBm == null) return;
setLocation(prevBm, curBm);
if (SlideApplication.SLIDE_EFFECT == ROTATE && prevBm == null)
canvas.drawColor(Color.BLACK);
if (prevBm != null) drawFadeOut(canvas, prevBm);
switch (SlideApplication.SLIDE_EFFECT) {
case NONE:
drawNone(canvas, curBm);
break;
case FADE_IN:
drawFadeIn(canvas, curBm);
break;
case ROTATE:
drawRotate(canvas, curBm);
break;
case SLIDE_IN:
drawSlideIn(canvas, curBm);
break;
default:
throw new IllegalStateException("unexpected state");
}
}
/*
*Adjust position according to figure
* #param prevBm Background Bitmap
* #param curBm Foreground Bitmap
*/
private static void setLocation(Bitmap prevBm, Bitmap curBm) {
if (curBm != null) {
int cWidth = curBm.getWidth();
int cHeight = curBm.getHeight();
if (cWidth > cHeight) {
curStartX = 0;
curStartY = (SlideEncoder.HEIGHT - cHeight) / 2;
} else if (cHeight > cWidth) {
curStartX = (SlideEncoder.WIDTH - cWidth) / 2;
curStartY = 0;
} else {
curStartX = 0;
curStartY = 0;
}
}
if (prevBm != null) {
int pWidth = prevBm.getWidth();
int pHeight = prevBm.getHeight();
if (pWidth > pHeight) {
prevStartX = 0;
prevStartY = (SlideEncoder.HEIGHT - pHeight) / 2;
} else if (pHeight > pWidth) {
prevStartX = (SlideEncoder.WIDTH - pWidth) / 2;
prevStartY = 0;
} else {
prevStartX = 0;
prevStartY = 0;
}
}
}
/**
*/
public static void init() {
in_alpha = 0f;
out_alpha = 255f;
rotate = 0f;
slideX = 800;
slideCount = 1;
curStartX = 0;
curStartY = 0;
prevStartX = 0;
prevStartY = 0;
}
/**
* drawNone
*
* #param c canvas
* #param bm bitmap
*/
private static void drawNone(Canvas c, Bitmap bm) {
c.drawBitmap(bm, curStartX, curStartY, null);
}
* Fade in effect *
private static void drawFadeIn(Canvas c, Bitmap bm) {
Paint p = new Paint();
int ratio = (int) Math.ceil(255 / SlideApplication.FRAME_PER_SEC);
in_alpha += ratio;
if (in_alpha > 255f) in_alpha = 255f;
p.setAlpha((int) in_alpha);
c.drawBitmap(bm, curStartX, curStartY, p);
}
Rotate effect
private static void drawRotate(Canvas c, Bitmap bm) {
Matrix matrix = new Matrix();
matrix.preTranslate(curStartX, curStartY);
float ratio = 360 / SlideApplication.FRAME_PER_SEC;
rotate += Math.ceil(ratio);
if (rotate > 360) rotate = 360;
matrix.postRotate(rotate, SlideEncoder.WIDTH / 2, SlideEncoder.HEIGHT / 2);
c.drawBitmap(bm, matrix, null);
}
/**
* drawSlideIn drawSlideIn
*
* #param c canvas
* #param bm bitmap
*/
private static void drawSlideIn(Canvas c, Bitmap bm) {
Matrix matrix = new Matrix();
int ratio = 1;
if (slideCount < 30) ratio = (int) Math.pow(slideCount++, 1.4);
slideX -= ratio;
if (slideX < curStartX) slideX = curStartX;
matrix.setTranslate(slideX, curStartY);
c.drawBitmap(bm, matrix, null);
}
/**
*
Fade-out effect (applies only to pictures that are drawn behind)
*
* #param c canvas
* #param bm bitmap
*/
private static void drawFadeOut(Canvas c, Bitmap bm) {
c.drawColor(Color.BLACK);
Paint p = new Paint();
int ratio = (int) Math.ceil(255 / SlideApplication.FRAME_PER_SEC);
out_alpha -= ratio;
if (out_alpha < 0f) out_alpha = 0f;
p.setAlpha((int) out_alpha);
c.drawBitmap(bm, prevStartX, prevStartY, p);
}
}
SlideApplication.java (To save variable)
import android.app.Application;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Parcelable;
import android.view.View;
import com.appwallet.slideshow.draw.SlideShow;
import java.util.ArrayList;
public class SlideApplication extends Application {
public static String BGM_PATH = "";
public static int SLIDE_TIME = 2;
public static int SLIDE_EFFECT = SlideShow.NONE;
public static int BIT_RATE = 2000 * 1024;
public static int FRAME_PER_SEC = 30;
public static ArrayList<Bitmap> bitmapList = new ArrayList<>();
public static int overlayEffect=0;
public static Parcelable[] images;
public static String deleteFolderPath;
}

Related

Ip stream viewer app shuts itself, it compile with no errors

My app keeps shutting itself but the code appears to don't have any errors, I think I'm missing something but I'm unable to see.
Here, I took this code as example, Android and MJPEG
while USB debuggin i got:
Warning: debug info can be unavailable. Please close other application using ADB: Monitor, DDMS, Eclipse
Also in MpegSample importing MjpegView and MjpegInputStream seems to be "unused"
MjpegSample
package com.example.rauls.webviewpfcactivity;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import com.example.rauls.webviewpfcactivity.MjpegInputStream;
import com.example.rauls.webviewpfcactivity.MjpegView;
/* android studio says that both import statements are unused
*/
public class MjpegSample extends Activity {
private MjpegView mv;
private static final int MENU_QUIT = 1;
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_QUIT, 0, "Quit");
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_QUIT:
finish();
return true;
}
return false;
}
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://192.168.1.64:8081/";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(false);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjegView
package com.example.rauls.webviewpfcactivity;
/**
* Created by RaulS on 19/12/2015.
*/
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
MjpegInputStream
package com.example.rauls.webviewpfcactivity;
/**
* Created by RaulS on 19/12/2015.
*/
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
main.xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical" android:layout_width="match_parent"
android:layout_height="match_parent">
<!--WebView
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/webview"
android:layout_width="wrap_content"
android:layout_height="wrap_content" /-->
<!--VideoView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="#+id/videoView"
android:layout_gravity="center_horizontal" /-->
<SurfaceView
android:id="#+id/surfaceView"
android:layout_width="fill_parent"
android:layout_height="fill_parent" />
</LinearLayout>

JavaCV video recorder orientation is not proper in portrait mode

Hi i am using https://github.com/bytedeco/javacv/ for recording video.
When using landscape mode orientation is fine but when i change the orientation to portrait mode the video is rotated -90 degree.
Any body have an idea what i may be doing wrong. Here is the code.
package org.bytedeco.javacv_android_example.record;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacpp.opencv_imgproc;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.javacv_android_example.R;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class RecordActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
/* The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
long startTime = 0;
boolean recording = false;
volatile boolean runAudioThread = true;
Frame[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
private PowerManager.WakeLock mWakeLock;
private File ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
private FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private Frame yuvImage = null;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
OpenCVFrameConverter.ToIplImage converter = new OpenCVFrameConverter.ToIplImage();
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
setContentView(R.layout.activity_record);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
}
#Override
protected void onResume() {
super.onResume();
if(mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
#Override
protected void onPause() {
super.onPause();
if(mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if(cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if(mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
#Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if(keyCode == KeyEvent.KEYCODE_BACK) {
if(recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.activity_record, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if(1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG, "init recorder");
if(RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new Frame[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for(int i = 0; i < images.length; i++) {
images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
timestamps[i] = -1;
}
} else if(yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(LOG_TAG, "create yuvImage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch(InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if(recorder != null && recording) {
if(RECORD_LENGTH > 0) {
Log.v(LOG_TAG, "Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if(imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if(lastIndex < firstIndex) {
lastIndex += images.length;
}
for(int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if(t >= 0) {
if(t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}
firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if(samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if(lastIndex < firstIndex) {
lastIndex += samples.length;
}
for(int i = firstIndex; i <= lastIndex; i++) {
recorder.recordSamples(samples[i % samples.length]);
}
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
recording = false;
Log.v(LOG_TAG, "Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
#Override
public void onClick(View v) {
if(!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
#Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if(RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for(int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while(runAudioThread) {
if(RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if(bufferReadResult > 0) {
Log.v(LOG_TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if(recording) {
if(RECORD_LENGTH <= 0) {
try {
recorder.recordSamples(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG, "audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera", "camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch(IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters camParams = mCamera.getParameters();
List<Camera.Size> sizes = camParams.getSupportedPreviewSizes();
// Sort the list in ascending order
Collections.sort(sizes, new Comparator<Camera.Size>() {
public int compare(final Camera.Size a, final Camera.Size b) {
return a.width * a.height - b.width * b.height;
}
});
// Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot
// reach the initial settings of imageWidth/imageHeight.
for(int i = 0; i < sizes.size(); i++) {
if((sizes.get(i).width >= imageWidth && sizes.get(i).height >= imageHeight) || i == sizes.size() - 1) {
imageWidth = sizes.get(i).width;
imageHeight = sizes.get(i).height;
Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth + "x" + imageHeight);
break;
}
}
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG, "Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
camParams.setPreviewFrameRate(frameRate);
Log.v(LOG_TAG, "Preview Framerate: " + camParams.getPreviewFrameRate());
mCamera.setParameters(camParams);
startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch(RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if(!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if(isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if(RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvImage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if(yuvImage != null && recording) {
((ByteBuffer) yuvImage.image[0].position(0)).put(data);
if(RECORD_LENGTH <= 0) {
try {
Log.v(LOG_TAG, "Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if(t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
opencv_core.IplImage toBeRotated = converter.convert(yuvImage);
toBeRotated = rotate(toBeRotated, -90);
Frame newFrame = converter.convert(toBeRotated);
opencv_core.cvReleaseImage(toBeRotated);
recorder.record(newFrame);
// recorder.record(yuvImage);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
public static opencv_core.IplImage rotate(opencv_core.IplImage image, double angle) {
opencv_core.IplImage copy = opencv_core.cvCloneImage(image);
double radians = Math.toRadians(angle);
double sin = Math.abs(Math.sin(radians));
double cos = Math.abs(Math.cos(radians));
int newWidth = (int) (copy.width() * cos + copy.height() * sin);
int newHeight = (int) (copy.width() * sin + copy.height() * cos);
int[] newWidthHeight = {newWidth, newHeight};
opencv_core.CvSize size = new opencv_core.CvSize(copy.height(), copy.width());
opencv_core.IplImage rotatedImage = opencv_core.cvCreateImage(size, copy.depth(), copy.nChannels());
opencv_core.CvMat mapMatrix = opencv_core.cvCreateMat( 2, 3, opencv_core.CV_32FC1 );
//Define Mid Point
opencv_core.CvPoint2D32f centerPoint = new opencv_core.CvPoint2D32f();
centerPoint.x(copy.width()/2);
centerPoint.y(copy.width()/2);
//Get Rotational Matrix
opencv_imgproc.cv2DRotationMatrix(centerPoint, angle, 1, mapMatrix);
opencv_imgproc.cvWarpAffine(copy, rotatedImage, mapMatrix, opencv_imgproc.CV_INTER_CUBIC + opencv_imgproc.CV_WARP_FILL_OUTLIERS, opencv_core.cvScalarAll(170));
opencv_core.cvReleaseImage(copy);
opencv_core.cvReleaseMat(mapMatrix);
return rotatedImage;
}
}
You could either do something like recorder.setVideoMetadata("rotate", "90") or transform the images with something like this:
FFmpegFrameFilter filter = new FFmpegFrameFilter("transpose=cclock_flip", imageWidth, imageHeight);
filter.push(frame);
Frame frame2;
while ((frame2 = filter.pull()) != null) {
recorder.record(frame2);
}

Android VideoView RTSP delay

I am opening a RTSP stream of an IP camera in my Android application through VideoView. There is a problem that It has a great delay that reaches more than 20 seconds! although viewing the Camera from an ordinary PC via its browser there is no such delay. Any Ideas please?
My Code:
String path="rtsp://192.168.1.20/3gpp";
myVideoView.setVideoURI(Uri.parse(path));
myVideoView.setMediaController(new MediaController(this));
myVideoView.requestFocus();
myVideoView.start();
Use MJPEG stream, instead of RTSP. After lots of research, I´ve found an MJPEG viewer class that plays live stream near in real time (depending on your network connection).
Here´s the code Android and MJPEG
You can do the following for this
MjpegSample Class
package de.mjpegsample;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import de.mjpegsample.MjpegView.MjpegInputStream;
import de.mjpegsample.MjpegView.MjpegView;
public class MjpegSample extends Activity {
private MjpegView mv;
private static final int MENU_QUIT = 1;
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_QUIT, 0, "Quit");
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_QUIT:
finish();
return true;
}
return false;
}
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://gamic.dnsalias.net:7001/img/video.mjpeg";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(false);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjepgView
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
MjpegInputStream
package de.mjpegsample.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}

Android ICS and MJPEG using AsyncTask

I modified the MJPEG viewer code from Android and MJPEG to work using an AsyncTask (and thus work on Ice Cream Sandwich (ICS), 4.0.4) and here is my code.
If anyone has any suggestions on how to optimize, cleanup, or do something more proper with the code please let me know. Two issues I'd appreciate help addressing:
If you have the device on a stream then lock the screen
and unlock the screen it does not resume playing until you either
kill and resume the app or rotate the screen. All my attempts at using OnResume() to do something or other resulted in app crashes.
In particular I'd like to get the AsyncTask back in
MjpegInputStream.java but was not able to get that to work.
MjpegActivity.java:
package com.demo.mjpeg;
import java.io.IOException;
import java.net.URI;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import com.demo.mjpeg.MjpegView.MjpegInputStream;
import com.demo.mjpeg.MjpegView.MjpegView;
import android.app.Activity;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Toast;
public class MjpegActivity extends Activity {
private static final String TAG = "MjpegActivity";
private MjpegView mv;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//sample public cam
String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&amp%3bdummy=1333689998337";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
new DoRead().execute(URL);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
public class DoRead extends AsyncTask<String, Void, MjpegInputStream> {
protected MjpegInputStream doInBackground(String... url) {
//TODO: if camera has authentication deal with it and don't just not work
HttpResponse res = null;
DefaultHttpClient httpclient = new DefaultHttpClient();
Log.d(TAG, "1. Sending http request");
try {
res = httpclient.execute(new HttpGet(URI.create(url[0])));
Log.d(TAG, "2. Request finished, status = " + res.getStatusLine().getStatusCode());
if(res.getStatusLine().getStatusCode()==401){
//You must turn off camera User Access Control before this will work
return null;
}
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
e.printStackTrace();
Log.d(TAG, "Request failed-ClientProtocolException", e);
//Error connecting to camera
} catch (IOException e) {
e.printStackTrace();
Log.d(TAG, "Request failed-IOException", e);
//Error connecting to camera
}
return null;
}
protected void onPostExecute(MjpegInputStream result) {
mv.setSource(result);
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
}
}
MjpegInputStream.java:
package com.demo.mjpeg.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Log;
public class MjpegInputStream extends DataInputStream {
private static final String TAG = "MjpegInputStream";
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public MjpegInputStream(InputStream in) {
super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
}
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) {
return i + 1;
}
} else {
seqIndex = 0;
}
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
nfe.getStackTrace();
Log.d(TAG, "catch NumberFormatException hit", nfe);
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
MjpegView.java:
package com.demo.mjpeg.MjpegView;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "MjpegView";
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) {
mSurfaceHolder = surfaceHolder;
}
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN){
return new Rect(0, 0, dispWidth, dispHeight);
}
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps;
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+" fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {
e.getStackTrace();
Log.d(TAG, "catch IOException hit in run", e);
}
}
} finally {
if (c != null) {
mSurfaceHolder.unlockCanvasAndPost(c);
}
}
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {
e.getStackTrace();
Log.d(TAG, "catch IOException hit in stopPlayback", e);
}
}
}
public MjpegView(Context context, AttributeSet attrs) {
super(context, attrs); init(context);
}
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) {
thread.setSurfaceSize(w, h);
}
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
nice work!
For your problem with onResume(), isn't it enough when you move the following code from onCreate() to onResume()?
//sample public cam
String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&amp%3bdummy=1333689998337";
mv = new MjpegView(this);
setContentView(mv);
new DoRead().execute(URL);
Then you simply recreate the View and new instance of the AsyncTask... I tried it and it works for me...
It'll be helpful for the newbies that if you want to access your ip camera having a username or password , you might want to add this to your DefaultHttpClient and the above code will work for cameras that require authentication
CredentialsProvider provider = new BasicCredentialsProvider();
UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("yourusername", "yourpassword");
provider.setCredentials(AuthScope.ANY, credentials);
DefaultHttpClient httpclient = new DefaultHttpClient();
httpclient.setCredentialsProvider(provider);
thanks for the code, it's very helpful
I want to suggest few optimization tips, which are already used in my code, overall performance can be easily increased by a few times.
I removed memory allocations during frame reading, where possible
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 200000 + HEADER_MAX_LENGTH;
private final String CONTENT_LENGTH = "Content-Length:";
private final String CONTENT_END = "\r\n";
private final static byte[] gFrameData = new byte[FRAME_MAX_LENGTH];
private final static byte[] gHeader = new byte[HEADER_MAX_LENGTH];
BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(SOI_MARKER);
if(headerLen < 0)
return false;
reset();
readFully(gHeader, 0, headerLen);
int contentLen;
try
{
contentLen = parseContentLength(gHeader, headerLen);
} catch (NumberFormatException nfe)
{
nfe.getStackTrace();
Log.d(TAG, "catch NumberFormatException hit", nfe);
contentLen = getEndOfSequence(EOF_MARKER);
}
readFully(gFrameData, 0, contentLen);
Bitmap bm = BitmapFactory.decodeByteArray(gFrameData, 0, contentLen, bitmapOptions);
bitmapOptions.inBitmap = bm;
return bm;
}
Optimizing parseContentLength, removing String operations as much as possible
byte[] CONTENT_LENGTH_BYTES;
byte[] CONTENT_END_BYTES;
public MjpegInputStream(InputStream in)
{
super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
bitmapOptions.inSampleSize = 1;
bitmapOptions.inPreferredConfig = Bitmap.Config.RGB_565;
bitmapOptions.inPreferQualityOverSpeed = false;
bitmapOptions.inPurgeable = true;
try
{
CONTENT_LENGTH_BYTES = CONTENT_LENGTH.getBytes("UTF-8");
CONTENT_END_BYTES = CONTENT_END.getBytes("UTF-8");
} catch (UnsupportedEncodingException e)
{
e.printStackTrace();
}
}
private int findPattern(byte[] buffer, int bufferLen, byte[] pattern, int offset)
{
int seqIndex = 0;
for(int i=offset; i < bufferLen; ++i)
{
if(buffer[i] == pattern[seqIndex])
{
++seqIndex;
if(seqIndex == pattern.length)
{
return i + 1;
}
} else
{
seqIndex = 0;
}
}
return -1;
}
private int parseContentLength(byte[] headerBytes, int length) throws IOException, NumberFormatException
{
int begin = findPattern(headerBytes, length, CONTENT_LENGTH_BYTES, 0);
int end = findPattern(headerBytes, length, CONTENT_END_BYTES, begin) - CONTENT_END_BYTES.length;
// converting string to int
int number = 0;
int radix = 1;
for(int i = end - 1; i >= begin; --i)
{
if(headerBytes[i] > 47 && headerBytes[i] < 58)
{
number += (headerBytes[i] - 48) * radix;
radix *= 10;
}
}
return number;
}
There could be mistakes in the code since I was rewriting it for stackoverflow, originally I'm using 2 threads, one is reading frames and another is rendering.
I hope it will help for someone.

Android and MJPEG

I need to get the mjpeg stream from an IP camera, anyone know the right way do it? I googled a bit and I find this example
http://www.anddev.org/mjpeg_on_android_anyone-t1871.html
but I've been stucked when I tried to get the stream from another activity called by the main activity. Here the code:
Main acitivity
package com.test;
public class IntentTest extends Activity {
/** Called when the activity is first created. */
ListView myListView = null;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
myListView = (ListView)findViewById(R.id.listView);
final ArrayList<String> items = new ArrayList<String>();
items.add("00408C944B9A");
final ArrayAdapter<String> aa;
aa = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1,
items);
myListView.setAdapter(aa);
myListView.setOnItemClickListener(listClicked);
}
private OnItemClickListener listClicked = new OnItemClickListener() {
public void onItemClick(AdapterView<?> arg0, View arg1, int position, long id) {
// TODO Auto-generated method stub
Intent i = new Intent(IntentTest.this, OtherActivity.class);
i.putExtra("MAC", myListView.getItemAtPosition(position).toString());
startActivity(i);
}
};
}
Second activity
package com.test;
import com.test.mjpeg.mjpegsample.MjpegView.*;
import com.test.parser.JSONParse;
public class OtherActivity extends Activity {
/** Called when the activity is first created. */
private MjpegView mv;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Bundle extras = getIntent().getExtras();
if (extras != null){
String mac = (String)extras.get("MAC");
Log.i("Other", "---->" + mac);
TextView tv = (TextView)findViewById(R.id.textView);
tv.setText(mac);
String URL = "myurl";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
I found this code over the internet some time ago, maybe it will be of some help to you.
MjpegSample Class
package de.mjpegsample;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import de.mjpegsample.MjpegView.MjpegInputStream;
import de.mjpegsample.MjpegView.MjpegView;
public class MjpegSample extends Activity {
private MjpegView mv;
private static final int MENU_QUIT = 1;
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_QUIT, 0, "Quit");
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_QUIT:
finish();
return true;
}
return false;
}
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://gamic.dnsalias.net:7001/img/video.mjpeg";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(false);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjpegView Class
package de.mjpegsample.MjpegView;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
MjpegInputStream Class
package de.mjpegsample.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
If you need any more info, let me know, I'll help in any way I can.
FYI: I did not write SimpleMjpegView, you can find more up to date code here
For those looking to get this working with AsyncTask (and thus working on Ice Cream Sandwich (ICS), 4.0.4) see Android ICS and MJPEG using AsyncTask.
I had the same problem and tried using custom Mjpeg viewers. While they worked, they proved unstable and clumsy for me.
I am simply using a WebView. On Android Studio, simply drag and drop a WebView. My WebView is not covering the entire screen, but half of it.
Then, to fit the video best, in your onCreate:
If using Kotlin:
webView.settings.loadWithOverviewMode = true;
webView.settings.useWideViewPort = true;
If using Java:
webView.getSettings().setLoadWithOverviewMode(true);
webView.getSettings().setUseWideViewPort(true);
And then, on button click:
To start:
webView.loadUrl("http://$id:8081/cam.mjpeg") // start
To Stop:
webView.stopLoading()
webView.loadUrl("about:blank")
Hope this helps.

Categories

Resources