My app keeps shutting itself but the code appears to don't have any errors, I think I'm missing something but I'm unable to see.
Here, I took this code as example, Android and MJPEG
while USB debuggin i got:
Warning: debug info can be unavailable. Please close other application using ADB: Monitor, DDMS, Eclipse
Also in MpegSample importing MjpegView and MjpegInputStream seems to be "unused"
MjpegSample
package com.example.rauls.webviewpfcactivity;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import com.example.rauls.webviewpfcactivity.MjpegInputStream;
import com.example.rauls.webviewpfcactivity.MjpegView;
/* android studio says that both import statements are unused
*/
public class MjpegSample extends Activity {
private MjpegView mv;
private static final int MENU_QUIT = 1;
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_QUIT, 0, "Quit");
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_QUIT:
finish();
return true;
}
return false;
}
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://192.168.1.64:8081/";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(false);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjegView
package com.example.rauls.webviewpfcactivity;
/**
* Created by RaulS on 19/12/2015.
*/
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
MjpegInputStream
package com.example.rauls.webviewpfcactivity;
/**
* Created by RaulS on 19/12/2015.
*/
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
main.xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical" android:layout_width="match_parent"
android:layout_height="match_parent">
<!--WebView
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/webview"
android:layout_width="wrap_content"
android:layout_height="wrap_content" /-->
<!--VideoView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="#+id/videoView"
android:layout_gravity="center_horizontal" /-->
<SurfaceView
android:id="#+id/surfaceView"
android:layout_width="fill_parent"
android:layout_height="fill_parent" />
</LinearLayout>
Related
I have server, that streams IP camera video(in this case my laptop is as IP camera). And stores JPG files that are accessible through internet. I have this code that should capture these JPG and make a video with them:
I am able only to get one frame and that it, program somehow does not refresh(frame refreshes if I rotate screen).
What I would like to know is:
1. If there is an easier way to reflect those jpeg pictures?
2. Why my program shows only one frame?
Main Activity
package de.mjpegsample;
import android.app.Activity;
import android.os.Bundle;
import android.view.Window;
import android.view.WindowManager;
import de.mjpegsample.MjpegView.MjpegInputStream;
import de.mjpegsample.MjpegView.MjpegView;
public class MjpegSample extends Activity {
private MjpegView mv;
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://xxx.xxx.x.xx:8080/cam_1.jpg";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjpegInputStream:
package de.mjpegsample.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
MjpegView
package de.mjpegsample.MjpegView;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = true;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) { super(context); init(context); }
public void surfaceCreated(SurfaceHolder holder) { surfaceDone = true; }
public void showFps(boolean b) { showFps = b; }
public void setSource(MjpegInputStream source) { mIn = source; startPlayback();}
public void setOverlayPaint(Paint p) { overlayPaint = p; }
public void setOverlayTextColor(int c) { overlayTextColor = c; }
public void setOverlayBackgroundColor(int c) { overlayBackgroundColor = c; }
public void setOverlayPosition(int p) { ovlPos = p; }
public void setDisplayMode(int s) { displayMode = s; }
}
Strange... I changed String URL = "http://xxx.xxx.x.xx:8080/cam_1.jpg"; to String URL = "http://xxx.xxx.x.xx:8080/cam_1.cgi"; and it is working with a little bit of delay but that is ok.
So my last question still is unanswered: Is there a more simple way to reflect JPG's and make a MJPEG? Because this one takes a lot of codding to do :)
I am opening a RTSP stream of an IP camera in my Android application through VideoView. There is a problem that It has a great delay that reaches more than 20 seconds! although viewing the Camera from an ordinary PC via its browser there is no such delay. Any Ideas please?
My Code:
String path="rtsp://192.168.1.20/3gpp";
myVideoView.setVideoURI(Uri.parse(path));
myVideoView.setMediaController(new MediaController(this));
myVideoView.requestFocus();
myVideoView.start();
Use MJPEG stream, instead of RTSP. After lots of research, I´ve found an MJPEG viewer class that plays live stream near in real time (depending on your network connection).
Here´s the code Android and MJPEG
You can do the following for this
MjpegSample Class
package de.mjpegsample;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import de.mjpegsample.MjpegView.MjpegInputStream;
import de.mjpegsample.MjpegView.MjpegView;
public class MjpegSample extends Activity {
private MjpegView mv;
private static final int MENU_QUIT = 1;
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_QUIT, 0, "Quit");
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_QUIT:
finish();
return true;
}
return false;
}
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://gamic.dnsalias.net:7001/img/video.mjpeg";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(false);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjepgView
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
MjpegInputStream
package de.mjpegsample.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
I modified the MJPEG viewer code from Android and MJPEG to work using an AsyncTask (and thus work on Ice Cream Sandwich (ICS), 4.0.4) and here is my code.
If anyone has any suggestions on how to optimize, cleanup, or do something more proper with the code please let me know. Two issues I'd appreciate help addressing:
If you have the device on a stream then lock the screen
and unlock the screen it does not resume playing until you either
kill and resume the app or rotate the screen. All my attempts at using OnResume() to do something or other resulted in app crashes.
In particular I'd like to get the AsyncTask back in
MjpegInputStream.java but was not able to get that to work.
MjpegActivity.java:
package com.demo.mjpeg;
import java.io.IOException;
import java.net.URI;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import com.demo.mjpeg.MjpegView.MjpegInputStream;
import com.demo.mjpeg.MjpegView.MjpegView;
import android.app.Activity;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Toast;
public class MjpegActivity extends Activity {
private static final String TAG = "MjpegActivity";
private MjpegView mv;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//sample public cam
String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&%3bdummy=1333689998337";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
new DoRead().execute(URL);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
public class DoRead extends AsyncTask<String, Void, MjpegInputStream> {
protected MjpegInputStream doInBackground(String... url) {
//TODO: if camera has authentication deal with it and don't just not work
HttpResponse res = null;
DefaultHttpClient httpclient = new DefaultHttpClient();
Log.d(TAG, "1. Sending http request");
try {
res = httpclient.execute(new HttpGet(URI.create(url[0])));
Log.d(TAG, "2. Request finished, status = " + res.getStatusLine().getStatusCode());
if(res.getStatusLine().getStatusCode()==401){
//You must turn off camera User Access Control before this will work
return null;
}
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
e.printStackTrace();
Log.d(TAG, "Request failed-ClientProtocolException", e);
//Error connecting to camera
} catch (IOException e) {
e.printStackTrace();
Log.d(TAG, "Request failed-IOException", e);
//Error connecting to camera
}
return null;
}
protected void onPostExecute(MjpegInputStream result) {
mv.setSource(result);
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
}
}
MjpegInputStream.java:
package com.demo.mjpeg.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Log;
public class MjpegInputStream extends DataInputStream {
private static final String TAG = "MjpegInputStream";
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public MjpegInputStream(InputStream in) {
super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
}
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) {
return i + 1;
}
} else {
seqIndex = 0;
}
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
nfe.getStackTrace();
Log.d(TAG, "catch NumberFormatException hit", nfe);
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
MjpegView.java:
package com.demo.mjpeg.MjpegView;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "MjpegView";
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) {
mSurfaceHolder = surfaceHolder;
}
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN){
return new Rect(0, 0, dispWidth, dispHeight);
}
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps;
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+" fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {
e.getStackTrace();
Log.d(TAG, "catch IOException hit in run", e);
}
}
} finally {
if (c != null) {
mSurfaceHolder.unlockCanvasAndPost(c);
}
}
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {
e.getStackTrace();
Log.d(TAG, "catch IOException hit in stopPlayback", e);
}
}
}
public MjpegView(Context context, AttributeSet attrs) {
super(context, attrs); init(context);
}
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) {
thread.setSurfaceSize(w, h);
}
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
nice work!
For your problem with onResume(), isn't it enough when you move the following code from onCreate() to onResume()?
//sample public cam
String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&%3bdummy=1333689998337";
mv = new MjpegView(this);
setContentView(mv);
new DoRead().execute(URL);
Then you simply recreate the View and new instance of the AsyncTask... I tried it and it works for me...
It'll be helpful for the newbies that if you want to access your ip camera having a username or password , you might want to add this to your DefaultHttpClient and the above code will work for cameras that require authentication
CredentialsProvider provider = new BasicCredentialsProvider();
UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("yourusername", "yourpassword");
provider.setCredentials(AuthScope.ANY, credentials);
DefaultHttpClient httpclient = new DefaultHttpClient();
httpclient.setCredentialsProvider(provider);
thanks for the code, it's very helpful
I want to suggest few optimization tips, which are already used in my code, overall performance can be easily increased by a few times.
I removed memory allocations during frame reading, where possible
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 200000 + HEADER_MAX_LENGTH;
private final String CONTENT_LENGTH = "Content-Length:";
private final String CONTENT_END = "\r\n";
private final static byte[] gFrameData = new byte[FRAME_MAX_LENGTH];
private final static byte[] gHeader = new byte[HEADER_MAX_LENGTH];
BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(SOI_MARKER);
if(headerLen < 0)
return false;
reset();
readFully(gHeader, 0, headerLen);
int contentLen;
try
{
contentLen = parseContentLength(gHeader, headerLen);
} catch (NumberFormatException nfe)
{
nfe.getStackTrace();
Log.d(TAG, "catch NumberFormatException hit", nfe);
contentLen = getEndOfSequence(EOF_MARKER);
}
readFully(gFrameData, 0, contentLen);
Bitmap bm = BitmapFactory.decodeByteArray(gFrameData, 0, contentLen, bitmapOptions);
bitmapOptions.inBitmap = bm;
return bm;
}
Optimizing parseContentLength, removing String operations as much as possible
byte[] CONTENT_LENGTH_BYTES;
byte[] CONTENT_END_BYTES;
public MjpegInputStream(InputStream in)
{
super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
bitmapOptions.inSampleSize = 1;
bitmapOptions.inPreferredConfig = Bitmap.Config.RGB_565;
bitmapOptions.inPreferQualityOverSpeed = false;
bitmapOptions.inPurgeable = true;
try
{
CONTENT_LENGTH_BYTES = CONTENT_LENGTH.getBytes("UTF-8");
CONTENT_END_BYTES = CONTENT_END.getBytes("UTF-8");
} catch (UnsupportedEncodingException e)
{
e.printStackTrace();
}
}
private int findPattern(byte[] buffer, int bufferLen, byte[] pattern, int offset)
{
int seqIndex = 0;
for(int i=offset; i < bufferLen; ++i)
{
if(buffer[i] == pattern[seqIndex])
{
++seqIndex;
if(seqIndex == pattern.length)
{
return i + 1;
}
} else
{
seqIndex = 0;
}
}
return -1;
}
private int parseContentLength(byte[] headerBytes, int length) throws IOException, NumberFormatException
{
int begin = findPattern(headerBytes, length, CONTENT_LENGTH_BYTES, 0);
int end = findPattern(headerBytes, length, CONTENT_END_BYTES, begin) - CONTENT_END_BYTES.length;
// converting string to int
int number = 0;
int radix = 1;
for(int i = end - 1; i >= begin; --i)
{
if(headerBytes[i] > 47 && headerBytes[i] < 58)
{
number += (headerBytes[i] - 48) * radix;
radix *= 10;
}
}
return number;
}
There could be mistakes in the code since I was rewriting it for stackoverflow, originally I'm using 2 threads, one is reading frames and another is rendering.
I hope it will help for someone.
I have used below example code to place image files onto a canvas in Android and I'm struggling getting it to save the entire image to SDCard. At the moment it only saves a strange close up of one of the images. Any idea on what is going wrong?
import java.io.File;
import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.Date;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Bitmap.CompressFormat;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Toast;
public class ebpSeatingPlan extends Activity {
public Panel myPanel;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//requestWindowFeature(Window.FEATURE_NO_TITLE);
myPanel = new Panel(this);
setContentView(myPanel);
myPanel.setImageID(R.drawable.tableperson);
}
private static final int TYPE1 = 0;
private static final int TYPE2 = 1;
private static final int TYPE3 = 2;
private static final int EXIT = 3;
private static final int TYPE4 = 4;
public boolean onCreateOptionsMenu(Menu menu){
menu.add(0,TYPE1,0,"Guest Type 1").setIcon(R.drawable.tableperson);
menu.add(0,TYPE2,0,"Guest Type 2").setIcon(R.drawable.tableperson2);
menu.add(0,TYPE3,0,"Guest Type 3").setIcon(R.drawable.tableperson3);
menu.add(0,TYPE4,0,"Save Image").setIcon(R.drawable.frame);
menu.add(0,EXIT,0,"Back to Main Menu").setIcon(R.drawable.exit);
return true;
}
public boolean onOptionsItemSelected (MenuItem item){
switch (item.getItemId()){
case TYPE1:
myPanel.setImageID(R.drawable.tableperson);
return true;
case TYPE2:
myPanel.setImageID(R.drawable.tableperson2);
return true;
case TYPE3:
myPanel.setImageID( R.drawable.tableperson3);
return true;
case TYPE4:
File myDir = new File("/sdcard/saved_images");
myDir.mkdirs();
Date now = new Date();
String fname = "image"+now.getDate()+now.getSeconds()+".jpg";
File file = new File(myDir, fname);
myPanel.saveAsJpg(file, myPanel);
return true;
case EXIT:
finish();
return true;
}
return false;
}
}
class Panel extends SurfaceView implements SurfaceHolder.Callback {
public int chosenImageId;
//public Bitmap bitmap;
private TutorialThread _thread;
private ArrayList<GraphicObject> _graphics = new ArrayList<GraphicObject>();
private GraphicObject _currentGraphic = null;
public void setImageID(int i){
chosenImageId = i;
}
public void saveAsJpg(File f, View v){
String fname = f.getAbsolutePath();
FileOutputStream fos = null;
try{
fos = new FileOutputStream(fname);
Bitmap b = Bitmap.createBitmap(v.getWidth(),v.getHeight(),Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(b);
v.draw(c);
//bitmap.compress(CompressFormat.JPEG, 100, fos);
b.compress(CompressFormat.JPEG, 100, fos);
Toast.makeText(super.getContext(),"Image Saved", Toast.LENGTH_LONG).show();
}catch (Exception ex){
Toast.makeText(super.getContext(),"Error Saving Image", Toast.LENGTH_LONG).show();
Log.i("DAVE","stacktrace is " + ex);
}
}
public Panel(Context context) {
super(context);
getHolder().addCallback(this);
_thread = new TutorialThread(getHolder(), this);
setFocusable(true);
}
#Override
public boolean onTouchEvent(MotionEvent event) {
synchronized (_thread.getSurfaceHolder()) {
GraphicObject graphic = null;
if (event.getAction() == MotionEvent.ACTION_DOWN) {
graphic = new GraphicObject(BitmapFactory.decodeResource(getResources(), chosenImageId));
graphic.getCoordinates().setX((int) event.getX() - graphic.getGraphic().getWidth() / 2);
graphic.getCoordinates().setY((int) event.getY() - graphic.getGraphic().getHeight() / 2);
_currentGraphic = graphic;
} else if (event.getAction() == MotionEvent.ACTION_MOVE) {
_currentGraphic.getCoordinates().setX((int) event.getX() - _currentGraphic.getGraphic().getWidth() / 2);
_currentGraphic.getCoordinates().setY((int) event.getY() - _currentGraphic.getGraphic().getHeight() / 2);
} else if (event.getAction() == MotionEvent.ACTION_UP) {
_graphics.add(_currentGraphic);
_currentGraphic = null;
}
return true;
}
}
#Override
public void onDraw(Canvas canvas) {
canvas.drawColor(Color.LTGRAY);
Bitmap bitmap=null;
GraphicObject.Coordinates coords;
for (GraphicObject graphic : _graphics) {
bitmap = graphic.getGraphic();
coords = graphic.getCoordinates();
canvas.drawBitmap(bitmap, coords.getX(), coords.getY(), null);
}
if (_currentGraphic != null) {
bitmap = _currentGraphic.getGraphic();
coords = _currentGraphic.getCoordinates();
canvas.drawBitmap(bitmap, coords.getX(), coords.getY(), null);
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// TODO Auto-generated method stub
}
public void surfaceCreated(SurfaceHolder holder) {
_thread.setRunning(true);
_thread.start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
boolean retry = true;
_thread.setRunning(false);
while (retry) {
try {
_thread.join();
retry = false;
} catch (InterruptedException e) {
}
}
}
}
class TutorialThread extends Thread {
private SurfaceHolder _surfaceHolder;
private Panel _panel;
private boolean _run = false;
public TutorialThread(SurfaceHolder surfaceHolder, Panel panel) {
_surfaceHolder = surfaceHolder;
_panel = panel;
}
public void setRunning(boolean run) {
_run = run;
}
public SurfaceHolder getSurfaceHolder() {
return _surfaceHolder;
}
#Override
public void run() {
Canvas c;
while (_run) {
c = null;
try {
c = _surfaceHolder.lockCanvas(null);
synchronized (_surfaceHolder) {
_panel.onDraw(c);
}
} finally {
if (c != null) {
_surfaceHolder.unlockCanvasAndPost(c);
}
}
}
}
}
class GraphicObject {
public class Speed {
public static final int X_DIRECTION_RIGHT = 1;
public static final int X_DIRECTION_LEFT = -1;
public static final int Y_DIRECTION_DOWN = 1;
public static final int Y_DIRECTION_UP = -1;
private int _x = 1;
private int _y = 1;
private int _xDirection = X_DIRECTION_RIGHT;
private int _yDirection = Y_DIRECTION_DOWN;
public int getXDirection() {
return _xDirection;
}
public void setXDirection(int direction) {
_xDirection = direction;
}
public void toggleXDirection() {
if (_xDirection == X_DIRECTION_RIGHT) {
_xDirection = X_DIRECTION_LEFT;
} else {
_xDirection = X_DIRECTION_RIGHT;
}
}
public int getYDirection() {
return _yDirection;
}
public void setYDirection(int direction) {
_yDirection = direction;
}
public void toggleYDirection() {
if (_yDirection == Y_DIRECTION_DOWN) {
_yDirection = Y_DIRECTION_UP;
} else {
_yDirection = Y_DIRECTION_DOWN;
}
}
public int getX() {
return _x;
}
public void setX(int speed) {
_x = speed;
}
public int getY() {
return _y;
}
public void setY(int speed) {
_y = speed;
}
public String toString() {
String xDirection;
String yDirection;
if (_xDirection == X_DIRECTION_RIGHT) {
xDirection = "right";
} else {
xDirection = "left";
}
if (_yDirection == Y_DIRECTION_UP) {
yDirection = "up";
} else {
yDirection = "down";
}
return "Speed: x: " + _x + " | y: " + _y + " | xDirection: " + xDirection + " | yDirection: " + yDirection;
}
}
public class Coordinates {
private int _x = 100;
private int _y = 0;
public int getX() {
return _x + _bitmap.getWidth() / 2;
}
public void setX(int value) {
_x = value - _bitmap.getWidth() / 2;
}
public int getY() {
return _y + _bitmap.getHeight() / 2;
}
public void setY(int value) {
_y = value - _bitmap.getHeight() / 2;
}
public String toString() {
return "Coordinates: (" + _x + "/" + _y + ")";
}
}
private Bitmap _bitmap;
private Coordinates _coordinates;
public GraphicObject(Bitmap bitmap) {
_bitmap = bitmap;
_coordinates = new Coordinates();
}
public Bitmap getGraphic() {
return _bitmap;
}
public Coordinates getCoordinates() {
return _coordinates;
}
}
you can use drawing cache of view to save image
first enable drawingcache of you view by
myPanel.setDrawingCacheEnabled(true);
after that in your saveImage function write following code
Bitmap bitMap=v.getDrawingCache(true);
if (!APP_FILE_PATH.exists()) {
APP_FILE_PATH.mkdirs();
}
bitMap.compress(Bitmap.CompressFormat.PNG, 95, f);
I need to get the mjpeg stream from an IP camera, anyone know the right way do it? I googled a bit and I find this example
http://www.anddev.org/mjpeg_on_android_anyone-t1871.html
but I've been stucked when I tried to get the stream from another activity called by the main activity. Here the code:
Main acitivity
package com.test;
public class IntentTest extends Activity {
/** Called when the activity is first created. */
ListView myListView = null;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
myListView = (ListView)findViewById(R.id.listView);
final ArrayList<String> items = new ArrayList<String>();
items.add("00408C944B9A");
final ArrayAdapter<String> aa;
aa = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1,
items);
myListView.setAdapter(aa);
myListView.setOnItemClickListener(listClicked);
}
private OnItemClickListener listClicked = new OnItemClickListener() {
public void onItemClick(AdapterView<?> arg0, View arg1, int position, long id) {
// TODO Auto-generated method stub
Intent i = new Intent(IntentTest.this, OtherActivity.class);
i.putExtra("MAC", myListView.getItemAtPosition(position).toString());
startActivity(i);
}
};
}
Second activity
package com.test;
import com.test.mjpeg.mjpegsample.MjpegView.*;
import com.test.parser.JSONParse;
public class OtherActivity extends Activity {
/** Called when the activity is first created. */
private MjpegView mv;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Bundle extras = getIntent().getExtras();
if (extras != null){
String mac = (String)extras.get("MAC");
Log.i("Other", "---->" + mac);
TextView tv = (TextView)findViewById(R.id.textView);
tv.setText(mac);
String URL = "myurl";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
I found this code over the internet some time ago, maybe it will be of some help to you.
MjpegSample Class
package de.mjpegsample;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import de.mjpegsample.MjpegView.MjpegInputStream;
import de.mjpegsample.MjpegView.MjpegView;
public class MjpegSample extends Activity {
private MjpegView mv;
private static final int MENU_QUIT = 1;
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_QUIT, 0, "Quit");
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_QUIT:
finish();
return true;
}
return false;
}
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://gamic.dnsalias.net:7001/img/video.mjpeg";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(false);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjpegView Class
package de.mjpegsample.MjpegView;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
MjpegInputStream Class
package de.mjpegsample.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
If you need any more info, let me know, I'll help in any way I can.
FYI: I did not write SimpleMjpegView, you can find more up to date code here
For those looking to get this working with AsyncTask (and thus working on Ice Cream Sandwich (ICS), 4.0.4) see Android ICS and MJPEG using AsyncTask.
I had the same problem and tried using custom Mjpeg viewers. While they worked, they proved unstable and clumsy for me.
I am simply using a WebView. On Android Studio, simply drag and drop a WebView. My WebView is not covering the entire screen, but half of it.
Then, to fit the video best, in your onCreate:
If using Kotlin:
webView.settings.loadWithOverviewMode = true;
webView.settings.useWideViewPort = true;
If using Java:
webView.getSettings().setLoadWithOverviewMode(true);
webView.getSettings().setUseWideViewPort(true);
And then, on button click:
To start:
webView.loadUrl("http://$id:8081/cam.mjpeg") // start
To Stop:
webView.stopLoading()
webView.loadUrl("about:blank")
Hope this helps.