Related
This is the RoofShape class
package com.view9.stoddart.opengl;
import android.opengl.GLU;
import android.util.Log;
import com.view9.stoddart.model.OpenGlPointObject;
import com.view9.stoddart.model.OpenGlRoofObject;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.ArrayList;
import java.util.List;
import javax.microedition.khronos.opengles.GL10;
/**
* Created by view9 on 10/25/15.
*/
public class RoofShape {
ArrayList<CoOrdinatesModel> cordinateList;
List<Short> indicesList;
private FloatBuffer mVertexBuffer = null;
private ShortBuffer mShapeBorderIndicesBuffer = null;
private int mNumOfShapeBorderIndices = 0;
OpenGlRoofObject roofObject;
public RoofShape(OpenGlRoofObject roofObject) {
this.roofObject = roofObject;
setAllBuffers();
}
public void drawShape(GL10 gl) {
// Log.d("#####", "draw called");
// Specifies the location and data format of an array of vertex
// coordinates to use when rendering.
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffer);
// Draw all lines
gl.glDrawElements(GL10.GL_LINES, mNumOfShapeBorderIndices,
GL10.GL_UNSIGNED_SHORT, mShapeBorderIndicesBuffer);
GLU.gluLookAt(gl, 0, 0, -5, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
private void setAllBuffers() {
cordinateList = new ArrayList<>();
cordinateList.add(new CoOrdinatesModel(Float.parseFloat("0.0"), Float.parseFloat("0.0"), Float.parseFloat("0.0")));
for (int pointCount = 0; pointCount < roofObject.getPoinstList().size(); pointCount++) {
OpenGlPointObject pointObject = roofObject.getPoinstList().get(pointCount);
cordinateList.add(new CoOrdinatesModel(pointObject.getxAsis(), pointObject.getyAxis(), pointObject.getzAxis()));
}
/**====================================
* working with the indices for line join
* adding cordinates to indicesList
======================================*/
indicesList = new ArrayList<>();
for (int indicesCount = 0; indicesCount < roofObject.getLinesList().size(); indicesCount++) {
String p = roofObject.getLinesList().get(indicesCount).getPath();
for (String strPoint : p.split(",")) {
//spliting the string and eleminating the prefix string
String prefixStringEleminating = strPoint.substring(1);
Short intPoint = Short.parseShort(prefixStringEleminating);
indicesList.add(intPoint);
}
}
updateVertexListToArray(cordinateList);
updateVerticesListToArray(indicesList);
}
private void updateVerticesListToArray(List<Short> list) {
/**
* For converting indices list to indices array
*/
ArrayList<Short> indicesList = new ArrayList<>();
indicesList.addAll(list);
short[] pointsToJoinList = new short[indicesList.size()];
int indicesCount = 0;
for (short shortIndices : indicesList) {
pointsToJoinList[indicesCount++] = shortIndices;
}
mNumOfShapeBorderIndices = pointsToJoinList.length;
ByteBuffer tbibb = ByteBuffer.allocateDirect(pointsToJoinList.length * 2);
tbibb.order(ByteOrder.nativeOrder());
mShapeBorderIndicesBuffer = tbibb.asShortBuffer();
mShapeBorderIndicesBuffer.put(pointsToJoinList);
mShapeBorderIndicesBuffer.position(0);
}
private void updateVertexListToArray(ArrayList<CoOrdinatesModel> cordinateList) {
/**============================
* working with the vertexes
* adding cordinates to VertexList
===============================*/
List<Float> vList = new ArrayList<>();
int loopSize = cordinateList.size();
for (int i = 0; i < loopSize; i++) {
vList.add(cordinateList.get(i).getxAxis());
vList.add(cordinateList.get(i).getyAxis());
vList.add(cordinateList.get(i).getzAxis());
}
/**
* converting vertex list to array
*/
float[] vertexlist = new float[vList.size()];
int count = 0;
for (float f : vList) {
vertexlist[count++] = f;
}
ByteBuffer vbb = ByteBuffer.allocateDirect(vertexlist.length * 4);
vbb.order(ByteOrder.nativeOrder());
mVertexBuffer = vbb.asFloatBuffer();
mVertexBuffer.put(vertexlist);
mVertexBuffer.position(0);
}
public void setFlashingPointTOCordinatelist(GL10 gl, ArrayList<CoOrdinatesModel> flashingPoint) {
cordinateList.addAll(flashingPoint);
mVertexBuffer.clear();
mShapeBorderIndicesBuffer.clear();
updateVertexListToArray(cordinateList);
Short initialPoint = Short.parseShort("0");
Short previousUpdated = Short.parseShort("0");
for (int i = 1; i <= 5; i++) {
String totalNo = (cordinateList.size() + 1) + i + "";
Short indice = Short.parseShort(totalNo);
String str = indice + "";
if (i == 1) {
indicesList.add(Short.parseShort(str));
initialPoint = Short.parseShort(str);
} else if (i == 2) {
indicesList.add(Short.parseShort(str));
previousUpdated = Short.parseShort(str);
} else if (i == 3 || i == 4) {
indicesList.add(previousUpdated);
indicesList.add(Short.parseShort(str));
previousUpdated = Short.parseShort(str);
} else if (i == 5) {
indicesList.add(previousUpdated);
indicesList.add(initialPoint);
}
}
updateVerticesListToArray(indicesList);
drawShape(gl);
}
}
THis is my Renderer class
package com.view9.stoddart.opengl;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.Log;
import com.view9.stoddart.model.OpenGlRoofObject;
import java.util.ArrayList;
import javax.microedition.khronos.opengles.GL10;
public class StoddartNewRenderer implements GLSurfaceView.Renderer {
private Context mContext;
public float mAngleX = 0.0f;
public float mAngleY = 0.0f;
public float mAngleZ = 0.0f;
private float mPreviousX;
private float mPreviousY;
private final float TOUCH_SCALE_FACTOR = 0.6f;
private OpenGlRoofObject roofObject;
GL10 glForShape;
RoofShape roofShape;
public StoddartNewRenderer(Context context, OpenGlRoofObject roofObject) {
mContext = context;
this.roofObject = roofObject;
roofShape = new RoofShape(roofObject);
}
public void onDrawFrame(GL10 gl) {
// Log.d("####","draw frame called");
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glTranslatef(0.0f, 0.0f, -3.0f);
gl.glRotatef(mAngleX, 1, 0, 0);
gl.glRotatef(mAngleY, 0, 1, 0);
gl.glRotatef(mAngleZ, 0, 0, 1);
// Set line color to black
gl.glColor4f(0.0f, 0.0f, 0.0f, 1.0f);
// gl.glColorMask(true, false, false, true);
roofShape.drawShape(gl);
//testing
glForShape = gl;
}
#Override
public void onSurfaceCreated(GL10 gl, javax.microedition.khronos.egl.EGLConfig config) {
Log.d("####", "surface created");
gl.glClearColor(1f, 1f, 1f, 1);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
gl.glEnable(GL10.GL_DEPTH_TEST);
// Enabled the vertex buffer for writing and to be used during rendering.
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
if (height == 0) height = 1;
gl.glViewport(0, 0, width, height);
float aspect = (float) width / height;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-aspect, aspect, -1.0f, 1.0f, 1.0f, 0.0f);
Log.d("####", "surface is changed");
}
public void drawSelectedShape(float x, float y) {
ArrayList<CoOrdinatesModel> flashingPoint = new ArrayList<>();
flashingPoint.add(new CoOrdinatesModel(x - 0.1f, y, 0f));
flashingPoint.add(new CoOrdinatesModel(x, y + 0.1f, 0f));
flashingPoint.add(new CoOrdinatesModel(x + 0.1f, y, 0f));
flashingPoint.add(new CoOrdinatesModel(x, y - 0.1f, 0f));
roofShape.setFlashingPointTOCordinatelist(glForShape, flashingPoint);
}
}
This is my GLSurfaceview class
package com.view9.stoddart.opengl;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
/**
* Created by view9 on 10/12/15.
*/
public class StoddartOpenGlSurfaceView extends GLSurfaceView {
StoddartNewRenderer renderer;
public StoddartOpenGlSurfaceView(Context context) {
super(context);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
public StoddartOpenGlSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public void showRenderer(StoddartNewRenderer renderer) {
this.renderer = renderer;
}
#Override
public boolean onTouchEvent(MotionEvent e) {
// return renderer.onTouchEvent(event);
float x = e.getX() / 100;
float y = e.getY() / 100;
Log.d("position", "x-axis: " + x + " y-axis: " + y);
renderer.drawSelectedShape(x, y);
requestRender();
return true;
}
}
And this is the activity where I've set glsurfaceview
data are fetched from the api which is in xml format I've parsed data well and stored it as a roofobject.
package com.view9.stoddart.activities;
import android.app.Dialog;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.LinearLayout;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.view9.stoddart.ApiCallConfiguration.ServerConfig;
import com.view9.stoddart.ApiCallConfiguration.WebService;
import com.view9.stoddart.R;
import com.view9.stoddart.fragment.ItemsSelectFragment;
import com.view9.stoddart.model.OpenGlRoofObject;
import com.view9.stoddart.model.SelectItemsDto;
import com.view9.stoddart.opengl.StoddartNewRenderer;
import com.view9.stoddart.opengl.StoddartOpenGlSurfaceView;
import com.view9.stoddart.utills.AppController;
import com.view9.stoddart.utills.Cache;
import com.view9.stoddart.utills.CustomDialogs;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.Serializable;
import java.util.ArrayList;
/**
* Created by view9 on 6/8/15.
*/
public class DrawingFlashingActivity extends BaseActivity {
DrawingFlashingActivity instance = null;
public DrawingFlashingActivity getInstance() {
return instance;
}
OpenGlRoofObject roofObject;
private LinearLayout gridLayout, flashingDiagramLayout;
// StoddartRenderer renderer;
StoddartNewRenderer renderer;
StoddartOpenGlSurfaceView stoddartOpenGlSurfaceView;
Response.Listener<String> response;
Response.ErrorListener errorListener;
Dialog pDialog;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
instance = DrawingFlashingActivity.this;
roofObject = Cache.cachedRoofObject;
iniitilizeUiComponent();
stoddartOpenGlSurfaceView = (StoddartOpenGlSurfaceView) findViewById(R.id.opengl_surface_view);
renderer = new StoddartNewRenderer(getInstance(), roofObject);
stoddartOpenGlSurfaceView.setRenderer(renderer);
stoddartOpenGlSurfaceView.showRenderer(renderer);
gettingServerResponse();
pDialog = CustomDialogs.progressDialog(getInstance(), AppController.LOADING);
}
private void gettingServerResponse() {
response = new Response.Listener<String>() {
#Override
public void onResponse(String s) {
Log.d("response", s);
pDialog.dismiss();
try {
JSONObject nodeObject = new JSONObject(s);
Boolean status = nodeObject.getBoolean("success");
int code = nodeObject.getInt("code");
if (code == 1 && status) {
Cache.FLASHING_TYPE = s;
parseFlashingType(s);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
};
errorListener = new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError volleyError) {
if (pDialog.isShowing()) {
pDialog.dismiss();
}
volleyError.printStackTrace();
}
};
}
private void parseFlashingType(String s) {
try {
ArrayList<SelectItemsDto> itemslist = new ArrayList<>();
JSONObject nodeObject = new JSONObject(s);
JSONArray dataArray = nodeObject.getJSONArray("data");
for (int i = 0; i < dataArray.length(); i++) {
String type = dataArray.get(i).toString();
itemslist.add(new SelectItemsDto("i", type, false, false));
}
ItemsSelectFragment alertdFragment = new ItemsSelectFragment();
Bundle bundle = new Bundle();
bundle.putString("title", "Choose Flashing Type");
bundle.putBoolean("isSingleSelect", true);
bundle.putSerializable("itemsList", (Serializable) itemslist);
alertdFragment.setArguments(bundle);
alertdFragment.show(getSupportFragmentManager(), "Alert Dialog Fragment");
} catch (JSONException e) {
e.printStackTrace();
}
}
#Override
public String getToolbarTitle() {
return "Flashing";
}
#Override
public int getResourceLayout() {
return R.layout.layout_flashing_measure_activity;
}
#Override
public int getActivityId() {
return 8;
}
#Override
protected void onPause() {
super.onPause();
stoddartOpenGlSurfaceView.onPause();
}
#Override
protected void onResume() {
super.onResume();
stoddartOpenGlSurfaceView.onResume();
}
private void iniitilizeUiComponent() {
gridLayout = (LinearLayout) findViewById(R.id.l_gridLayout);
gridLayout.setVisibility(View.GONE);
flashingDiagramLayout = (LinearLayout) findViewById(R.id.flashing_diagram_layout);
flashingDiagramLayout.setVisibility(View.VISIBLE);
setListeners();
}
private void setListeners() {
ivTick.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (Cache.FLASHING_TYPE != "") {
parseFlashingType(Cache.FLASHING_TYPE);
} else {
pDialog.show();
WebService.getFlashingType(response, errorListener, ServerConfig.GET_FLASHING_TYPE_URL);
}
}
});
}
}
Writing these codes I can successfully draw a shape as shown in the diagram. The diagram shown is of a roof. My question is that I want to add a circle on touching the shape; there may be multiple circles, e.g. if I touch on say(1,2,1) co-ordinate, the circle should be added to that point again. If I touch another point, the next circle should be added to that point and so on... This is also shown in the second image.
I've wasted 3 days doing this but didn't find any solutions; what I've tried to do in this code so far is: whenever I touch the view I calculate the value of x and y and passed to this method setFlashingPointTOCordinatelist() which is defined in the RoofShape class.
Any kind of help will be appreciated. And please keep in mind that I am very new to this opengl. I can draw this image here.
And this is the image what I want to do on touch over the shape.
I am learning how to make a spectrum analyzer. I learn from tutorial and here's the code
package com.example.prasetyo.spectrumanalyzer;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
import ca.uol.aig.fftpack.RealDoubleFFT;
/*public class SpectrumActivity extends ActionBarActivity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_spectrum);
}*/
public class SpectrumActivity extends Activity implements OnClickListener {
public void onClick(View v) {
if (started) {
started = false;
startStopButton.setText("Start");
recordTask.cancel(true);
} else {
started = true;
startStopButton.setText("Stop");
recordTask = new RecordAudio();
recordTask.execute();
}
}
int frequency = 8000;
int channelConfiguration = AudioFormat.CHANNEL_IN_MONO;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private RealDoubleFFT transformer;
int blockSize = 256;
Button startStopButton;
boolean started = false;
RecordAudio recordTask;
ImageView imageView;
Bitmap bitmap;
Canvas canvas;
Paint paint;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_spectrum);
startStopButton = (Button) this.findViewById(R.id.StartStopButton);
startStopButton.setOnClickListener(this);
transformer = new RealDoubleFFT(blockSize);
imageView = (ImageView) this.findViewById(R.id.ImageView01);
bitmap = Bitmap.createBitmap(256, 100, Bitmap.Config.ARGB_8888);
canvas = new Canvas(bitmap);
paint = new Paint();
paint.setColor(Color.GREEN);
imageView.setImageBitmap(bitmap);
}
/*public void onClick(View v) {
if (started) {
started = false;
startStopButton.setText("Start");
recordTask.cancel(true);
} else {
started = true;
startStopButton.setText("Stop");
recordTask = new RecordAudio();
recordTask.execute();
}
}*/
private class RecordAudio extends AsyncTask<Void, double[], Void> {
#Override
protected Void doInBackground(Void... params) {
if(isCancelled()){
return null;
}
//try {
int bufferSize = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding);
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, frequency, channelConfiguration, audioEncoding, bufferSize);
short[] buffer = new short[blockSize];
double[] toTransform = new double[blockSize];
try {
audioRecord.startRecording();
}
catch (IllegalStateException e) {
Log.e("Recording Failed", e.toString());
}
while (started) {
int bufferReadResult = audioRecord.read(buffer, 0, blockSize);
for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
toTransform[i] = (double) buffer[i] / 32768.0; // signed 16 bit
}
transformer.ft(toTransform);
publishProgress(toTransform);
//Log.e("AudioRecord", "Recording Failed");
//return null;
}
return null;
}
protected void onProgressUpdate(double[]... toTransform) {
canvas.drawColor(Color.BLACK);
for (int i = 0; i < toTransform[0].length; i++) {
int x;
x = i;
int downy = (int) (100 - (toTransform[0][i] * 10));
int upy = 100;
canvas.drawLine(x, downy, x, upy, paint);
imageView.invalidate();
}
}
/*public void onClick(View v) {
if (started) {
started = false;
startStopButton.setText("Start");
recordTask.cancel(true);
} else {
started = true;
startStopButton.setText("Stop");
recordTask = new RecordAudio();
recordTask.execute();
}
}*/
}
}
Spectrum will show up as line each frequency. If i'm not mistaken, the showed up line appear because of drawLine method. Is there any way to show the frequency in other shape (bar)? Really need help :)
Here's the illustration here is the ilustration https://drive.google.com/open?id=0B3jsXXkRa7pLclFYMDdmUlk5MEU&authuser=0
You can replace canvas.drawLine(x, downy, x, upy, paint); with canvas.drawRect(x * 5, downy, x * 5 + 4, upy, paint);
Canvas.drawRect
how to use GPUImage library without resizing original image and I need to apply for a filter like as Pixelate or Blur to an entire GPUImageView.
I thought to use GPUImage but I don't know how to do it.
You change GPUImage library GPUImageRendering class
below code is
/*
* Copyright (C) 2012 CyberAgent
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.co.cyberagent.android.gpuimage;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView.Renderer;
import jp.co.cyberagent.android.gpuimage.util.TextureRotationUtil;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.LinkedList;
import java.util.Queue;
import static jp.co.cyberagent.android.gpuimage.util.TextureRotationUtil.TEXTURE_NO_ROTATION;
#SuppressLint("WrongCall")
#TargetApi(11)
public class GPUImageRenderer implements Renderer, PreviewCallback {
public static final int NO_IMAGE = -1;
static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
private GPUImageFilter mFilter;
public final Object mSurfaceChangedWaiter = new Object();
private int mGLTextureId = NO_IMAGE;
private SurfaceTexture mSurfaceTexture = null;
private final FloatBuffer mGLCubeBuffer;
private final FloatBuffer mGLTextureBuffer;
private IntBuffer mGLRgbBuffer;
int mwidth,mheight,wheight;
private int mOutputWidth;
private int mOutputHeight;
private int mImageWidth;
private int mImageHeight;
private int mAddedPadding;
private final Queue<Runnable> mRunOnDraw;
private final Queue<Runnable> mRunOnDrawEnd;
private Rotation mRotation;
private boolean mFlipHorizontal;
private boolean mFlipVertical;
private GPUImage.ScaleType mScaleType = GPUImage.ScaleType.CENTER_CROP;
public GPUImageRenderer(final GPUImageFilter filter) {
mFilter = filter;
mRunOnDraw = new LinkedList<Runnable>();
mRunOnDrawEnd = new LinkedList<Runnable>();
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
setRotation(Rotation.NORMAL, false, false);
}
#Override
public void onSurfaceCreated(final GL10 unused, final EGLConfig config) {
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mFilter.init();
}
public void onSurfacSize(final int width, final int height) {
mwidth=width;
mheight=height;
}
#Override
public void onSurfaceChanged(final GL10 gl, final int width, final int height) {
if(height>mheight)
{
mOutputWidth = mwidth;
mOutputHeight = mheight;
GLES20.glViewport(0,0, mwidth, mheight);
// GLES20.glViewport(0+(mwidth/2), 0+(mheight/2), mwidth, mheight);
mFilter.onOutputSizeChanged(mwidth,mheight);
}
else
{
mOutputWidth = width;
mOutputHeight = height;
GLES20.glViewport(0,0, width, height);
mFilter.onOutputSizeChanged(width,height);
}
// GLES20.glViewport(20,-20, width, height);
GLES20.glUseProgram(mFilter.getProgram());
adjustImageScaling();
synchronized (mSurfaceChangedWaiter) {
mSurfaceChangedWaiter.notifyAll();
}
}
#Override
public void onDrawFrame(final GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
runAll(mRunOnDraw);
mFilter.onDraw(mGLTextureId, mGLCubeBuffer, mGLTextureBuffer);
runAll(mRunOnDrawEnd);
if (mSurfaceTexture != null) {
mSurfaceTexture.updateTexImage();
}
}
private void runAll(Queue<Runnable> queue) {
synchronized (queue) {
while (!queue.isEmpty()) {
queue.poll().run();
}
}
}
#Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
final Size previewSize = camera.getParameters().getPreviewSize();
if (mGLRgbBuffer == null) {
mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
}
if (mRunOnDraw.isEmpty()) {
runOnDraw(new Runnable() {
#Override
public void run() {
GPUImageNativeLibrary.YUVtoRBGA(data, previewSize.width, previewSize.height,
mGLRgbBuffer.array());
mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
camera.addCallbackBuffer(data);
if (mImageWidth != previewSize.width) {
mImageWidth = previewSize.width;
mImageHeight = previewSize.height;
adjustImageScaling();
}
}
});
}
}
public void setUpSurfaceTexture(final Camera camera) {
runOnDraw(new Runnable() {
#Override
public void run() {
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mSurfaceTexture = new SurfaceTexture(textures[0]);
try {
camera.setPreviewTexture(mSurfaceTexture);
camera.setPreviewCallback(GPUImageRenderer.this);
camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
public void setFilter(final GPUImageFilter filter) {
runOnDraw(new Runnable() {
#Override
public void run() {
final GPUImageFilter oldFilter = mFilter;
mFilter = filter;
if (oldFilter != null) {
oldFilter.destroy();
}
mFilter.init();
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(mOutputWidth, mOutputHeight);
}
});
}
public void setFilter3(final GPUImageFilter filter) {
runOnDraw(new Runnable() {
#Override
public void run() {
final GPUImageFilter oldFilter = mFilter;
mFilter = filter;
// if (oldFilter != null) {
// oldFilter.destroy();
// }
mFilter.init();
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(mOutputWidth, mOutputHeight);
}
});
}
public void deleteImage() {
runOnDraw(new Runnable() {
#Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{
mGLTextureId
}, 0);
mGLTextureId = NO_IMAGE;
}
});
}
public void setImageBitmap(final Bitmap bitmap) {
setImageBitmap(bitmap, true);
}
public void setImageBitmap(final Bitmap bitmap, final boolean recycle) {
if (bitmap == null) {
return;
}
runOnDraw(new Runnable() {
#Override
public void run() {
Bitmap resizedBitmap = null;
if (bitmap.getWidth() % 2 == 1) {
resizedBitmap = Bitmap.createBitmap(bitmap.getWidth() + 1, bitmap.getHeight(),
Bitmap.Config.ARGB_8888);
Canvas can = new Canvas(resizedBitmap);
can.drawARGB(0x00, 0x00, 0x00, 0x00);
can.drawBitmap(bitmap, 0, 0, null);
mAddedPadding = 1;
} else {
mAddedPadding = 0;
}
mGLTextureId = OpenGlUtils.loadTexture(
resizedBitmap != null ? resizedBitmap : bitmap, mGLTextureId, recycle);
if (resizedBitmap != null) {
resizedBitmap.recycle();
}
mImageWidth = bitmap.getWidth();
mImageHeight = bitmap.getHeight();
adjustImageScaling();
}
});
}
public void setScaleType(GPUImage.ScaleType scaleType) {
mScaleType = scaleType;
}
protected int getFrameWidth() {
return mOutputWidth;
}
protected int getFrameHeight() {
return mOutputHeight;
}
private void adjustImageScaling() {
float outputWidth = mOutputWidth;
float outputHeight = mOutputHeight;
if (mRotation == Rotation.ROTATION_270 || mRotation == Rotation.ROTATION_90) {
outputWidth = mOutputHeight;
outputHeight = mOutputWidth;
}
float ratio1 = outputWidth / mImageWidth;
float ratio2 =outputHeight / mImageHeight;
float ratioMax = Math.max(ratio1, ratio2);
int imageWidthNew = (Math.round(mImageWidth * ratioMax));
int imageHeightNew = (Math.round(mImageHeight * ratioMax));
float ratioWidth = imageWidthNew / (outputWidth);
float ratioHeight = imageHeightNew / (outputHeight);
float[] cube = CUBE;
float[] textureCords = TextureRotationUtil.getRotation(mRotation, mFlipHorizontal, mFlipVertical);
if (mScaleType == GPUImage.ScaleType.CENTER_CROP) {
float distHorizontal = (1 - 1 / ratioWidth) / 2;
float distVertical = (1 - 1 / ratioHeight) / 2;
textureCords = new float[]{
addDistance(textureCords[0], distHorizontal), addDistance(textureCords[1], distVertical),
addDistance(textureCords[2], distHorizontal), addDistance(textureCords[3], distVertical),
addDistance(textureCords[4], distHorizontal), addDistance(textureCords[5], distVertical),
addDistance(textureCords[6], distHorizontal), addDistance(textureCords[7], distVertical),
};
} else {
cube = new float[]{
CUBE[0] * ratioWidth, CUBE[1] * ratioHeight,
CUBE[2] * ratioWidth, CUBE[3] * ratioHeight,
CUBE[4] * ratioWidth, CUBE[5] * ratioHeight,
CUBE[6] * ratioWidth, CUBE[7] * ratioHeight,
};
}
mGLCubeBuffer.clear();
mGLCubeBuffer.put(cube).position(0);
mGLTextureBuffer.clear();
mGLTextureBuffer.put(textureCords).position(0);
}
private float addDistance(float coordinate, float distance) {
return coordinate == 0.0f ? distance : 1 - distance;
}
public void setRotationCamera(final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
setRotation(rotation, flipVertical, flipHorizontal);
}
public void setRotation(final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
mRotation = rotation;
mFlipHorizontal = flipHorizontal;
mFlipVertical = flipVertical;
adjustImageScaling();
}
public Rotation getRotation() {
return mRotation;
}
public boolean isFlippedHorizontally() {
return mFlipHorizontal;
}
public boolean isFlippedVertically() {
return mFlipVertical;
}
protected void runOnDraw(final Runnable runnable) {
synchronized (mRunOnDraw) {
mRunOnDraw.add(runnable);
}
}
protected void runOnDrawEnd(final Runnable runnable) {
synchronized (mRunOnDrawEnd) {
mRunOnDrawEnd.add(runnable);
}
}
}
I'm having a strange issue with an Android game I'm making. It's a 2d action puzzler; the sprites and background are simple opaque images, but need to move quickly and smoothly. I have been experiencing general frame rate issues however, and this particular issue is quite confusing to me though.
The problem is that I can start my game (on the hardware, a Nexus 7) and have it run at a frame rate of at least 60fps, then close it and start it back up and it will run at 30-45. Repeating this process, the game usually runs with the slower framerate, but runs perfectly smoothly on every 4th (or so) attempt.
I am new to Android and programming in general, so Im wondering if anyone has a guess as to what could be causing this performance discrepancy on repeated runs? I can edit to include some code if it's helpful, but this seems more theoretical and Im not sure what would be relevant to post.
Thanks.
EDIT:
Here's my onCreate code:
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.os.Bundle;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.view.Window;
import android.view.WindowManager;
import com.jag.framework.Audio;
import com.jag.framework.FileIO;
import com.jag.framework.Game;
import com.jag.framework.Graphics;
import com.jag.framework.Input;
import com.jag.framework.Screen;
public abstract class AndroidGame extends Activity implements Game {
AndroidFastRenderView renderView;
Graphics graphics;
Audio audio;
Input input;
FileIO fileIO;
Screen screen;
WakeLock wakeLock;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
boolean isPortrait = getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
int frameBufferWidth = isPortrait ? 800: 1200;
int frameBufferHeight = isPortrait ? 1200: 800;
Bitmap frameBuffer = Bitmap.createBitmap(frameBufferWidth,
frameBufferHeight, Config.RGB_565);
float scaleX = (float) frameBufferWidth
/ getWindowManager().getDefaultDisplay().getWidth();
float scaleY = (float) frameBufferHeight
/ getWindowManager().getDefaultDisplay().getHeight();
renderView = new AndroidFastRenderView(this, frameBuffer);
graphics = new AndroidGraphics(getAssets(), frameBuffer);
fileIO = new AndroidFileIO(this);
audio = new AndroidAudio(this);
input = new AndroidInput(this, renderView, scaleX, scaleY);
screen = getInitScreen();
setContentView(renderView);
PowerManager powerManager = (PowerManager) getSystemService(Context.POWER_SERVICE);
wakeLock = powerManager.newWakeLock(PowerManager.FULL_WAKE_LOCK, "MyGame");
}
#Override
public void onResume() {
super.onResume();
wakeLock.acquire();
screen.resume();
renderView.resume();
}
#Override
public void onPause() {
super.onPause();
wakeLock.release();
renderView.pause();
screen.pause();
if (isFinishing())
screen.dispose();
}
#Override
public Input getInput() {
return input;
}
#Override
public FileIO getFileIO() {
return fileIO;
}
#Override
public Graphics getGraphics() {
return graphics;
}
#Override
public Audio getAudio() {
return audio;
}
#Override
public void setScreen(Screen screen) {
if (screen == null)
throw new IllegalArgumentException("Screen must not be null");
this.screen.pause();
this.screen.dispose();
screen.resume();
screen.update(0);
this.screen = screen;
}
public Screen getCurrentScreen() {
return screen;
}
}
and here's my main code:
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import com.jag.framework.Game;
import com.jag.framework.Graphics;
import com.jag.framework.Image;
import com.jag.framework.Input.TouchEvent;
import com.jag.framework.Screen;
public class GameScreen extends Screen {
enum GameState {
Ready, Running, Paused, GameOver
}
GameState state = GameState.Ready;
// Variable Setup
private static Scene scene;
public static int screenheight;
private ArrayList<Pieces> pieces;
//lanes:
int lane;
Paint paint, paint2;
Rect rect;
Image fore, rings1, rings2, base1, base2;
boolean recent, freeze, touch, pospressed, negpressed, wrongbutton;
int timepassed, difficulty, recentinterval, score;
Bitmap bitmap;
Canvas canvas2;
Rect foreg;
InputStream in;
AssetManager assets;
public GameScreen(Game game) {
super(game);
// Initialize game objects here
scene = new Scene(600);
screenheight = game.getGraphics().getHeight();
pieces = new ArrayList<Pieces>();
lane = 100;
recent = true;
Pieces p3 = new Pieces(lane, 940, true);
pieces.add(p3);
paint = new Paint();
paint.setTextSize(30);
paint.setTextAlign(Paint.Align.CENTER);
paint.setAntiAlias(true);
paint.setColor(Color.WHITE);
paint2 = new Paint();
paint2.setColor(Color.WHITE);
paint2.setStyle(Style.FILL);
difficulty = 50;
recentinterval = 30;
timepassed = 0;
freeze = false;
fore = Assets.block;
rings1 = Assets.ringswhite;
rings2 = Assets.ringsblack;
base1 = Assets.basewhite;
base2 = Assets.baseblack;
wrongbutton = false;
score = 0;
}
#Override
public void update(float deltaTime) {
List<TouchEvent> touchEvents = game.getInput().getTouchEvents();
// We have four separate update methods in this example.
// Depending on the state of the game, we call different update methods.
// Refer to Unit 3's code. We did a similar thing without separating the
// update methods.
if (state == GameState.Ready)
updateReady(touchEvents);
if (state == GameState.Running)
updateRunning(touchEvents, deltaTime);
if (state == GameState.Paused)
updatePaused(touchEvents);
if (state == GameState.GameOver)
updateGameOver(touchEvents);
}
private void updateReady(List<TouchEvent> touchEvents) {
// This example starts with a "Ready" screen.
// When the user touches the screen, the game begins.
// state now becomes GameState.Running.
// Now the updateRunning() method will be called!
if (touchEvents.size() > 0) {
game.getGraphics().clearScreen(Color.BLACK);
state = GameState.Running;
Assets.theme.stop();
}
}
private void updateRunning(List<TouchEvent> touchEvents, float deltaTime) {
// 1. All touch input is handled here:
int len = touchEvents.size();
for (int i = 0; i < len; i++) {
TouchEvent event = touchEvents.get(i);
if ((event.type == TouchEvent.TOUCH_DRAGGED || event.type == TouchEvent.TOUCH_DOWN) && event.y <= 1000) {
touch = true;
scene.setLine(event.x);
}
//POSITIVE BUTTON
if (event.type == TouchEvent.TOUCH_DOWN && ((0 < event.x) && (event.x < 250)) && event.y > 1000) {
pospressed = true;
}
//NEGATIVE BUTTON
if (event.type == TouchEvent.TOUCH_DOWN && ((550 < event.x) && (event.x < 800)) && event.y > 1000) {
negpressed = true;
}
//ACTIVATES ALERT (reduce score)
if (event.type == TouchEvent.TOUCH_UP) {
touch = false;
pospressed = false;
negpressed = false;
}
}
// 2. Check miscellaneous events like death:
// if (livesLeft == 0) {
// state = GameState.GameOver;
// }
// 3. Call individual update() methods here.
// This is where all the game updates happen.
// For example, robot.update();
if (!freeze){
timepassed += 1;
if ((timepassed % recentinterval) == 0){
recent = false;
}
}
Random randomGenerator = new Random();
int randomInt = randomGenerator.nextInt(difficulty);
int randomInt2 = randomGenerator.nextInt(7);
boolean randomBool = randomGenerator.nextBoolean();
int chanceOfNewPiece = 8;
if ((randomInt < chanceOfNewPiece)&&!recent) {
Pieces p = new Pieces((randomInt2+1)*lane, 940, randomBool);
pieces.add(p);
recent = true;
}
Iterator<Pieces> it = pieces.iterator();
while (it.hasNext()) {
Pieces p = it.next();
if (p.isVisible()&&!p.wayback&&!freeze)
p.update();
else if (p.isVisible()&&!p.wayback&&freeze){
p.still();
}
else if (p.wayback && pospressed){
if (score > 10){
score -= 10;
}
if (score <= 10){
score = 0;
}
if (p.type){
it.remove();
freeze = false;
pospressed = false;
Assets.click.play(100);
}
if (!p.type){
freeze = false;
pospressed = false;
wrongbutton = true;
}
}
else if (p.wayback && negpressed){
if (score > 10){
score -= 10;
}
if (score <= 10){
score = 0;
}
if (p.type){
freeze = false;
negpressed = false;
wrongbutton = true;
}
if (!p.type){
it.remove();
freeze = false;
negpressed = false;
Assets.click.play(100);
}
}
else if (p.isVisible() && p.wayback && wrongbutton){
p.updateback();
}
else if (p.isVisible()&&p.wayback){
p.updateback();
freeze = true;
}
else if (p.y < 10){
score += 1;
it.remove();
freeze = false;
wrongbutton = false;
}
else {
// if (p.y > screenheight-281){
// game.getGraphics().clearScreen(Color.BLACK);
// state = GameState.GameOver;
// }
score = 0;
it.remove();
freeze = false;
wrongbutton = false;
}
}
// if (pieces.size() == 0) {
//
// game.getGraphics().clearScreen(Color.BLACK);
// state = GameState.GameOver;
// }
}
private void updatePaused(List<TouchEvent> touchEvents) {
int len = touchEvents.size();
for (int i = 0; i < len; i++) {
TouchEvent event = touchEvents.get(i);
if (event.type == TouchEvent.TOUCH_UP) {
state = GameState.Running;
Assets.theme.stop();
}
}
}
private void updateGameOver(List<TouchEvent> touchEvents) {
int len = touchEvents.size();
for (int i = 0; i < len; i++) {
TouchEvent event = touchEvents.get(i);
if (event.type == TouchEvent.TOUCH_UP) {
nullify();
game.setScreen(new MainMenuScreen(game));
return;
}
}
}
#Override
public void paint(float deltaTime) {
//Debug.startMethodTracing();
Graphics g = game.getGraphics();
// draw the game elements
if (state == GameState.Running){
int fingerx = scene.getLine();
g.drawRect(fingerx, 0, g.getWidth(), 1200, Color.BLACK);
g.drawRect(0, 0, fingerx, 1200, Color.WHITE);
if (touch == false){
g.drawRect(250, 1000, 302, 205, Color.RED);
}
g.drawScaledImage(base1, fingerx, 955, g.getWidth()-fingerx,
base2.getHeight(), fingerx, 0, g.getWidth()-fingerx, base2.getHeight());
g.drawScaledImage(base2, 0, 955, fingerx, base2.getHeight(), 0, 0, fingerx, base2.getHeight());
g.drawString(String.valueOf(score),
350, 1075, paint);
////g.drawImage(back, 0, 0);
// g.drawScaledImage(Assets.fore, fingerx, 0, g.getWidth()-fingerx,
// Assets.fore.getHeight(), fingerx, 0, g.getWidth()-fingerx, Assets.fore.getHeight());
////g.drawImage(Assets.topwhite, 0, 0);
//g.saveCanvas();
//g.drawTransRect(0, 0, fingerx, g.getHeight());
////g.drawImage(fore, 0, 0);
//g.drawImage(base2, 0, 955);
// g.drawScaledImage(Assets.back, 0, 0, fingerx, Assets.back.getHeight(), 0, 0, fingerx, base2.getHeight());
// g.drawScaledImage(base2, fingerx, 955, g.getWidth()-fingerx, base2.getHeight(),
// fingerx, 0, g.getWidth()-fingerx, base2.getHeight());
//g.drawCropped(base2);
////g.drawImage(rings2, 0, 950);
//g.restoreCanvas();
for (Pieces p : pieces){
if (p.type == true)
g.drawImage(Assets.pos, (p.x - 40), p.y);
if (p.type == false){
g.drawImage(Assets.neg, (p.x - 40), p.y);
}
}
}
// draw the UI
if (state == GameState.Ready)
drawReadyUI();
if (state == GameState.Running)
drawRunningUI();
if (state == GameState.Paused)
drawPausedUI();
if (state == GameState.GameOver)
drawGameOverUI();
//Debug.stopMethodTracing();
}
private void nullify() {
// Set all variables to null. You will be recreating them in the
// constructor.
paint = null;
scene = null;
pieces = null;
scene = null;
pieces = null;
paint2 = null;
Assets.theme = null;
Assets.click = null;
// Call garbage collector to clean up memory.
System.gc();
}
private void drawReadyUI() {
Graphics g = game.getGraphics();
g.drawARGB(155, 0, 0, 0);
g.drawString("TOUCH THE SCREEN YA DUMMY",
400, 300, paint);
}
private void drawRunningUI() {
//Graphics g = game.getGraphics();
}
private void drawPausedUI() {
Graphics g = game.getGraphics();
// Darken the entire screen so you can display the Paused screen.
g.drawRect(0, 0, 801, 1281, Color.BLACK);
g.drawString("HEY GUY IT'S PAUSED", 640, 300, paint);
}
private void drawGameOverUI() {
Graphics g = game.getGraphics();
g.drawRect(0, 0, 1281, 801, Color.BLACK);
g.drawString("GAME OVER BRO", 640, 300, paint);
}
#Override
public void pause() {
if (state == GameState.Running)
System.gc();
state = GameState.Paused;
}
#Override
public void resume() {
}
#Override
public void dispose() {
}
#Override
public void backButton() {
pause();
}
public static Scene getScene(){
return scene;
}
public void setScore(int i){
score += i;
}
}
I realize this is a bit of a mess; it's the first thing I've coded. The framework is from kilobolt.com. Let me know if there are other classes/ methods I should include.
I need to get the mjpeg stream from an IP camera, anyone know the right way do it? I googled a bit and I find this example
http://www.anddev.org/mjpeg_on_android_anyone-t1871.html
but I've been stucked when I tried to get the stream from another activity called by the main activity. Here the code:
Main acitivity
package com.test;
public class IntentTest extends Activity {
/** Called when the activity is first created. */
ListView myListView = null;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
myListView = (ListView)findViewById(R.id.listView);
final ArrayList<String> items = new ArrayList<String>();
items.add("00408C944B9A");
final ArrayAdapter<String> aa;
aa = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1,
items);
myListView.setAdapter(aa);
myListView.setOnItemClickListener(listClicked);
}
private OnItemClickListener listClicked = new OnItemClickListener() {
public void onItemClick(AdapterView<?> arg0, View arg1, int position, long id) {
// TODO Auto-generated method stub
Intent i = new Intent(IntentTest.this, OtherActivity.class);
i.putExtra("MAC", myListView.getItemAtPosition(position).toString());
startActivity(i);
}
};
}
Second activity
package com.test;
import com.test.mjpeg.mjpegsample.MjpegView.*;
import com.test.parser.JSONParse;
public class OtherActivity extends Activity {
/** Called when the activity is first created. */
private MjpegView mv;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Bundle extras = getIntent().getExtras();
if (extras != null){
String mac = (String)extras.get("MAC");
Log.i("Other", "---->" + mac);
TextView tv = (TextView)findViewById(R.id.textView);
tv.setText(mac);
String URL = "myurl";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
I found this code over the internet some time ago, maybe it will be of some help to you.
MjpegSample Class
package de.mjpegsample;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import de.mjpegsample.MjpegView.MjpegInputStream;
import de.mjpegsample.MjpegView.MjpegView;
public class MjpegSample extends Activity {
private MjpegView mv;
private static final int MENU_QUIT = 1;
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_QUIT, 0, "Quit");
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_QUIT:
finish();
return true;
}
return false;
}
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://gamic.dnsalias.net:7001/img/video.mjpeg";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(false);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjpegView Class
package de.mjpegsample.MjpegView;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
MjpegInputStream Class
package de.mjpegsample.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
If you need any more info, let me know, I'll help in any way I can.
FYI: I did not write SimpleMjpegView, you can find more up to date code here
For those looking to get this working with AsyncTask (and thus working on Ice Cream Sandwich (ICS), 4.0.4) see Android ICS and MJPEG using AsyncTask.
I had the same problem and tried using custom Mjpeg viewers. While they worked, they proved unstable and clumsy for me.
I am simply using a WebView. On Android Studio, simply drag and drop a WebView. My WebView is not covering the entire screen, but half of it.
Then, to fit the video best, in your onCreate:
If using Kotlin:
webView.settings.loadWithOverviewMode = true;
webView.settings.useWideViewPort = true;
If using Java:
webView.getSettings().setLoadWithOverviewMode(true);
webView.getSettings().setUseWideViewPort(true);
And then, on button click:
To start:
webView.loadUrl("http://$id:8081/cam.mjpeg") // start
To Stop:
webView.stopLoading()
webView.loadUrl("about:blank")
Hope this helps.