how to use GPUImage without scale in android - android

how to use GPUImage library without resizing original image and I need to apply for a filter like as Pixelate or Blur to an entire GPUImageView.
I thought to use GPUImage but I don't know how to do it.

You change GPUImage library GPUImageRendering class
below code is
/*
* Copyright (C) 2012 CyberAgent
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.co.cyberagent.android.gpuimage;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView.Renderer;
import jp.co.cyberagent.android.gpuimage.util.TextureRotationUtil;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.LinkedList;
import java.util.Queue;
import static jp.co.cyberagent.android.gpuimage.util.TextureRotationUtil.TEXTURE_NO_ROTATION;
#SuppressLint("WrongCall")
#TargetApi(11)
public class GPUImageRenderer implements Renderer, PreviewCallback {
public static final int NO_IMAGE = -1;
static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
private GPUImageFilter mFilter;
public final Object mSurfaceChangedWaiter = new Object();
private int mGLTextureId = NO_IMAGE;
private SurfaceTexture mSurfaceTexture = null;
private final FloatBuffer mGLCubeBuffer;
private final FloatBuffer mGLTextureBuffer;
private IntBuffer mGLRgbBuffer;
int mwidth,mheight,wheight;
private int mOutputWidth;
private int mOutputHeight;
private int mImageWidth;
private int mImageHeight;
private int mAddedPadding;
private final Queue<Runnable> mRunOnDraw;
private final Queue<Runnable> mRunOnDrawEnd;
private Rotation mRotation;
private boolean mFlipHorizontal;
private boolean mFlipVertical;
private GPUImage.ScaleType mScaleType = GPUImage.ScaleType.CENTER_CROP;
public GPUImageRenderer(final GPUImageFilter filter) {
mFilter = filter;
mRunOnDraw = new LinkedList<Runnable>();
mRunOnDrawEnd = new LinkedList<Runnable>();
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
setRotation(Rotation.NORMAL, false, false);
}
#Override
public void onSurfaceCreated(final GL10 unused, final EGLConfig config) {
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mFilter.init();
}
public void onSurfacSize(final int width, final int height) {
mwidth=width;
mheight=height;
}
#Override
public void onSurfaceChanged(final GL10 gl, final int width, final int height) {
if(height>mheight)
{
mOutputWidth = mwidth;
mOutputHeight = mheight;
GLES20.glViewport(0,0, mwidth, mheight);
// GLES20.glViewport(0+(mwidth/2), 0+(mheight/2), mwidth, mheight);
mFilter.onOutputSizeChanged(mwidth,mheight);
}
else
{
mOutputWidth = width;
mOutputHeight = height;
GLES20.glViewport(0,0, width, height);
mFilter.onOutputSizeChanged(width,height);
}
// GLES20.glViewport(20,-20, width, height);
GLES20.glUseProgram(mFilter.getProgram());
adjustImageScaling();
synchronized (mSurfaceChangedWaiter) {
mSurfaceChangedWaiter.notifyAll();
}
}
#Override
public void onDrawFrame(final GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
runAll(mRunOnDraw);
mFilter.onDraw(mGLTextureId, mGLCubeBuffer, mGLTextureBuffer);
runAll(mRunOnDrawEnd);
if (mSurfaceTexture != null) {
mSurfaceTexture.updateTexImage();
}
}
private void runAll(Queue<Runnable> queue) {
synchronized (queue) {
while (!queue.isEmpty()) {
queue.poll().run();
}
}
}
#Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
final Size previewSize = camera.getParameters().getPreviewSize();
if (mGLRgbBuffer == null) {
mGLRgbBuffer = IntBuffer.allocate(previewSize.width * previewSize.height);
}
if (mRunOnDraw.isEmpty()) {
runOnDraw(new Runnable() {
#Override
public void run() {
GPUImageNativeLibrary.YUVtoRBGA(data, previewSize.width, previewSize.height,
mGLRgbBuffer.array());
mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer, previewSize, mGLTextureId);
camera.addCallbackBuffer(data);
if (mImageWidth != previewSize.width) {
mImageWidth = previewSize.width;
mImageHeight = previewSize.height;
adjustImageScaling();
}
}
});
}
}
public void setUpSurfaceTexture(final Camera camera) {
runOnDraw(new Runnable() {
#Override
public void run() {
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mSurfaceTexture = new SurfaceTexture(textures[0]);
try {
camera.setPreviewTexture(mSurfaceTexture);
camera.setPreviewCallback(GPUImageRenderer.this);
camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
public void setFilter(final GPUImageFilter filter) {
runOnDraw(new Runnable() {
#Override
public void run() {
final GPUImageFilter oldFilter = mFilter;
mFilter = filter;
if (oldFilter != null) {
oldFilter.destroy();
}
mFilter.init();
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(mOutputWidth, mOutputHeight);
}
});
}
public void setFilter3(final GPUImageFilter filter) {
runOnDraw(new Runnable() {
#Override
public void run() {
final GPUImageFilter oldFilter = mFilter;
mFilter = filter;
// if (oldFilter != null) {
// oldFilter.destroy();
// }
mFilter.init();
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(mOutputWidth, mOutputHeight);
}
});
}
public void deleteImage() {
runOnDraw(new Runnable() {
#Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{
mGLTextureId
}, 0);
mGLTextureId = NO_IMAGE;
}
});
}
public void setImageBitmap(final Bitmap bitmap) {
setImageBitmap(bitmap, true);
}
public void setImageBitmap(final Bitmap bitmap, final boolean recycle) {
if (bitmap == null) {
return;
}
runOnDraw(new Runnable() {
#Override
public void run() {
Bitmap resizedBitmap = null;
if (bitmap.getWidth() % 2 == 1) {
resizedBitmap = Bitmap.createBitmap(bitmap.getWidth() + 1, bitmap.getHeight(),
Bitmap.Config.ARGB_8888);
Canvas can = new Canvas(resizedBitmap);
can.drawARGB(0x00, 0x00, 0x00, 0x00);
can.drawBitmap(bitmap, 0, 0, null);
mAddedPadding = 1;
} else {
mAddedPadding = 0;
}
mGLTextureId = OpenGlUtils.loadTexture(
resizedBitmap != null ? resizedBitmap : bitmap, mGLTextureId, recycle);
if (resizedBitmap != null) {
resizedBitmap.recycle();
}
mImageWidth = bitmap.getWidth();
mImageHeight = bitmap.getHeight();
adjustImageScaling();
}
});
}
public void setScaleType(GPUImage.ScaleType scaleType) {
mScaleType = scaleType;
}
protected int getFrameWidth() {
return mOutputWidth;
}
protected int getFrameHeight() {
return mOutputHeight;
}
private void adjustImageScaling() {
float outputWidth = mOutputWidth;
float outputHeight = mOutputHeight;
if (mRotation == Rotation.ROTATION_270 || mRotation == Rotation.ROTATION_90) {
outputWidth = mOutputHeight;
outputHeight = mOutputWidth;
}
float ratio1 = outputWidth / mImageWidth;
float ratio2 =outputHeight / mImageHeight;
float ratioMax = Math.max(ratio1, ratio2);
int imageWidthNew = (Math.round(mImageWidth * ratioMax));
int imageHeightNew = (Math.round(mImageHeight * ratioMax));
float ratioWidth = imageWidthNew / (outputWidth);
float ratioHeight = imageHeightNew / (outputHeight);
float[] cube = CUBE;
float[] textureCords = TextureRotationUtil.getRotation(mRotation, mFlipHorizontal, mFlipVertical);
if (mScaleType == GPUImage.ScaleType.CENTER_CROP) {
float distHorizontal = (1 - 1 / ratioWidth) / 2;
float distVertical = (1 - 1 / ratioHeight) / 2;
textureCords = new float[]{
addDistance(textureCords[0], distHorizontal), addDistance(textureCords[1], distVertical),
addDistance(textureCords[2], distHorizontal), addDistance(textureCords[3], distVertical),
addDistance(textureCords[4], distHorizontal), addDistance(textureCords[5], distVertical),
addDistance(textureCords[6], distHorizontal), addDistance(textureCords[7], distVertical),
};
} else {
cube = new float[]{
CUBE[0] * ratioWidth, CUBE[1] * ratioHeight,
CUBE[2] * ratioWidth, CUBE[3] * ratioHeight,
CUBE[4] * ratioWidth, CUBE[5] * ratioHeight,
CUBE[6] * ratioWidth, CUBE[7] * ratioHeight,
};
}
mGLCubeBuffer.clear();
mGLCubeBuffer.put(cube).position(0);
mGLTextureBuffer.clear();
mGLTextureBuffer.put(textureCords).position(0);
}
private float addDistance(float coordinate, float distance) {
return coordinate == 0.0f ? distance : 1 - distance;
}
public void setRotationCamera(final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
setRotation(rotation, flipVertical, flipHorizontal);
}
public void setRotation(final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
mRotation = rotation;
mFlipHorizontal = flipHorizontal;
mFlipVertical = flipVertical;
adjustImageScaling();
}
public Rotation getRotation() {
return mRotation;
}
public boolean isFlippedHorizontally() {
return mFlipHorizontal;
}
public boolean isFlippedVertically() {
return mFlipVertical;
}
protected void runOnDraw(final Runnable runnable) {
synchronized (mRunOnDraw) {
mRunOnDraw.add(runnable);
}
}
protected void runOnDrawEnd(final Runnable runnable) {
synchronized (mRunOnDrawEnd) {
mRunOnDrawEnd.add(runnable);
}
}
}

Related

Texture with alpha channel not working on 3ds model in JPCT

About my problem: What is the way to get textures with alpha channel to work?
I use a png texture with alpha channel, but the result is black color on transparent area.
Android program result:
enter image description here
3ds model in cheetah3d:
enter image description here
This is my code:
package testoc.biz.testproject;
import android.app.Activity;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.view.MotionEvent;
import com.threed.jpct.Camera;
import com.threed.jpct.FrameBuffer;
import com.threed.jpct.Light;
import com.threed.jpct.Loader;
import com.threed.jpct.Logger;
import com.threed.jpct.Matrix;
import com.threed.jpct.Object3D;
import com.threed.jpct.Primitives;
import com.threed.jpct.RGBColor;
import com.threed.jpct.SimpleVector;
import com.threed.jpct.Texture;
import com.threed.jpct.TextureManager;
import com.threed.jpct.World;
import com.threed.jpct.util.BitmapHelper;
import com.threed.jpct.util.MemoryHelper;
import java.io.InputStream;
import java.lang.reflect.Field;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
public class FullscreenActivity extends Activity {
// Used to handle pause and resume...
private static FullscreenActivity master = null;
private GLSurfaceView mGLView;
private MyRenderer renderer = null;
private FrameBuffer fb = null;
private World world = null;
private RGBColor back = new RGBColor(50, 50, 100);
private float touchTurn = 0;
private float touchTurnUp = 0;
private float xpos = -1;
private float ypos = -1;
private Object3D cube = null;
private int fps = 0;
private boolean gl2 = true;
private Light sun = null;
protected void onCreate(Bundle savedInstanceState) {
if (master != null) {
copy(master);
}
super.onCreate(savedInstanceState);
mGLView = new GLSurfaceView(getApplication());
if (gl2) {
mGLView.setEGLContextClientVersion(2);
} else {
mGLView.setEGLConfigChooser(new GLSurfaceView.EGLConfigChooser() {
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
int[] attributes = new int[] { EGL10.EGL_DEPTH_SIZE, 16, EGL10.EGL_NONE };
EGLConfig[] configs = new EGLConfig[1];
int[] result = new int[1];
egl.eglChooseConfig(display, attributes, configs, 1, result);
return configs[0];
}
});
}
renderer = new MyRenderer();
mGLView.setRenderer(renderer);
setContentView(mGLView);
}
#Override
protected void onPause() {
super.onPause();
mGLView.onPause();
}
#Override
protected void onResume() {
super.onResume();
mGLView.onResume();
}
#Override
protected void onStop() {
super.onStop();
}
private void copy(Object src) {
try {
Logger.log("Copying data from master Activity!");
Field[] fs = src.getClass().getDeclaredFields();
for (Field f : fs) {
f.setAccessible(true);
f.set(this, f.get(src));
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public boolean onTouchEvent(MotionEvent me) {
if (me.getAction() == MotionEvent.ACTION_DOWN) {
xpos = me.getX();
ypos = me.getY();
return true;
}
if (me.getAction() == MotionEvent.ACTION_UP) {
xpos = -1;
ypos = -1;
touchTurn = 0;
touchTurnUp = 0;
return true;
}
if (me.getAction() == MotionEvent.ACTION_MOVE) {
float xd = me.getX() - xpos;
float yd = me.getY() - ypos;
xpos = me.getX();
ypos = me.getY();
touchTurn = xd / -100f;
touchTurnUp = yd / -100f;
return true;
}
try {
Thread.sleep(15);
} catch (Exception e) {
// No need for this...
}
return super.onTouchEvent(me);
}
protected boolean isFullscreenOpaque() {
return true;
}
class MyRenderer implements GLSurfaceView.Renderer {
private long time = System.currentTimeMillis();
public MyRenderer() {
}
private Object3D loadModel( float scale){
InputStream stream = getResources().openRawResource(R.raw.cube);
Object3D[] model = Loader.load3DS(stream, scale);
Object3D o3d = new Object3D(0);
Object3D temp = null;
for (int i = 0; i < model.length; i++) {
temp = model[i];
temp.setCenter(SimpleVector.ORIGIN);
temp.rotateX((float)( -0.5*Math.PI));
temp.rotateMesh();
temp.setRotationMatrix(new Matrix());
o3d = Object3D.mergeObjects(o3d, temp);
o3d.build();
}
return o3d;
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
if (fb != null) {
fb.dispose();
}
if (gl2) {
fb = new FrameBuffer(w, h); // OpenGL ES 2.0 constructor
} else {
fb = new FrameBuffer(gl, w, h); // OpenGL ES 1.x constructor
}
if (master == null) {
world = new World();
world.setAmbientLight(20, 20, 20);
sun = new Light(world);
sun.setIntensity(250, 250, 250);
Texture texture = new Texture(BitmapHelper.rescale(BitmapHelper.convert(getResources().getDrawable(R.drawable.gradient)), 256, 256),true);
TextureManager.getInstance().addTexture("gradient.png", texture);
cube = loadModel(20);
cube.strip();
cube.build();
world.addObject(cube);
Camera cam = world.getCamera();
cam.moveCamera(Camera.CAMERA_MOVEOUT, 50);
cam.lookAt(cube.getTransformedCenter());
SimpleVector sv = new SimpleVector();
sv.set(cube.getTransformedCenter());
sv.y -= 100;
sv.z -= 100;
sun.setPosition(sv);
MemoryHelper.compact();
if (master == null) {
Logger.log("Saving master Activity!");
master = FullscreenActivity.this;
}
}
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
public void onDrawFrame(GL10 gl) {
if (touchTurn != 0) {
cube.rotateY(touchTurn);
touchTurn = 0;
}
if (touchTurnUp != 0) {
cube.rotateX(touchTurnUp);
touchTurnUp = 0;
}
fb.clear(back);
world.renderScene(fb);
world.draw(fb);
fb.display();
if (System.currentTimeMillis() - time >= 1000) {
Logger.log(fps + "fps");
fps = 0;
time = System.currentTimeMillis();
}
fps++;
}
}
}
I am used the jpct library for Android.
I've nailed it with adding glEnable as wes suggested by #Reigertje:
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
gl.glEnable(GL10.GL_BLEND);
gl.glBlendFunc(GL10.GL_ONE, GL10.GL_ONE_MINUS_SRC_ALPHA);
}

Add circle with co-ordinate with text over the rendered shape in Android

This is the RoofShape class
package com.view9.stoddart.opengl;
import android.opengl.GLU;
import android.util.Log;
import com.view9.stoddart.model.OpenGlPointObject;
import com.view9.stoddart.model.OpenGlRoofObject;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.ArrayList;
import java.util.List;
import javax.microedition.khronos.opengles.GL10;
/**
* Created by view9 on 10/25/15.
*/
public class RoofShape {
ArrayList<CoOrdinatesModel> cordinateList;
List<Short> indicesList;
private FloatBuffer mVertexBuffer = null;
private ShortBuffer mShapeBorderIndicesBuffer = null;
private int mNumOfShapeBorderIndices = 0;
OpenGlRoofObject roofObject;
public RoofShape(OpenGlRoofObject roofObject) {
this.roofObject = roofObject;
setAllBuffers();
}
public void drawShape(GL10 gl) {
// Log.d("#####", "draw called");
// Specifies the location and data format of an array of vertex
// coordinates to use when rendering.
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffer);
// Draw all lines
gl.glDrawElements(GL10.GL_LINES, mNumOfShapeBorderIndices,
GL10.GL_UNSIGNED_SHORT, mShapeBorderIndicesBuffer);
GLU.gluLookAt(gl, 0, 0, -5, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
private void setAllBuffers() {
cordinateList = new ArrayList<>();
cordinateList.add(new CoOrdinatesModel(Float.parseFloat("0.0"), Float.parseFloat("0.0"), Float.parseFloat("0.0")));
for (int pointCount = 0; pointCount < roofObject.getPoinstList().size(); pointCount++) {
OpenGlPointObject pointObject = roofObject.getPoinstList().get(pointCount);
cordinateList.add(new CoOrdinatesModel(pointObject.getxAsis(), pointObject.getyAxis(), pointObject.getzAxis()));
}
/**====================================
* working with the indices for line join
* adding cordinates to indicesList
======================================*/
indicesList = new ArrayList<>();
for (int indicesCount = 0; indicesCount < roofObject.getLinesList().size(); indicesCount++) {
String p = roofObject.getLinesList().get(indicesCount).getPath();
for (String strPoint : p.split(",")) {
//spliting the string and eleminating the prefix string
String prefixStringEleminating = strPoint.substring(1);
Short intPoint = Short.parseShort(prefixStringEleminating);
indicesList.add(intPoint);
}
}
updateVertexListToArray(cordinateList);
updateVerticesListToArray(indicesList);
}
private void updateVerticesListToArray(List<Short> list) {
/**
* For converting indices list to indices array
*/
ArrayList<Short> indicesList = new ArrayList<>();
indicesList.addAll(list);
short[] pointsToJoinList = new short[indicesList.size()];
int indicesCount = 0;
for (short shortIndices : indicesList) {
pointsToJoinList[indicesCount++] = shortIndices;
}
mNumOfShapeBorderIndices = pointsToJoinList.length;
ByteBuffer tbibb = ByteBuffer.allocateDirect(pointsToJoinList.length * 2);
tbibb.order(ByteOrder.nativeOrder());
mShapeBorderIndicesBuffer = tbibb.asShortBuffer();
mShapeBorderIndicesBuffer.put(pointsToJoinList);
mShapeBorderIndicesBuffer.position(0);
}
private void updateVertexListToArray(ArrayList<CoOrdinatesModel> cordinateList) {
/**============================
* working with the vertexes
* adding cordinates to VertexList
===============================*/
List<Float> vList = new ArrayList<>();
int loopSize = cordinateList.size();
for (int i = 0; i < loopSize; i++) {
vList.add(cordinateList.get(i).getxAxis());
vList.add(cordinateList.get(i).getyAxis());
vList.add(cordinateList.get(i).getzAxis());
}
/**
* converting vertex list to array
*/
float[] vertexlist = new float[vList.size()];
int count = 0;
for (float f : vList) {
vertexlist[count++] = f;
}
ByteBuffer vbb = ByteBuffer.allocateDirect(vertexlist.length * 4);
vbb.order(ByteOrder.nativeOrder());
mVertexBuffer = vbb.asFloatBuffer();
mVertexBuffer.put(vertexlist);
mVertexBuffer.position(0);
}
public void setFlashingPointTOCordinatelist(GL10 gl, ArrayList<CoOrdinatesModel> flashingPoint) {
cordinateList.addAll(flashingPoint);
mVertexBuffer.clear();
mShapeBorderIndicesBuffer.clear();
updateVertexListToArray(cordinateList);
Short initialPoint = Short.parseShort("0");
Short previousUpdated = Short.parseShort("0");
for (int i = 1; i <= 5; i++) {
String totalNo = (cordinateList.size() + 1) + i + "";
Short indice = Short.parseShort(totalNo);
String str = indice + "";
if (i == 1) {
indicesList.add(Short.parseShort(str));
initialPoint = Short.parseShort(str);
} else if (i == 2) {
indicesList.add(Short.parseShort(str));
previousUpdated = Short.parseShort(str);
} else if (i == 3 || i == 4) {
indicesList.add(previousUpdated);
indicesList.add(Short.parseShort(str));
previousUpdated = Short.parseShort(str);
} else if (i == 5) {
indicesList.add(previousUpdated);
indicesList.add(initialPoint);
}
}
updateVerticesListToArray(indicesList);
drawShape(gl);
}
}
THis is my Renderer class
package com.view9.stoddart.opengl;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.Log;
import com.view9.stoddart.model.OpenGlRoofObject;
import java.util.ArrayList;
import javax.microedition.khronos.opengles.GL10;
public class StoddartNewRenderer implements GLSurfaceView.Renderer {
private Context mContext;
public float mAngleX = 0.0f;
public float mAngleY = 0.0f;
public float mAngleZ = 0.0f;
private float mPreviousX;
private float mPreviousY;
private final float TOUCH_SCALE_FACTOR = 0.6f;
private OpenGlRoofObject roofObject;
GL10 glForShape;
RoofShape roofShape;
public StoddartNewRenderer(Context context, OpenGlRoofObject roofObject) {
mContext = context;
this.roofObject = roofObject;
roofShape = new RoofShape(roofObject);
}
public void onDrawFrame(GL10 gl) {
// Log.d("####","draw frame called");
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glTranslatef(0.0f, 0.0f, -3.0f);
gl.glRotatef(mAngleX, 1, 0, 0);
gl.glRotatef(mAngleY, 0, 1, 0);
gl.glRotatef(mAngleZ, 0, 0, 1);
// Set line color to black
gl.glColor4f(0.0f, 0.0f, 0.0f, 1.0f);
// gl.glColorMask(true, false, false, true);
roofShape.drawShape(gl);
//testing
glForShape = gl;
}
#Override
public void onSurfaceCreated(GL10 gl, javax.microedition.khronos.egl.EGLConfig config) {
Log.d("####", "surface created");
gl.glClearColor(1f, 1f, 1f, 1);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
gl.glEnable(GL10.GL_DEPTH_TEST);
// Enabled the vertex buffer for writing and to be used during rendering.
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
if (height == 0) height = 1;
gl.glViewport(0, 0, width, height);
float aspect = (float) width / height;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-aspect, aspect, -1.0f, 1.0f, 1.0f, 0.0f);
Log.d("####", "surface is changed");
}
public void drawSelectedShape(float x, float y) {
ArrayList<CoOrdinatesModel> flashingPoint = new ArrayList<>();
flashingPoint.add(new CoOrdinatesModel(x - 0.1f, y, 0f));
flashingPoint.add(new CoOrdinatesModel(x, y + 0.1f, 0f));
flashingPoint.add(new CoOrdinatesModel(x + 0.1f, y, 0f));
flashingPoint.add(new CoOrdinatesModel(x, y - 0.1f, 0f));
roofShape.setFlashingPointTOCordinatelist(glForShape, flashingPoint);
}
}
This is my GLSurfaceview class
package com.view9.stoddart.opengl;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
/**
* Created by view9 on 10/12/15.
*/
public class StoddartOpenGlSurfaceView extends GLSurfaceView {
StoddartNewRenderer renderer;
public StoddartOpenGlSurfaceView(Context context) {
super(context);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
public StoddartOpenGlSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public void showRenderer(StoddartNewRenderer renderer) {
this.renderer = renderer;
}
#Override
public boolean onTouchEvent(MotionEvent e) {
// return renderer.onTouchEvent(event);
float x = e.getX() / 100;
float y = e.getY() / 100;
Log.d("position", "x-axis: " + x + " y-axis: " + y);
renderer.drawSelectedShape(x, y);
requestRender();
return true;
}
}
And this is the activity where I've set glsurfaceview
data are fetched from the api which is in xml format I've parsed data well and stored it as a roofobject.
package com.view9.stoddart.activities;
import android.app.Dialog;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.LinearLayout;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.view9.stoddart.ApiCallConfiguration.ServerConfig;
import com.view9.stoddart.ApiCallConfiguration.WebService;
import com.view9.stoddart.R;
import com.view9.stoddart.fragment.ItemsSelectFragment;
import com.view9.stoddart.model.OpenGlRoofObject;
import com.view9.stoddart.model.SelectItemsDto;
import com.view9.stoddart.opengl.StoddartNewRenderer;
import com.view9.stoddart.opengl.StoddartOpenGlSurfaceView;
import com.view9.stoddart.utills.AppController;
import com.view9.stoddart.utills.Cache;
import com.view9.stoddart.utills.CustomDialogs;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.Serializable;
import java.util.ArrayList;
/**
* Created by view9 on 6/8/15.
*/
public class DrawingFlashingActivity extends BaseActivity {
DrawingFlashingActivity instance = null;
public DrawingFlashingActivity getInstance() {
return instance;
}
OpenGlRoofObject roofObject;
private LinearLayout gridLayout, flashingDiagramLayout;
// StoddartRenderer renderer;
StoddartNewRenderer renderer;
StoddartOpenGlSurfaceView stoddartOpenGlSurfaceView;
Response.Listener<String> response;
Response.ErrorListener errorListener;
Dialog pDialog;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
instance = DrawingFlashingActivity.this;
roofObject = Cache.cachedRoofObject;
iniitilizeUiComponent();
stoddartOpenGlSurfaceView = (StoddartOpenGlSurfaceView) findViewById(R.id.opengl_surface_view);
renderer = new StoddartNewRenderer(getInstance(), roofObject);
stoddartOpenGlSurfaceView.setRenderer(renderer);
stoddartOpenGlSurfaceView.showRenderer(renderer);
gettingServerResponse();
pDialog = CustomDialogs.progressDialog(getInstance(), AppController.LOADING);
}
private void gettingServerResponse() {
response = new Response.Listener<String>() {
#Override
public void onResponse(String s) {
Log.d("response", s);
pDialog.dismiss();
try {
JSONObject nodeObject = new JSONObject(s);
Boolean status = nodeObject.getBoolean("success");
int code = nodeObject.getInt("code");
if (code == 1 && status) {
Cache.FLASHING_TYPE = s;
parseFlashingType(s);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
};
errorListener = new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError volleyError) {
if (pDialog.isShowing()) {
pDialog.dismiss();
}
volleyError.printStackTrace();
}
};
}
private void parseFlashingType(String s) {
try {
ArrayList<SelectItemsDto> itemslist = new ArrayList<>();
JSONObject nodeObject = new JSONObject(s);
JSONArray dataArray = nodeObject.getJSONArray("data");
for (int i = 0; i < dataArray.length(); i++) {
String type = dataArray.get(i).toString();
itemslist.add(new SelectItemsDto("i", type, false, false));
}
ItemsSelectFragment alertdFragment = new ItemsSelectFragment();
Bundle bundle = new Bundle();
bundle.putString("title", "Choose Flashing Type");
bundle.putBoolean("isSingleSelect", true);
bundle.putSerializable("itemsList", (Serializable) itemslist);
alertdFragment.setArguments(bundle);
alertdFragment.show(getSupportFragmentManager(), "Alert Dialog Fragment");
} catch (JSONException e) {
e.printStackTrace();
}
}
#Override
public String getToolbarTitle() {
return "Flashing";
}
#Override
public int getResourceLayout() {
return R.layout.layout_flashing_measure_activity;
}
#Override
public int getActivityId() {
return 8;
}
#Override
protected void onPause() {
super.onPause();
stoddartOpenGlSurfaceView.onPause();
}
#Override
protected void onResume() {
super.onResume();
stoddartOpenGlSurfaceView.onResume();
}
private void iniitilizeUiComponent() {
gridLayout = (LinearLayout) findViewById(R.id.l_gridLayout);
gridLayout.setVisibility(View.GONE);
flashingDiagramLayout = (LinearLayout) findViewById(R.id.flashing_diagram_layout);
flashingDiagramLayout.setVisibility(View.VISIBLE);
setListeners();
}
private void setListeners() {
ivTick.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (Cache.FLASHING_TYPE != "") {
parseFlashingType(Cache.FLASHING_TYPE);
} else {
pDialog.show();
WebService.getFlashingType(response, errorListener, ServerConfig.GET_FLASHING_TYPE_URL);
}
}
});
}
}
Writing these codes I can successfully draw a shape as shown in the diagram. The diagram shown is of a roof. My question is that I want to add a circle on touching the shape; there may be multiple circles, e.g. if I touch on say(1,2,1) co-ordinate, the circle should be added to that point again. If I touch another point, the next circle should be added to that point and so on... This is also shown in the second image.
I've wasted 3 days doing this but didn't find any solutions; what I've tried to do in this code so far is: whenever I touch the view I calculate the value of x and y and passed to this method setFlashingPointTOCordinatelist() which is defined in the RoofShape class.
Any kind of help will be appreciated. And please keep in mind that I am very new to this opengl. I can draw this image here.
And this is the image what I want to do on touch over the shape.

MediaPlayer freezes when playing video

I use MediaPlayer to play videos with GPUImage effects apply to it. On certain devices, after a short while - a few seconds or more (seems random), the video stops playing (the image remains stuck), but audio keeps playing (most of the time). I target the MediaPlayer to a GLSurfaceView, in order to be able to use GPUImage effects on top of it.
Here is the custom view:
public class FilteredVideoView extends GLSurfaceView {
private MediaPlayer mMediaPlayer = null;
private long time = 0;
private MyCustomRenderer renderer;
private boolean mediaPlayerInitialized = false;
private String videoPath;
public FilteredVideoView(Context context) {
super(context);
setWillNotDraw(false);
init();
//startFilterChangeThread();
}
public void setFilter(MyGPUImageFilter filter) {
renderer.setFilter(filter);
}
public void stopVideo() {
mMediaPlayer.stop();
}
public void prepareVideo() {
try {
mMediaPlayer.prepare();
} catch (Exception e) {
}
}
public void playVideo() {
initMediaPlayer();
mMediaPlayer.seekTo(0);
mMediaPlayer.start();
}
private void init() {
setEGLContextClientVersion(2);
getHolder().setFormat(PixelFormat.TRANSLUCENT);
setEGLConfigChooser(8, 8, 8, 8, 16, 0);
mMediaPlayer = new MediaPlayer();
//initMediaPlayer();
renderer = new MyCustomRenderer(new MyNoFilterFilter(), mMediaPlayer);
setRenderer(renderer);
}
private void initMediaPlayer() {
if (mediaPlayerInitialized) {
return;
}
mediaPlayerInitialized = true;
try {
FileInputStream fi = new FileInputStream(new File(videoPath));
mMediaPlayer.setDataSource(fi.getFD());
mMediaPlayer.prepare();
mMediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
onVideoPlaybackFinished();
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
void onVideoPlaybackFinished() {
}
#Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
}
}
And here is the renderer.
public class MyCustomRenderer implements Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final int NO_IMAGE = -1;
public static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
private MyGPUImageFilter mFilter;
public final Object mSurfaceChangedWaiter = new Object();
private int mTextureID = NO_IMAGE;
private SurfaceTexture mSurfaceTexture = null;
private final FloatBuffer mGLCubeBuffer;
private final FloatBuffer mGLTextureBuffer;
private IntBuffer mGLRgbBuffer;
private int mOutputWidth;
private int mOutputHeight;
private int mImageWidth;
private int mImageHeight;
private int mAddedPadding;
private final Queue<Runnable> mRunOnDraw;
private final Queue<Runnable> mRunOnDrawEnd;
private Rotation mRotation;
private boolean mFlipHorizontal;
private boolean mFlipVertical;
private GPUImage.ScaleType mScaleType = GPUImage.ScaleType.CENTER_CROP;
public SurfaceTexture mSurface;
public boolean updateSurface = false;
private MediaPlayer mMediaPlayer;
private int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private float[] mSTMatrix = new float[16];
public MyCustomRenderer(final MyGPUImageFilter filter, MediaPlayer mediaPlayer) {
mMediaPlayer = mediaPlayer;
mFilter = filter;
mRunOnDraw = new LinkedList<Runnable>();
mRunOnDrawEnd = new LinkedList<Runnable>();
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
setRotation(Rotation.NORMAL, false, false);
Matrix.setIdentityM(mSTMatrix, 0);
}
#Override
public void onSurfaceCreated(final GL10 unused, final EGLConfig config) {
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
surface.release();
synchronized (this) {
updateSurface = false;
}
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mFilter.init();
//mMediaPlayer.start();
}
#Override
public void onSurfaceChanged(final GL10 gl, final int width, final int height) {
mOutputWidth = width;
mOutputHeight = height;
GLES20.glViewport(0, 0, width, height);
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(width, height);
adjustImageScaling();
synchronized (mSurfaceChangedWaiter) {
mSurfaceChangedWaiter.notifyAll();
}
}
#Override
public void onDrawFrame(final GL10 gl) {
synchronized (this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
} else {
//return;
}
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
runAll(mRunOnDraw);
mFilter.onDraw(mTextureID, mGLCubeBuffer, mGLTextureBuffer);
runAll(mRunOnDrawEnd);
/*if (mSurfaceTexture != null) {
mSurfaceTexture.updateTexImage();
}*/
}
private void runAll(Queue<Runnable> queue) {
synchronized (queue) {
while (!queue.isEmpty()) {
queue.poll().run();
}
}
}
public void setFilter(final MyGPUImageFilter filter) {
runOnDraw(new Runnable() {
#Override
public void run() {
final MyGPUImageFilter oldFilter = mFilter;
mFilter = filter;
if (oldFilter != null) {
oldFilter.destroy();
}
mFilter.init();
GLES20.glUseProgram(mFilter.getProgram());
mFilter.onOutputSizeChanged(mOutputWidth, mOutputHeight);
}
});
}
public void deleteImage() {
runOnDraw(new Runnable() {
#Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{
mTextureID
}, 0);
mTextureID = NO_IMAGE;
}
});
}
public void setScaleType(GPUImage.ScaleType scaleType) {
mScaleType = scaleType;
}
protected int getFrameWidth() {
return mOutputWidth;
}
protected int getFrameHeight() {
return mOutputHeight;
}
private void adjustImageScaling() {
float outputWidth = mOutputWidth;
float outputHeight = mOutputHeight;
if (mRotation == Rotation.ROTATION_270 || mRotation == Rotation.ROTATION_90) {
outputWidth = mOutputHeight;
outputHeight = mOutputWidth;
}
mImageWidth = App.screenW();
mImageHeight = App.screenH();
outputWidth = App.screenW();
outputHeight = App.screenH();
float ratio1 = outputWidth / mImageWidth;
float ratio2 = outputHeight / mImageHeight;
float ratioMax = Math.max(ratio1, ratio2);
int imageWidthNew = Math.round(mImageWidth * ratioMax);
int imageHeightNew = Math.round(mImageHeight * ratioMax);
float ratioWidth = imageWidthNew / outputWidth;
float ratioHeight = imageHeightNew / outputHeight;
float[] cube = CUBE;
float[] textureCords = TextureRotationUtil.getRotation(mRotation, mFlipHorizontal, mFlipVertical);
if (mScaleType == GPUImage.ScaleType.CENTER_CROP) {
float distHorizontal = (1 - 1 / ratioWidth) / 2;
float distVertical = (1 - 1 / ratioHeight) / 2;
textureCords = new float[]{
addDistance(textureCords[0], distHorizontal), addDistance(textureCords[1], distVertical),
addDistance(textureCords[2], distHorizontal), addDistance(textureCords[3], distVertical),
addDistance(textureCords[4], distHorizontal), addDistance(textureCords[5], distVertical),
addDistance(textureCords[6], distHorizontal), addDistance(textureCords[7], distVertical),
};
} else {
cube = new float[]{
CUBE[0] / ratioHeight, CUBE[1] / ratioWidth,
CUBE[2] / ratioHeight, CUBE[3] / ratioWidth,
CUBE[4] / ratioHeight, CUBE[5] / ratioWidth,
CUBE[6] / ratioHeight, CUBE[7] / ratioWidth,
};
}
mGLCubeBuffer.clear();
mGLCubeBuffer.put(cube).position(0);
mGLTextureBuffer.clear();
mGLTextureBuffer.put(textureCords).position(0);
}
private float addDistance(float coordinate, float distance) {
return coordinate == 0.0f ? distance : 1 - distance;
}
public void setRotationCamera(final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
setRotation(rotation, flipVertical, flipHorizontal);
}
public void setRotation(final Rotation rotation) {
mRotation = rotation;
adjustImageScaling();
}
public void setRotation(final Rotation rotation,
final boolean flipHorizontal, final boolean flipVertical) {
mFlipHorizontal = flipHorizontal;
mFlipVertical = flipVertical;
setRotation(rotation);
}
public Rotation getRotation() {
return mRotation;
}
public boolean isFlippedHorizontally() {
return mFlipHorizontal;
}
public boolean isFlippedVertically() {
return mFlipVertical;
}
protected void runOnDraw(final Runnable runnable) {
synchronized (mRunOnDraw) {
mRunOnDraw.add(runnable);
}
}
protected void runOnDrawEnd(final Runnable runnable) {
synchronized (mRunOnDrawEnd) {
mRunOnDrawEnd.add(runnable);
}
}
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
updateSurface = true;
}
}
Any ideas what could be causing the issue? Maybe a memory-leak?
Thank you for your help!

create mjpeg video from pictures in Android(Streaming from server)

I have server, that streams IP camera video(in this case my laptop is as IP camera). And stores JPG files that are accessible through internet. I have this code that should capture these JPG and make a video with them:
I am able only to get one frame and that it, program somehow does not refresh(frame refreshes if I rotate screen).
What I would like to know is:
1. If there is an easier way to reflect those jpeg pictures?
2. Why my program shows only one frame?
Main Activity
package de.mjpegsample;
import android.app.Activity;
import android.os.Bundle;
import android.view.Window;
import android.view.WindowManager;
import de.mjpegsample.MjpegView.MjpegInputStream;
import de.mjpegsample.MjpegView.MjpegView;
public class MjpegSample extends Activity {
private MjpegView mv;
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://xxx.xxx.x.xx:8080/cam_1.jpg";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjpegInputStream:
package de.mjpegsample.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
MjpegView
package de.mjpegsample.MjpegView;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = true;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) { super(context); init(context); }
public void surfaceCreated(SurfaceHolder holder) { surfaceDone = true; }
public void showFps(boolean b) { showFps = b; }
public void setSource(MjpegInputStream source) { mIn = source; startPlayback();}
public void setOverlayPaint(Paint p) { overlayPaint = p; }
public void setOverlayTextColor(int c) { overlayTextColor = c; }
public void setOverlayBackgroundColor(int c) { overlayBackgroundColor = c; }
public void setOverlayPosition(int p) { ovlPos = p; }
public void setDisplayMode(int s) { displayMode = s; }
}
Strange... I changed String URL = "http://xxx.xxx.x.xx:8080/cam_1.jpg"; to String URL = "http://xxx.xxx.x.xx:8080/cam_1.cgi"; and it is working with a little bit of delay but that is ok.
So my last question still is unanswered: Is there a more simple way to reflect JPG's and make a MJPEG? Because this one takes a lot of codding to do :)

Android VideoView RTSP delay

I am opening a RTSP stream of an IP camera in my Android application through VideoView. There is a problem that It has a great delay that reaches more than 20 seconds! although viewing the Camera from an ordinary PC via its browser there is no such delay. Any Ideas please?
My Code:
String path="rtsp://192.168.1.20/3gpp";
myVideoView.setVideoURI(Uri.parse(path));
myVideoView.setMediaController(new MediaController(this));
myVideoView.requestFocus();
myVideoView.start();
Use MJPEG stream, instead of RTSP. After lots of research, I´ve found an MJPEG viewer class that plays live stream near in real time (depending on your network connection).
Here´s the code Android and MJPEG
You can do the following for this
MjpegSample Class
package de.mjpegsample;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import de.mjpegsample.MjpegView.MjpegInputStream;
import de.mjpegsample.MjpegView.MjpegView;
public class MjpegSample extends Activity {
private MjpegView mv;
private static final int MENU_QUIT = 1;
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_QUIT, 0, "Quit");
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_QUIT:
finish();
return true;
}
return false;
}
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://gamic.dnsalias.net:7001/img/video.mjpeg";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN, WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(false);
}
public void onPause() {
super.onPause();
mv.stopPlayback();
}
}
MjepgView
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) {
super(context);
init(context);
}
public void surfaceCreated(SurfaceHolder holder) {
surfaceDone = true;
}
public void showFps(boolean b) {
showFps = b;
}
public void setSource(MjpegInputStream source) {
mIn = source;
startPlayback();
}
public void setOverlayPaint(Paint p) {
overlayPaint = p;
}
public void setOverlayTextColor(int c) {
overlayTextColor = c;
}
public void setOverlayBackgroundColor(int c) {
overlayBackgroundColor = c;
}
public void setOverlayPosition(int p) {
ovlPos = p;
}
public void setDisplayMode(int s) {
displayMode = s;
}
}
MjpegInputStream
package de.mjpegsample.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}

Categories

Resources