opengles2 screen shot - android

I am trying to capture a screenshot of what is rendered onto the screen. I have tried using glreadpixels but the results were all zeros. I'm happy to know if there are any other methods of capturing the glsurfaceview as well. Thanks a lot!
The following is a code snippet I've obtained from another thread:
public Bitmap savePixels(int x, int y, int w, int h){
int b[]=new int[w*(y+h)];
int bt[]=new int[w*h];
IntBuffer ib=IntBuffer.wrap(b);
ib.position(0);
GLES20.glReadPixels(x, 0, w, y+h, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib);
for(int i=0, k=0; i<h; i++, k++)
{//remember, that OpenGL bitmap is incompatible with Android bitmap
//and so, some correction need.
for(int j=0; j<w; j++)
{
int pix=b[i*w+j];
int pb=(pix>>16)&0xff;
int pr=(pix<<16)&0x00ff0000;
int pix1=(pix&0xff00ff00) | pr | pb;
bt[(h-k-1)*w+j]=pix1;
int test = pix;
if (pix > 0)
System.out.println(test%256 + " " + (test/256)%256 + " " + (test/256/256)%256 + " " + test/256/256/256);
}
}
return Bitmap.createBitmap(bt, w, h, Bitmap.Config.ARGB_8888);
}

Related

How to get the bitmap image from a GLSurfaceView

I am doing the following to get the bitmap image that has been set to a GLSurfaceView object:
glView.setDrawingCacheEnabled(true);
glView.measure(View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED),
View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED));
glView.layout(0, 0, glView.getMeasuredWidth(), glView.getMeasuredHeight());
glView.buildDrawingCache(true);
Bitmap tmpbm = Bitmap.createBitmap(glView.getDrawingCache());
glView.setDrawingCacheEnabled(false);
But glView.getDrawingCache() is returning me null in the above case, and hence it is crashing in the line Bitmap tmpbm = Bitmap.createBitmap(glView.getDrawingCache());
Why am I getting null from there, and how do I tackle this issue? Also, is there a different / better way to achieve my goal? Any help would be highly appreciated.
Try this method :
private Bitmap createBitmapFromGLSurface(int x, int y, int w, int h, GL10 gl)
throws OutOfMemoryError {
int bitmapBuffer[] = new int[w * h];
int bitmapSource[] = new int[w * h];
IntBuffer intBuffer = IntBuffer.wrap(bitmapBuffer);
intBuffer.position(0);
try {
gl.glReadPixels(x, y, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, intBuffer);
int offset1, offset2;
for (int i = 0; i < h; i++) {
offset1 = i * w;
offset2 = (h - i - 1) * w;
for (int j = 0; j < w; j++) {
int texturePixel = bitmapBuffer[offset1 + j];
int blue = (texturePixel >> 16) & 0xff;
int red = (texturePixel << 16) & 0x00ff0000;
int pixel = (texturePixel & 0xff00ff00) | red | blue;
bitmapSource[offset2 + j] = pixel;
}
}
} catch (GLException e) {
return null;
}
return Bitmap.createBitmap(bitmapSource, w, h, Bitmap.Config.ARGB_8888);
}
more here

how to get bitmap from GLSurfaceView in android

I have made a photo Effect demo. In that I am applying different effects on android.opengl.GLSurfaceView. All done successfully, but when I am going to take bitmap from that View, I got a black transparent bitmap with bitmap. I want to take only bitmap of image (not that black background).
Can anyone please help me to solve it.
code
<android.opengl.GLSurfaceView
android:id="#+id/iv_effect_img"
android:layout_width="fill_parent"
android:layout_height="wrap_content" />
java code
public Bitmap takeScreenshot(GL10 mGL) {
final int mWidth = mEffectView.getWidth();
final int mHeight = mEffectView.getHeight();
IntBuffer ib = IntBuffer.allocate(mWidth * mHeight);
IntBuffer ibt = IntBuffer.allocate(mWidth * mHeight);
mGL.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, ib);
// Convert upside down mirror-reversed image to right-side up normal
// image.
for (int i = 0; i < mHeight; i++) {
for (int j = 0; j < mWidth; j++) {
ibt.put((mHeight - i - 1) * mWidth + j, ib.get(i * mWidth + j));
}
}
Bitmap mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
mBitmap.copyPixelsFromBuffer(ibt);
Global.img = mBitmap;
return mBitmap;
}

Change inSampleSize of a Bitmap

I am creating a bitmap from glsurfaceview and adding it to a arraylist but when I created a bitmap from glsurfaceview it gives outofmemory error
CODE:
Bitmap bitmap = createBitmapFromGLSurface(0, 0, mEffectView.getWidth(),
mEffectView.getHeight(), gl);
al_bitmaps.add(bitmap);
Method:
private Bitmap createBitmapFromGLSurface(int x, int y, int w, int h, GL10 gl)
throws OutOfMemoryError {
int bitmapBuffer[] = new int[w * h];
int bitmapSource[] = new int[w * h];
IntBuffer intBuffer = IntBuffer.wrap(bitmapBuffer);
intBuffer.position(0);
try {
gl.glReadPixels(x, y, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE,
intBuffer);
int offset1, offset2;
for (int i = 0; i < h; i++) {
offset1 = i * w;
offset2 = (h - i - 1) * w;
for (int j = 0; j < w; j++) {
int texturePixel = bitmapBuffer[offset1 + j];
int blue = (texturePixel >> 16) & 0xff;
int red = (texturePixel << 16) & 0x00ff0000;
int pixel = (texturePixel & 0xff00ff00) | red | blue;
bitmapSource[offset2 + j] = pixel;
}
}
} catch (GLException e) {
return null;
}
return Bitmap.createBitmap(bitmapSource, w, h, Bitmap.Config.ARGB_8888);
}
For larger resolution device (ex. Samsung Galaxy S4), my app crashes.
I would like to know how to set inSampleSize of that bitmap.
First of all i will recommended to you use
android:largeHeap="true"
in your manifest file.
Secondary if that not helped to you look that https://stackoverflow.com/a/17839597/2956344
Also i recommend to you know about maximum texture size of your GLSufaceView to check limit of your bitmap resolution programmatically.
Get Maximum OpenGL ES 2.0 Texture Size Limit in Android
P.S. I think you find information about use GL10.

COCOS 2D screen shot is black in Android

I am using the following code. The image is saved but it is BLACK.
Please see my code and tell me where I am doing wrong.
I am using this code in the Menu.
case R.id.id_menu_Save:
Bitmap bmp = SavePixels(0, 0, 800, 400, CCDirector.sharedDirector().gl);
File file = new File("/sdcard/test.jpg");
try
{
file.createNewFile();
FileOutputStream fos = new FileOutputStream(file);
bmp.compress(CompressFormat.JPEG, 100, fos);
Toast.makeText(getApplicationContext(), "Image Saved", 0).show();
Log.i("Menu Save Button", "Image saved as JPEG");
}
catch (Exception e)
{
e.printStackTrace();
}
break;
This is my Save Image Function.
public static Bitmap SavePixels(int x, int y, int w, int h, GL10 gl)
{
int b[]=new int[w*(y+h)];
int bt[]=new int[w*h];
IntBuffer ib=IntBuffer.wrap(b);
ib.position(0);
gl.glReadPixels(x, 0, w, y+h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, ib);
for(int i=0, k=0; i<h; i++, k++)
{//remember, that OpenGL bitmap is incompatible with Android bitmap
//and so, some correction need.
for(int j=0; j<w; j++)
{
int pix=b[i*w+j];
int pb=(pix>>16)&0xff;
int pr=(pix<<16)&0x00ff0000;
int pix1=(pix&0xff00ff00) | pr | pb;
bt[(h-k-1)*w+j]=pix1;
}
}
Bitmap sb = Bitmap.createBitmap(bt, w, h, Bitmap.Config.ARGB_8888);
return sb;
}
Apart from above, what I want from you is to point me in the right direction. Like if I have to get the pixels of the screen, what class/entity should I be exploring?
Just change SavePixelx method to the below:
public static Bitmap SavePixels(int x, int y, int w, int h, GL10 gl)
{
int b[]=new int[w*(y+h)];
int bt[]=new int[w*h];
IntBuffer ib=IntBuffer.wrap(b);
ib.position(0);
gl.glReadPixels(x, 0, w, y+h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, ib);
for(int i=0, k=0; i<h; i++, k++)
{
//remember, that OpenGL bitmap is incompatible with Android bitmap
//and so, some correction need.
for(int j=0; j<w; j++)
{
int pix=b[i*w+j];
int pb=(pix>>16)&0xff;
int pr=(pix<<16)&0xffff0000;
int pix1=(pix&0xff00ff00) | pr | pb;
bt[(h-k-1)*w+j]=pix1;
}
}
Bitmap sb = Bitmap.createBitmap(bt, w, h, Bitmap.Config.ARGB_8888);
return sb;
}
just try to change GL10.GL_RGB or make changes in bitmap.config . It might be work.

Blurring an image in Android

I am using the following code to blur an image in android, but it does not work. The final image that I get is with very distorted color and not a blur which I want. What am I doing wrong?
public Bitmap blurBitmap(Bitmap bmpOriginal)
{
int width, height;
height = bmpOriginal.getHeight();
width = bmpOriginal.getWidth();
Bitmap bmpBlurred = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
for (int i = 1; i < width - 1; i++) {
for (int j = 1; j < height - 1; j++) {
int color = (int) getAverageOfPixel(bmpOriginal, i, j);
bmpBlurred.setPixel(i, j, Color.argb(Color.alpha(color), Color.red(color), Color.green(color), Color.blue(color)));
}
}
return bmpBlurred;
}
private double getAverageOfPixel(Bitmap bitmap, int i, int j) {
return (
bitmap.getPixel(i-1, j-1) + bitmap.getPixel(i-1, j) + bitmap.getPixel(i-1, j+1) +
bitmap.getPixel(i, j-1) + bitmap.getPixel(i, j) + bitmap.getPixel(i, j+1) +
bitmap.getPixel(i+1, j-1) + bitmap.getPixel(i+1, j) + bitmap.getPixel(i+1, j+1)) / 9;
}
Your problem is that you are combining all the colour channels at once and they all spill into each other. You should apply your blur function to the red, green, and blue channels separately.
Might it be easier to create a SurfaceView and use the FX_SURFACE_BLUR flag?

Categories

Resources