OpenGL example to render bitmap to MediaCodec surface - android

I am looking for an example on how to render a bitmap to the surface provided by MediaCodec so I can encode and then mux them into mp4 video.
The closest well know example I see is this EncodeAndMuxTest. Unfortunately with my limited OpenGL knowledge I have not been able to convert the example to use bitmaps instead of the raw OpenGL Frames it currently generates. Here is the example's Frame generation Method.
private void generateSurfaceFrame(int frameIndex) {
frameIndex %= 8;
int startX, startY;
if (frameIndex < 4) {
// (0,0) is bottom-left in GL
startX = frameIndex * (mWidth / 4);
startY = mHeight / 2;
} else {
startX = (7 - frameIndex) * (mWidth / 4);
startY = 0;
}
GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
GLES20.glScissor(startX, startY, mWidth / 4, mHeight / 2);
GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
}
Can someone please show me how to modify this to render a bitmap instead or point me to an example that does this? I'm assuming that this is all I need to do in order to get bitmaps to render to the surface (but I may be wrong).
Edit: this is the method I replaced generateSurfaceFrame with that doesn't create any input to the encoder surface so far:
private int generatebitmapframe()
{
final int[] textureHandle = new int[1];
try {
int id = _context.getResources().getIdentifier("drawable/other", null, _context.getPackageName());
// Temporary create a bitmap
Bitmap bmp = BitmapFactory.decodeResource(_context.getResources(), id);
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0);
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
//Utils.testSavebitmap(bmp, new File(OUTPUT_DIR, "testers.bmp").getAbsolutePath());
}
catch (Exception e) { e.printStackTrace(); }
return textureHandle[0];
}

Related

Problems reading ATI compressed textures

I'm trying to use compressed textures on my android application. I have a problem loading textures, textures seem to be "cut-off" on the right side of the object.
For compressing textures I'm using ATI's "TheCompressonator".
For testing I'm using Nexus 5.
I'm suspecting the problem is with my "calculated" size of the texture, but can't find any refenreces / specification of this compression format.
Does anyone know how to properly read this file format?
Here is screenshot from nexus:
Here is how it should have looked (don't mind black object textures, the image for that was missing)
Here is my code snippet.
final int[] textureObjectIds = new int[1];
glGenTextures(1, textureObjectIds, 0);
if(textureObjectIds[0] == 0){
logTextureHelper(Log.WARN, "Could not generate a new OpenGL texture object");
return 0;
}
final InputStream bitmap = context.getResources().openRawResource(resourceId);
byte[] buffer;
ByteBuffer bf;
try {
buffer = new byte[bitmap.available()];
bitmap.read(buffer);
int offset = 0; // 64 bit = header, 15 bit = metadata
bf = ByteBuffer.wrap(buffer, offset, buffer.length-offset);
bf.order(ByteOrder.LITTLE_ENDIAN);
int height = bf.getInt(16);
int width = bf.getInt(12);
int size = ((height + 3) / 4) * ((width + 3) / 4) * 16;
Log.d("TextureHelper","Buffer size: "+width+" "+height+" "+size);
glBindTexture(GL_TEXTURE_2D, textureObjectIds[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glCompressedTexImage2D(GL_TEXTURE_2D, 0,ATC_RGBA_EXPLICIT_ALPHA_AMD, width, height, 0, size, bf);
Log.d("TextureHelper","Buffer size: "+bf.capacity()+" : "+buffer.length+" error:"+GLES20.glGetError());
glBindTexture(GL_TEXTURE_2D, 0); //unbind texture
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return textureObjectIds[0];
EDIT: Solution
buffer = new byte[bitmap.available()];
bitmap.read(buffer);
int offset = 128; // 64 bit = header, 15 bit = metadata
bf = ByteBuffer.wrap(buffer, offset, buffer.length-offset);
bf.order(ByteOrder.LITTLE_ENDIAN);
int height = bf.getInt(16);
int width = bf.getInt(12);
int size = ((height + 3) / 4) * ((width + 3) / 4) * 16;
Log.d("TextureHelper","Buffer size: "+width+" "+height+" "+size);
bf.compact();///////SOLUTION!
bf.position(0);
glBindTexture(GL_TEXTURE_2D, textureObjectIds[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glCompressedTexImage2D(GL_TEXTURE_2D, 0,ATC_RGBA_EXPLICIT_ALPHA_AMD, width, height, 0, size, bf);
Log.d("TextureHelper","Buffer size: "+bf.capacity()+" : "+buffer.length+" error:"+GLES20.glGetError());
glBindTexture(GL_TEXTURE_2D, 0); //unbind texture
Note: You need to have data at 0 position, if you just offset position(128) it will throw invalid pointer exception.
Your size computation looks correct, but you're uploading the header as image data. After you extract the width and height from the header, offset your byte buffer by the appropriate amount to skip the header and start pointing at the image data.
There are a few ways you could do this, but something like this may work as an example (this could be optimized to remove the second bytebuffer). Also, I'm not sure what texture format you're using, but let's assume your header is, oh, 124 bytes long.
// assumption that buffer[] is the fully loaded contents of the file
byte[] buffer = ... // load entire file from input stream
// read the header
ByteBuffer bfHeader = ByteBuffer.wrap(buffer);
bfHeader.order(ByteOrder.LITTLE_ENDIAN);
int height = bfHeader.getInt(16);
int width = bfHeader.getInt(12);
// offset to image data
int headerSize = 124; // (replace with correct header size)
ByteBuffer bfData = ByteBuffer.wrap(buffer, headerSize, buffer.length-headerSize);
bfData.order(ByteOrder.LITTLE_ENDIAN);
// load image data
int size = ((height + 3) / 4) * ((width + 3) / 4) * 16;
glBindTexture(GL_TEXTURE_2D, textureObjectIds[0]);
GLES20.glCompressedTexImage2D(GL_TEXTURE_2D, 0,ATC_RGBA_EXPLICIT_ALPHA_AMD, width, height, 0, size, bfData);

Convert OpenGL ES 2.0 rendered texture to bitmap and back

I'd like to blur the rendered texture with RenderScript and for it I need to convert it to bitmap format and to use it I need to convert it back to OpenGL texture.
The render to texture is working. The problem has to be somewhere here but I don't understand why it doesn't work. I'm getting a black screen
public void renderToTexture(){
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fb[0]);
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
// specify texture as color attachment
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, renderTex[0], 0);
// attach render buffer as depth buffer
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, depthRb[0]);
// check status
int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
drawRender();
Bitmap bitmap = SavePixels(0,0,texW,texH);
//blur bitmap and get back a bluredBitmap not yet implemented
texture = TextureHelper.loadTexture(bluredBitmap, 128);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
drawRender2();
}
To create a bitmap I read pixels from the framebuffer because didn't find any other way to do it but I'm open for other methods
public static Bitmap SavePixels(int x, int y, int w, int h)
{
int b[]=new int[w*(y+h)];
int bt[]=new int[w*h];
IntBuffer ib=IntBuffer.wrap(b);
ib.position(0);
GLES20.glReadPixels(0, 0, w, h, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, ib);
for(int i=0, k=0; i<h; i++, k++)
{
for(int j=0; j<w; j++)
{
int pix=b[i*w+j];
int pb=(pix>>16)&0xff;
int pr=(pix<<16)&0x00ff0000;
int pix1=(pix&0xff00ff00) | pr | pb;
bt[(h-k-1)*w+j]=pix1;
}
}
Bitmap sb=Bitmap.createBitmap(bt, w, h, Bitmap.Config.ARGB_8888);
return sb;
}
Here is the bitmap to texture code:
public static int loadTexture(final Bitmap pics, int size)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
// Read in the resource
final Bitmap bitmap = pics;
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glEnable(GLES20.GL_BLEND);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
You can see Android MediaCodec stuff, also can directly see ExtractMpegFramesTest_egl14.java, and the code snippet is here:
[/**
* Saves][1] the current frame to disk as a PNG image.
*/
public void saveFrame(String filename) throws IOException {
// glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA
// data (i.e. a byte of red, followed by a byte of green...). To use the Bitmap
// constructor that takes an int[] array with pixel data, we need an int[] filled
// with little-endian ARGB data.
//
// If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just
// copying data around for a 720p frame. It's better to do a bulk get() and then
// rearrange the data in memory. (For comparison, the PNG compress takes about 500ms
// for a trivial frame.)
//
// So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer
// get() into a straight memcpy on most Android devices. Our ints will hold ABGR data.
// Swapping B and R gives us ARGB. We need about 30ms for the bulk get(), and another
// 270ms for the color swap.
//
// We can avoid the costly B/R swap here if we do it in the fragment shader (see
// http://stackoverflow.com/questions/21634450/ ).
//
// Having said all that... it turns out that the Bitmap#copyPixelsFromBuffer()
// method wants RGBA pixels, not ARGB, so if we create an empty bitmap and then
// copy pixel data in we can avoid the swap issue entirely, and just copy straight
// into the Bitmap from the ByteBuffer.
//
// Making this even more interesting is the upside-down nature of GL, which means
// our output will look upside-down relative to what appears on screen if the
// typical GL conventions are used. (For ExtractMpegFrameTest, we avoid the issue
// by inverting the frame when we render it.)
//
// Allocating large buffers is expensive, so we really want mPixelBuf to be
// allocated ahead of time if possible. We still get some allocations from the
// Bitmap / PNG creation.
mPixelBuf.rewind();
GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE,
mPixelBuf);
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(filename));
Bitmap bmp = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
mPixelBuf.rewind();
bmp.copyPixelsFromBuffer(mPixelBuf);
bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
bmp.recycle();
} finally {
if (bos != null) bos.close();
}
if (VERBOSE) {
Log.d(TAG, "Saved " + mWidth + "x" + mHeight + " frame as '" + filename + "'");
}
}
You should have used:
GLES20.glReadPixels(0, 0, w, h, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib);
Your for loop is supposed to convert RGBA to ARGB_8888

glReadPixels + FBO is not working

On Android - ES 2. Reading from the framebuffer by calling glReadPixels + FBO. However, the byte array is 0. Interesting enough is that when I remove the binding code (leave the glReadPixels) it works.
Made me wonder if I didn't bind the buffer correctly, although when I check the framebuffer status (glCheckFramebufferStatus) I get GLES20.GL_FRAMEBUFFER_COMPLETE.
Any idea what I've done wrong?
int frameIdIndex=0,renderIdIndex=1,textureIdIndex=2;
int[] bufferId=new int[3];
Bitmap takeOne(Context cntxt) {
DisplayMetrics dm = cntxt.getResources().getDisplayMetrics();
int width = dm.widthPixels;
int height = dm.heightPixels;
//id index 0 frameId, 1 renderId 2 textureId;
GLES20.glGenFramebuffers(1,bufferId,frameIdIndex);
GLES20.glGenRenderbuffers(1, bufferId, renderIdIndex);
GLES20.glGenTextures(1, bufferId, textureIdIndex);
// bind texture and load the texture mip-level 0
// texels are RGB565
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,bufferId[textureIdIndex]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D,0,GLES20.GL_RGBA,width,height,0,GLES20.GL_RGBA,GLES20.GL_UNSIGNED_SHORT_5_6_5,null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
// bind renderbuffer and create a 16-bit depth buffer
// width and height of renderbuffer = width and height of
// the texture
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, bufferId[renderIdIndex]);
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER,GLES20.GL_DEPTH_COMPONENT16,width,height);
//bind the frameBuffer;
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,bufferId[frameIdIndex]);
//specify texture as color attachment
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,GLES20.GL_COLOR_ATTACHMENT0,GLES20.GL_TEXTURE_2D,bufferId[textureIdIndex],0);
//specify renderbuffer as depth_attachment
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER,GLES20.GL_DEPTH_ATTACHMENT,GLES20.GL_RENDERBUFFER,bufferId[renderIdIndex]);
//check for framebuffer complete
int status= GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if(status !=GLES20.GL_FRAMEBUFFER_COMPLETE) {
throw new RuntimeException("status:"+status+", hex:"+Integer.toHexString(status));
}
int screenshotSize = width * height;
ByteBuffer bb = ByteBuffer.allocateDirect(screenshotSize * 4);
bb.order(ByteOrder.nativeOrder());
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA,
GL10.GL_UNSIGNED_BYTE, bb);
int pixelsBuffer[] = new int[screenshotSize];
bb.asIntBuffer().get(pixelsBuffer);
final Bitmap bitmap = Bitmap.createBitmap(width, height,
Bitmap.Config.RGB_565);
bitmap.setPixels(pixelsBuffer, screenshotSize - width, -width,
0, 0, width, height);
pixelsBuffer = null;
short sBuffer[] = new short[screenshotSize];
ShortBuffer sb = ShortBuffer.wrap(sBuffer);
bitmap.copyPixelsToBuffer(sb);
// Making created bitmap (from OpenGL points) compatible with
// Android
// bitmap
for (int i = 0; i < screenshotSize; ++i) {
short v = sBuffer[i];
sBuffer[i] = (short) (((v & 0x1f) << 11) | (v & 0x7e0) | ((v & 0xf800) >> 11));
}
sb.rewind();
bitmap.copyPixelsFromBuffer(sb);
// cleanup
GLES20.glDeleteRenderbuffers(1, bufferId,renderIdIndex);
GLES20.glDeleteFramebuffers(1, bufferId ,frameIdIndex);
GLES20.glDeleteTextures(1, bufferId,textureIdIndex);
return bitmap;
}
Your formats and types are somewhat mixed up. This glTexImage2D() should already give you a GL_INVALID_OPERATION call if you check with glGetError():
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0,
GLES20.GL_RGBA, width, height, 0,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_SHORT_5_6_5, null);
GL_UNSIGNED_SHORT_5_6_5 can only be used with a format of GL_RGB. From the documentation:
GL_INVALID_OPERATION is generated if type is GL_UNSIGNED_SHORT_5_6_5 and format is not GL_RGB.
To avoid this error condition, the call needs to be:
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0,
GLES20.GL_RGB, width, height, 0,
GLES20.GL_RGB, GLES20.GL_UNSIGNED_SHORT_5_6_5, null);
The glReadPixels() call itself looks fine to me, so I believe that should work once you got a valid texture to render to.
The bitmap.setPixels() call might be problematic. The documentation says that it expects ARGB colors, and you will have RGBA here. But that's beyond the main scope of your question.

Android OpenGL Trying to texture multiple different bitmaps

I have small cubes that make up a grid to make 3D cube. On each small cube I use a bitmap to texture the surface, but I want to use more then one picture. I can build more textures within loadTextures and add them tofinal int[] textureHandle = new int[1]; and return them. How do I instantiate them to each small cube I'm drawing though?
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config)
{
mLastRequestedCubeFactor = mActualCubeFactor = 3;
generateCubes(mActualCubeFactor, false, false);
// Set the background clear color to black.
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// Use culling to remove back faces.
GLES20.glEnable(GLES20.GL_CULL_FACE);
// Enable depth testing
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// Position the eye in front of the origin.
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = -0.5f;
// We are looking toward the distance
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = -5.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader = RawResourceReader.readTextFileFromRawResource(mLessonSevenActivity, R.raw.lesson_seven_vertex_shader);
final String fragmentShader = RawResourceReader.readTextFileFromRawResource(mLessonSevenActivity, R.raw.lesson_seven_fragment_shader);
final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
mProgramHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
new String[] {"a_Position", "a_Normal", "a_TexCoordinate"});
// Load the texture
mAndroidDataHandle = TextureHelper.loadTexture(mLessonSevenActivity, R.drawable.usb_android);
GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mAndroidDataHandle);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mAndroidDataHandle);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR);
// Initialize the accumulated rotation matrix
Matrix.setIdentityM(mAccumulatedRotation, 0);
}
public class TextureHelper
{
public static int loadTexture(final Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
}
How do I instantiate them to each small cube I'm drawing though?
In short, you don't.
Even in desktop GL, where vertex instancing is a core feature, there is no way to change texture bindings without splitting the draw into multiple draw calls.
You could use a texture atlas, array texture or geometry shader to sample from a different (already bound) texture or a different part of a single texture. Alternatively, you could use bindless textures. Each one of those things I mentioned requires a newer version of GL than the last.
The only way to do this in ES is going to be either multiple draw calls, or a texture atlas/binding textures to multiple texture units. But since instancing is not a core feature, computing the texture coordinate / unit dynamically is a tremendous pain and will involve duplicating vertex data.
The bottom line is, what do you really mean by instantiated cube? Are you trying to draw 500 cubes in a single operation, or are you drawing them separately by calling some method in your cube class? Instancing has different meanings depending on the context.

Android texture only showing solid color

I am trying to display a single texture on a quad.
I had a working VertexObject, which drew a square(or any geometric object) fine. Now I tried expanding it to handle textures too, and the textures doesn't work. I only see the quad in one solid color.
The coordinate data is in an arrayList:
/*the vertices' coordinates*/
public int coordCount = 0;
/*float array of 3(x,y,z)*/
public ArrayList<Float> coordList = new ArrayList<Float>(coordCount);
/*the coordinates' indexes(if used)*/
/*maximum limit:32767*/
private int orderCount = 0;
private ArrayList<Short> orderList = new ArrayList<Short>(orderCount);
/*textures*/
public boolean textured;
private boolean textureIsReady;
private ArrayList<Float> textureList = new ArrayList<Float>(coordCount);
private Bitmap bitmap; //the image to be displayed
private int textures[]; //the textures' ids
The buffers are initialized in the following function:
/*Drawing is based on the buffers*/
public void refreshBuffers(){
/*Coordinates' List*/
float coords[] = new float[coordList.size()];
for(int i=0;i<coordList.size();i++){
coords[i]= coordList.get(i);
}
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
coords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(coords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
/*Index List*/
short order[] = new short[(short)orderList.size()];
for(int i=0;i<order.length;i++){
order[i] = (short) orderList.get(i);
}
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
order.length * 2);
dlb.order(ByteOrder.nativeOrder());
orderBuffer = dlb.asShortBuffer();
orderBuffer.put(order);
orderBuffer.position(0);
/*texture list*/
if(textured){
float textureCoords[] = new float[textureList.size()];
for(int i=0;i<textureList.size();i++){
textureCoords[i] = textureList.get(i);
}
ByteBuffer byteBuf = ByteBuffer.allocateDirect(textureCoords.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
textureBuffer = byteBuf.asFloatBuffer();
textureBuffer.put(textureCoords);
textureBuffer.position(0);
}
}
I load the image into the object with the following code:
public void initTexture(GL10 gl, Bitmap inBitmap){
bitmap = inBitmap;
loadTexture(gl);
textureIsReady = true;
}
/*http://www.jayway.com/2010/12/30/opengl-es-tutorial-for-android-part-vi-textures/*/
public void loadTexture(GL10 gl){
gl.glGenTextures(1, textures, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterx(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_WRAP_T,
GL10.GL_CLAMP_TO_EDGE);
/*bind bitmap to texture*/
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
}
And the drawing happens based on this code:
public void draw(GL10 gl){
if(textured && textureIsReady){
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
//loadTexture(gl);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0,
vertexBuffer);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0,
textureBuffer);
}else{
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glColor4f(color[0], color[1], color[2], color[3]);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0,
vertexBuffer);
}
if(!indexed)gl.glDrawArrays(drawMode, 0, coordCount);
else gl.glDrawElements(drawMode, orderCount, GL10.GL_UNSIGNED_SHORT, orderBuffer);
if(textured && textureIsReady){
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDisable(GL10.GL_TEXTURE_2D);
}else{
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
}
}
The initialization is as follows:
pic = new VertexObject();
pic.indexed = true;
pic.textured = true;
pic.initTexture(gl,MainActivity.bp);
pic.color[0] = 0.0f;
pic.color[1] = 0.0f;
pic.color[2] = 0.0f;
float inputVertex[] = {2.0f,2.0f,0.0f};
float inputTexture[] = {0.0f,0.0f};
pic.addTexturedVertex(inputVertex,inputTexture);
inputVertex[0] = 2.0f;
inputVertex[1] = 8.0f;
inputTexture[0] = 0.0f;
inputTexture[0] = 1.0f;
pic.addTexturedVertex(inputVertex,inputTexture);
inputVertex[0] = 8.0f;
inputVertex[1] = 8.0f;
inputTexture[0] = 1.0f;
inputTexture[0] = 1.0f;
pic.addTexturedVertex(inputVertex,inputTexture);
inputVertex[0] = 8.0f;
inputVertex[1] = 2.0f;
inputTexture[0] = 1.0f;
inputTexture[0] = 0.0f;
pic.addTexturedVertex(inputVertex,inputTexture);
pic.addIndex((short)0);
pic.addIndex((short)1);
pic.addIndex((short)2);
pic.addIndex((short)0);
pic.addIndex((short)2);
pic.addIndex((short)3);
The coordinates are just simply added to the arrayList, and then I refresh the buffers.
The bitmap is valid, because it is showing up on an imageView.
The image is a png file with the size of 128x128 in the drawable folder.
For what I gathered the image is getting to the vertexObject, but something isn't right with the texture mapping. Any pointers on what am I doing wrong?
Okay, I got it!
I downloaded a working example from the internet and rewrote it, to resemble the object(presented above) step by step. I observed if it works on every step. Turns out, the problem isn't in the graphical part, because the object worked in another context with different coordinates.
Long story short:
I got the texture UV mapping wrong!
That's why I got the solid color, the texture was loaded, but the UV mapping wasn't correct.
Short story long:
At the lines
inputVertex[0] = 2.0f;
inputVertex[1] = 8.0f;
inputTexture[0] = 0.0f;
inputTexture[0] = 1.0f;
The indexing was wrong as only the first element of inputTexture was updated only. There might have been some additional errors regarding the sizes of the different array describing the vertex coordinates, but rewriting on the linked example fixed the problem, and it produced a mroe concise code.

Categories

Resources