Perhaps I have some stupid problems. I'll be appreciate if someone could reply them.
All the problems are based on Android environment and OpenGL ES.
How to verified whether I has opened the MSAA or not ? If I draw some GL_POINTS with points size 50, there are some small squares. If I enabled 4x MSAA, can the small squares become round points ?
I tried my best to enable MSAA with FBO and BlitFBO. But it draw nothing and there is an error INVALID_OPERATION after glBlitFramebuffer() calling.
Here is the complete projects I mentioed above:https://github.com/Enoch-Liu/GL
And the following is the key codes:
void Renderer::MultisampleAntiAliasing() {
glGenRenderbuffers(1, &m_MSColor);
glBindRenderbuffer(GL_RENDERBUFFER, m_MSColor);
glRenderbufferStorageMultisample(GL_RENDERBUFFER, 4, GL_RGBA8, m_width, m_height);
checkGLError("GenMSColorBuffer");
glGenFramebuffers(1, &m_MSFBO);
glBindFramebuffer(GL_FRAMEBUFFER, m_MSFBO);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, m_MSColor);
checkGLError("FboRbo,COLORATTACHMENT");
glGenRenderbuffers(1, &m_MSDepth);
glBindRenderbuffer(GL_RENDERBUFFER, m_MSDepth);
glRenderbufferStorageMultisample(GL_RENDERBUFFER, 4, GL_DEPTH_COMPONENT16, m_width, m_height);
checkGLError("GenDepthBuffer");
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, m_MSDepth);
checkGLError("DepthBuffer,Renderbuffer");
GLenum drawBufs[] = {GL_COLOR_ATTACHMENT0};
glDrawBuffers(1, drawBufs);
checkGLError("DrawBuffer");
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
LOG_ERROR("failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
}
void Renderer::drawFrame() {
//LOG_INFO("drawFrame %d x %d", width, height);
static float r=0.9f;
static float g=0.2f;
static float b=0.2f;
LOG_INFO("xxx %d, %d", m_width,m_height);
if (OPENMSAA)
{
glBindFramebuffer(GL_FRAMEBUFFER, m_MSFBO);
glBindRenderbuffer(GL_RENDERBUFFER, m_MSColor);
checkGLError("BindTwoBuffers");
}
glViewport(0,0,m_width,m_height);
glScissor(0,0,m_width,m_height);
glClearColor(r, g, b, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDisable(GL_DEPTH_TEST);
const GLfloat landscapeOrientationMatrix[16] = {
1.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f };
const GLfloat color[4] = {
1.0f, 0.0f, 0.0f, 1.0f
};
glUseProgram( m_program );
glUniformMatrix4fv(m_uMvp, 1, GL_FALSE, landscapeOrientationMatrix);
glUniform4fv(m_uColor, 1, color);
m_p = glGetAttribLocation(m_program, "vPosition");
m_p1 = glGetAttribLocation(m_program, "vPosition1");
glEnableVertexAttribArray( m_p );
glVertexAttribPointer( m_p , 3, GL_FLOAT, false, 3 * sizeof( float ), squareCoords);
glDrawArrays(GL_POINTS, 0, 4);
glDisableVertexAttribArray( m_p );
glFlush();
checkGLError("Before Blit");
if (OPENMSAA)
{
glBindFramebuffer(GL_READ_FRAMEBUFFER, m_MSFBO);
checkGLError("BindReadBuffer");
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
checkGLError("BindFramebuffer");
glBlitFramebuffer(0, 0, m_width, m_height, 0, 0, m_width, m_height, GL_COLOR_BUFFER_BIT, GL_NEAREST);
checkGLError("BlitFramebufferColor");
glBlitFramebuffer(0, 0, m_width, m_height, 0, 0, m_width, m_height, GL_DEPTH_BUFFER_BIT, GL_NEAREST);
checkGLError("BlitFramebufferDepth");
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
}
The framebuffer is complete.
The internal format of the depth buffers have to match: https://www.opengl.org/discussion_boards/showthread.php/173275-Alternative-to-glBlitFramebuffer%28%29
Looking at your github project you are not configuring a depth buffer at all. From your project:
const EGLint attribs[] = {
// EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_SAMPLE_BUFFERS, 1,
EGL_SAMPLES, 4,
EGL_NONE
};
Related
New to OpenGl and currently working on Android NDK. I am currently getting this error, and can't figure it out. I feel like it's something basic, but I could be wrong. the appRender is run every frame by the java render side.
Error:
E/emuglGLESv2_enc: glDrawArrays: a vertex attribute array is enabled with no data bound
app.cpp:
void appRender(long tick, int width, int height){
const float vertices[] =
{
0.0f, 0.5f, 0.0f,
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f
};
glClear(GL_COLOR_BUFFER_BIT);
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
// Setup colors for each vertex
GLfloat colors[3*4];
for (int v = 0; v < 3; v++){
colors[4*v+0] = 0;
colors[4*v+1] = 1;
colors[4*v+2] = 0;
colors[4*v+3] = 1;
}
// Setup color buffer
GLuint colorBuffer;
glGenBuffers(1, &colorBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(colors), colors, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(
1,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glVertexAttribPointer(
1,
4,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glBindTexture(GL_TEXTURE_2D, 0);
//textRenderer.RenderTexts(0.5f);
//boxRenderer.RenderBoxes(0.5f);
}
So I found it, and yes, I'm bad.
glVertexAttribPointer(1,3,...) -> glVertexAttribPointer(0,3,...)
I tried to map my texture to a square made in opengl es 2.0, and when i do, the texture appears upside down, is my mapping wrong? or the way im drawing it? here is a picture of what it looks like:
here is my code for the onDrawFrame()
public void onDrawFrame(GL10 glUnused) {
GLES20.glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT,
false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glEnableVertexAttribArray(maPositionHandle);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT,
false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
GLES20.glEnableVertexAttribArray(maTextureHandle);
Matrix.orthoM(mProjMatrix, 0, 0, 200, 0, 100, -5, 5);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mProjMatrix, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
}
the coordination for the square and mapping UV are:
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
0f, 0f, 0, 0.0f, 0.0f,
50f, 0f, 0, 1.0f, 0.0f,
50f, 50f, 0, 1.0f, 1.0f,
0f, 50f, 0.0f, 0.0f, 1.0f };
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 };
how could i achieve a right-side up view?
This looks like a common mistake. You need to account for the fact that OpenGL's origin for textures is in the lower left corner (and not the upper-left corner).
There's a couple of things you can do to compensate:
read the pixels lines from bottom to top;
swap the pixel lines;
or swap the texture Y coordinates;
You could try swapping the texture coordinates in a shader (source: http://en.wikibooks.org/wiki/OpenGL_Programming/Modern_OpenGL_Tutorial_06)
void main(void) {
vec2 flipped_texcoord = vec2(f_texcoord.x, 1.0 - f_texcoord.y);
gl_FragColor = texture2D(mytexture, flipped_texcoord);
}
I would like to have a gradient background in OpenGL
I found these two links, but I cannot reproduce it:
OpenGL gradient fill on iPhone looks striped
OpenGL gradient banding on Android
I tried the following of the first link:
// Begin Render
//IntBuffer redBits = null, greenBits = null, blueBits = null;
//gl.glGetIntegerv (GL10.GL_RED_BITS, redBits); // ==> 8
//gl.glGetIntegerv (GL10.GL_GREEN_BITS, greenBits); // ==> 8
//gl.glGetIntegerv (GL10.GL_BLUE_BITS, blueBits); // ==> 8
gl.glDisable(GL10.GL_BLEND);
gl.glDisable(GL10.GL_DITHER);
gl.glDisable(GL10.GL_FOG);
gl.glDisable(GL10.GL_LIGHTING);
gl.glDisable(GL10.GL_TEXTURE_2D);
gl.glShadeModel(GL10.GL_SMOOTH);
float[] vertices = {
0, 0,
320, 0,
0, 480,
320, 480,
};
FloatBuffer vertsBuffer = makeFloatBuffer(vertices);
int[] colors = {
255, 255, 255, 255,
255, 255, 255, 255,
200, 200, 200, 255,
200, 200, 200, 255,
};
IntBuffer colorBuffer = makeIntBuffer(colors);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, vertsBuffer);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glColorPointer(4, GL10.GL_UNSIGNED_BYTE, 0, colorBuffer);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
// End Render
protected static FloatBuffer makeFloatBuffer(float[] arr) {
ByteBuffer bb = ByteBuffer.allocateDirect(arr.length*4);
bb.order(ByteOrder.nativeOrder());
FloatBuffer fb = bb.asFloatBuffer();
fb.put(arr);
fb.position(0);
return fb;
}
protected static IntBuffer makeIntBuffer(int[] arr) {
ByteBuffer bb = ByteBuffer.allocateDirect(arr.length*4);
bb.order(ByteOrder.nativeOrder());
IntBuffer ib = bb.asIntBuffer();
ib.put(arr);
ib.position(0);
return ib;
}
But it just shows a rectangle in the right upper corner. But I don't know if the
glGetIntegerv
would have an effect? Any ideas/links how to make it run?
SOLUTION
// set orthographic perspective
setOrtho2D(activity, gl);
gl.glDisable(GL10.GL_BLEND);
//gl.glDisable(GL10.GL_DITHER);
gl.glDisable(GL10.GL_FOG);
gl.glDisable(GL10.GL_LIGHTING);
gl.glDisable(GL10.GL_TEXTURE_2D);
gl.glShadeModel(GL10.GL_SMOOTH);
float[] vertices = {
0, 0,
_winWidth, 0,
0, _winHeight,
_winWidth, _winHeight
};
FloatBuffer vertsBuffer = makeFloatBuffer(vertices);
float[] colors = {
1.0f, 1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f, 1.0f,
0.2f, 0.2f, 0.2f, 1.0f,
0.2f, 0.2f, 0.2f, 1.0f
};
FloatBuffer colorBuffer = makeFloatBuffer(colors);
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, vertsBuffer);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glColorPointer(4, GL10.GL_FLOAT, 0, colorBuffer);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
I forgot to comment in the perspective line again. I also changed the vertices layed order from "U" shape to the "Z" shape (as commented from Nick).
Now it looks like how I want it:
This is a problem:
int[] colors;
....
gl.glColorPointer(4, GL10.GL_UNSIGNED_BYTE, 0, colorBuffer);
You are using signed four-byte integers for your color channels, and then telling opengl that they are unsigned one-byte integers. You should be using a buffer full of unsigned bytes.
It would be easier however, to just use floats instead:
float[] colors = {
1.0f, 1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f, 1.0f,
0.5f, 0.5f, 0.5f, 1.0f,
0.5f, 0.5f, 0.5f, 1.0f,
};
float vertices[] = {
0, 0,
800, 0,
0, 480,
480, 800,
};
FloatBuffer colorBuffer = makeFloatBuffer(colors);
gl.glColorPointer(4, GL10.GL_FLOAT, 0, colorBuffer);
in this posting http://www.badlogicgames.com/wordpress/?p=504 Mr. libgdx Mario writes:
OpenGL from the ground up: an extremely well written tutorial series on OpenGL ES 1.x. Covers all the basics you need to get started with OpenGL. Note that the tutorial is written for the IPhone and uses Objective C/C++. This shouldn’t be a big problem though as the API is the same.
To my shame, I wasn't able to get a libgdx aequivalent of the very first example in that tutorial running, which is this:
- (void)drawView:(GLView*)view;
{
Vertex3D vertex1 = Vertex3DMake(0.0, 1.0, -3.0);
Vertex3D vertex2 = Vertex3DMake(1.0, 0.0, -3.0);
Vertex3D vertex3 = Vertex3DMake(-1.0, 0.0, -3.0);
Triangle3D triangle = Triangle3DMake(vertex1, vertex2, vertex3);
glLoadIdentity();
glClearColor(0.7, 0.7, 0.7, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnableClientState(GL_VERTEX_ARRAY);
glColor4f(1.0, 0.0, 0.0, 1.0);
glVertexPointer(3, GL_FLOAT, 0, &triangle);
glDrawArrays(GL_TRIANGLES, 0, 9);
glDisableClientState(GL_VERTEX_ARRAY);
}
my code...
public void render () {
Gdx.gl11.glLoadIdentity();
Gdx.gl11.glRotatef(rotation, 0.0f, 0.0f, 1.0f);
Gdx.gl11.glClearColor((float)0.7, (float)0.7, (float)0.7, (float)1.0);
Gdx.gl11.glClear(GL11.GL_COLOR_BUFFER_BIT | GL11.GL_DEPTH_BUFFER_BIT);
Gdx.gl11.glEnableClientState(GL11.GL_VERTEX_ARRAY);
Gdx.gl11.glColor4f((float)1.0, (float)0.0, (float)0.0, (float)1.0);
Gdx.gl11.glVertexPointer(3, GL11.GL_FLOAT, BYTES_PER_VERTEX, vertices);
Gdx.gl11.glDrawArrays(GL11.GL_TRIANGLES, 0, 9);
Gdx.gl11.glDisableClientState(GL11.GL_VERTEX_ARRAY);
}
The problem here is 'vertices'. I have no idea what that should be. After lots of googling I came up with:
final int BYTES_PER_VERTEX = (3 + 4) * 4;
public void create () {
ByteBuffer buffer = ByteBuffer.allocateDirect(BYTES_PER_VERTEX * 3);
buffer.order(ByteOrder.nativeOrder());
vertices = buffer.asFloatBuffer();
float[] verts = {
0.0f, 1.0f, 0.0f, 1, 0, 0, 0,
1.0f, 0.0f, 0.0f, 0, 1, 0, 0,
-1.0f, 0.0f, 0.0f, 0, 0, 1, 0};
vertices.put(verts);
vertices.flip();
}
. and that seems to be displaying a triangle, but the values for the vertices are not the same as in the original example (z value is 0 instead of -3, in which case I wouldn't see anything).
Can anyone shed any light on vertices?
Here's what I have. I create a square instead of a triangle but you get the jist. A lot of the random code that is in the iphone tutorial is taken care internally. You can actually dissect the source of libgdx if you are curious how (in java) it's going about the calls internally (camera management, mesh management, etc).
in create() :
mesh = new Mesh(true, 4, 4,
new VertexAttribute(Usage.Position, 3, "a_position"),
new VertexAttribute(Usage.ColorPacked, 4, "a_color"));
mesh.setVertices(new float[] {
-1.0f, -1.0f, -3.0f, Color.toFloatBits(255, 0, 0, 255),
1.0f, -1.0f, -3.0f, Color.toFloatBits(255, 0, 0, 255),
-1.0f, 1.0f, -3.0f, Color.toFloatBits(255, 0, 0, 255),
1.0f, 1.0f, -3.0f, Color.toFloatBits(255, 0, 0, 255)});
mesh.setIndices(new short[] { 0, 1, 2, 3 });
in resize()
float aspectRatio = (float) width / (float) height;
camera = new PerspectiveCamera(67, 2f * aspectRatio, 2f);
camera.near = 0.1f;
camera.translate(0, 0, 0);
in render()
Gdx.gl11.glClearColor(0.7f, 0.7f, 0.7f, 1.0f);
Gdx.gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
mesh.render(GL10.GL_TRIANGLE_STRIP, 0, 4);
Hope that helps.
Resources:
http://dpk.net/2011/03/07/libgdx-cubes-handling-inputs-in-applicationlistener-render/
http://www.badlogicgames.com/wordpress/?p=2032
Having constructed the following OpenGL-ES program which renders a simple textured cube using ambient light alone, I have come across a strange anomaly whilst attempting to implement the "glMaterialfv" function with either GL_FRONT or GL_BACK parameters. Whilst the material is correctly processes in conjuction with the GL_BACK_AND_FRONT parameter, neither GL_FRONT nor GL_BACK appear to procur the correct results. As my normals appear to work in the presence of a directional light source, I can only assume that I'm missing something quite obvious. Could this possibly be an issue with the Android emulator itself?
package tal.cube1;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import javax.microedition.khronos.opengles.GL11;
import android.graphics.BitmapFactory;
import android.opengl.GLU;
import android.opengl.GLUtils;
import android.opengl.GLSurfaceView.Renderer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import android.content.res.*;
import android.graphics.Bitmap;
public class OpenGLRenderer implements Renderer
{
private final float mf_textureCoordinates[] =
{
0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f
};
private final float mf_normals[] =
{ 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1,
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0,
0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1,
0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0,
-1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0,
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0
};
private final float mf_vertices[] =
{-1, 1, 1, -1, -1, 1, 1, -1, 1, 1, 1, 1,
-1, 1, -1, -1, 1, 1, 1, 1, 1, 1, 1, -1,
1, 1, -1, 1, -1, -1, -1, -1, -1, -1, 1, -1,
-1, -1, 1, -1, -1, -1, 1, -1, -1, 1, -1, 1,
-1, 1, -1, -1, -1, -1, -1, -1, 1, -1, 1, 1,
1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1
};
private final short mf_indices[] =
{ 0, 1, 2, 0, 2, 3,
4, 5, 6, 4, 6, 7,
8, 9, 10, 8, 10, 11,
12, 13, 14, 12, 14, 15,
16, 17, 18, 16, 18, 19,
20, 21, 22, 20, 22, 23
};
private final float mf_ambientLight[] =
{
1.0f, 1.0f, 1.0f, 1.0f
};
private final float mf_ambientMaterial[] =
{
1.0f, 0.0f, 0.0f, 1.0f
};
private FloatBuffer m_vertexBuffer;
private FloatBuffer m_normalBuffer;
private FloatBuffer m_textureBuffer;
private ShortBuffer m_indexBuffer;
private Bitmap m_texture;
private int m_textures[];
private float m_angle = 0.0f;
public OpenGLRenderer(Resources p_resources)
{
super();
m_texture = BitmapFactory.decodeResource(p_resources, R.drawable.crate);
}
#Override public void onSurfaceCreated(GL10 p_gl, EGLConfig p_config)
{
ByteBuffer vbb = ByteBuffer.allocateDirect(mf_vertices.length * 4);
vbb.order(ByteOrder.nativeOrder());
m_vertexBuffer = vbb.asFloatBuffer();
m_vertexBuffer.put(mf_vertices);
m_vertexBuffer.position(0);
ByteBuffer nbb = ByteBuffer.allocateDirect(mf_normals.length * 4);
nbb.order(ByteOrder.nativeOrder());
m_normalBuffer = nbb.asFloatBuffer();
m_normalBuffer.put(mf_normals);
m_normalBuffer.position(0);
ByteBuffer tbb = ByteBuffer.allocateDirect(mf_textureCoordinates.length * 4);
tbb.order(ByteOrder.nativeOrder());
m_textureBuffer = tbb.asFloatBuffer();
m_textureBuffer.put(mf_textureCoordinates);
m_textureBuffer.position(0);
ByteBuffer ibb = ByteBuffer.allocateDirect(mf_indices.length * 2);
ibb.order(ByteOrder.nativeOrder());
m_indexBuffer = ibb.asShortBuffer();
m_indexBuffer.put(mf_indices);
m_indexBuffer.position(0);
m_textures = new int[1];
p_gl.glBindTexture(GL10.GL_TEXTURE_2D, m_textures[0]);
p_gl.glGenTextures(1, m_textures, 0);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, m_texture, 0);
p_gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
p_gl.glClearDepthf(1.0f);
p_gl.glShadeModel(GL10.GL_FLAT);
p_gl.glDepthFunc(GL10.GL_LEQUAL);
p_gl.glFrontFace(GL10.GL_CCW);
p_gl.glCullFace(GL10.GL_BACK);
p_gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
p_gl.glDisable(GL10.GL_DITHER);
p_gl.glEnable(GL10.GL_DEPTH_TEST);
p_gl.glEnable(GL10.GL_CULL_FACE);
p_gl.glEnable(GL10.GL_LIGHTING);
p_gl.glEnable(GL10.GL_TEXTURE_2D);
p_gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
p_gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
p_gl.glEnableClientState(GL11.GL_NORMAL_ARRAY);
p_gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, m_textureBuffer);
p_gl.glNormalPointer(GL10.GL_FLOAT, 0, m_normalBuffer);
p_gl.glVertexPointer(3, GL10.GL_FLOAT, 0, m_vertexBuffer);
p_gl.glLightModelfv(GL10.GL_LIGHT_MODEL_AMBIENT, mf_ambientLight, 0);
p_gl.glMaterialfv(GL10.GL_FRONT_AND_BACK, GL10.GL_AMBIENT, mf_ambientMaterial, 0);
}
#Override public void onDrawFrame(GL10 p_gl)
{
p_gl.glClear(GL10.GL_COLOR_BUFFER_BIT |
GL10.GL_DEPTH_BUFFER_BIT);
p_gl.glLoadIdentity();
p_gl.glTranslatef(0.0f, 0.0f, -8);
p_gl.glRotatef(m_angle, 1.0f, 1.0f, 1.0f);
p_gl.glDrawElements(GL10.GL_TRIANGLES, m_indexBuffer.capacity(),
GL10.GL_UNSIGNED_SHORT, m_indexBuffer);
m_angle += 1.0f;
}
#Override public void onSurfaceChanged(GL10 p_gl, int p_width, int p_height)
{
p_gl.glViewport(0, 0, p_width, p_height);
p_gl.glMatrixMode(GL10.GL_PROJECTION);
p_gl.glLoadIdentity();
GLU.gluPerspective(p_gl, 45.0f, (float)p_width / (float)p_height,
0.1f, 100.0f);
p_gl.glMatrixMode(GL10.GL_MODELVIEW);
p_gl.glLoadIdentity();
}
}
Having now reviewed the OpenGL-ES 1.1 documentation, I can now confirm that the GL_FRONT and GL_BACK parameters are only supported under the full OpenGL 1.1 specification. My advice for anyone currently developing for the OpenGL-ES platform, is to ensure that their documentation specifically covers the "ES" OpenGL subset. Due to some erroneous assumptions, not to mention a fair measure of lethargy, I have spent many hours needlessly testing features which are not supported by the target platform. Just like baking a cake, preparation is everything, and I deserve nothing less that a swift kick in the knackers for adopting such a school-boy approach to an otherwise solid API. Just to reiterate, whilst this may be common sense for most people, the full OpenGL specification is of little value if you're developing for its lesser "ES" counterpart.