I am new to opengl ES 2.0. I want to set differents colors to each vertex of a triangle
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.opengl;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import android.opengl.GLES20;
/**
* A two-dimensional square for use as a drawn object in OpenGL ES 2.0.
*/
public class Square {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// The matrix must be included as a modifier of gl_Position.
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = {
-0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f }; // top right
private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
float color[] = { 0.2f, 0.709803922f, 0.898039216f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f };
/**
* Sets up the drawing object data for use in an OpenGL ES context.
*/
public Square() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = MyGLRenderer.loadShader(
GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGLRenderer.loadShader(
GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
/**
* Encapsulates the OpenGL ES instructions for drawing this shape.
*
* #param mvpMatrix - The Model View Project matrix in which to draw
* this shape.
*/
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(
mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyGLRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
MyGLRenderer.checkGlError("glUniformMatrix4fv");
// Draw the square
GLES20.glDrawElements(
GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
I have one more question: Should I create matrix for rotation, scaling and tranformation or I should use Matrix.multiplyMM(scratch, 0, mMVPMatrix, 0, mRotationMatrix, 0);
functions as it is.
If this is a question about lighting a square you should add what have you tried and what went wrong or what part are you unclear of. There seem to be no code relevant to lighting in what you posted. There are a lot of posts on how to do that on the web.
To set different colors to the vertices you need a new attribute in your shaders instead of the uniform. The fragment shader should have attribute vec4 aColor; and varying vec4 vColor; then set vColor = aColor; in main. The fragment shader should also contain varying vec4 vColor; which replaces uniform vec4 vColor;. In the code you need to do all the same as for the positions but use the color array and bind it to aColor.
To create matrix for rotation, scaling and translation depends on what you are doing but generally no, it is a bad idea. If you need a reference to those values it is better to keep the angle, scale and translation data as floats and vectors which you use to construct the matrix when needed.
Related
I'm using OpenGL and I already have everything up and running. I can plot triangles and other shapes but now I need to make a tube. It doesnt need to hollow(if it makes it easier).
Something like this: Image of the cilinder
How can I define a cilinder in a 3D? I think I need x,y,z and Radius. Can someone give me ideas where to start? Thank you.
Draw a triangle code:
class Triangle {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}";
// Use to access and set the view transformation
private int mMVPMatrixHandle;
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = { // in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
};
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.5f, 0.5f, 0.5f, 1.0f };
private final int mProgram;
private short[] indices = {0,1,2,0,2,3};
private FloatBuffer vertexBuffer;
private ShortBuffer indexBuffer;
public Triangle() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
int vertexShader = OpenGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = OpenGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram();
// add the vertex shader to program
GLES20.glAttachShader(mProgram, vertexShader);
// add the fragment shader to program
GLES20.glAttachShader(mProgram, fragmentShader);
// creates OpenGL ES program executables
GLES20.glLinkProgram(mProgram);
}
private int mPositionHandle;
private int mColorHandle;
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
public void drawMatrix(float[] mvpMatrix) { // pass in the calculated transformation matrix
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
No matter how you define it internally, you still have to provide a set of triangles, lines or points for OpenGL ES.
So you would need to define two equal regular polygons that lay on parallel planes. The more vertices they have the coser they will be to the circles. You can see this yourself with the formula:
R is a radius of circumscribed circle, r is a radius of inscribed circle, n is anumber of vertices. The closer r is to R the closer the polygon is to the circle. So, the polygon will appear more like a circle if you use more vertices: when n→∞, π/n→0, cos(π/n)→1 so r→R.
You could divide the polygon in different ways. For example, like this:
This way you'll have n triangles.
Or you could go this way:
This way you will have n-2 triangles.
There could be better ways, search for them on the net.
I'm trying to capture video and display it to the screen by setting an Open GL ES texture to an android surfaceTexture. I can't use a TextureView and implement SurfaceTextureListener as per this tutorial since I am using Google Cardboard.
I have followed the Android documentation on how to initialise Open GL ES 2.0 and use it, and also this tutorial on texturing.
Putting the 2 together I get a blank screen and occasionally get <core_glBindTexture:572>: GL_INVALID_OPERATION in the console window.
Overwhelmed by so many new concepts that I don't know, I'm not able to debug or just understand if the two approach can be used like this. Here is my drawing code, it is initialised in the onSurfaceCreated() of the MainActivity class, and drawn from onEyeDraw() which is Cardboard's draw function.
package com.example.rich.test3;
import android.hardware.Camera;
import android.opengl.GLES20;
import android.view.TextureView;
import java.nio.ShortBuffer;
import java.nio.FloatBuffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Created by rich on 03/05/2015.
*/
public class Square {
private java.nio.FloatBuffer vertexBuffer;
private java.nio.ShortBuffer drawListBuffer;
private final java.nio.FloatBuffer mCubeTextureCoordinates;
float color[] = { 1.f, 1.f, 1.f, 1.0f };
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
" gl_Position = vPosition;" +
" v_TexCoordinate = a_TexCoordinate;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
"gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));" +
"}";
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = {
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
-0.5f, 0.5f, 0.0f, // top left
0.5f, 0.5f, 0.0f}; // top right
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
private int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;
private final int vertexCount = squareCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
private int mTextureDataHandle;
float textureCoordinates[] =
{0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f };
Camera _camera;
TextureView _textureView;
int[] textures;
android.graphics.SurfaceTexture _surface;
public Square()
{
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
mCubeTextureCoordinates = ByteBuffer.allocateDirect(textureCoordinates.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeTextureCoordinates.put(textureCoordinates).position(0);
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram();
textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
_surface = new android.graphics.SurfaceTexture(textures[0]);
_camera = Camera.open();
Camera.Size previewSize = _camera.getParameters().getPreviewSize();
try
{
_camera.setPreviewTexture(_surface);
}
catch (java.io.IOException ex)
{
// Console.writeLine (ex.Message);
}
final int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vertexShaderHandle, vertexShaderCode);
GLES20.glCompileShader(vertexShaderHandle);
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0)
{
//do check here
}
final int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fragmentShaderHandle, fragmentShaderCode);
GLES20.glCompileShader(fragmentShaderHandle);
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0)
{
//do check here
}
GLES20.glAttachShader(mProgram, vertexShaderHandle);
GLES20.glAttachShader(mProgram, fragmentShaderHandle);
GLES20.glBindAttribLocation(mProgram, 0, "a_Position");
GLES20.glBindAttribLocation(mProgram, 0, "a_TexCoordinate");
GLES20.glLinkProgram(mProgram);
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0)
{
//do check here
}
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
mTextureDataHandle = textures[0];
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
public void draw()
{
_surface.updateTexImage();
GLES20.glUseProgram(mProgram);
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgram, "u_Texture");
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(mProgram, "a_Color");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false,
0, mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glUniform1i(mTextureUniformHandle, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
When rendering a SurfaceTexture texture object, you need to use the GL_TEXTURE_EXTERNAL_OES texture target:
The texture object uses the GL_TEXTURE_EXTERNAL_OES texture target, which is defined by the GL_OES_EGL_image_external OpenGL ES extension. This limits how the texture may be used. Each time the texture is bound it must be bound to the GL_TEXTURE_EXTERNAL_OES target rather than the GL_TEXTURE_2D target. Additionally, any OpenGL ES 2.0 shader that samples from the texture must declare its use of this extension using, for example, an "#extension GL_OES_EGL_image_external : require" directive. Such shaders must also access the texture using the samplerExternalOES GLSL sampler type.
So you need to change your fragment shader like this, adding the #extension declaration and declaring your texture uniform as samplerExternalOES:
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;" +
"uniform vec4 vColor;" +
"uniform samplerExternalOES u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
"gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));" +
"}";
Also in your draw() function, bind the texture like this:
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureDataHandle);
You can't use normal texture to render camera or video preview, you have to use GL_TEXTURE_EXTERNAL_OES extension. I had same problem and i found a complete working solution on github. The name of the project is android_instacam .
Here you will find source code to study. If you want to see it in action directly on your device just go on play store here.
I'm following Google's tutorial (http://developer.android.com/training/graphics/opengl/environment.html) and i think that i everything correctly done. But i have problem, Triangle and Rectangle is invisible and LogCat doesn't show any error.
This is my code for rectangle:
package com.example.gameengine;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import android.opengl.GLES20;
/**
* A two-dimensional square for use as a drawn object in OpenGL ES 2.0.
*/
public class GLObject {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// The matrix must be included as a modifier of gl_Position.
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = {
-0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f }; // top right
private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
float color[] = { 0.2f, 0.709803922f, 0.898039216f, 1.0f };
/**
* Sets up the drawing object data for use in an OpenGL ES context.
*/
public GLObject() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = GLRenderer.loadShader(
GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = GLRenderer.loadShader(
GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
/**
* Encapsulates the OpenGL ES instructions for drawing this shape.
*
* #param mvpMatrix - The Model View Project matrix in which to draw
* this shape.
*/
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(
mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
GLRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
GLRenderer.checkGlError("glUniformMatrix4fv");
// Draw the square
GLES20.glDrawElements(
GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
And this is how i draw rectangle:
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
// Draw square
mSquare.draw(mMVPMatrix);
GLSurface is correctly implmenent(i can change background color)
EDIT: Since vertex buffer objects are not being used for drawing here, no need to use the ELEMENT_ARRAY_BUFFER. That being the case, have you tried just specifying an unit matrix for the mvp that will show the quad without any transformation ? That will rule out the possibility of the quad not getting drawn at all and hence being invisible.
Is this the full drawing code ? It looks like you are not binding the ELEMENT_ARRAY_BUFFER, that is needed for indexing the vertices using drawListBuffer.
Using the below, you can enable this for the default object (0).
glBindBuffer(GL_ARRAY_BUFFER, somebufferID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, someotherBufferID);
I am having trouble getting my Android OpenGL ES 2.0 application working. I have a "Triangle" class very similar to several tutorials out there. In the "draw" function, mPositionHande is -1 after the following line of code:
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
Below is the entire triangle class which has shader code etc. Any ideas what might cause this? Sorry, I am new to OpenGL programming, so maybe this is a dumb question.
public class Triangle {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
public float triangleCoords[];
private final int vertexCount = 3; //triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.5f, 1.0f, 0.5f, 1.0f };
float edgeColor[] = { 0.0f, 0.0f, 0.0f, 1.0f };
public Triangle(float[] vertices) {
this.triangleCoords = vertices;
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = MyRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the facet vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the facet
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
MyRenderer.checkGlError("glUniformMatrix4fv");
// Draw the facet
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
//If edge mode
//GLES20.glUniform4fv(mColorHandle, 1, edgeColor, 0);
//GLES20.glLineWidth(2.0f);
//GLES20.glDrawArrays(GLES20.GL_LINE_STRIP, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
I have tried to add a fairly simple extension on top of Android's example OpenGL 2.0 project in order to add texturing to basic shapes. This seems pretty straightforward, but on certain devices (Samsung Nexus S, LG Optimus 3D, Samsung Galaxy S) the texture just does not render.
This is actually a problem that I am having on a much larger project, but I was able to reproduce the issue with the simple project below in the hope that someone here has an idea of where my code presents issues, or how to specifically architect GL textures for these devices (maybe there are issues with the devices).
To give an idea of how this object is used: In the GLSurfaceView.Renderer's onSurfaceCreated method I am instantiating a Square() object and in the onDrawFrame method I am calling Square's draw() method. However, all of the relevant code to dealing with textures should appear in this Square class which is almost exactly identical to Google's own example.
Many thanks in advance to anyone who takes a crack at this.
class Square {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = vPosition * uMVPMatrix;" +
" v_TexCoordinate = a_TexCoordinate;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
" gl_FragColor = texture2D(u_Texture, v_TexCoordinate);" +
"}";
private final FloatBuffer vertexBuffer;
private final FloatBuffer textureBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = { -0.5f, 0.5f, 0.0f, // top left
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
0.5f, 0.5f, 0.0f }; // top right
final float[] previewTextureCoordinateData =
{
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f
};
private int textureDataHandle;
private int textureUniformHandle;
private int textureCoordinateHandle;
private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.2f, 0.709803922f, 0.898039216f, 1.0f };
private int loadTexture(final Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
public Square(Context context) {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
ByteBuffer texCoordinates = ByteBuffer.allocateDirect(previewTextureCoordinateData.length * 4);
texCoordinates.order(ByteOrder.nativeOrder());
textureBuffer = texCoordinates.asFloatBuffer();
textureBuffer.put(previewTextureCoordinateData);
textureBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = MyGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
textureDataHandle = loadTexture(context, R.drawable.color_texture);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
textureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");
GLES20.glVertexAttribPointer(textureCoordinateHandle, 2, GLES20.GL_FLOAT, false,
0, textureBuffer);
GLES20.glEnableVertexAttribArray(textureCoordinateHandle);
textureUniformHandle = GLES20.glGetUniformLocation(mProgram, "u_Texture");
MyGLRenderer.checkGlError("glGetUniformLocation");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureDataHandle);
GLES20.glUniform1i(textureUniformHandle, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyGLRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
MyGLRenderer.checkGlError("glUniformMatrix4fv");
// Draw the square
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
I'll guess that this is a Power-of-two problem.
By default, the GL_TEXTURE_WRAP setting of glTexParameter is set to GL_REPEAT, and textures that use GL_REPEAT must be power-of-two sized:
Similarly, if the width or height of a texture image are not powers of two and either the
GL_TEXTURE_MIN_FILTER is set to one of the functions that requires mipmaps
or the GL_TEXTURE_WRAP_S or GL_TEXTURE_WRAP_T is not
set to GL_CLAMP_TO_EDGE, then the texture image unit will return
(R, G, B, A) = (0, 0, 0, 1).
You may start with a power-of-two texture, but when you use a BitmapFactory.decodeResource to generate a bitmap, it helpfully(?) scales this based on the density of a device. So for example if you load a 512*512 source texture from drawable folder on a HDPI device, I believe it scales it by 1.5x, so you're left with something that is not Po2.
This gives you the result that your textures don't work on a ton of devices, because those devices are all of a density that causes you to generate illegal texture sizes.
The solution in this case would be to place your (power of 2) source texture into the resource folder drawable-nodpi, which will prevent any density-based scaling. Either that or use CLAMP_TO_EDGE, which doesn't care about Po2.