While learning how OpenGL ES works, I'm trying to get a textured square to appear on my screen. Here is the relevant code to the square. The code is based on the google example:
public class Shape2Square {
private static final String TAG = "Shape2Square";
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"attribute vec2 a_TexCoordinate;"+
"varying vec2 v_TexCoordinate;"+
"void main() {" +
"v_TexCoordinate = a_TexCoordinate;"+
"gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform sampler2D u_Texture"+
"varying vec2 v_TexCoordinate;"+
"void main() {" +
"gl_FragColor = texture2D(u_Texture, v_TexCoordinate);"+
"}";
private final FloatBuffer vertexBuffer;
private final FloatBuffer textureBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mMVPMatrixHandle;
private int mtexture;
private int mtexCoordHandler;
private int mtextureHandler;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = { -0.3f, -0.3f, 0.0f, // top left
-0.3f, 0.3f, 0.0f, // bottom left
0.3f, 0.3f, 0.0f, // bottom right
0.3f, -0.3f, 0.0f }; // top right
// u,v
static float texturedata[] = {0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f};
private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 };
private final int vertexStride = COORDS_PER_VERTEX * 4;
public Shape2Square() {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
ByteBuffer bbtexture = ByteBuffer.allocateDirect(texturedata.length*4);
bbtexture.order(ByteOrder.nativeOrder());
textureBuffer = bbtexture.asFloatBuffer();
textureBuffer.put(texturedata);
textureBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = CommonMethods.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = CommonMethods.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram();
Shape2Square.checkGlError("glCreateProgram");
GLES20.glAttachShader(mProgram, vertexShader);
Shape2Square.checkGlError("glAttachShader");
GLES20.glAttachShader(mProgram, fragmentShader);
Shape2Square.checkGlError("glAttachShader");
GLES20.glLinkProgram(mProgram);
Shape2Square.checkGlError("glLinkProgram");
}
The trouble I get from logcat points to the "draw" method of this square. Note the CommonMethods.loadTexture simply loads a bitmap code into OPEN GL:
EDIT1: (added checkGlErrors all over the code)
public void draw(float[] mvpMatrix, int textureid) {
GLES20.glUseProgram(mProgram);
Shape2Square.checkGlError("glUseProgram");
vertexBuffer.position(0);
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
Shape2Square.checkGlError("glVertexAttribPointer");
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
Shape2Square.checkGlError("glVertexAttribPointer");
GLES20.glEnableVertexAttribArray(mPositionHandle);
Shape2Square.checkGlError("glEnableVertexAttribArray");
textureBuffer.position(0);
mtexCoordHandler = GLES20.glGetAttribLocation(mProgram,"a_TexCoordinate");
Shape2Square.checkGlError("glGetAttribLocation");
GLES20.glEnableVertexAttribArray(mtexCoordHandler);
Shape2Square.checkGlError("glEnableVertexAttribArray");
GLES20.glVertexAttribPointer(mtexCoordHandler, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
Shape2Square.checkGlError("glVertexAttribPointer");
mtextureHandler = GLES20.glGetUniformLocation(mProgram, "u_Texture");
Shape2Square.checkGlError("mtextureHandler");
mtexture = CommonMethods.loadTexture(textureid);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
Shape2Square.checkGlError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mtexture);
Shape2Square.checkGlError("glBindTexture");
GLES20.glUniform1i(mtextureHandler, 0);
Shape2Square.checkGlError("glUniform1i");
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Shape2Square.checkGlError("glGetUniformLocation");
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
Shape2Square.checkGlError("glUniformMatrix4fv");
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
Shape2Square.checkGlError("glDrawElements");
GLES20.glDisableVertexAttribArray(mPositionHandle);
Shape2Square.checkGlError("glDisableVertexAttribArray");
}
public static void checkGlError(String glOperation) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, glOperation + ": glError " + error);
throw new RuntimeException(glOperation + ": glError " + error);
}
}
Doing this causes an error and forces the application to shut down. logcat leads to glGetUniformLocation with the most likely culprit of mMVPMatrixHandle after messing about with the code, which is strange, because it worked before. To prove my point, if I delete all the Texture code and add a vColor instead (both in the actual code and the fragmentShadercode) with all the usual relevance, then I will get the coloured square. I don't understand how the mistake lies with glGetUniformLocation or how to fix it so any help is appreciated.
EDIT1: So I tried to blank out the checkglerror of the "glgetuniformlocation" but this comes to a disappointment as now the error comes in the form of "glUniformMatrix4fv". Something is terribly wrong...
EDIT2: decided to use checkglerror all over the code, and so far it seems that the ENTIRE draw method (including the glUseProgram) has an error. The gl bits which occur before the draw method (which includes attaching shader and linking) are ok with no errors.
Ok I found out what i did wrong (im a noob...).
The fragmentShadercode after u_Texture should have ";" behind it. Can't believe that held me back 2 days :(. Anyways thanks for the help ClayMontgomery. I have drawn the shape upside down though, but i'm pretty sure I can fix that.
Related
I am attempting to use a GLSurfaceView to render output from Camera2. It works OK when the device is in portrait mode, however when rotating to landscape, of course the picture is "sideways".
I've done a bit of looking around as to how to apply rotation to the output, however there seems to be a multitude of different ways, and I was hoping there would be a simple one that would fit into mine.
This is an abridged version of my GLSurfaceView descendant:
public class DWGLCameraView extends GLSurfaceView implements Renderer, OnFrameAvailableListener {
// Other parts snipped
public void onDrawFrame(GL10 gl) {
mSurfaceTexture.updateTexImage();
float[] mtx = new float[16];
mSurfaceTexture.getTransformMatrix(mtx);
// Can I do something here to apply the rotation?
mDrawer.draw(mtx);
}
}
..and this (if it helps), is the draw routine DWGLDrawer class:
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
public class DWGLDrawer {
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"attribute vec4 inputTextureCoordinate;" +
"uniform mat4 u_xform;\n" +
"varying vec2 textureCoordinate;" +
"void main()" +
"{"+
"gl_Position = vPosition;"+
"textureCoordinate = (u_xform * inputTextureCoordinate).xy;" +
"}";
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n"+
"precision mediump float;" +
"varying vec2 textureCoordinate;\n" +
"uniform samplerExternalOES s_texture;\n" +
"void main() {" +
" gl_FragColor = texture2D( s_texture, textureCoordinate );\n" +
"}";
private FloatBuffer vertexBuffer, textureVerticesBuffer;
private ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mTextureCoordHandle;
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
// number of coordinates per vertex in this array
private static final int COORDS_PER_VERTEX = 2;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
private static float squareCoords[] = {
-1.0f, 1.0f,
-1.0f, -1.0f,
1.0f, -1.0f,
1.0f, 1.0f,
};
private static float textureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
};
private int texture;
private int mTransformLocation;
public DWGLDrawer(int texture) {
this.texture = texture;
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
bb2.order(ByteOrder.nativeOrder());
textureVerticesBuffer = bb2.asFloatBuffer();
textureVerticesBuffer.put(textureVertices);
textureVerticesBuffer.position(0);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // creates OpenGL ES program executables
mTransformLocation = GLES20.glGetUniformLocation(mProgram, "u_xform");
}
public void draw(float[] mtx) {
GLES20.glUseProgram(mProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture);
GLES20.glUniformMatrix4fv(mTransformLocation, 1, false, mtx, 0);
// get handle to vertex shader's vPosition member
int positionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(positionHandle);
// Prepare the <insert shape here> coordinate data
GLES20.glVertexAttribPointer(positionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
int textureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
GLES20.glEnableVertexAttribArray(textureCoordHandle);
// textureVerticesBuffer.clear();
// textureVerticesBuffer.put( transformTextureCoordinates( textureVertices, mtx ));
// textureVerticesBuffer.position(0);
GLES20.glVertexAttribPointer(textureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureVerticesBuffer);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
// Disable vertex array
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(textureCoordHandle);
}
private int loadShader(int type, String shaderCode) {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
private float[] transformTextureCoordinates( float[] coords, float[] matrix) {
float[] result = new float[ coords.length ];
float[] vt = new float[4];
for ( int i = 0 ; i < coords.length ; i += 2 ) {
float[] v = { coords[i], coords[i+1], 0 , 1 };
Matrix.multiplyMV(vt, 0, matrix, 0, v, 0);
result[i] = vt[0];
result[i+1] = vt[1];
}
return result;
}
}
As per my comment in onDrawFrame, I figured that I might be able to apply rotation there. I had tried this:
if (mRotation > 0)
Matrix.rotateM(mtx, 0, mRotation, 0f, 0f, -1f);
(mRotation is the rotation value in degrees)
however that resulted in an unintelligible picture, so I assume that it's completely wrong, or there's something else I need to do
Please remember that I am using Camera2, so setDisplayOrientation (from Camera) is not an option
EDIT
I've now included the entire declaration for DWGLDrawer, and added to the description
In general, your approach should be correct (assuming mRotation is in degrees and is based on something like Display.getRotation() to determine your UI orientation - note that getRotation doesn't return degrees so the value has to be adjusted).
However, you don't have your shader code here, so it's hard to say if you're using mTransformLocation in a way that's compatible with the rotateM call you use. So probably you just need to debug the values you see for the matrix, what the rotate call does, and so on, to figure out where the math goes haywire. Unfortunately common for dealing with transforms in EGL (or anywhere, really).
I'm trying to capture video and display it to the screen by setting an Open GL ES texture to an android surfaceTexture. I can't use a TextureView and implement SurfaceTextureListener as per this tutorial since I am using Google Cardboard.
I have followed the Android documentation on how to initialise Open GL ES 2.0 and use it, and also this tutorial on texturing.
Putting the 2 together I get a blank screen and occasionally get <core_glBindTexture:572>: GL_INVALID_OPERATION in the console window.
Overwhelmed by so many new concepts that I don't know, I'm not able to debug or just understand if the two approach can be used like this. Here is my drawing code, it is initialised in the onSurfaceCreated() of the MainActivity class, and drawn from onEyeDraw() which is Cardboard's draw function.
package com.example.rich.test3;
import android.hardware.Camera;
import android.opengl.GLES20;
import android.view.TextureView;
import java.nio.ShortBuffer;
import java.nio.FloatBuffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Created by rich on 03/05/2015.
*/
public class Square {
private java.nio.FloatBuffer vertexBuffer;
private java.nio.ShortBuffer drawListBuffer;
private final java.nio.FloatBuffer mCubeTextureCoordinates;
float color[] = { 1.f, 1.f, 1.f, 1.0f };
private final String vertexShaderCode =
"attribute vec4 vPosition;" +
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
" gl_Position = vPosition;" +
" v_TexCoordinate = a_TexCoordinate;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
"gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));" +
"}";
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = {
-0.5f, -0.5f, 0.0f, // bottom left
0.5f, -0.5f, 0.0f, // bottom right
-0.5f, 0.5f, 0.0f, // top left
0.5f, 0.5f, 0.0f}; // top right
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices
private int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;
private final int vertexCount = squareCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
private int mTextureDataHandle;
float textureCoordinates[] =
{0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f };
Camera _camera;
TextureView _textureView;
int[] textures;
android.graphics.SurfaceTexture _surface;
public Square()
{
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
mCubeTextureCoordinates = ByteBuffer.allocateDirect(textureCoordinates.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeTextureCoordinates.put(textureCoordinates).position(0);
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram();
textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
_surface = new android.graphics.SurfaceTexture(textures[0]);
_camera = Camera.open();
Camera.Size previewSize = _camera.getParameters().getPreviewSize();
try
{
_camera.setPreviewTexture(_surface);
}
catch (java.io.IOException ex)
{
// Console.writeLine (ex.Message);
}
final int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vertexShaderHandle, vertexShaderCode);
GLES20.glCompileShader(vertexShaderHandle);
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0)
{
//do check here
}
final int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fragmentShaderHandle, fragmentShaderCode);
GLES20.glCompileShader(fragmentShaderHandle);
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0)
{
//do check here
}
GLES20.glAttachShader(mProgram, vertexShaderHandle);
GLES20.glAttachShader(mProgram, fragmentShaderHandle);
GLES20.glBindAttribLocation(mProgram, 0, "a_Position");
GLES20.glBindAttribLocation(mProgram, 0, "a_TexCoordinate");
GLES20.glLinkProgram(mProgram);
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0)
{
//do check here
}
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
mTextureDataHandle = textures[0];
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
public void draw()
{
_surface.updateTexImage();
GLES20.glUseProgram(mProgram);
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgram, "u_Texture");
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(mProgram, "a_Color");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false,
0, mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glUniform1i(mTextureUniformHandle, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
When rendering a SurfaceTexture texture object, you need to use the GL_TEXTURE_EXTERNAL_OES texture target:
The texture object uses the GL_TEXTURE_EXTERNAL_OES texture target, which is defined by the GL_OES_EGL_image_external OpenGL ES extension. This limits how the texture may be used. Each time the texture is bound it must be bound to the GL_TEXTURE_EXTERNAL_OES target rather than the GL_TEXTURE_2D target. Additionally, any OpenGL ES 2.0 shader that samples from the texture must declare its use of this extension using, for example, an "#extension GL_OES_EGL_image_external : require" directive. Such shaders must also access the texture using the samplerExternalOES GLSL sampler type.
So you need to change your fragment shader like this, adding the #extension declaration and declaring your texture uniform as samplerExternalOES:
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;" +
"uniform vec4 vColor;" +
"uniform samplerExternalOES u_Texture;" +
"varying vec2 v_TexCoordinate;" +
"void main() {" +
"gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));" +
"}";
Also in your draw() function, bind the texture like this:
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureDataHandle);
You can't use normal texture to render camera or video preview, you have to use GL_TEXTURE_EXTERNAL_OES extension. I had same problem and i found a complete working solution on github. The name of the project is android_instacam .
Here you will find source code to study. If you want to see it in action directly on your device just go on play store here.
Hi I am new in android open gl. I tries to create few 2d objects in open gles 2.0. Now I tried to draw a line in android open gles 2.0. my code looks like
public class UserLine
{
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
static final int COORDS_PER_VERTEX = 3;
private final int vertexStride = COORDS_PER_VERTEX * 4;
private float[] pathCords =
{
0.00f, 0.0f, 0.0f,
0.5f, 0.3f, 0.0f
};
private short[] pathDrawOrder = {0,1}
private float[] color = {1.0f, 0.0f, 0.0f, 1.0f};
public UserLine()
{
ByteBuffer bb = ByteBuffer.allocateDirect(pathCords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(pathCords);
vertexBuffer.position(0);
ByteBuffer dlb = ByteBuffer.allocateDirect(pathDrawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(pathDrawOrder);
drawListBuffer.position(0);
int vertexShader = MyGLRenderer.loadShader(GLES20.GL_VERTEX_SHADER,vertexShaderCode);
int fragmentShader = MyGLRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,fragmentShaderCode);
mProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mProgram, vertexShader);
GLES20.glAttachShader(mProgram, fragmentShader);
GLES20.glLinkProgram(mProgram);
}
public void draw(float[] mvpMatrix) {
GLES20.glUseProgram(mProgram);
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
GLES20.glDrawElements(GLES20.GL_LINES, color.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
GLES20.glDisableVertexAttribArray(mPositionHandle);
GLES20.glDisable(mColorHandle);
}
}
SO here is my problem According to this code it must draw one line but along that line it also draw one extra line which is not expected behavior.
Am I doing some thing wrong or it that default behavior. Need Help. Thank you.
I think the problem is here:
GLES20.glDrawElements(GLES20.GL_LINES, color.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
The second parameter is the count and I don't understand why you have it set to the length of color. That's just the color of the line you pass into the shader so the length is always 4:
private float[] color = {1.0f, 0.0f, 0.0f, 1.0f};
You are drawing a line with 2 points, as specified in pathDrawOrder, so I think using the length of that instead is what you need:
GLES20.glDrawElements(GLES20.GL_LINES, pathDrawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
I am having trouble getting my Android OpenGL ES 2.0 application working. I have a "Triangle" class very similar to several tutorials out there. In the "draw" function, mPositionHande is -1 after the following line of code:
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
Below is the entire triangle class which has shader code etc. Any ideas what might cause this? Sorry, I am new to OpenGL programming, so maybe this is a dumb question.
public class Triangle {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
public float triangleCoords[];
private final int vertexCount = 3; //triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.5f, 1.0f, 0.5f, 1.0f };
float edgeColor[] = { 0.0f, 0.0f, 0.0f, 1.0f };
public Triangle(float[] vertices) {
this.triangleCoords = vertices;
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
triangleCoords.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(triangleCoords);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = MyRenderer.loadShader(GLES20.GL_VERTEX_SHADER,
vertexShaderCode);
int fragmentShader = MyRenderer.loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the facet vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the facet
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
MyRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
MyRenderer.checkGlError("glUniformMatrix4fv");
// Draw the facet
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount);
//If edge mode
//GLES20.glUniform4fv(mColorHandle, 1, edgeColor, 0);
//GLES20.glLineWidth(2.0f);
//GLES20.glDrawArrays(GLES20.GL_LINE_STRIP, 0, vertexCount);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
I started from the google's android tutorial on open gl and then used this tutorial:
to add textures. Since the class structures differ a bit, I had to do some code-moving-renaming. What i ended up is:
Here is the texture file i used.
As one can see, the texture is somehow stretched along (1, 1), although the underlying object is a square.
Here is my quad's code, any help is appreciated.
public class GLSquare implements IGLObject {
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" + "attribute vec2 a_TexCoordinate;"
+ "varying vec2 v_TexCoordinate;" + "attribute vec4 vPosition;"
+ "void main() {"
+
// the matrix must be included as a modifier of gl_Position
" gl_Position = uMVPMatrix * vPosition;"
+ "v_TexCoordinate = a_TexCoordinate;" + "}";
private final String fragmentShaderCode = "precision mediump float;"
+ "uniform sampler2D u_Texture;" + "varying vec2 v_TexCoordinate;"
+ "void main() {"
+ " gl_FragColor = texture2D(u_Texture, v_TexCoordinate);" + "}";
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private final int mProgramHandle;
private int mPositionHandle;
private int mMVPMatrixHandle;
private int mTextureDataHandle;
/** The texture pointer */
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = { -10f, 10f, 0.0f, // top left
-10f, -10f, 0.0f, // bottom left
10f, -10f, 0.0f, // bottom right
10f, 10f, 0.0f }; // top right
private FloatBuffer textureBuffer; // buffer holding the texture coordinates
private float texture[] = {
// Mapping coordinates for the vertices
0.0f, 1.0f, // top left (V2)
0.0f, 0.0f, // bottom left (V1)
1.0f, 1.0f, // top right (V4)
1.0f, 0.0f // bottom right (V3)
};
static float[] mTranslate = new float[16];
static float[] translatedMVP = new float[16];
private final short drawOrder[] = { 0, 1, 2, 0, 2, 3 };
public GLSquare(float x, float y, float z, Context context) {
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);
bb = ByteBuffer.allocateDirect(texture.length * 4);
bb.order(ByteOrder.nativeOrder());
textureBuffer = bb.asFloatBuffer();
textureBuffer.put(texture);
textureBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
Matrix.setIdentityM(mTranslate, 0);
Matrix.translateM(mTranslate, 0, x, y, z);
// prepare shaders and OpenGL program
final int vertexShaderHandle = GLTools.compileShader(
GLES20.GL_VERTEX_SHADER, vertexShaderCode);
final int fragmentShaderHandle = GLTools.compileShader(
GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgramHandle = GLTools.createAndLinkProgram(vertexShaderHandle,
fragmentShaderHandle, new String[] { "a_Position", "a_Color",
"a_TexCoordinate" });
// Load the texture
mTextureDataHandle = GLTools.loadGLTexture(context, R.raw.stars1024);
}
public void draw(float[] vpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgramHandle);
// Pass in the position information
vertexBuffer.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT,
false, 0, vertexBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
int mTextureCoordinateHandle = GLES20.glGetAttribLocation(
mProgramHandle, "a_TexCoordinate");
// Pass in the texture coordinate information
textureBuffer.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, 2,
GLES20.GL_FLOAT, false, 0, textureBuffer);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle,
"uMVPMatrix");
WideOpenRenderer.checkGlError("glGetUniformLocation");
// Apply the projection and view transformation
Matrix.multiplyMM(translatedMVP, 0, vpMatrix, 0, mTranslate, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, translatedMVP, 0);
// Draw the cube.
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
}
}
here is the methode loading texture:
public static int loadGLTexture(Context context, final int resourceId) {
Log.d("GLTools", "Loading texture...");
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
Log.d("GLTools", "Binding texture, setting parameter" + resourceId);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
Ok, i am now able to answer my own question.
The texture was just scaled relative to its origin = left bottom point (u, v) = (0, 0), which has irritated me, since I am new to opengl, I was expecting that it would be scaled relative to at least world origin, or object's centroid.