I attempting to render an exported model in .obj form (exported from blender). It is a sphere and my app crashes only on Nexus 5 phone. It is working on Andy emulator and other Android devices.
While debugging, I tried changing the value of the 'count' parameter, to see what would happen. My sphere has 960 faces (so I should draw 2880 to see the full model). However, if I put 1785 or more, it will crash. Using 1784 or less, it doesn't crash (but I only see a part of the model).
It crash on GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 1784);
private void draw()
{
mCubePositions.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, 0, mCubePositions);
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Pass in the normal information
mCubeNormals.position(0);
GLES20.glVertexAttribPointer(mNormalHandle, 3, GLES20.GL_FLOAT, false, 0, mCubeNormals);
GLES20.glEnableVertexAttribArray(mNormalHandle);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// Pass in the modelview matrix.
GLES20.glUniformMatrix4fv(mMVMatrixHandle, 1, false, mMVPMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
// Pass in the combined matrix.
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Pass in the light position in eye space.
GLES20.glUniform3f(mLightPosHandle, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1], mLightPosInEyeSpace[2]);
// Draw the cube.
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 1784);
}
My OBJ reader
public static void readModel(Context context, int resId) {
// read in all the lines and put in their respective arraylists of strings
// reason I do this is to get a count of the faces to be used to initialize the
// float arrays
ArrayList<String> vertexes = new ArrayList<String>();
ArrayList<String> vertexNormals = new ArrayList<String>();
ArrayList<String> textures = new ArrayList<String>();
ArrayList<String> faces = new ArrayList<String>();
InputStream iStream = context.getResources().openRawResource(resId);
InputStreamReader isr = new InputStreamReader(iStream);
BufferedReader bReader = new BufferedReader(isr);
String line;
try {
while ((line = bReader.readLine()) != null) {
// do not read in the leading v, vt or f
if (line.startsWith("v ")) vertexes.add(line.substring(2));
if (line.startsWith("vn ")) vertexNormals.add(line.substring(3));
if (line.startsWith("vt ")) textures.add(line.substring(3));
if (line.startsWith("f ")) faces.add(line.substring(2));
}
} catch (IOException e) {
e.printStackTrace();
}
// holding arrays for the vertices, texture coords and indexes
float[] vCoords = new float[faces.size() *3*3];
float[] vNCoords = new float[faces.size()*3*3];
float[] vtCoords = new float[faces.size()*3*2];
totalCaras = faces.size();
int vertexIndex = 0;
int faceIndex = 0;
int textureIndex = 0;
// for each face
for (String i : faces) {
// for each face component
for (String j : i.split(" ")) {
String[] faceComponent = j.split("//");
String vertex = vertexes.get(Integer.parseInt(faceComponent[0]) - 1);
String normal = vertexNormals.get(Integer.parseInt(faceComponent[1]) - 1);
String vertexComp[] = vertex.split(" ");
String normalComp[] = normal.split(" ");
for (String v : vertexComp) {
vCoords[vertexIndex++] = Float.parseFloat(v);
}
for (String n : normalComp) {
vNCoords[faceIndex++] = Float.parseFloat(n);
}
}
}
mCubePositions = ByteBuffer.allocateDirect(vCoords.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubePositions.put(vCoords).position(0);
mCubeNormals = ByteBuffer.allocateDirect(vNCoords.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeNormals.put(vNCoords).position(0);
}
The only trace that logcat shows:
06-10 09:08:32.016: D/OpenGLRenderer(6041): Use EGL_SWAP_BEHAVIOR_PRESERVED: true
06-10 09:08:32.027: D/Atlas(6041): Validating map...
06-10 09:08:32.074: I/Adreno-EGL(6041): <qeglDrvAPI_eglInitialize:379>: QUALCOMM Build: 01/14/15, ab0075f, Id3510ff6dc
06-10 09:08:32.075: I/OpenGLRenderer(6041): Initialized EGL, version 1.4
06-10 09:05:05.724: D/OpenGLRenderer(5242): Enabling debug mode 0
06-10 09:05:05.878: A/libc(5242): Fatal signal 11 (SIGSEGV), code 2, fault addr 0x74615000 in tid 5279 (GLThread 13643)
Related
Hi there I'm using ARToolKit v6 to render marker based on nft jpg image it works very well but..
I need to get position of marker in screen(view) and then add a custom textView there is that possible ? how to get 2d position of marker based on projectionMatrix and modelViewMatrix ?
Or can I draw a text or image instead of Cube ?
code
#Override
public void draw() {
super.draw();
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glFrontFace(GLES20.GL_CCW);
boolean show = false;
for (int trackableUID : trackableUIDs) {
if (ARToolKit.getInstance().queryMarkerVisible(trackableUIDs.get(trackableUID))) {
float[] projectionMatrix = ARToolKit.getInstance().getProjectionMatrix();
float[] modelViewMatrix = ARToolKit.getInstance().queryMarkerTransformation(trackableUIDs.get(trackableUID));
Log.i("INFOOOOO", projectionMatrix.toString());
Log.i("INFOOOOO", modelViewMatrix.toString());
cube.draw(projectionMatrix, modelViewMatrix);
show = true;
}
}
}
#sturner thanks so much , I found a solution below is the code if someone is looking for solution
final float[] projectionMatrix = ARToolKit.getInstance().getProjectionMatrix();
final float[] modelViewMatrix = ARToolKit.getInstance().queryMarkerTransformation(trackableUIDs.get(trackableUID));
if (view == null) {
view = new int[4];
view[0] = 0;
view[1] = 0;
view[2] = ARTrackingActivity.width;
view[3] = ARTrackingActivity.height;
}
int i = GLU.gluProject(0, 0, 0, modelViewMatrix, 0, projectionMatrix, 0, view, 0, floats, 0);
if (i == GLES20.GL_TRUE) {
// draw the object in screen
}
I'm trying to get Vuforia 6.0.117 working in my Android app. I'm using this specific version since its the last version supporting FrameMarkers. The detection of FrameMarkers is working fine, but when i'm trying to render a texture over the FrameMarker on my phone I get an error stating:
After operation FrameMarkers render frame got glError 0x501
My renderFrame method:
// Clear color and depth buffer
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Get the state from Vuforia and mark the beginning of a rendering
// section
State state = Renderer.getInstance().begin();
// Explicitly render the Video Background
Renderer.getInstance().drawVideoBackground();
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendEquation(GLES20.GL_FUNC_ADD);
// GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
// We must detect if background reflection is active and adjust the
// culling direction.
// If the reflection is active, this means the post matrix has been
// reflected as well,
// therefore standard counter clockwise face culling will result in
// "inside out" models.
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glCullFace(GLES20.GL_BACK);
if (Renderer.getInstance().getVideoBackgroundConfig().getReflection() == VIDEO_BACKGROUND_REFLECTION.VIDEO_BACKGROUND_REFLECTION_ON) {
GLES20.glFrontFace(GLES20.GL_CW); // Front camera
} else {
GLES20.glFrontFace(GLES20.GL_CCW); // Back camera
}
// Did we find any trackables this frame?
if (mActivity.isHelpVisible() || state.getNumTrackableResults() == 0) {
// no marker scanned
mActivity.hideInfoButton();
} else {
// Get the trackable:
TrackableResult trackableResult = state.getTrackableResult(0);
float[] modelViewMatrix = Tool.convertPose2GLMatrix(trackableResult.getPose()).getData();
// Check the type of the trackable:
MarkerResult markerResult = (MarkerResult) trackableResult;
Marker marker = (Marker) markerResult.getTrackable();
if (markerId != marker.getMarkerId()) {
markerId = marker.getMarkerId();
tag = DataManager.getInstance().getTagByMarkerId(markerId);
if (tag != null) {
texture = Texture.loadTexture(tag.getTexture());
setupTexture(texture);
tag.addToDB();
}
}
if (tag != null) {
String poiReference = tag.getPoiReference();
if (!poiReference.isEmpty()) {
mActivity.showInfoButton(poiReference);
}
// Select which model to draw:
Buffer vertices = planeObject.getVertices();
Buffer normals = planeObject.getNormals();
Buffer indices = planeObject.getIndices();
Buffer texCoords = planeObject.getTexCoords();
int numIndices = planeObject.getNumObjectIndex();
float[] modelViewProjection = new float[16];
float scale = (float) tag.getScale();
Matrix.scaleM(modelViewMatrix, 0, scale, scale, scale);
Matrix.multiplyMM(modelViewProjection, 0, vuforiaAppSession.getProjectionMatrix().getData(), 0, modelViewMatrix, 0);
GLES20.glUseProgram(shaderProgramID);
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 0, vertices);
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT, false, 0, normals);
GLES20.glVertexAttribPointer(textureCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texCoords);
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture.mTextureID[0]);
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
GLES20.glUniform1i(texSampler2DHandle, 0);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, numIndices, GLES20.GL_UNSIGNED_SHORT, indices);
GLES20.glDisableVertexAttribArray(vertexHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
GLES20.glDisableVertexAttribArray(textureCoordHandle);
SampleUtils.checkGLError("FrameMarkers render frame");
}
}
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
Renderer.getInstance().end();
}
I'm loading a texture of the size 640x482 and is loading as follows:
public class Texture {
public int mWidth; // The width of the texture.
public int mHeight; // The height of the texture.
public int mChannels; // The number of channels.
public ByteBuffer mData; // The pixel data.
public int[] mTextureID = new int[1];
public boolean mSuccess = false;
public static Texture loadTexture(String fileName) {
try {
InputStream inputStream = new FileInputStream(fileName);
BufferedInputStream bufferedStream = new BufferedInputStream(inputStream);
Bitmap bitMap = BitmapFactory.decodeStream(bufferedStream);
bufferedStream.close();
inputStream.close();
int[] data = new int[bitMap.getWidth() * bitMap.getHeight()];
bitMap.getPixels(data, 0, bitMap.getWidth(), 0, 0, bitMap.getWidth(), bitMap.getHeight());
return loadTextureFromIntBuffer(data, bitMap.getWidth(), bitMap.getHeight());
} catch (IOException e) {
Log.e(Constants.DEBUG, "Failed to load texture '" + fileName + "' from APK");
Log.i(Constants.DEBUG, e.getMessage());
return null;
}
}
public static Texture loadTextureFromIntBuffer(int[] data, int width, int height) {
// Convert:
int numPixels = width * height;
byte[] dataBytes = new byte[numPixels * 4];
for (int p = 0; p < numPixels; ++p) {
int colour = data[p];
dataBytes[p * 4] = (byte) (colour >>> 16); // R
dataBytes[p * 4 + 1] = (byte) (colour >>> 8); // G
dataBytes[p * 4 + 2] = (byte) colour; // B
dataBytes[p * 4 + 3] = (byte) (colour >>> 24); // A
}
Texture texture = new Texture();
texture.mWidth = width;
texture.mHeight = height;
texture.mChannels = 4;
texture.mData = ByteBuffer.allocateDirect(dataBytes.length).order(ByteOrder.nativeOrder());
int rowSize = texture.mWidth * texture.mChannels;
for (int r = 0; r < texture.mHeight; r++) {
texture.mData.put(dataBytes, rowSize * (texture.mHeight - 1 - r), rowSize);
}
texture.mData.rewind();
texture.mSuccess = true;
return texture;
}
}
Anybody got an idea why i'm getting this error and how to fix it?
I cannot go over your entire code right now, and even if I could I'm not sure it would help. You first need to narrow down the problem, so I will first give you the method to do that, and I hope it will serve you in other cases as well.
You managed to find out that there was an error - but you are checking it only at the end of the rendering function. What you need to do is to place the checkGLError call in several places inside the rendering code (print a different text message), until you can pin-point the exact line after which the error first appears. Then, if you cannot understand the problem, comment here what is the problematic line and I will try to help.
UPDATE:
After looking at the shader code, following your report that normalHandle is -1, I got to the following conclusions:
The error, which indicates the variable vertexNormal cannot be found in the shader, may be due to the fact that this variable is probably optimized out during shader compilation, since it is not really required.
Explanation: in the vertex shader (CUBE_MESH_VERTEX_SHADER), vertexNormal is assigned to a varying called normal (variable that is passed to the fragment shader). In the fragment shader, this varying is declared but not used.
Therefore, you can actually delete the variables vertexNormal and normal from the shader, and you can delete all usages of 'normalHandle' in your code.
This should eliminate the error.
I have a world full of 2D squares (z=0). At startup I setup projection in such a way that the whole world is visible on the screen, using:
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glOrthof(left, right, bottom, top, zNear, zFar);
Then I allow the user to zoom in the world with fingers by using:
gl.glScalef(mScaleFactor, mScaleFactor, 1.0f);
I want to make visibility test for objects that appear not visible as the user is zooming in to not render them (performance boost).
I found this method:
android.opengl.Visibility.visibilityTest(float[] ws, int wsOffset, float[] positions, int positionsOffset, char[] indices, int indicesOffset, int indexCount);
But I can't make it work, nor I found ANY examples of the usage of this method on Internet. Currently this method returns result=0 for every square I test, even when scaling is not applied (mScaleFactor = 1.0)
The way I'm doing this:
final short SQUARE_VERTICES_ORDER_TEMPLATE[] = {0, 1, 2, 0, 2, 3};
.....
float[] vertices = toArray(mVertexBuffer);
short[] indices = toArray(mIndicesBuffer);
char[] charIndices = new char[indices.length];
// method needs char[]
for (int i = 0; i < indices.length; i++) {
short shortIndex = indices[i];
charIndices[i] = (char) shortIndex;
}
for (int i = 0; i < mSquares.size(); i++) {
int numIndicesPerSquare = SQUARE_VERTICES_ORDER_TEMPLATE.length;
int indicesOffset = i * numIndicesPerSquare;
int result = Visibility.visibilityTest(matrixGrabber.mProjection, 0, vertices, 0, charIndices, indicesOffset, numIndicesPerSquare);
switch (result) {
case 0:
Log.v(TAG, "Object NOT VISIBLE: " + mSquares.get(i)); // hits every time
break;
case 1:
Log.v(TAG, "Object PARTIALLY VISIBLE: " + mSquares.get(i));
break;
default:
TAG.toString(); // to place a break point
break;
}
}
I'm not sure if I'm picking up the right Matrix required by this method.
Could you please validate the right usage of this method or give any other tips or workarounds ?
I've figured out what is wrong, visibilityTest method requires multiplied matrix. Here is how it should be:
matrixGrabber.getCurrentState(gl);
float[] resultMatrix = new float[matrixGrabber.mProjection.length];
Matrix.multiplyMM(resultMatrix, 0, matrixGrabber.mProjection, 0, matrixGrabber.mModelView, 0);
....
Visibility.visibilityTest(resultMatrix, 0, vertices, 0, charIndices, indicesOffset, numIndicesPerSquare);
I'm trying to write a basic Wavefront OBJ loader for my Android OpenGL ES 2.0 program. For now, I'm ignoring everything in an OBJ file except for vertices, normals, and faces. Here's what I've written so far:
InputStream inputStream = context.getResources().openRawResource(resourceID);
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
while((line = reader.readLine()) != null)
{
if(line.startsWith("v "))
{
Float x = Float.valueOf(line.split(" ")[1]);
Float y = Float.valueOf(line.split(" ")[2]);
Float z = Float.valueOf(line.split(" ")[3]);
verticesArrayList.add(x);
verticesArrayList.add(y);
verticesArrayList.add(z);
}
else if(line.startsWith("vn "))
{
Float x = Float.valueOf(line.split(" ")[1]);
Float y = Float.valueOf(line.split(" ")[2]);
Float z = Float.valueOf(line.split(" ")[3]);
normalsArrayList.add(x);
normalsArrayList.add(y);
normalsArrayList.add(z);
}
else if(line.startsWith("f "))
{
// Loop 3 times for the 3 vertices/textures/normals associated with each face
for(int i = 1; i <= 3; i++)
{
Short vertex = (short) (Short.valueOf(line.split(" ")[i].split("/")[0]) - 1);
indicesArrayList.add(vertex);
// Make a copy of my normals array list
if(normalsArrayList2.size() == 0)
normalsArrayList2 = new ArrayList<Float>(normalsArrayList);
// Attempt to re-arrange the normals to match the order of the vertices
int normal = Integer.valueOf(line.split(" ")[i].split("/")[2]) - 1;
normalsArrayList2.add(vertex * 3, normalsArrayList.get(normal * 3));
normalsArrayList2.add((vertex * 3) + 1, normalsArrayList.get((normal * 3) + 1));
normalsArrayList2.add((vertex * 3) + 2, normalsArrayList.get((normal * 3) + 2));
}
}
}
reader.close();
However, I'm not sure if I'm setting my normals correctly. You see what I'm doing there at the bottom of my code? I'm trying to re-arrange the normals so that they match the order of the vertices. I'm doing this because I'm using the GLES20.glDrawElements() method like so:
// Get handle to vertex shader's aPosition member, enable the handle, and prepare the vertex data
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glVertexAttribPointer(mPositionHandle, COORDINATES_PER_VERTEX, GLES20.GL_FLOAT, false, VERTEX_STRIDE, verticesBuffer);
// Get handle to vertex shader's aNormal member, enable the handle, and prepare the vertex data
mNormalHandle = GLES20.glGetAttribLocation(mProgram, "aNormal");
GLES20.glEnableVertexAttribArray(mNormalHandle);
GLES20.glVertexAttribPointer(mNormalHandle, COORDINATES_PER_VERTEX, GLES20.GL_FLOAT, false, VERTEX_STRIDE, normalsBuffer);
// Draw the cube
GLES20.glDrawElements(GLES20.GL_TRIANGLES, numberOfIndices, GLES20.GL_UNSIGNED_SHORT, indicesBuffer);
I got the idea for this re-arranging from this site, after countless hours of Googling this subject.
However, something isn't going right. The lighting in my shader works for all the shapes I created manually with OpenGL, just not for the models I read in using my OBJ parser, so I don't think my lighting calculations are the problem, just how the normals are being calculated in my OBJ parser. Also, I opened my test OBJ files with Maya and used the "Vertex Normal Edit Tool" to visually verify that all of the normals are pointing the right way. Are there any glaring problems you can see with any of my code?
Use ArrayList's set() method instead of add()
Overview
I am having trouble rendering the texture on the sides of my cube. I have successfully rendered textures on the top and bottom of my cube, but am unable to render on the sides.
What I have
I have a texture buffer full of 48 elements, (4*2 elements per face and 6 faces is 48) and they are full will good coordinates.
The cube shape is drawing is drawing, but the sides are not being rendered.
The image I am drawing is simply a image with the numbers 1-9 as you can see from the top of the cube. The textureBuffer is the same patter over and over again...
texture[0] = 0;
texture[1] = 0;
texture[2] = 1;
texture[3] = 0;
texture[4] = 1;
texture[5] = 1;
texture[6] = 0;
texture[7] = 1;
texture[8] = 0;
texture[9] = 0;
texture[10] = 1;
texture[11] = 0;
texture[12] = 1;
texture[13] = 1;
texture[14] = 0;
texture[15] = 1;
texture[16] = 0f;
texture[17] = 0f;
texture[18] = 1f;
texture[19] = 0f;
texture[20] = 1f;
texture[21] = 1f;
texture[22] = 0f;
texture[23] = 1f;
which simply loads the texture Buffer to render the full texture.
Possible Problem##
It appears that only the first 16 texture coordinates are being drawn and used because only the top and bottom surface of the rectangle are being textured. I've debugged it and when I populate the TextureBuffer the size is 48 though.
Render Code
#Override
public void draw(GL10 gl)
{
super.draw(gl);
//gl.glColor4f(255, 0, 0, 150);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnable(GL10.GL_ALPHA_TEST);
gl.glAlphaFunc(GL10.GL_GREATER, 0.0f);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glTexCoordPointer(2,GL10.GL_FLOAT,0,textureBuffer);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureID);
gl.glBlendFunc(GL10.GL_SRC_ALPHA,GL10.GL_ONE_MINUS_SRC_ALPHA);
gl.glEnable(GL10.GL_BLEND);
gl.glFrontFace(GL10.GL_CCW);
gl.glEnable(GL10.GL_CULL_FACE);
gl.glCullFace(GL10.GL_BACK);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glVertexPointer(3,GL10.GL_FLOAT,0,vertexBuffer);
gl.glDrawElements(GL10.GL_TRIANGLES,indexBuffer.capacity(),GL10.GL_UNSIGNED_SHORT,indexBuffer);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDisable(GL10.GL_CULL_FACE);
gl.glDisable(GL10.GL_ALPHA_TEST);
gl.glDisable(GL10.GL_TEXTURE_2D);
gl.glColor4f(255, 255, 255, 255);
}
Creating the variable textureBuffer
The parameter variable texture that comes in contains 48 elements
public void constructTextureBuffer(float[] texture)
{
ByteBuffer vbb = ByteBuffer.allocateDirect(texture.length*4);
vbb.order(ByteOrder.nativeOrder());
textureBuffer = vbb.asFloatBuffer();
textureBuffer.put(texture);
textureBuffer.position(0);
}
The vertexBuffer is correctly setup and using the index buffer to render a cube. Do you know why the sides of the cube are not being rendered?
NEW!!
So I tried creating a shape by hand and I am running into the same situation with the texture buffer. I can render two faces but not the third! It appears anything past 8 texture verticies do not work.
This picture shows my new shape. Notice the horizontal extension. No matter what I do to those texture coordinates, that texture does not change. That is also the third face of my random object.
I have yet to get index buffers and textures to work together. I've tried almost everything! So instead (and unfortunately) I am building lots of triangles. In order to parse the .obj file generated by blender, I wrote this function which creates the vertex buffer and texture buffer for me.
public static Mesh createMesh(int resourceID)
{
Mesh m = new Mesh();
Scanner s = null;
BufferedReader inputStream = null;
ArrayList<Float> readInVerticies = new ArrayList<Float>();
ArrayList<Float> readInTextures = new ArrayList<Float>();
ArrayList<Short> readInVertexIndicies = new ArrayList<Short>();
ArrayList<Short> readInTextureIndicies = new ArrayList<Short>();
int numberFaces = 0;
try
{
inputStream = new BufferedReader(new InputStreamReader(context.getResources().openRawResource(resourceID)));
s = new Scanner(inputStream);
String line = null;
/*
* Read the header part of the file
*/
line = inputStream.readLine();
line = inputStream.readLine();
line = inputStream.readLine();
line = inputStream.readLine();
line = inputStream.readLine();
while(line.charAt(0) == 'v' && line.charAt(1) != 't')
{
s = new Scanner(line);
s.next();
readInVerticies.add(s.nextFloat());
readInVerticies.add(s.nextFloat());
readInVerticies.add(s.nextFloat());
line = inputStream.readLine();
}
while(line.charAt(0)=='v' && line.charAt(1)=='t')
{
s = new Scanner(line);
s.next(); //read in "vt"
readInTextures.add(s.nextFloat());
readInTextures.add(s.nextFloat());
line = inputStream.readLine();
}
line = inputStream.readLine();
line = inputStream.readLine();
while(line != null && line.charAt(0) == 'f')
{
s = new Scanner(line);
s.useDelimiter("[ /\n]");
String buffer = s.next();
short vi1,vi2,vi3,vi4;
short ti1,ti2,ti3,ti4;
vi1 = s.nextShort();
ti1 = s.nextShort();
vi2 = s.nextShort();
ti2 = s.nextShort();
vi3 = s.nextShort();
ti3 = s.nextShort();
vi4 = s.nextShort();
ti4 = s.nextShort();
readInVertexIndicies.add(vi1);
readInVertexIndicies.add(vi2);
readInVertexIndicies.add(vi3);
readInVertexIndicies.add(vi4);
readInTextureIndicies.add(ti1);
readInTextureIndicies.add(ti2);
readInTextureIndicies.add(ti3);
readInTextureIndicies.add(ti4);
numberFaces = numberFaces + 1;
line = inputStream.readLine();
}
/*
* constructing our verticies. Use the number of faces * 6 because
* there are 2 triangles per face and 3 verticies on a triangle but there are
* also 3 coordinates per vertex.
*
* For the texture, the same number but there are only 2 coordinates per texture
*/
float verticies[] = new float[numberFaces * 6 * 3];
float textures[] = new float[numberFaces * 6 * 2];
for(int i=0;i<numberFaces;i++)
{
verticies[i*18+0] = readInVerticies.get((readInVertexIndicies.get(i*4+0)-1)*3+0);
verticies[i*18+1] = readInVerticies.get((readInVertexIndicies.get(i*4+0)-1)*3+1);
verticies[i*18+2] = readInVerticies.get((readInVertexIndicies.get(i*4+0)-1)*3+2);
textures[i*12+0] = readInTextures.get((readInTextureIndicies.get(i*4+0)-1)*2+0);
textures[i*12+1] = readInTextures.get((readInTextureIndicies.get(i*4+0)-1)*2+1);
verticies[i*18+3] = readInVerticies.get((readInVertexIndicies.get(i*4+1)-1)*3+0);
verticies[i*18+4] = readInVerticies.get((readInVertexIndicies.get(i*4+1)-1)*3+1);
verticies[i*18+5] = readInVerticies.get((readInVertexIndicies.get(i*4+1)-1)*3+2);
textures[i*12+2] = readInTextures.get((readInTextureIndicies.get(i*4+1)-1)*2+0);
textures[i*12+3] = readInTextures.get((readInTextureIndicies.get(i*4+1)-1)*2+1);
verticies[i*18+6] = readInVerticies.get((readInVertexIndicies.get(i*4+2)-1)*3+0);
verticies[i*18+7] = readInVerticies.get((readInVertexIndicies.get(i*4+2)-1)*3+1);
verticies[i*18+8] = readInVerticies.get((readInVertexIndicies.get(i*4+2)-1)*3+2);
textures[i*12+4] = readInTextures.get((readInTextureIndicies.get(i*4+2)-1)*2+0);
textures[i*12+5] = readInTextures.get((readInTextureIndicies.get(i*4+2)-1)*2+1);
verticies[i*18+9] = readInVerticies.get((readInVertexIndicies.get(i*4+0)-1)*3+0);
verticies[i*18+10] = readInVerticies.get((readInVertexIndicies.get(i*4+0)-1)*3+1);
verticies[i*18+11] = readInVerticies.get((readInVertexIndicies.get(i*4+0)-1)*3+2);
textures[i*12+6] = readInTextures.get((readInTextureIndicies.get(i*4+0)-1)*2+0);
textures[i*12+7] = readInTextures.get((readInTextureIndicies.get(i*4+0)-1)*2+1);
verticies[i*18+12] = readInVerticies.get((readInVertexIndicies.get(i*4+2)-1)*3+0);
verticies[i*18+13] = readInVerticies.get((readInVertexIndicies.get(i*4+2)-1)*3+1);
verticies[i*18+14] = readInVerticies.get((readInVertexIndicies.get(i*4+2)-1)*3+2);
textures[i*12+8] = readInTextures.get((readInTextureIndicies.get(i*4+2)-1)*2+0);
textures[i*12+9] = readInTextures.get((readInTextureIndicies.get(i*4+2)-1)*2+1);
verticies[i*18+15] = readInVerticies.get((readInVertexIndicies.get(i*4+3)-1)*3+0);
verticies[i*18+16] = readInVerticies.get((readInVertexIndicies.get(i*4+3)-1)*3+1);
verticies[i*18+17] = readInVerticies.get((readInVertexIndicies.get(i*4+3)-1)*3+2);
textures[i*12+10] = readInTextures.get((readInTextureIndicies.get(i*4+3)-1)*2+0);
textures[i*12+11] = readInTextures.get((readInTextureIndicies.get(i*4+3)-1)*2+1);
}
m.constructVertexBuffer(verticies);
m.constructTextureBuffer(textures);
}
catch (FileNotFoundException e)
{
e.printStackTrace();
}
catch (IOException e)
{
e.printStackTrace();
}
return m;
}
So if you ever have trouble parsing a .obj file, feel free to use this as a reference or guide! Blender gives you everything in faces with 4 vertices, and this turns everything into 3s and constructs 2 triangles per face.