I created an android app using Vuforia 6. in this app i have 26 different target images and 26 different 3D object (1 Object on 1 target). that works perfectly, but i need to rotate 3d object on use touch.
here is my renderer code:
public class ImageTargetRendererAndroid implements GLSurfaceView.Renderer, SampleAppRendererControl
{
private static final String LOGTAG = "ImageTargetRenderer";
private SampleApplicationSession vuforiaAppSession;
private ImageTargetsAndroid mActivity;
private SampleAppRenderer mSampleAppRenderer;
private Vector<Texture> mTextures;
private int normalHandle;
private int shaderProgramID;
private int vertexHandle;
private int textureCoordHandle;
private int mvpMatrixHandle;
private int texSampler2DHandle;
private ArrayList<AndroidModel> mModel = new ArrayList<AndroidModel>();
//private Teapot mTeapot;
private float kBuildingScale = 12.0f;
private SampleApplication3DModel mBuildingsModel;
private Renderer mRenderer;
boolean mIsActive = false;
private boolean mModelIsLoaded = false;
private static final float OBJECT_SCALE_FLOAT = 3.0f;
ArrayList<String> modelNames = new ArrayList<String>();
private Product mProduct;
private String PackagePath = "";
private String Package3DPath = "";
AndroidModel curModel;
public ImageTargetRendererAndroid(ImageTargetsAndroid activity, SampleApplicationSession session, Product product)
{
mProduct = product;
PackagePath = Globals.getPackagePath(mProduct.getName());
Package3DPath = PackagePath + Constants.SERVER_RESPONSE_TAGS_FOLDER_NAME_3D+ "/";
if(modelNames.size() <= 0){
ArrayList<String> names = Globals.getModelNames(Package3DPath);
for(String name : names){
modelNames.add(name);
}
}
// modelNames.add("A");
// modelNames.add("B");
mActivity = activity;
vuforiaAppSession = session;
// SampleAppRenderer used to encapsulate the use of RenderingPrimitives setting
// the device mode AR/VR and stereo mode
mSampleAppRenderer = new SampleAppRenderer(this, mActivity, Device.MODE.MODE_AR, false, 10f , 5000f);
}
// Called to draw the current frame.
#Override
public void onDrawFrame(GL10 gl)
{
if (!mIsActive)
return;
// Call our function to render content from SampleAppRenderer class
mSampleAppRenderer.render();
}
public void setActive(boolean active)
{
mIsActive = active;
if(mIsActive)
mSampleAppRenderer.configureVideoBackground();
}
// Called when the surface is created or recreated.
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
Log.d(LOGTAG, "GLRenderer.onSurfaceCreated");
// Call Vuforia function to (re)initialize rendering after first use
// or after OpenGL ES context was lost (e.g. after onPause/onResume):
vuforiaAppSession.onSurfaceCreated();
mSampleAppRenderer.onSurfaceCreated();
}
// Called when the surface changed size.
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
Log.d(LOGTAG, "GLRenderer.onSurfaceChanged");
// Call Vuforia function to handle render surface size changes:
vuforiaAppSession.onSurfaceChanged(width, height);
// RenderingPrimitives to be updated when some rendering change is done
mSampleAppRenderer.onConfigurationChanged(mIsActive);
initRendering();
}
// Function for initializing the renderer.
private void initRendering()
{
String storage = Environment.getExternalStorageDirectory() + "/";
ArrayList<String> objs = Globals.getModelObjects(Package3DPath);
// for(String obj : objs){
// //modelNames.add(Package3DPath + obj);
// mModel.add(new AndroidModel(mActivity,Package3DPath + "/" + obj));
// }
if(modelNames.size() <= 0){
ArrayList<String> names = Globals.getModelNames(Package3DPath);
for(String name : names){
modelNames.add(name);
}
}
for(String modelName : modelNames){
//modelNames.add(Package3DPath + obj);
mModel.add(new AndroidModel(mActivity,Package3DPath + modelName + "." + Constants.FILE_TYPE_OBJ,modelName));
}
// mModel.add(new AndroidModel(mActivity,storage + "A.obj"));
// mModel.add(new AndroidModel(mActivity,storage + "B.obj"));
mRenderer = Renderer.getInstance();
GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f : 1.0f);
for (Texture t : mTextures)
{
GLES20.glGenTextures(1, t.mTextureID, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, t.mTextureID[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, t.mWidth, t.mHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, t.mData);
}
shaderProgramID = SampleUtils.createProgramFromShaderSrc( CubeShaders.CUBE_MESH_VERTEX_SHADER, CubeShaders.CUBE_MESH_FRAGMENT_SHADER);
vertexHandle = GLES20.glGetAttribLocation(shaderProgramID, "vertexPosition");
normalHandle = GLES20.glGetAttribLocation(shaderProgramID, "vertexNormal");
textureCoordHandle = GLES20.glGetAttribLocation(shaderProgramID, "vertexTexCoord");
mvpMatrixHandle = GLES20.glGetUniformLocation(shaderProgramID, "modelViewProjectionMatrix");
texSampler2DHandle = GLES20.glGetUniformLocation(shaderProgramID, "texSampler2D");
if(!mModelIsLoaded) {
//mTeapot = new Teapot();
// try {
// mBuildingsModel = new SampleApplication3DModel();
// mBuildingsModel.loadModel(mActivity.getResources().getAssets(), "ImageTargets/Buildings.txt");
// mModelIsLoaded = true;
// } catch (IOException e) {
// Log.e(LOGTAG, "Unable to load buildings");
// }
// Hide the Loading Dialog
mActivity.loadingDialogHandler.sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);
}
}
public void updateConfiguration()
{
mSampleAppRenderer.onConfigurationChanged(mIsActive);
}
// The render function called from SampleAppRendering by using RenderingPrimitives views.
// The state is owned by SampleAppRenderer which is controlling it's lifecycle.
// State should not be cached outside this method.
public void renderFrame(State state, float[] projectionMatrix)
{
// Renders video background replacing Renderer.DrawVideoBackground()
mSampleAppRenderer.renderVideoBackground();
// state = mRenderer.begin();
// mRenderer.drawVideoBackground();
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// handle face culling, we need to detect if we are using reflection
// to determine the direction of the culling
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glCullFace(GLES20.GL_BACK);
// Did we find any trackables this frame?
int a = state.getNumTrackableResults();
for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) {
TrackableResult result = state.getTrackableResult(tIdx);
Trackable trackable = result.getTrackable();
Matrix44F modelViewMatrix_Vuforia = Tool.convertPose2GLMatrix(result.getPose());
float[] modelViewMatrix = modelViewMatrix_Vuforia.getData();
// int textureIndex = trackable.getName().equalsIgnoreCase("stones") ? 0 : 1;
// textureIndex = trackable.getName().equalsIgnoreCase("tarmac") ? 2 : textureIndex;
// deal with the modelview and projection matrices
float[] modelViewProjection = new float[16];
String targetdata = ((String) trackable.getUserData()).replace("Current Dataset : ","");
int modelIndex = getTargetIndex(targetdata);
int textureIndex = modelIndex;
curModel = mModel.get(modelIndex);
if (!mActivity.isExtendedTrackingActive()) {
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 0, curModel.getVertices());
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT, false, 0, curModel.getNormals());
GLES20.glVertexAttribPointer(textureCoordHandle, 2, GLES20.GL_FLOAT, false, 0, curModel.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
// activate texture 0, bind it, and pass to shader
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(textureIndex).mTextureID[0]);
GLES20.glUniform1i(texSampler2DHandle, 0);
// pass the model view matrix to the shader
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
// finally draw the teapot
GLES20.glDrawElements(GLES20.GL_TRIANGLES, curModel.getNumObjectIndex(), GLES20.GL_UNSIGNED_SHORT, curModel.getIndices());
// disable the enabled arrays
GLES20.glDisableVertexAttribArray(vertexHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
GLES20.glDisableVertexAttribArray(textureCoordHandle);
} else {
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 0, mBuildingsModel.getVertices());
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT, false, 0, mBuildingsModel.getNormals());
GLES20.glVertexAttribPointer(textureCoordHandle, 2, GLES20.GL_FLOAT, false, 0, mBuildingsModel.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(3).mTextureID[0]);
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
GLES20.glUniform1i(texSampler2DHandle, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, mBuildingsModel.getNumObjectVertex());
SampleUtils.checkGLError("Renderer DrawBuildings");
Matrix.rotateM(modelViewMatrix, 0, 90.0f, 1.0f, 0, 0);
Matrix.scaleM(modelViewMatrix, 0, kBuildingScale, kBuildingScale, kBuildingScale);
}
Matrix.multiplyMM(modelViewProjection, 0, projectionMatrix, 0, modelViewMatrix, 0);
// activate the shader program and bind the vertex/normal/tex coords
GLES20.glUseProgram(shaderProgramID);
if (!mActivity.isExtendedTrackingActive()) {
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 0, curModel.getVertices());
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT, false, 0, curModel.getNormals());
GLES20.glVertexAttribPointer(textureCoordHandle, 2, GLES20.GL_FLOAT, false, 0, curModel.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
// activate texture 0, bind it, and pass to shader
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(textureIndex).mTextureID[0]);
GLES20.glUniform1i(texSampler2DHandle, 0);
// pass the model view matrix to the shader
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
// finally draw the teapot
GLES20.glDrawElements(GLES20.GL_TRIANGLES, curModel.getNumObjectIndex(), GLES20.GL_UNSIGNED_SHORT, curModel.getIndices());
// disable the enabled arrays
GLES20.glDisableVertexAttribArray(vertexHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
GLES20.glDisableVertexAttribArray(textureCoordHandle);
} else {
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 0, mBuildingsModel.getVertices());
GLES20.glVertexAttribPointer(textureCoordHandle, 2, GLES20.GL_FLOAT, false, 0, mBuildingsModel.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(3).mTextureID[0]);
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0);
GLES20.glUniform1i(texSampler2DHandle, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, mBuildingsModel.getNumObjectVertex());
SampleUtils.checkGLError("Renderer DrawBuildings");
}
SampleUtils.checkGLError("Render Frame");
}
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mRenderer.end();
}
private void printUserData(Trackable trackable)
{
String userData = (String) trackable.getUserData();
Log.d(LOGTAG, "UserData:Retreived User Data \"" + userData + "\"");
}
public void setTextures(Vector<Texture> textures)
{
mTextures = textures;
}
private int getTargetIndex(String name){
for(int i = 0 ; i < modelNames.size() ; i++){
if(modelNames.get(i).equals(name)){
return i;
}
}
return -1;
}
}
Related
As we know in ARCore, we can detect 3d object while click on the horizontal plane surfaces. Instead of 3d object, I need to show Video when User is going to click the PLane Surfaces. The Look and feel should be same like 3d object is displaying. Instead of 3d object, the video should be displayed in preview mode.
In ARcore they are currently using one Relativelayout with Surfaceview. So for displaying Video, I am using the Surfaceview and attaching with mediaplayer.
public void onsurfacecreatedvideo(){
mProgram = createProgram(mVertexShader, mFragmentShader);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
/*
* Create the SurfaceTexture that will feed this textureID,
* and pass it to the MediaPlayer
*/
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
mMediaPlayer.setScreenOnWhilePlaying(true);
surface.release();
mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp) {
Log.i(TAG,"ONPREPArED abhilash");
setVideoSize();
mp.start();
}
});
try {
mMediaPlayer.prepare();
} catch (IOException t) {
Log.e(TAG, "media player prepare failed");
}
synchronized(this) {
updateSurface = false;
}
mMediaPlayer.start();
}
void ondrawvideo(){
synchronized(this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
}
}
/////////////
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();
}
// Visualize planes.
mPlaneRenderer.drawPlanes(mSession.getAllPlanes(), frame.getPose(), projmtx);
// Visualize anchors created by touch.
float scaleFactor = 1.0f;
for (PlaneAttachment planeAttachment : mTouches) {
ondrawvideo();
if (!planeAttachment.isTracking()) {
continue;
}
// Get the current combined pose of an Anchor and Plane in world space. The Anchor
// and Plane poses are updated during calls to session.update() as ARCore refines
// its estimate of the world.
planeAttachment.getPose().toMatrix(mAnchorMatrix, 0);
// Update and draw the model and its shadow.
mVirtualObject.updateModelMatrix(mAnchorMatrix, scaleFactor);
mVirtualObjectShadow.updateModelMatrix(mAnchorMatrix, scaleFactor);
mVirtualObject.draw(viewmtx, projmtx, lightIntensity);
mVirtualObjectShadow.draw(viewmtx, projmtx, lightIntensity);
}
} catch (Throwable t) {
// Avoid crashing the application due to unhandled exceptions.
Log.e(TAG, "Exception on the OpenGL thread", t);
}
}
Currently my output is coming like this. When I am click on plane Surfaces, its showing like this:
enter image description here
As you can see, below Image, I need to achieve it like this. I just marked that in this particular bugdroid image the video should be played, the video should not be exceed full screen; it should only be shown just like the bugdroid image size:
enter image description here
I did this by creating a new class called MovieClipRenderer - which modeled after the ObjectRenderer class in the HelloAR sample. This creates a quad geometry and renders the texture from the media player in the quad. The quad is anchored to a plane, so it does not move as the user looks around.
To test with, I used a stock movie from: https://www.videvo.net/video/chicken-on-green-screen/3435/
and added it to src/main/assets
Then I added the member variable for the renderer to HelloArActivity
private final MovieClipRenderer mMovieClipRenderer = new MovieClipRenderer();
In onSurfaceCreated() I initialized the renderer with the others
mMovieClipRenderer.createOnGlThread();
To try it out, I made the first tap on a plane create the movie anchor by changing hit test code slightly to be:
if (mMovieAnchor == null) {
mMovieAnchor = hit.createAnchor();
} else {
mAnchors.add(hit.createAnchor());
}
Then at the bottom of onDrawFrame() I checked for the anchor and started playing it:
if (mMovieAnchor != null) {
// Draw chickens!
if (!mMovieClipRenderer.isStarted()) {
mMovieClipRenderer.play("chicken.mp4", this);
}
mMovieAnchor.getPose().toMatrix(mAnchorMatrix,0);
mMovieClipRenderer.update(mAnchorMatrix, 0.25f);
mMovieClipRenderer.draw(mMovieAnchor.getPose(), viewmtx, projmtx);
}
The rendering class is pretty long, but is pretty standard GLES code of creating the OES texture and initializing the video player, creating the vertices of a quad and loading a fragment shader that draws an OES texture.
/**
* Renders a movie clip with a green screen aware shader.
* <p>
* Copyright 2018 Google LLC
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class MovieClipRenderer implements
SurfaceTexture.OnFrameAvailableListener {
private static final String TAG = MovieClipRenderer.class.getSimpleName();
// Quad geometry
private static final int COORDS_PER_VERTEX = 3;
private static final int TEXCOORDS_PER_VERTEX = 2;
private static final int FLOAT_SIZE = 4;
private static final float[] QUAD_COORDS = new float[]{
-1.0f, -1.0f, 0.0f,
-1.0f, +1.0f, 0.0f,
+1.0f, -1.0f, 0.0f,
+1.0f, +1.0f, 0.0f,
};
private static final float[] QUAD_TEXCOORDS = new float[]{
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f,
};
// Shader for a flat quad.
private static final String VERTEX_SHADER =
"uniform mat4 u_ModelViewProjection;\n\n" +
"attribute vec4 a_Position;\n" +
"attribute vec2 a_TexCoord;\n\n" +
"varying vec2 v_TexCoord;\n\n" +
"void main() {\n" +
" gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);\n" +
" v_TexCoord = a_TexCoord;\n" +
"}";
// The fragment shader samples the video texture, blending to
// transparent for the green screen
// color. The color was determined by sampling a screenshot
// of the video in an image editor.
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n" +
"\n" +
"precision mediump float;\n" +
"varying vec2 v_TexCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"\n" +
"void main() {\n" +
" //TODO make this a uniform variable - " +
" but this is the color of the background. 17ad2b\n" +
" vec3 keying_color = vec3(23.0f/255.0f, 173.0f/255.0f, 43.0f/255.0f);\n" +
" float thresh = 0.4f; // 0 - 1.732\n" +
" float slope = 0.2;\n" +
" vec3 input_color = texture2D(sTexture, v_TexCoord).rgb;\n" +
" float d = abs(length(abs(keying_color.rgb - input_color.rgb)));\n" +
" float edge0 = thresh * (1.0f - slope);\n" +
" float alpha = smoothstep(edge0,thresh,d);\n" +
" gl_FragColor = vec4(input_color, alpha);\n" +
"}";
// Geometry data in GLES friendly data structure.
private FloatBuffer mQuadVertices;
private FloatBuffer mQuadTexCoord;
// Shader program id and parameters.
private int mQuadProgram;
private int mQuadPositionParam;
private int mQuadTexCoordParam;
private int mModelViewProjectionUniform;
private int mTextureId = -1;
// Matrix for the location and perspective of the quad.
private float[] mModelMatrix = new float[16];
// Media player, texture and other bookkeeping.
private MediaPlayer player;
private SurfaceTexture videoTexture;
private boolean frameAvailable = false;
private boolean started = false;
private boolean done;
private boolean prepared;
private static Handler handler;
// Lock used for waiting if the player was not yet created.
private final Object lock = new Object();
/**
* Update the model matrix based on the location and scale to draw the quad.
*/
public void update(float[] modelMatrix, float scaleFactor) {
float[] scaleMatrix = new float[16];
Matrix.setIdentityM(scaleMatrix, 0);
scaleMatrix[0] = scaleFactor;
scaleMatrix[5] = scaleFactor;
scaleMatrix[10] = scaleFactor;
Matrix.multiplyMM(mModelMatrix, 0, modelMatrix, 0, scaleMatrix, 0);
}
/**
* Initialize the GLES objects.
* This is called from the GL render thread to make sure
* it has access to the EGLContext.
*/
public void createOnGlThread() {
// 1 texture to hold the video frame.
int textures[] = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureId = textures[0];
int mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
GLES20.glBindTexture(mTextureTarget, mTextureId);
GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_NEAREST);
videoTexture = new SurfaceTexture(mTextureId);
videoTexture.setOnFrameAvailableListener(this);
// Make a quad to hold the movie
ByteBuffer bbVertices = ByteBuffer.allocateDirect(
QUAD_COORDS.length * FLOAT_SIZE);
bbVertices.order(ByteOrder.nativeOrder());
mQuadVertices = bbVertices.asFloatBuffer();
mQuadVertices.put(QUAD_COORDS);
mQuadVertices.position(0);
int numVertices = 4;
ByteBuffer bbTexCoords = ByteBuffer.allocateDirect(
numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
bbTexCoords.order(ByteOrder.nativeOrder());
mQuadTexCoord = bbTexCoords.asFloatBuffer();
mQuadTexCoord.put(QUAD_TEXCOORDS);
mQuadTexCoord.position(0);
int vertexShader = loadGLShader(TAG, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
int fragmentShader = loadGLShader(TAG,
GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
mQuadProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mQuadProgram, vertexShader);
GLES20.glAttachShader(mQuadProgram, fragmentShader);
GLES20.glLinkProgram(mQuadProgram);
GLES20.glUseProgram(mQuadProgram);
ShaderUtil.checkGLError(TAG, "Program creation");
mQuadPositionParam = GLES20.glGetAttribLocation(mQuadProgram, "a_Position");
mQuadTexCoordParam = GLES20.glGetAttribLocation(mQuadProgram, "a_TexCoord");
mModelViewProjectionUniform = GLES20.glGetUniformLocation(
mQuadProgram, "u_ModelViewProjection");
ShaderUtil.checkGLError(TAG, "Program parameters");
Matrix.setIdentityM(mModelMatrix, 0);
initializeMediaPlayer();
}
public void draw(Pose pose, float[] cameraView, float[] cameraPerspective) {
if (done || !prepared) {
return;
}
synchronized (this) {
if (frameAvailable) {
videoTexture.updateTexImage();
frameAvailable = false;
}
}
float[] modelMatrix = new float[16];
pose.toMatrix(modelMatrix, 0);
float[] modelView = new float[16];
float[] modelViewProjection = new float[16];
Matrix.multiplyMM(modelView, 0, cameraView, 0, mModelMatrix, 0);
Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, modelView, 0);
ShaderUtil.checkGLError(TAG, "Before draw");
GLES20.glEnable(GL10.GL_BLEND);
GLES20.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
GLES20.glUseProgram(mQuadProgram);
// Set the vertex positions.
GLES20.glVertexAttribPointer(
mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
false, 0, mQuadVertices);
// Set the texture coordinates.
GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
GLES20.GL_FLOAT, false, 0, mQuadTexCoord);
// Enable vertex arrays
GLES20.glEnableVertexAttribArray(mQuadPositionParam);
GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);
GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false,
modelViewProjection, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Disable vertex arrays
GLES20.glDisableVertexAttribArray(mQuadPositionParam);
GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);
ShaderUtil.checkGLError(TAG, "Draw");
}
private void initializeMediaPlayer() {
if (handler == null)
handler = new Handler(Looper.getMainLooper());
handler.post(new Runnable() {
#Override
public void run() {
synchronized (lock) {
player = new MediaPlayer();
lock.notify();
}
}
});
}
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (this) {
frameAvailable = true;
}
}
public boolean play(final String filename, Context context)
throws FileNotFoundException {
// Wait for the player to be created.
if (player == null) {
synchronized (lock) {
while (player == null) {
try {
lock.wait();
} catch (InterruptedException e) {
return false;
}
}
}
}
player.reset();
done = false;
player.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp) {
prepared = true;
mp.start();
}
});
player.setOnErrorListener(new MediaPlayer.OnErrorListener() {
#Override
public boolean onError(MediaPlayer mp, int what, int extra) {
done = true;
Log.e("VideoPlayer",
String.format("Error occured: %d, %d\n", what, extra));
return false;
}
});
player.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
#Override
public void onCompletion(MediaPlayer mp) {
done = true;
}
});
player.setOnInfoListener(new MediaPlayer.OnInfoListener() {
#Override
public boolean onInfo(MediaPlayer mediaPlayer, int i, int i1) {
return false;
}
});
try {
AssetManager assets = context.getAssets();
AssetFileDescriptor descriptor = assets.openFd(filename);
player.setDataSource(descriptor.getFileDescriptor(),
descriptor.getStartOffset(),
descriptor.getLength());
player.setSurface(new Surface(videoTexture));
player.prepareAsync();
synchronized (this) {
started = true;
}
} catch (IOException e) {
Log.e(TAG, "Exception preparing movie", e);
return false;
}
return true;
}
public synchronized boolean isStarted() {
return started;
}
static int loadGLShader(String tag, int type, String code) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, code);
GLES20.glCompileShader(shader);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
if (shader == 0) {
throw new RuntimeException("Error creating shader.");
}
return shader;
}
}
If you are in need to play a video on ar plane using sceneform, create video node and add media player to it. Refer to this link- https://github.com/SceneView/sceneform-android/blob/master/samples/video-texture/src/main/java/com/google/ar/sceneform/samples/videotexture/MainActivity.java
I'm having an issue rendering textures in openGL ES 2 on Android. The image is being drawn but the texture ism't wrapping correctly by the look of it.
I have tried all the usual things to fix the issue but nothing has worked.
Here's how one of the images should look:
But here's how they look on the screen:
Ignore the black border that's part of the texture.
Here is my Texture class:
public class HFTexture {
private int width;
private int height;
private int textureId;
private HFGame game;
private String textureFile;
public HFTexture(HFGame game, String textureFile) {
this.game = game;
this.textureFile = textureFile;
//load();
}
public void load() {
int[] texIds = new int[1];
GLES20.glGenTextures(1, texIds, 0);
textureId = texIds[0];
InputStream in;
try {
in = game.getFileManager().getAsset(textureFile);
Bitmap bitmap = BitmapFactory.decodeStream(in);
width = bitmap.getWidth();
height = bitmap.getHeight();
bind();
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
bitmap.recycle();
} catch(IOException ex) {
}
}
public void bind() {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
}
public void activate(HFShader shader, int texture) {
GLES20.glActiveTexture(texture);
bind();
GLES20.glUniform1i(shader.getHandle("sampler0"), 0);
}
public void delete() {
bind();
int[] textureIds = {textureId};
GLES20.glDeleteTextures(1, textureIds, 0);
}
}
Here is my Vertices class:
public class Vertices {
private FloatBuffer vertexBuffer;
private FloatBuffer normalBuffer;
private FloatBuffer texCoordBuffer;
private ShortBuffer indexBuffer;
private final int VERTEX_COUNT;
private final int VERTEX_STRIDE;
private final int VERTEX_SIZE = 3;
private final int NORMAL_STRIDE;
private final int NORMAL_SIZE = 3;
private final int TEXTURE_COORD_STRIDE;
private final int TEXTURE_COORD_SIZE = 2;
private final int INDEX_COUNT;
public Vertices(float[] vertices, float[] normals, float[] texCoords, short[] indices) {
VERTEX_STRIDE = VERTEX_SIZE * 4;
NORMAL_STRIDE = NORMAL_SIZE * 4;
TEXTURE_COORD_STRIDE = TEXTURE_COORD_SIZE * 4;
VERTEX_COUNT = vertices.length;
INDEX_COUNT = indices.length;
ByteBuffer bb = ByteBuffer.allocateDirect(VERTEX_COUNT * VERTEX_STRIDE);
bb.order(ByteOrder.nativeOrder());
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
bb = ByteBuffer.allocateDirect(normals.length * NORMAL_STRIDE);
bb.order(ByteOrder.nativeOrder());
normalBuffer = bb.asFloatBuffer();
normalBuffer.put(normals);
normalBuffer.position(0);
bb = ByteBuffer.allocateDirect(texCoords.length * TEXTURE_COORD_STRIDE);
bb.order(ByteOrder.nativeOrder());
texCoordBuffer = bb.asFloatBuffer();
texCoordBuffer.put(texCoords);
texCoordBuffer.position(0);
bb = ByteBuffer.allocateDirect(indices.length * 2);
bb.order(ByteOrder.nativeOrder());
indexBuffer = bb.asShortBuffer();
indexBuffer.put(indices);
indexBuffer.position(0);
}
public void bind(HFShader shader) {
int positionHandle = shader.getHandle("position");
int normalHandle = shader.getHandle("normal");
int texCoordHandle = shader.getHandle("texCoord");
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glVertexAttribPointer(
positionHandle, VERTEX_SIZE,
GLES20.GL_FLOAT, false,
VERTEX_STRIDE, vertexBuffer);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glVertexAttribPointer(
normalHandle, NORMAL_SIZE,
GLES20.GL_FLOAT, false,
NORMAL_STRIDE, normalBuffer);
GLES20.glEnableVertexAttribArray(texCoordHandle);
GLES20.glVertexAttribPointer(
texCoordHandle, TEXTURE_COORD_SIZE,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_STRIDE, vertexBuffer);
}
public void unbind(HFShader shader) {
int positionHandle = shader.getHandle("position");
int normalHandle = shader.getHandle("normal");
int texCoordHandle = shader.getHandle("texCoord");
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
GLES20.glDisableVertexAttribArray(texCoordHandle);
}
public void draw() {
if(indexBuffer != null) {
GLES20.glDrawElements(GLES20.GL_TRIANGLES, INDEX_COUNT, GLES20.GL_UNSIGNED_SHORT, indexBuffer);
} else {
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, VERTEX_COUNT);
}
}
}
And here is my Vertex data:
float[] verts = {
-(width / 2f), (height / 2f), 0f, // index 0
-(width / 2f), -(height / 2f), 0f, // index 1
(width / 2f), -(height / 2f), 0f, // index 2
(width / 2f), (height / 2f), 0f // index 3
};
float[] norms = {
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f
};
float[] texCoords = {
0f, 1f,
0f, 0f,
1f, 0f,
1f, 1f
};
short[] indices = {
0,1,2,2,3,0
};
I've tried adding the clamp to edge texture parameters as well but that didn't seem to help. Have I just put the vertex and texture coords in the wrong order or is there something I'm missing altogether?
You are setting your vertex buffer instead of your texture coord buffer for the texture co-ordinates:
GLES20.glEnableVertexAttribArray(texCoordHandle);
GLES20.glVertexAttribPointer(
texCoordHandle, TEXTURE_COORD_SIZE,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_STRIDE, vertexBuffer); // <-- here
should be:
GLES20.glEnableVertexAttribArray(texCoordHandle);
GLES20.glVertexAttribPointer(
texCoordHandle, TEXTURE_COORD_SIZE,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_STRIDE, texCoordBuffer);
i create object 3d in android using tutorial from learnopengles, and i create cube from the lesson six of that tutorial (texture filtering), after that i want replace the cube with my object (i create the strawberry object). i want my object can display in the view, so i parsing the my object (my object use extension file .obj) to my renderer class, but the object in view is displaying random triangle object.
this is my parsing code :
public ObjLoader(Context mActivityContext) {
FileReader fr;
String str;
ArrayList<Float> tempModelVertices = new ArrayList<Float>();
ArrayList<Float> tempTextureVertices = new ArrayList<Float>();
ArrayList<Float> tempNormalVertices = new ArrayList<Float>();
ArrayList<Integer> facesM = new ArrayList<Integer>();
ArrayList<Integer> facesT = new ArrayList<Integer>();
ArrayList<Integer> facesN = new ArrayList<Integer>();
try {
fr = new FileReader(new File("model/straw_obj"));
BufferedReader br = new BufferedReader(fr);
while((str = br.readLine())!=null){
if(str.startsWith("f")){
String[] strAr = str.replaceAll("f", "").trim().split(" ");
for(String s : strAr){
String[] cornerAr = s.split("/");
facesM.add(Integer.parseInt(cornerAr[0].trim())-1);
facesT.add(Integer.parseInt(cornerAr[1].trim())-1);
facesN.add(Integer.parseInt(cornerAr[2].trim())-1);
}
}
else if(str.startsWith("vt")){
String[] strAr = str.replaceAll("vt", "").trim().split(" ");
tempTextureVertices.add(Float.valueOf(strAr[0].trim()));
tempTextureVertices.add(-1*Float.valueOf(strAr[1].trim()));
}
else if(str.startsWith("vn")){
String[] strAr = str.replaceAll("vn", "").trim().split(" ");
tempNormalVertices.add(Float.valueOf(strAr[0].trim()));
tempNormalVertices.add(Float.valueOf(strAr[1].trim()));
tempNormalVertices.add(Float.valueOf(strAr[2].trim()));
}
else if(str.startsWith("v")){
String[] strAr = str.replaceAll("v", "").trim().split(" ");
tempModelVertices.add(Float.valueOf(strAr[0].trim()));
tempModelVertices.add(Float.valueOf(strAr[1].trim()));
tempModelVertices.add(Float.valueOf(strAr[2].trim()));
}
}
//Log.v(LOG_TAG, "v :"+ String.valueOf(v) + "vt :"+ String.valueOf(vt) + "vn :"+ String.valueOf(vn) + "f :"+ String.valueOf(f));
} catch (IOException e) {
// TODO Auto-generated catch block
Log.v(TAG, "error");
}
Log.v(TAG, "vt " + String.valueOf(tempTextureVertices.size()) + " vn " + String.valueOf(tempNormalVertices.size()) + " v " + String.valueOf(tempModelVertices.size()));
ModelPositionData = new float[facesM.size()];
ModelTextureCoordinateData = new float[facesT.size()];
ModelNormalData = new float[facesN.size()];
for(int i=0; i<facesM.size(); i++){
ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
}
for(int i=0; i<facesT.size(); i++){
ModelTextureCoordinateData[i] = tempTextureVertices.get(facesT.get(i));
}
for(int i=0; i<facesN.size(); i++){
ModelNormalData[i] = tempNormalVertices.get(facesN.get(i));
}
}
and this is how i create the glsurface renderer
public class TesterRenderer implements GLSurfaceView.Renderer{
private static final String TAG = "TesterRenderer";
private final Context mActivityContext;
/**
* Store the model matrix. This matrix is used to move models from object space (where each model can be thought
* of being located at the center of the universe) to world space.
*/
private float[] mModelMatrix = new float[16];
/**
* Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;
* it positions things relative to our eye.
*/
private float[] mViewMatrix = new float[16];
/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];
/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];
/** Store the accumulated rotation. */
private final float[] mAccumulatedRotation = new float[16];
/** Store the current rotation. */
private final float[] mCurrentRotation = new float[16];
/** A temporary matrix. */
private float[] mTemporaryMatrix = new float[16];
/**
* Stores a copy of the model matrix specifically for the light position.
*/
private float[] mLightModelMatrix = new float[16];
/** Store our model data in a float buffer. */
private final FloatBuffer mModelPositions;
private final FloatBuffer mModelNormals;
private final FloatBuffer mModelTextureCoordinates;
// private final FloatBuffer mModelTextureCoordinatesForPlane;
/** This will be used to pass in the transformation matrix. */
private int mMVPMatrixHandle;
/** This will be used to pass in the modelview matrix. */
private int mMVMatrixHandle;
/** This will be used to pass in the light position. */
private int mLightPosHandle;
/** This will be used to pass in the texture. */
private int mTextureUniformHandle;
/** This will be used to pass in model position information. */
private int mPositionHandle;
/** This will be used to pass in model normal information. */
private int mNormalHandle;
/** This will be used to pass in model texture coordinate information. */
private int mTextureCoordinateHandle;
/** How many bytes per float. */
private final int mBytesPerFloat = 4;
/** Size of the position data in elements. */
private final int mPositionDataSize = 3;
/** Size of the normal data in elements. */
private final int mNormalDataSize = 3;
/** Size of the texture coordinate data in elements. */
private final int mTextureCoordinateDataSize = 2;
/** Used to hold a light centered on the origin in model space. We need a 4th coordinate so we can get translations to work when
* we multiply this by our transformation matrices. */
private final float[] mLightPosInModelSpace = new float[] {0.0f, 0.0f, 0.0f, 1.0f};
/** Used to hold the current position of the light in world space (after transformation via model matrix). */
private final float[] mLightPosInWorldSpace = new float[4];
/** Used to hold the transformed position of the light in eye space (after transformation via modelview matrix) */
private final float[] mLightPosInEyeSpace = new float[4];
/** This is a handle to our cube shading program. */
private int mProgramHandle;
/** This is a handle to our light point program. */
private int mPointProgramHandle;
/** These are handles to our texture data. */
private int mTextureDataHandle;
// private int mGrassDataHandle;
/** Temporary place to save the min and mag filter, in case the activity was restarted. */
private int mQueuedMinFilter;
private int mQueuedMagFilter;
// These still work without volatile, but refreshes are not guaranteed to happen.
public volatile float mDeltaX;
public volatile float mDeltaY;
public TesterRenderer(final Context activityContext)
{
mActivityContext = activityContext;
ObjLoader obj = new ObjLoader(mActivityContext);
mModelPositions = ByteBuffer.allocateDirect(obj.ModelPositionData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mModelPositions.put(obj.ModelPositionData).position(0);
mModelNormals = ByteBuffer.allocateDirect(obj.ModelNormalData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mModelNormals.put(obj.ModelNormalData).position(0);
mModelTextureCoordinates = ByteBuffer.allocateDirect(obj.ModelTextureCoordinateData.length * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mModelTextureCoordinates.put(obj.ModelTextureCoordinateData).position(0);
}
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config)
{
// Set the background clear color to black.
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// Use culling to remove back faces.
GLES20.glEnable(GLES20.GL_CULL_FACE);
// Enable depth testing
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// The below glEnable() call is a holdover from OpenGL ES 1, and is not needed in OpenGL ES 2.
// Enable texture mapping
// GLES20.glEnable(GLES20.GL_TEXTURE_2D);
// Position the eye in front of the origin.
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = -0.5f;
// We are looking toward the distance
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = -5.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.per_pixel_vertex_shader_tex_and_light);
final String fragmentShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.per_pixel_fragment_shader_tex_and_light);
final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
mProgramHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
new String[] {"a_Position", "a_Normal", "a_TexCoordinate"});
// Define a simple shader program for our point.
final String pointVertexShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.point_vertex_shader);
final String pointFragmentShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.point_fragment_shader);
final int pointVertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, pointVertexShader);
final int pointFragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, pointFragmentShader);
mPointProgramHandle = ShaderHelper.createAndLinkProgram(pointVertexShaderHandle, pointFragmentShaderHandle,
new String[] {"a_Position"});
// Load the texture
mTextureDataHandle = TextureHelper.loadTexture(mActivityContext, R.drawable.strawberry_texture);
GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
// mGrassDataHandle = TextureHelper.loadTexture(mActivityContext, R.drawable.noisy_grass_public_domain);
// GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
if (mQueuedMinFilter != 0)
{
setMinFilter(mQueuedMinFilter);
}
if (mQueuedMagFilter != 0)
{
setMagFilter(mQueuedMagFilter);
}
// Initialize the accumulated rotation matrix
Matrix.setIdentityM(mAccumulatedRotation, 0);
}
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height)
{
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 1000.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}
#Override
public void onDrawFrame(GL10 glUnused)
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
// Do a complete rotation every 10 seconds.
long time = SystemClock.uptimeMillis() % 10000L;
long slowTime = SystemClock.uptimeMillis() % 100000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
float slowAngleInDegrees = (360.0f / 100000.0f) * ((int) slowTime);
// Set our per-vertex lighting program.
GLES20.glUseProgram(mProgramHandle);
// Set program handles for cube drawing.
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVPMatrix");
mMVMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVMatrix");
mLightPosHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_LightPos");
mTextureUniformHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_Texture");
mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Position");
mNormalHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Normal");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_TexCoordinate");
// Calculate position of the light. Rotate and then push into the distance.
Matrix.setIdentityM(mLightModelMatrix, 0);
Matrix.translateM(mLightModelMatrix, 0, 0.0f, 0.0f, -2.0f);
Matrix.rotateM(mLightModelMatrix, 0, angleInDegrees, 0.0f, 1.0f, 0.0f);
Matrix.translateM(mLightModelMatrix, 0, 0.0f, 0.0f, 3.5f);
Matrix.multiplyMV(mLightPosInWorldSpace, 0, mLightModelMatrix, 0, mLightPosInModelSpace, 0);
Matrix.multiplyMV(mLightPosInEyeSpace, 0, mViewMatrix, 0, mLightPosInWorldSpace, 0);
// Draw a cube.
// Translate the cube into the screen.
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -7.0f);
// Set a matrix that contains the current rotation.
Matrix.setIdentityM(mCurrentRotation, 0);
Matrix.rotateM(mCurrentRotation, 0, mDeltaX, 0.0f, 1.0f, 0.0f);
Matrix.rotateM(mCurrentRotation, 0, mDeltaY, 1.0f, 0.0f, 0.0f);
mDeltaX = 0.0f;
mDeltaY = 0.0f;
// Multiply the current rotation by the accumulated rotation, and then set the accumulated rotation to the result.
Matrix.multiplyMM(mTemporaryMatrix, 0, mCurrentRotation, 0, mAccumulatedRotation, 0);
System.arraycopy(mTemporaryMatrix, 0, mAccumulatedRotation, 0, 16);
// Rotate the cube taking the overall rotation into account.
Matrix.multiplyMM(mTemporaryMatrix, 0, mModelMatrix, 0, mAccumulatedRotation, 0);
System.arraycopy(mTemporaryMatrix, 0, mModelMatrix, 0, 16);
// Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
// Pass in the texture coordinate information
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
mModelTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false,
0, mModelTextureCoordinates);
drawModel();
// Draw a plane
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.translateM(mModelMatrix, 0, 0.0f, -2.0f, -5.0f);
Matrix.scaleM(mModelMatrix, 0, 25.0f, 1.0f, 25.0f);
Matrix.rotateM(mModelMatrix, 0, slowAngleInDegrees, 0.0f, 1.0f, 0.0f);
// Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// Bind the texture to this unit.
//GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);
// Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
// Pass in the texture coordinate information
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
drawModel();
GLES20.glUseProgram(mPointProgramHandle);
drawLight();
}
public void setMinFilter(final int filter)
{
if (mTextureDataHandle != 0)
{
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, filter);
// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);
// GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, filter);
}
else
{
mQueuedMinFilter = filter;
}
}
public void setMagFilter(final int filter)
{
if (mTextureDataHandle != 0)
{
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, filter);
// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);
// GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, filter);
}
else
{
mQueuedMagFilter = filter;
}
}
private void drawModel()
{
// Pass in the position information
GLES20.glEnableVertexAttribArray(mPositionHandle);
mModelPositions.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
0, mModelPositions);
// Pass in the normal information
GLES20.glEnableVertexAttribArray(mNormalHandle);
mModelNormals.position(0);
GLES20.glVertexAttribPointer(mNormalHandle, mNormalDataSize, GLES20.GL_FLOAT, false,
0, mModelNormals);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// Pass in the modelview matrix.
GLES20.glUniformMatrix4fv(mMVMatrixHandle, 1, false, mMVPMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
// Pass in the combined matrix.
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Pass in the light position in eye space.
GLES20.glUniform3f(mLightPosHandle, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1], mLightPosInEyeSpace[2]);
// Draw the cube.
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);
}
/**
* Draws a point representing the position of the light.
*/
private void drawLight()
{
final int pointMVPMatrixHandle = GLES20.glGetUniformLocation(mPointProgramHandle, "u_MVPMatrix");
final int pointPositionHandle = GLES20.glGetAttribLocation(mPointProgramHandle, "a_Position");
// Pass in the position.
GLES20.glVertexAttrib3f(pointPositionHandle, mLightPosInModelSpace[0], mLightPosInModelSpace[1], mLightPosInModelSpace[2]);
// Since we are not using a buffer object, disable vertex arrays for this attribute.
GLES20.glDisableVertexAttribArray(pointPositionHandle);
// Pass in the transformation matrix.
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mLightModelMatrix, 0);
Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
GLES20.glUniformMatrix4fv(pointMVPMatrixHandle, 1, false, mMVPMatrix, 0);
// Draw the point.
GLES20.glDrawArrays(GLES20.GL_POINTS, 0, 1);
}
}
can someone help me fix this ?
It looks like there is a problem with the way you reorder the coordinates based on the indices in the faces:
for(int i=0; i<facesM.size(); i++){
ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
}
Each position consists of 3 coordinates. This loop copies only one value per position, though. It should look something like this:
for(int i=0; i<facesM.size(); i++){
ModelPositionData[3 * i ] = tempModelVertices.get(3 * facesM.get(i) );
ModelPositionData[3 * i + 1] = tempModelVertices.get(3 * facesM.get(i) + 1);
ModelPositionData[3 * i + 2] = tempModelVertices.get(3 * facesM.get(i) + 2);
}
You will also need to adjust the allocation accordingly:
ModelPositionData = new float[3 * facesM.size()];
and make the equivalent changes for the normals and texture coordinates.
i want to make my own object of tracker like in Image Target example i want to replace the Teapot by another 3D shape , the problem is i can't understand the code very well! here is the Code:
Teapot Class:
have two Functions:
setVerts() and setIndices() with alot of indices and vertices numbers
and the ImageTargetRender is:
// The renderer class for the ImageTargets sample.
public class ImageTargetRenderer implements GLSurfaceView.Renderer
{
private static final String LOGTAG = "ImageTargetRenderer";
private SampleApplicationSession vuforiaAppSession;
private ImageTargets mActivity;
private Vector<Texture> mTextures;
private int shaderProgramID;
private int vertexHandle;
private int normalHandle;
private int textureCoordHandle;
private int mvpMatrixHandle;
private int texSampler2DHandle;
private Teapot mTeapot;
private float kBuildingScale = 12.0f;
private SampleApplication3DModel mBuildingsModel;
private Renderer mRenderer;
boolean mIsActive = false;
private static final float OBJECT_SCALE_FLOAT = 3.0f;
public ImageTargetRenderer(ImageTargets activity,
SampleApplicationSession session)
{
mActivity = activity;
vuforiaAppSession = session;
}
// Called to draw the current frame.
#Override
public void onDrawFrame(GL10 gl)
{
if (!mIsActive)
return;
// Call our function to render content
renderFrame();
}
// Called when the surface is created or recreated.
#Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
Log.d(LOGTAG, "GLRenderer.onSurfaceCreated");
initRendering();
// Call Vuforia function to (re)initialize rendering after first use
// or after OpenGL ES context was lost (e.g. after onPause/onResume):
vuforiaAppSession.onSurfaceCreated();
}
// Called when the surface changed size.
#Override
public void onSurfaceChanged(GL10 gl, int width, int height)
{
Log.d(LOGTAG, "GLRenderer.onSurfaceChanged");
// Call Vuforia function to handle render surface size changes:
vuforiaAppSession.onSurfaceChanged(width, height);
}
// Function for initializing the renderer.
private void initRendering()
{
mTeapot = new Teapot();
mRenderer = Renderer.getInstance();
GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f
: 1.0f);
for (Texture t : mTextures)
{
GLES20.glGenTextures(1, t.mTextureID, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, t.mTextureID[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA,
t.mWidth, t.mHeight, 0, GLES20.GL_RGBA,
GLES20.GL_UNSIGNED_BYTE, t.mData);
}
shaderProgramID = SampleUtils.createProgramFromShaderSrc(
CubeShaders.CUBE_MESH_VERTEX_SHADER,
CubeShaders.CUBE_MESH_FRAGMENT_SHADER);
vertexHandle = GLES20.glGetAttribLocation(shaderProgramID,
"vertexPosition");
normalHandle = GLES20.glGetAttribLocation(shaderProgramID,
"vertexNormal");
textureCoordHandle = GLES20.glGetAttribLocation(shaderProgramID,
"vertexTexCoord");
mvpMatrixHandle = GLES20.glGetUniformLocation(shaderProgramID,
"modelViewProjectionMatrix");
texSampler2DHandle = GLES20.glGetUniformLocation(shaderProgramID,
"texSampler2D");
try
{
mBuildingsModel = new SampleApplication3DModel();
mBuildingsModel.loadModel(mActivity.getResources().getAssets(),
"ImageTargets/Buildings.txt");
} catch (IOException e)
{
Log.e(LOGTAG, "Unable to load buildings");
}
// Hide the Loading Dialog
mActivity.loadingDialogHandler
.sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);
}
// The render function.
private void renderFrame()
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
State state = mRenderer.begin();
mRenderer.drawVideoBackground();
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// handle face culling, we need to detect if we are using reflection
// to determine the direction of the culling
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glCullFace(GLES20.GL_BACK);
if (Renderer.getInstance().getVideoBackgroundConfig().getReflection() == VIDEO_BACKGROUND_REFLECTION.VIDEO_BACKGROUND_REFLECTION_ON)
GLES20.glFrontFace(GLES20.GL_CW); // Front camera
else
GLES20.glFrontFace(GLES20.GL_CCW); // Back camera
// did we find any trackables this frame?
for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
{
TrackableResult result = state.getTrackableResult(tIdx);
Trackable trackable = result.getTrackable();
printUserData(trackable);
Matrix44F modelViewMatrix_Vuforia = Tool
.convertPose2GLMatrix(result.getPose());
float[] modelViewMatrix = modelViewMatrix_Vuforia.getData();
int textureIndex = trackable.getName().equalsIgnoreCase("stones") ? 0
: 1;
textureIndex = trackable.getName().equalsIgnoreCase("tarmac") ? 2
: textureIndex;
// deal with the modelview and projection matrices
float[] modelViewProjection = new float[16];
if (!mActivity.isExtendedTrackingActive())
{
Matrix.translateM(modelViewMatrix, 0, 0.0f, 0.0f,
OBJECT_SCALE_FLOAT);
Matrix.scaleM(modelViewMatrix, 0, OBJECT_SCALE_FLOAT,
OBJECT_SCALE_FLOAT, OBJECT_SCALE_FLOAT);
} else
{
Matrix.rotateM(modelViewMatrix, 0, 90.0f, 1.0f, 0, 0);
Matrix.scaleM(modelViewMatrix, 0, kBuildingScale,
kBuildingScale, kBuildingScale);
}
Matrix.multiplyMM(modelViewProjection, 0, vuforiaAppSession
.getProjectionMatrix().getData(), 0, modelViewMatrix, 0);
// activate the shader program and bind the vertex/normal/tex coords
GLES20.glUseProgram(shaderProgramID);
if (!mActivity.isExtendedTrackingActive())
{
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT,
false, 0, mTeapot.getVertices());
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT,
false, 0, mTeapot.getNormals());
GLES20.glVertexAttribPointer(textureCoordHandle, 2,
GLES20.GL_FLOAT, false, 0, mTeapot.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
// activate texture 0, bind it, and pass to shader
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
mTextures.get(textureIndex).mTextureID[0]);
GLES20.glUniform1i(texSampler2DHandle, 0);
// pass the model view matrix to the shader
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false,
modelViewProjection, 0);
// finally draw the teapot
GLES20.glDrawElements(GLES20.GL_TRIANGLES,
mTeapot.getNumObjectIndex(), GLES20.GL_UNSIGNED_SHORT,
mTeapot.getIndices());
// disable the enabled arrays
GLES20.glDisableVertexAttribArray(vertexHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
GLES20.glDisableVertexAttribArray(textureCoordHandle);
} else
{
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT,
false, 0, mBuildingsModel.getVertices());
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT,
false, 0, mBuildingsModel.getNormals());
GLES20.glVertexAttribPointer(textureCoordHandle, 2,
GLES20.GL_FLOAT, false, 0, mBuildingsModel.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
mTextures.get(3).mTextureID[0]);
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false,
modelViewProjection, 0);
GLES20.glUniform1i(texSampler2DHandle, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0,
mBuildingsModel.getNumObjectVertex());
SampleUtils.checkGLError("Renderer DrawBuildings");
}
SampleUtils.checkGLError("Render Frame");
}
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mRenderer.end();
}
private void printUserData(Trackable trackable)
{
String userData = (String) trackable.getUserData();
Log.d(LOGTAG, "UserData:Retreived User Data \"" + userData + "\"");
}
public void setTextures(Vector<Texture> textures)
{
mTextures = textures;
}
}
building.xml is another file with diffrent numbers of vertices and indices
i'm Confused about the numbers in building.xml and in setVertx() ,setindcises()
plz help.
You shouldn't define a 3d object the way teapot have been defined.
I would use a library like RajawaliVuforia, If i was you.
I have checked the textures individually, they work fine.
Each of the textures are 128*128 pixels, I am working on Motorola MILESTONE mobile,
and have tested all basic examples (from line to cubemap), got stuck while multitexturing
rectangle.
The shader works well if I do not bind any one of the textures, but the problem starts with two textures for multitexturing.
private float[] myRotateMatrix = new float[16];
private float[] myViewMatrix = new float[16];
private float[] myProjectionMatrix = new float[16];
private float[] myMVPMatrix = new float[16];
private int aPositionLocation;
private int uMVPLocation;
private int aTextureCoordLocation;
private FloatBuffer rectangleVFB;
private ShortBuffer rectangleISB;
private FloatBuffer textureCFB;
private int program;
private int textureId1;
private int textureId2;
private int uSampler1Location;
private int uSampler2Location;
private void initShapes() {
float[] rectangleVFA = {-1,-1,0, 1,-1,0, 1,1,0, -1,1,0};
short[] rectangleISA = {0,1,2, 0,3,2};
float[] textureCFA = {0,0, 1,0, 1,1, 0,1};
ByteBuffer rectangleVBB = ByteBuffer.allocateDirect(rectangleVFA.length * 4);
rectangleVBB.order(ByteOrder.nativeOrder());
rectangleVFB = rectangleVBB.asFloatBuffer();
rectangleVFB.put(rectangleVFA);
rectangleVFB.position(0);
ByteBuffer rectangleIBB = ByteBuffer.allocateDirect(rectangleISA.length * 2);
rectangleIBB.order(ByteOrder.nativeOrder());
rectangleISB = rectangleIBB.asShortBuffer();
rectangleISB.put(rectangleISA);
rectangleISB.position(0);
ByteBuffer textureCBB = ByteBuffer.allocateDirect(textureCFA.length * 4);
textureCBB.order(ByteOrder.nativeOrder());
textureCFB = textureCBB.asFloatBuffer();
textureCFB.put(textureCFA);
textureCFB.position(0);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
checkError("glViewport");
float ratio = (float) width / height;
Matrix.setLookAtM(myViewMatrix, 0, 0, 0, 6, 0, 0, 0, 0, 1, 0);
Matrix.frustumM(myProjectionMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
aPositionLocation = GLES20.glGetAttribLocation(program, "aPosition");
checkError("glGetAttribLocation");
uMVPLocation = GLES20.glGetUniformLocation(program, "uMVP");
checkError("glGetUniformLocation");
aTextureCoordLocation = GLES20.glGetAttribLocation(program, "aTextureCoord");
checkError("glGetAttribLocation");
uSampler1Location = GLES20.glGetUniformLocation(program, "uSampler1");
checkError("glGetUniformLocation");
uSampler2Location = GLES20.glGetUniformLocation(program, "uSampler2");
checkError("glGetUniformLocation");
int[] textures = new int[2];
GLES20.glGenTextures(2, textures, 0);
checkError("glGenTextures");
textureId1 = textures[0];
textureId2 = textures[1];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId1);
checkError("glBindTexture");
InputStream is1 = context.getResources().openRawResource(R.drawable.brick1);
Bitmap img1;
try {
img1 = BitmapFactory.decodeStream(is1);
}finally {
try {
is1.close();
}catch (IOException ioe) {
}
}
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img1, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId2);
checkError("glBindTexture");
InputStream is2 = context.getResources().openRawResource(R.drawable.brick2);
Bitmap img2;
try {
img2 = BitmapFactory.decodeStream(is2);
}finally {
try {
is2.close();
}catch (IOException ioe) {
}
}
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img2, 0);
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
checkError("glClear");
GLES20.glUseProgram(program);
checkError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId1);
checkError("glBindTexture");
GLES20.glUniform1i(uSampler1Location, 0);
checkError("glUniform1i");
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
checkError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId2);
checkError("glBindTexture");
GLES20.glUniform1i(uSampler2Location, 1);
checkError("glUniform1i");
Matrix.setIdentityM(myRotateMatrix, 0);
Matrix.rotateM(myRotateMatrix, 0, touchX, 0, 1, 0);
Matrix.rotateM(myRotateMatrix, 0, touchY, 1, 0, 0);
Matrix.multiplyMM(myMVPMatrix, 0, myViewMatrix, 0, myRotateMatrix, 0);
Matrix.multiplyMM(myMVPMatrix, 0, myProjectionMatrix, 0, myMVPMatrix, 0);
GLES20.glVertexAttribPointer(aPositionLocation, 3, GLES20.GL_FLOAT, false, 12, rectangleVFB);
checkError("glVertexAttribPointer");
GLES20.glEnableVertexAttribArray(aPositionLocation);
checkError("glEnableVertexAttribArray");
GLES20.glVertexAttribPointer(aTextureCoordLocation, 2, GLES20.GL_FLOAT, false, 8, textureCFB);
checkError("glVertexAttribPointer");
GLES20.glEnableVertexAttribArray(aTextureCoordLocation);
checkError("glEnableVertexAttribArray");
GLES20.glUniformMatrix4fv(uMVPLocation, 1, false, myMVPMatrix, 0);
checkError("glUniformMatrix4fv");
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_SHORT, rectangleISB);
checkError("glDrawElements");
}
In OpenGL ES 2.0, after every call to glBindTexture and before texImage2D the parameters for that texture have to be specified seperately.
So, if there are 2 textures (as in multitexturing) then it will require 4 glTexParameterf each for texture 1 and 2, total 8.