Draw to framebuffer object, then blit to primary display (GLES 3.0) - android

I am attempting to create a framebuffer object, and then blit it to the primary display. The purpose of this to to cache a screen shot that I can blit to the display whenever I want without re-rendering the object. I am using Open GLES 3.0 with android NDK.
I created a frame buffer in the normal way.
GLuint framebuffer = NULL;
glGenFramebuffers( 1, &framebuffer );
glBindFramebuffer( GL_FRAMEBUFFER, framebuffer );
GLuint colorRenderBuffer = NULL;
glGenRenderbuffers( 1, &colorRenderbuffer );
glBindRenderbuffer( GL_RENDERBUFFER, colorRenderbuffer );
glRenderbufferStorage( GL_RENDERBUFFER, GL_RGBA8, engine->width, engine->height );
glFramebufferRenderbuffer( GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderbuffer );
GLuint depthRenderbuffer = NULL;
glGenRenderbuffers( 1, &depthRenderbuffer );
glBindRenderbuffer( GL_RENDERBUFFER, depthRenderbuffer );
glRenderbufferStorage( GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, engine->width, engine->height );
glFramebufferRenderbuffer( GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer );
GLenum status = glCheckFramebufferStatus( GL_FRAMEBUFFER );
if (status != GL_FRAMEBUFFER_COMPLETE) {
LOGI( "failed to make complete framebuffer object %d", status );
}
else {
LOGI( "Frame buffer is complete." );
}
This works without error. I then am able to edit the frame buffer, successfully.
glBindFramebuffer( GL_DRAW_FRAMEBUFFER, framebuffer );
glBindFramebuffer( GL_READ_FRAMEBUFFER, framebuffer );
// Just fill the screen with a color.
glClearColor( red, green, blue, 255 );
glClear( GL_COLOR_BUFFER_BIT );
read_pixels( ); //wrapper for glReadPixels. Shows that all pixels in the frame buffer object have the desired color.
However, attempting to blit this to the main draw buffer fails.
glBindFramebuffer( GL_DRAW_FRAMEBUFFER, 0 );
glBindFramebuffer( GL_READ_FRAMEBUFFER, framebuffer );
glReadBuffer( GL_COLOR_ATTACHMENT0 );
glBlitFramebuffer( 0, 0, engine->width, engine->height, 0, 0, engine->width, engine->height, GL_COLOR_BUFFER_BIT, GL_LINEAR );
LOGI("GL error after blit: %d", glGetError()); //no error reported
glBindFramebuffer( GL_DRAW_FRAMEBUFFER, 0 );
glBindFramebuffer( GL_READ_FRAMEBUFFER, 0 );
read_pixels( engine ); //gives zero'd buffer (wrong result)
eglSwapBuffers( engine->display, engine->surface ); //shows a black screen, unless I use glClearColor directly on the primary display buffer.

I was able to get around the problem by using a texture (instead of a render buffer) frame buffer object, drawing to that frame buffer, and then drawing that frame buffer texture to the default buffer. This isn't really an answer, but a work around, so I'd still appreciate it if anyone has an answer to the original question.
For reference, I include the key code below:
GLuint LoadFrameBufferTexture( ESContext *context, int width, int height ) {
glGenFramebuffers( 1, &(context->framebuffer) );
glBindFramebuffer( GL_FRAMEBUFFER, context->framebuffer );
// create the texture
GLuint texture;
glGenTextures( 1, &texture );
glBindTexture( GL_TEXTURE_2D, texture );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL );
glFramebufferTexture2D( GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0 );
GLuint depthRenderbuffer;
glGenRenderbuffers( 1, &depthRenderbuffer );
glBindRenderbuffer( GL_RENDERBUFFER, depthRenderbuffer );
glRenderbufferStorage( GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height );
glFramebufferRenderbuffer( GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer );
GLenum status = glCheckFramebufferStatus( GL_FRAMEBUFFER );
if (status != GL_FRAMEBUFFER_COMPLETE) {
LOGI("failed to make complete framebuffer object ");
} else {
LOGI("Frame buffer is complete.");
}
glBindFramebuffer( GL_FRAMEBUFFER, 0 );
return texture;
}
void draw_frame(int red, int green, int blue) {
glBindFramebuffer( GL_FRAMEBUFFER, esContext->framebuffer );
glClearColor( red, green, blue, 255 );
glClear( GL_COLOR_BUFFER_BIT );
glBindFramebuffer(GL_FRAMEBUFFER, 0 );
DrawFrame( esContext );
eglSwapBuffers( engine->display, engine->surface );
}
void DrawFrame( ESContext *esContext ) {
UserData *userData = (UserData *)esContext->userData;
GLfloat vVertices[] = {-1.0f, 1.0f, 0.0f, 0.0f, 1.0f, -1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, -1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f };
GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
glViewport( 0, 0, esContext->width, esContext->height );
glUseProgram( userData->programObject );
glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof( GLfloat ), vVertices );
glVertexAttribPointer( 1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof( GLfloat ), &vVertices[3] );
glEnableVertexAttribArray( 0 );
glEnableVertexAttribArray( 1 );
glActiveTexture( GL_TEXTURE0 );
glBindTexture( GL_TEXTURE_2D, userData->frameTexId );
glUniform1i( userData->baseMapLoc, 0 );
glDrawElements( GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices );
}

Related

How can i render captured frames from ndk into encoder input surface?

i'm trying to capture with camera and record frames as mp4 file in android Oreo.
I implemented the capture progress using NDK.
After that, i did create preview surface(GLSurfaceview) and get input surface from encoder(Mediacodec).
However, I don't know how to render captured frames to encoder's input surface.
/* This is onDrawFrame of Renderer in GLSurfaceView instance
* The native function onDrawFrame(texMatrix) is called */
#Override
public void onDrawFrame(GL10 gl) {
synchronized (lock) {
if ( frameAvailable ) {
Log.d("yoo", "Frame availablee...updating");
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(texMatrix);
frameAvailable = false;
}
}
onDrawFrame(texMatrix);
}
/* This is drawing function in NDK part that is executed when onDrawFrame(texMatrix) is called */
static void drawFrame ( JNIEnv* env, jfloatArray texMatArray )
{
LOGD("DrawFrame called");
glClear ( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT );
glClearColor ( 0, 0, 0, 1 );
glUseProgram ( prog );
// Configure main transformations
float mvp[] = {
1.0f, 0, 0, 0,
0, 1.0f, 0, 0,
0, 0, 1.0f, 0,
0, 0, 0, 1.0f
};
// give mvp data to shader
glUniformMatrix4fv ( mvpMatrix, 1, false, mvp );
// Prepare texture for drawing
glActiveTexture ( GL_TEXTURE0 );
glBindTexture ( GL_TEXTURE_EXTERNAL_OES, textureId );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
// Pass SurfaceTexture transformations to shader
float* tm = env->GetFloatArrayElements ( texMatArray, 0 );
glUniformMatrix4fv ( texMatrix, 1, false, tm );
env->ReleaseFloatArrayElements ( texMatArray, tm, 0 );
// Set the SurfaceTexture sampler
glUniform1i ( texSampler, 0 );
// specify color to mix with camera frames
float c[] = { 1, 1, 1, 1 };
glUniform4fv ( color, 1, (GLfloat*)c );
// Size of the window is used in fragment shader
// to split the window
float sz[2] = {0};
sz[0] = width/2;
sz[1] = height/2;
glUniform2fv ( size, 1, (GLfloat*)sz );
// Set up vertices/indices and draw
glBindBuffer ( GL_ARRAY_BUFFER, buf[0] );
glBindBuffer ( GL_ELEMENT_ARRAY_BUFFER, buf[1] );
glEnableVertexAttribArray ( vtxPosAttrib ) ;
glVertexAttribPointer ( vtxPosAttrib, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 5, (void*)0 );
glEnableVertexAttribArray ( uvsAttrib );
glVertexAttribPointer ( uvsAttrib, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 5, (void*)(3 * sizeof(float) ) );
glViewport ( 0, 0, width, height );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
glDrawElements ( GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0 );
}
Actualy, i tried to create the encoder instance of Mediacodec and receive the input surface of that and render the captured frames. (I refered grafika's exmaple.)
However, i don't know the way how to render captured frames after rendering captured frames into GLSurfaceView.
-EDIT-
I tried to create EGLSurface using input surface of encoder but the below error message is showed.
android.opengl.EGL14.eglCreateWindowSurface(mEGLCore.getDisplay(), mEGLCore.getConfig(), mEncoderSurface, surfaceAttribs, 0);
/* error message */
E/BufferQueueProducer: [GraphicBufferSource] connect: BufferQueue has been abandoned
E/libEGL: eglCreateWindowSurface: native_window_api_connect (win=0x92b7f808) failed (0xffffffed) (already connected to another API?)
E/libEGL: eglCreateWindowSurface:693 error 3003 (EGL_BAD_ALLOC)
Any reply will be appreciated.

I receive INVALID OPERATION when I use glBlitFramebuffer to implement MSAA

Perhaps I have some stupid problems. I'll be appreciate if someone could reply them.
All the problems are based on Android environment and OpenGL ES.
How to verified whether I has opened the MSAA or not ? If I draw some GL_POINTS with points size 50, there are some small squares. If I enabled 4x MSAA, can the small squares become round points ?
I tried my best to enable MSAA with FBO and BlitFBO. But it draw nothing and there is an error INVALID_OPERATION after glBlitFramebuffer() calling.
Here is the complete projects I mentioed above:https://github.com/Enoch-Liu/GL
And the following is the key codes:
void Renderer::MultisampleAntiAliasing() {
glGenRenderbuffers(1, &m_MSColor);
glBindRenderbuffer(GL_RENDERBUFFER, m_MSColor);
glRenderbufferStorageMultisample(GL_RENDERBUFFER, 4, GL_RGBA8, m_width, m_height);
checkGLError("GenMSColorBuffer");
glGenFramebuffers(1, &m_MSFBO);
glBindFramebuffer(GL_FRAMEBUFFER, m_MSFBO);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, m_MSColor);
checkGLError("FboRbo,COLORATTACHMENT");
glGenRenderbuffers(1, &m_MSDepth);
glBindRenderbuffer(GL_RENDERBUFFER, m_MSDepth);
glRenderbufferStorageMultisample(GL_RENDERBUFFER, 4, GL_DEPTH_COMPONENT16, m_width, m_height);
checkGLError("GenDepthBuffer");
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, m_MSDepth);
checkGLError("DepthBuffer,Renderbuffer");
GLenum drawBufs[] = {GL_COLOR_ATTACHMENT0};
glDrawBuffers(1, drawBufs);
checkGLError("DrawBuffer");
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
LOG_ERROR("failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
}
void Renderer::drawFrame() {
//LOG_INFO("drawFrame %d x %d", width, height);
static float r=0.9f;
static float g=0.2f;
static float b=0.2f;
LOG_INFO("xxx %d, %d", m_width,m_height);
if (OPENMSAA)
{
glBindFramebuffer(GL_FRAMEBUFFER, m_MSFBO);
glBindRenderbuffer(GL_RENDERBUFFER, m_MSColor);
checkGLError("BindTwoBuffers");
}
glViewport(0,0,m_width,m_height);
glScissor(0,0,m_width,m_height);
glClearColor(r, g, b, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDisable(GL_DEPTH_TEST);
const GLfloat landscapeOrientationMatrix[16] = {
1.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f };
const GLfloat color[4] = {
1.0f, 0.0f, 0.0f, 1.0f
};
glUseProgram( m_program );
glUniformMatrix4fv(m_uMvp, 1, GL_FALSE, landscapeOrientationMatrix);
glUniform4fv(m_uColor, 1, color);
m_p = glGetAttribLocation(m_program, "vPosition");
m_p1 = glGetAttribLocation(m_program, "vPosition1");
glEnableVertexAttribArray( m_p );
glVertexAttribPointer( m_p , 3, GL_FLOAT, false, 3 * sizeof( float ), squareCoords);
glDrawArrays(GL_POINTS, 0, 4);
glDisableVertexAttribArray( m_p );
glFlush();
checkGLError("Before Blit");
if (OPENMSAA)
{
glBindFramebuffer(GL_READ_FRAMEBUFFER, m_MSFBO);
checkGLError("BindReadBuffer");
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
checkGLError("BindFramebuffer");
glBlitFramebuffer(0, 0, m_width, m_height, 0, 0, m_width, m_height, GL_COLOR_BUFFER_BIT, GL_NEAREST);
checkGLError("BlitFramebufferColor");
glBlitFramebuffer(0, 0, m_width, m_height, 0, 0, m_width, m_height, GL_DEPTH_BUFFER_BIT, GL_NEAREST);
checkGLError("BlitFramebufferDepth");
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
}
The framebuffer is complete.
The internal format of the depth buffers have to match: https://www.opengl.org/discussion_boards/showthread.php/173275-Alternative-to-glBlitFramebuffer%28%29
Looking at your github project you are not configuring a depth buffer at all. From your project:
const EGLint attribs[] = {
// EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_SAMPLE_BUFFERS, 1,
EGL_SAMPLES, 4,
EGL_NONE
};

Freetype Library gives triple textures and weird symbols

Currently working on an android ndk/ opengl project and I'm trying to use freetype as my font rendering library, but I keep getting a weird error when I render text to the screen. Here is what it is showing for a few sample texts: (note: the bottom one is supposed to say "This is")
Setup:
void TextRenderer::SetupGlyphs(std::string fontPath, int size){
__android_log_print(ANDROID_LOG_INFO, "SetupGlyphs", "Font location: %s", fontPath.c_str());
if(shadersInitialized == 0)
CreateShader();
glUseProgram(this->shader);
// FreeType
FT_Library ft;
if (FT_Init_FreeType(&ft))
__android_log_print(ANDROID_LOG_INFO, "SetupGlyphs", "ERROR::FREETYPE: Could not init FreeType Library.");
FT_Face face;
if (FT_New_Face(ft, fontPath.c_str(), 0, &face))
__android_log_print(ANDROID_LOG_INFO, "SetupGlyphs", "ERROR::FREETYPE: Failed to load font: %s", fontPath.c_str());
FT_Set_Pixel_Sizes(face, 0, size);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
for (GLubyte c = 0; c < 128; c++){
if(FT_Load_Char(face, c, FT_LOAD_RENDER)){
__android_log_print(ANDROID_LOG_INFO, "SetupGlyphs", "ERROR::FREETYPE: Failed to load Glyph");
continue;
}
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RGB,
face->glyph->bitmap.width,
face->glyph->bitmap.rows,
0,
GL_RGB,
GL_UNSIGNED_BYTE,
face->glyph->bitmap.buffer
);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
Character character = {
texture,
ivec2(face->glyph->bitmap.width, face->glyph->bitmap.rows),
ivec2(face->glyph->bitmap_left, face->glyph->bitmap_top),
static_cast<GLuint>(face->glyph->advance.x)
};
characters.insert(std::pair<GLchar, Character>(c, character));
}
glBindTexture(GL_TEXTURE_2D, 0);
FT_Done_Face(face);
FT_Done_FreeType(ft);
}
Rendering:
void TextRenderer::RenderTexts()
{
if(shadersInitialized == 0)
CreateShader();
// Activate corresponding render state
glUseProgram(this->shader);
GLuint projectionLocation = glGetUniformLocation(this->shader, "projection");
glUniformMatrix4fv(projectionLocation, 1, GL_FALSE, projectionMatrix);
for (int i=0; i<projects.size(); i++) {
ProjectLabel project = projects.at(i);
glUniform3f(glGetUniformLocation(this->shader, "textColor"), project.textColor.x, project.textColor.y, project.textColor.z);
glActiveTexture(GL_TEXTURE0);
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
/* Set up the VBO for our vertex data */
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 0, 0);
// Iterate through all characters
std::string::const_iterator c;
GLuint x = project.x;
for (c = project.text.begin(); c != project.text.end(); c++)
{
Character ch = characters[*c];
GLfloat xpos = x + ch.Bearing.x;
GLfloat ypos = project.y - (ch.Size.y - ch.Bearing.y);
GLfloat w = ch.Size.x;
GLfloat h = ch.Size.y;
// Update VBO for each character
GLfloat vertices[6*4] = {
xpos, ypos + h, 0.0, 0.0 ,
xpos, ypos, 0.0, 1.0 ,
xpos + w, ypos, 1.0, 1.0 ,
xpos, ypos + h, 0.0, 0.0 ,
xpos + w, ypos, 1.0, 1.0 ,
xpos + w, ypos + h, 1.0, 0.0
};
glBindTexture(GL_TEXTURE_2D, ch.TextureID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_DYNAMIC_DRAW);
glDrawArrays(GL_TRIANGLES, 0, 6);
x += (ch.Advance / 64); // Bitshift by 6 to get value in pixels (2^6 = 64 (divide amount of 1/64th pixels by 64 to get amount of pixels))
}
glDisableVertexAttribArray(0);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
So, to anyone that may find this post, scowering the web for hours on end, trying to figure out why everything looks funky, I found the answer. Freetype is not aligned (at least not in my project) through GL_RGB, instead is aligned through GL_LUMINANCE. By changing such things in glTexImage2D I solved on the above issues, as well as SIGABRT errors I was also getting.
TLDR;
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_RGB, => GL_LUMINANCE
face->glyph->bitmap.width,
face->glyph->bitmap.rows,
0,
GL_RGB, => GL_LUMINANCE
GL_UNSIGNED_BYTE,
face->glyph->bitmap.buffer
);

Texture won't appear using native code to load it with opengl es on android

I'm trying to apply a texture to an object in opengl es from the native side and I have no idea why it isn't showing up. I have a couple random objects drawn on the screen, and they're all visible and everything. I applied color to some shapes using glColor4f and that works fine. I'm trying to use a texture on the last object that gets drawn but it ends up being the same color as the one previous.
I was originally loading the texture from a png, but I decided to simplify things by loading it from a file that contains raw RGB data. It's 16 pixels x 16 pixels, and I've tried sizes up to 512 by 512 with the same result.
Here's how I'm initializing everything:
bool Activity::_initGL () {
const EGLint attribs[] = {
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_NONE
};
EGLint dummy, format;
EGLint numConfigs;
EGLConfig config;
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, 0, 0);
eglChooseConfig(display, attribs, &config, 1, &numConfigs);
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);
ANativeWindow_setBuffersGeometry(app->window, 0, 0, format);
surface = eglCreateWindowSurface(display, config, app->window, NULL);
context = eglCreateContext(display, config, NULL, NULL);
if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
LOGE("Unable to eglMakeCurrent");
return false;
}
eglQuerySurface(display, surface, EGL_WIDTH, &width);
eglQuerySurface(display, surface, EGL_HEIGHT, &height);
glViewport(0,0, width, height);
}
And then I enable the necessary things and try to create the texture:
void postInit () {
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);
glDisable( GL_BLEND );
glDisable( GL_LIGHTING );
// glEnable(GL_CULL_FACE);
glEnable( GL_TEXTURE_2D );
glShadeModel(GL_SMOOTH);
glDisable(GL_DEPTH_TEST);
glClearColor(0,0,0,1);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
// glTexEnvx( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE );
glMatrixMode( GL_MODELVIEW );
GLuint texIDarray[1];
glGenTextures( 1, texIDarray );
glActiveTexture( GL_TEXTURE0 );
glBindTexture( GL_TEXTURE_2D, texIDarray[0] );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, (GLsizei)16, (GLsizei)16, 0, GL_RGB, GL_UNSIGNED_BYTE, protData);
}
And here's where the texture gets drawn, someday:
void drawImpl () {
glClear(GL_COLOR_BUFFER_BIT);
glLoadIdentity();
// glDisable(GL_TEXTURE_2D);
// glTexEnvx( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE );
// glEnableClientState(GL_VERTEX_ARRAY);
// glViewport(0, 0, wid, hei);
#define fX(x) ((int)(x * (1 << 16)))
static int verts[6] = {
0,0,
65536,0,
0,30000
};
glVertexPointer(2, GL_FIXED, 0, verts);
// glColor4f(1,0,1,1);
glDrawArrays(GL_TRIANGLES, 0, 3);
static int poo[12] = {
40000,-5000,
40000,-30000,
60000,-5000,
60000,-5000,
40000,-30000,
60000,-30000
};
glVertexPointer(2, GL_FIXED, 0, poo);
// glColor4f(1,1,1,1);
glDrawArrays(GL_TRIANGLES, 0, 6);
static int pee[12] = {
40000, 5000,
60000, 5000,
60000,30000,
40000, 5000,
60000,30000,
40000,30000
};
glVertexPointer(2, GL_FIXED, 0, pee);
// glColor4f(1,0,1,1);
glDrawArrays(GL_TRIANGLES, 0, 6);
glEnable(GL_TEXTURE_2D);
static int squareVerts[12] = {
0,0,
fX(1),0,
0,fX(1),
0,fX(1),
fX(1),0,
fX(1),fX(1)
};
static int texCoords[12] = {
0,0,
fX(1),0,
0,fX(1),
0,fX(1),
fX(1),0,
fX(1),fX(1)
};
//glTranslatef( (float)-.25, (float)-.5, (float)0);
// glColor4f(0,0,0,0);
// glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glActiveTexture( GL_TEXTURE0 );
// glBindTexture(GL_TEXTURE_2D, texID);
glTexEnvx( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE );
glTexParameterx( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT );
glTexParameterx( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT );
glVertexPointer(2, GL_FIXED, 0, squareVerts);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
// glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable( GL_TEXTURE_2D );
glDrawArrays(GL_TRIANGLES, 0, 6);
// glDisableClientState(GL_VERTEX_ARRAY);
// glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
I deliberately left some of the commented out things to show the other things I have tried doing.
I am totally at a dead end with this right now. If anyone has any suggestions or anything it would make me super happy, and that is good.
Seems like you messed up your texture coordinates.. They should be between 0 and 1, not between 0 and 1<<16. Another thing is your "glColor4f" will also affect your texture by modulating it and for normal texture draw it needs to be set to (1,1,1,1).

glTranslatef/2D viewport setup issue

When i try glTranslatef(1,-1,0); it pushes my quad's lefthand corner to the center of the screen instead of what im trying to do, moving it 1 pixel to the left and 1 down. Im pretty sure this is because my viewport isnt set correctly but im unsure why. pic, view setup code and drawing code below.
setupView:
-(void)setupView:(GLView*)view
{
printf("setup view");
glClearColor(0,1,1, 1);
// Enable Smooth Shading, default not really needed.
glShadeModel(GL_SMOOTH);
// Depth buffer setup.
glClearDepthf(1.0f);
//enable textures.
glEnable(GL_TEXTURE_2D);
glHint(GL_PERSPECTIVE_CORRECTION_HINT,GL_FASTEST);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
CGRect rect = view.bounds;
glOrthof( 0,rect.size.width,-rect.size.height, 0, -1, 1 ) ;
glViewport(0, 0,rect.size.width,rect.size.height);
glMatrixMode(GL_PROJECTION);
// Bind the number of textures we need, in this case one.
glGenTextures(1, &texture[0]);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_GENERATE_MIPMAP,GL_TRUE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glLoadIdentity();
NSString *path = [[NSBundle mainBundle] pathForResource:#"cm2" ofType:#"jpg"];
NSData *texData = [[NSData alloc] initWithContentsOfFile:path];
UIImage *image = [[UIImage alloc] initWithData:texData];
if (image == nil)
NSLog(#"Do real error checking here");
GLuint width = CGImageGetWidth(image.CGImage);
GLuint height = CGImageGetHeight(image.CGImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
void *imageData = malloc( height * width * 4 );
CGContextRef context = CGBitmapContextCreate( imageData, width, height, 8, 4 * width, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big );
// Flip the Y-axis
CGContextTranslateCTM (context, 0, height);
CGContextScaleCTM (context, 1.0, -1.0);
CGColorSpaceRelease( colorSpace );
CGContextClearRect( context, CGRectMake( 0, 0, width, height ) );
CGContextDrawImage( context, CGRectMake( 0, 0, width, height ), image.CGImage );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
CGContextRelease(context);
free(imageData);
[image release];
[texData release];
}
drawView:
- (void)drawView:(GLView*)view
{
//draw calls
glColor4f(1,1,1,1);
glClear(GL_COLOR_BUFFER_BIT);
glLoadIdentity();
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
static const Vertex3D vertices[] = {
{0, 0, 1}, //TL
{ 1024.0f,0, 1}, //TR
{0, -1024.0f, 1}, //BL
{ 1024.0f, -1024.0f, 1} //BR
};
static const GLfloat texCoords[] = {
0.0, 1.0,
1.0, 1.0,
0.0, 0.0,
1.0, 0.0
};
glTranslatef(1,-1, 1);
glScalef(scale,scale,1);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
You need to first set up your viewport, then set the matrix mode to projection, then call glOrtho, like so:
glViewPort (0, 0, width, height);
glMatrixMode (GL_PROJECTION);
glLoadIdentity ();
glOrtho (0, width, 0, height, -1, 1); // Usually this is -width/2,width/2,-height/2,height/2
Also, you probably want to set the matrix mode to ModelView after that to draw your model.

Categories

Resources