glGenerateMipmap IMGSRV error - android

In my app I am trying to use textures but I am getting the errors
:0: SGXQueueTransfer: all paths failed
:0: HardwareMipGen: Failed to generate texture mipmap levels (error=3)
on my Galaxy Nexus. I don't get these errors on my EVO 4G.
Here is the relevant loading code.
private static int load(Context context, int resID) {
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(),
resID);
int[] texts = new int[1];
GLES20.glGenTextures(1, texts, 0);
int texID = texts[0];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texID);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_REPEAT);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
return texID;
}
private static int loadWithMipmap(Context context, int resID) {
int texID = load(context, resID);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
return texID;
}
Any ideas what is going on and how I can fix it?
EDIT: There is only one image causing the error and it is a 1024x2048 png.

I saw this, and it worked once I squashed the image to be square. OpenGL doesn't give an error.

Frickin' scary. Tested on a Galaxy Nexus 7.
You need to do the following:
GLES20.glTexParameterf(
GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR_MIPMAP_NEAREST);
this.context.checkError("GL_TEXTURE_MIN_FILTER");
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
Strictly speaking, I guess GL_LINEAR_MIPMAP_NEAREST isn't a valid MAG_FILTER. Nexus 7 thinks so anyway. And it mipmaps beautifully with the change made.

Wild guess, as I don't have any access to the hardware you're mentioning, but does the error occur with a texture that has non power of two dimensions ?

Related

Android MediaPlayer: render frame from external videofile to texture over background camera frame

I'm developing a simple app: it should detect an object in active AR-session (Vuforia UserDefinedTarget), stick a surface to this object and start to render frames from videofile to texture. But instead of video-frame I see the frame from camera in this texture. I'm not sure if this specific Vuforia issue or just wrong setting of MediaPlayer.
Here is the MediaPlayer init:
videoPlayer.setSurface(mRenderer?.planeSurface)
videoPlayer.isLooping = true
val afd: AssetFileDescriptor
try {
afd = assets.openFd("test.mp4")
videoPlayer.setDataSource(afd.fileDescriptor, afd.startOffset, afd.length)
videoPlayer.prepare()
videoPlayer.start()
} catch (e: IOException) {
e.printStackTrace()
}
Texture creation:
GLES20.glGenTextures(1, planeSurfaceTextureId, 0)
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, planeSurfaceTextureId[0])
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE)
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
planeSurfaceTexture = SurfaceTexture(planeSurfaceTextureId[0])
planeSurfaceTexture.setOnFrameAvailableListener(mActivity)
planeSurface = Surface(planeSurfaceTexture)
And here is the rendering:
GLES20.glGenTextures(1, planeSurfaceTextureId, 0)
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, planeSurfaceTextureId[0])
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE)
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
planeSurfaceTexture = SurfaceTexture(planeSurfaceTextureId[0])
planeSurfaceTexture.setOnFrameAvailableListener(mActivity)
planeSurface = Surface(planeSurfaceTexture)
I'm new in Android, so it might be some stupid problem, please help me to figure it out:) Probably it could be done without GLES..
Thanks in advance!

Android image to video

How I can load an image as a texture and rendering it through GLES to use the MediaCodec Surface input approach?
I was started from EncodeAndMuxTest example.
Thank you in advance.
Look at sample from grafika ,it will give you insight about how you should do it
Here is code to load bitmap into texture
int mTextureId = -1;
public void loadTexture(Bitmap bitmap)
{
if (mTextureId != -1) {
int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId);
}
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
}
and here is how you can render it to inputsurface
// Create Fullframe rectangle (a class from grafika),
mInputSurface.makeCurrent();
mFullFrameRect = new FullFrameRect(new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_2D));
....
// And when you want to draw it
mInputSurface.makeCurrent(); // if its not already current
loadTexture(bitmap);
GLES20.glViewport(0, 0, viewWidth, viewHeight);
mFullFrameRect.drawFrame(mTextureId, GlUtil.IDENTITY_MATRIX);
mInputSurface.setPresentationTime(pts);
mInputSurface.swapBuffers();
FullFrameRect, Texture2dProgram, GlUtil are classes from Grafika, so you should copy it or implement similar functionality by yourself

Use more than one SurfaceTexture on a shader

Just a quick question: I'm using a shader that admits a video as a GL_TEXTURE_EXTERNAL_OES from a SurfaceTexture, but now I want to modify it to accept two videos as 2 GL_TEXTURE_EXTERNAL_OES from two different SurfaceTextures.
Is there a way to do this? I'm following this code as an example: https://github.com/mstorsjo/android-decodeencodetest/tree/master/src/com/example/decodeencodetest and I'm trying to do:
int[] textures = new int[2];
GLES20.glGenTextures(2, textures, 0);
mTexture1ID = textures[0];
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTexture1ID);
checkGlError("glBindTexture mTexture1ID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameter");
mTexture2ID = textures[1];
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 2);
checkGlError("glBindTexture mTexture2ID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameter");
But I haven't been able to make it work, I didn't have too much hope in this code though xD. Anyway, I'd thank any kind of help.
Allright, for anyone who arrives to this question, refer to this:
https://software.intel.com/sites/landingpage/mmsf/documentation/mmsf_android_example3.html
It is possible to use more than one external texture in a shader.

Opengl - glGenTextures Error 1280

Im implementing Render to texture Using FBOs in android, as first step im creating a texture, but i get error 1280 by calling GLES20.glGenTextures method.
the Texture Creator function is bellow:
public int CreateTexture(int w, int h){
final int[] textureId = new int[1];
int i;
//ijad mikonim 1 Adad texturte ro rooye textureID
GLES20.glGenTextures(1, textureId,0);
i = GLES20.glGetError();
//BindTexture miad texturo ro baraaye call shodan amaade mikone
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId[0]);
//texture nahaE ro ijaad mikonim
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, w, h, 0, GLES20.GL_RGBA, GLES20.GL_FLOAT, null);
//in null tooye voroodie akharie bala, mige ke fazaa ro baraye texture ijad kon vali ba hichi poresh nakon hanooz
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
if(i!=0){
Log.d("ERROR", "ERROR Happend"+i+"");
return i;
}
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
return textureId[0];
}
when i call this method it returns error 1280.
You got a GL_INVALID_ENUM error, which means you passed an unsupported enum value to a GL function. error is not in CreateTexture function , it is probably in function call before CreateTexture or in your opengl init function

OpenGL Textures have distorted colors on some android devices

I am uploading loading images as textures to GLSurfaceView.
The resulting textures look perfectly fine on some devices, on others the appear completely distorted.
This is what it looks like on a Samsung Galaxy Nexus (screen density 2.0):
The same images on a Motorola (screen density 1.5):
Here is my loading code:
FutureTask<Integer> futureTask = new FutureTask<Integer>(new Callable<Integer>() {
#Override
public Integer call() throws Exception {
// Generate Texture
int[] texturenames = new int[1];
GLES20.glGenTextures(1, texturenames, 0);
// Bind texture to texturename
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texturenames[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
// Set wrapping mode
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// Correct the bitmap if its not a power of two
Bitmap potTextureBitmap = textureBitmap;
int potWidth = nextPOT(textureBitmap.getWidth());
int potHeight = nextPOT(textureBitmap.getHeight());
if ((textureBitmap.getWidth() != potWidth) || (textureBitmap.getHeight() != potHeight)) {
potTextureBitmap = Bitmap.createScaledBitmap(textureBitmap, potWidth, potHeight, false);
}
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, potTextureBitmap, 0);
GLES20.glFlush();
return Integer.valueOf( texturenames[0]);
}
});
this.mSurfaceView.queueEvent(futureTask);
What am I doing wrong?
I finally found the cause of this problem.
Its rather specific, but I'll share the details anyhow:
There turned out to be an error in my color calculation. I need to convert from Hex colors to normalized rgba (in my case convert white #ffffffff to {1.0, 1.0, 1.0, 1.0}), but I actually fed non-normalized values into my shader (example: {255, 255, 255, 255}).
When multiplied with the color from my texture, the resulting colors would blow up.
Depending on the graphic card this caused the artifacts or not.
So the assumed dependency on the screen resolution was pure coincidence!

Categories

Resources