The TextureView documentation states that it can be used to render OpenGL content.
In the blog post announcing TextureView, it states:
A TextureView can just as easily be used to embed an OpenGL scene in your application. As of Android 4.0, eglCreateWindowSurface() can be used to render into a SurfaceTexture object.
Which seems to imply that to use TextureView instead of GLSurfaceView, one would have to do all the EGL setup themselves and manage the EGLContext and the threading (since GLSurfaceView maintains a GLThread). There doesn't seem to be any sample code in the Android 4.0 SDK that demonstrates how the "TextureView can just as easily be used to embed an OpenGL scene". TextureView seems to plug in more cleanly to the Camera preview (setPreviewTexture) and MediaPlayer (setSurface).
Is it possible to use GLSurfaceView in conjunction with TextureView by using GLSurfaceView.setEGLWindowSurfaceFactory to make it render to the TextureView's SurfaceTexture?
Again, it would be nice if there were some sample code.
A moderator deleted this answer so adding it back for posterity:
See Romain Guy's answer (Nov 23rd 2011) from the android-dev google group:
http://groups.google.com/group/android-developers/browse_thread/thread/539457146a401cf1 (mirrored: http://grokbase.com/t/gg/android-developers/11bqmgb7sw/how-to-replace-glsurfaceview-with-textureview-in-android-ice-cream-sandwich)
GLSurfaceView handles GL setup for you, which TextureView will not do.
A TextureView can be used as the native window when you create an EGL
surface. Here is an example (the interesting part is the call to
eglCreateWindowSurface()):
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
mRenderThread = new RenderThread(getResources(), surface);
mRenderThread.start();
}
private static class RenderThread extends Thread {
private static final String LOG_TAG = "GLTextureView";
static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
static final int EGL_OPENGL_ES2_BIT = 4;
private volatile boolean mFinished;
private final Resources mResources;
private final SurfaceTexture mSurface;
private EGL10 mEgl;
private EGLDisplay mEglDisplay;
private EGLConfig mEglConfig;
private EGLContext mEglContext;
private EGLSurface mEglSurface;
private GL mGL;
RenderThread(Resources resources, SurfaceTexture surface) {
mResources = resources;
mSurface = surface;
}
private static final String sSimpleVS =
"attribute vec4 position;\n" +
"attribute vec2 texCoords;\n" +
"varying vec2 outTexCoords;\n" +
"\nvoid main(void) {\n" +
" outTexCoords = texCoords;\n" +
" gl_Position = position;\n" +
"}\n\n";
private static final String sSimpleFS =
"precision mediump float;\n\n" +
"varying vec2 outTexCoords;\n" +
"uniform sampler2D texture;\n" +
"\nvoid main(void) {\n" +
" gl_FragColor = texture2D(texture, outTexCoords);\n" +
"}\n\n";
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0.0f, 0.0f, 0.0f,
1.0f, -1.0f, 0.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f, 1.0f,
};
#Override
public void run() {
initGL();
FloatBuffer triangleVertices = ByteBuffer.allocateDirect(mTriangleVerticesData.length * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
triangleVertices.put(mTriangleVerticesData).position(0);
int texture = loadTexture(R.drawable.large_photo);
int program = buildProgram(sSimpleVS, sSimpleFS);
int attribPosition = glGetAttribLocation(program, "position");
checkGlError();
int attribTexCoords = glGetAttribLocation(program, "texCoords");
checkGlError();
int uniformTexture = glGetUniformLocation(program, "texture");
checkGlError();
glBindTexture(GL_TEXTURE_2D, texture);
checkGlError();
glUseProgram(program);
checkGlError();
glEnableVertexAttribArray(attribPosition);
checkGlError();
glEnableVertexAttribArray(attribTexCoords);
checkGlError();
glUniform1i(uniformTexture, texture);
checkGlError();
while (!mFinished) {
checkCurrent();
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
checkGlError();
glClear(GL_COLOR_BUFFER_BIT);
checkGlError();
// drawQuad
triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
glVertexAttribPointer(attribPosition, 3, GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
glVertexAttribPointer(attribTexCoords, 3, GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
if (!mEgl.eglSwapBuffers(mEglDisplay, mEglSurface)) {
throw new RuntimeException("Cannot swap buffers");
}
checkEglError();
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// Ignore
}
}
finishGL();
}
private int loadTexture(int resource) {
int[] textures = new int[1];
glActiveTexture(GL_TEXTURE0);
glGenTextures(1, textures, 0);
checkGlError();
int texture = textures[0];
glBindTexture(GL_TEXTURE_2D, texture);
checkGlError();
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
Bitmap bitmap = BitmapFactory.decodeResource(mResources, resource);
GLUtils.texImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bitmap, GL_UNSIGNED_BYTE, 0);
checkGlError();
bitmap.recycle();
return texture;
}
private int buildProgram(String vertex, String fragment) {
int vertexShader = buildShader(vertex, GL_VERTEX_SHADER);
if (vertexShader == 0) return 0;
int fragmentShader = buildShader(fragment, GL_FRAGMENT_SHADER);
if (fragmentShader == 0) return 0;
int program = glCreateProgram();
glAttachShader(program, vertexShader);
checkGlError();
glAttachShader(program, fragmentShader);
checkGlError();
glLinkProgram(program);
checkGlError();
int[] status = new int[1];
glGetProgramiv(program, GL_LINK_STATUS, status, 0);
if (status[0] != GL_TRUE) {
String error = glGetProgramInfoLog(program);
Log.d(LOG_TAG, "Error while linking program:\n" + error);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
glDeleteProgram(program);
return 0;
}
return program;
}
private int buildShader(String source, int type) {
int shader = glCreateShader(type);
glShaderSource(shader, source);
checkGlError();
glCompileShader(shader);
checkGlError();
int[] status = new int[1];
glGetShaderiv(shader, GL_COMPILE_STATUS, status, 0);
if (status[0] != GL_TRUE) {
String error = glGetShaderInfoLog(shader);
Log.d(LOG_TAG, "Error while compiling shader:\n" + error);
glDeleteShader(shader);
return 0;
}
return shader;
}
private void checkEglError() {
int error = mEgl.eglGetError();
if (error != EGL10.EGL_SUCCESS) {
Log.w(LOG_TAG, "EGL error = 0x" + Integer.toHexString(error));
}
}
private void checkGlError() {
int error = glGetError();
if (error != GL_NO_ERROR) {
Log.w(LOG_TAG, "GL error = 0x" + Integer.toHexString(error));
}
}
private void finishGL() {
mEgl.eglDestroyContext(mEglDisplay, mEglContext);
mEgl.eglDestroySurface(mEglDisplay, mEglSurface);
}
private void checkCurrent() {
if (!mEglContext.equals(mEgl.eglGetCurrentContext()) ||
!mEglSurface.equals(mEgl.eglGetCurrentSurface(EGL10.EGL_DRAW))) {
if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
throw new RuntimeException("eglMakeCurrent failed " + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
}
}
private void initGL() {
mEgl = (EGL10) EGLContext.getEGL();
mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (mEglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed "
+ GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int[] version = new int[2];
if (!mEgl.eglInitialize(mEglDisplay, version)) {
throw new RuntimeException("eglInitialize failed " +
GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
mEglConfig = chooseEglConfig();
if (mEglConfig == null) {
throw new RuntimeException("eglConfig not initialized");
}
mEglContext = createContext(mEgl, mEglDisplay, mEglConfig);
mEglSurface = mEgl.eglCreateWindowSurface(mEglDisplay, mEglConfig, mSurface, null);
if (mEglSurface == null || mEglSurface == EGL10.EGL_NO_SURFACE)
{
int error = mEgl.eglGetError();
if (error == EGL10.EGL_BAD_NATIVE_WINDOW) {
Log.e(LOG_TAG, "createWindowSurface returned EGL_BAD_NATIVE_WINDOW.");
return;
}
throw new RuntimeException("createWindowSurface failed "
+ GLUtils.getEGLErrorString(error));
}
if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
throw new RuntimeException("eglMakeCurrent failed "
+ GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
mGL = mEglContext.getGL();
}
EGLContext createContext(EGL10 egl, EGLDisplay eglDisplay, EGLConfig eglConfig) {
int[] attrib_list = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
return egl.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
}
private EGLConfig chooseEglConfig() {
int[] configsCount = new int[1];
EGLConfig[] configs = new EGLConfig[1];
int[] configSpec = getConfig();
if (!mEgl.eglChooseConfig(mEglDisplay, configSpec, configs, 1, configsCount)) {
throw new IllegalArgumentException("eglChooseConfig failed " +
GLUtils.getEGLErrorString(mEgl.eglGetError()));
} else if (configsCount[0] > 0) {
return configs[0];
}
return null;
}
private int[] getConfig() {
return new int[] {
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_NONE
};
}
void finish() {
mFinished = true;
}
}
As #fadden mentioned earlier, you have a nice example of using the TextureView here: https://github.com/google/grafika/blob/master/src/com/android/grafika/TextureViewGLActivity.java.
GLSurfaceView and TextureView are mutually exclusive. I don't see a use case to have both at the same time.
Related
I'm beginning simple 2d gles 2.0 android application.
For some strange reason I always get one point in the center of the screen instead of vertex coordinates passed to the shader.
I'm clearly doing something wrong. Can't figure what.
P.S. I'm not using any projection matrices, because i need standard quad for drawing. tried projection - did not help.
public class TestActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
GLSurfaceView glv = new GLSurfaceView(this);
glv.setEGLContextClientVersion(2);
SimpleRenderer renderer = new SimpleRenderer(this);
glv.setRenderer(renderer);
glv.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
setContentView(glv);
}
}
public class SimpleRenderer implements GLSurfaceView.Renderer {
private final float[] squareVertices = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
private FloatBuffer squareBuffer;
private final Context context;
private int text_program;
private int aPositionLocation2;
public SimpleRenderer(Context context) {
this.context = context;
squareBuffer = ByteBuffer.allocateDirect(squareVertices.length * 4).asFloatBuffer();
squareBuffer.put(squareVertices).position(0);
}
public void onDrawFrame(GL10 gl) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(text_program);
glEnableVertexAttribArray(aPositionLocation2);
glVertexAttribPointer(aPositionLocation2, 2, GL_FLOAT, false, 0, squareBuffer);
glDrawArrays(GL_POINTS, 0, 4);
glDisableVertexAttribArray(aPositionLocation2);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
glViewport(0, 0, width, height);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
text_program = ShaderHelper.buildProgram(TextResourceReader.readTextFileFromResource(context, R.raw.texture_vertex_shader),
TextResourceReader.readTextFileFromResource(context, R.raw.texture_fragment_shader));
aPositionLocation2 = glGetAttribLocation(text_program, "a_Position");
glClearColor(0f, 0f, 0f, 0f);
}
}
public class ShaderHelper {
private static final String TAG = "ShaderHelper";
public static int compileVertexShader(String shaderCode) {
return compileShader(GL_VERTEX_SHADER, shaderCode);
}
public static int compileFragmentShader(String shaderCode) {
return compileShader(GL_FRAGMENT_SHADER, shaderCode);
}
private static int compileShader(int type, String shaderCode) {
final int shaderObjectId = glCreateShader(type);
if (shaderObjectId == 0) Log.w(TAG, "Shader not created!");
glShaderSource(shaderObjectId, shaderCode);
glCompileShader(shaderObjectId);
final int[] compileStatus = new int[1];
glGetShaderiv(shaderObjectId, GL_COMPILE_STATUS, compileStatus, 0);
Log.v(TAG, "Results of compiling source:" + "\n" + shaderCode + "\n:"
+ glGetShaderInfoLog(shaderObjectId));
if (compileStatus[0] == 0) {
// If it failed, delete the shader object.
glDeleteShader(shaderObjectId);
Log.w(TAG, "Compilation of shader failed.");
return 0;
}
return shaderObjectId;
}
public static int linkProgram(int vertexShaderId, int fragmentShaderId) {
final int programObjectId = glCreateProgram();
if (programObjectId == 0) {
Log.w(TAG, "Could not create new program");
return 0;
}
glAttachShader(programObjectId, vertexShaderId);
glAttachShader(programObjectId, fragmentShaderId);
glLinkProgram(programObjectId);
final int[] linkStatus = new int[1];
glGetProgramiv(programObjectId, GL_LINK_STATUS, linkStatus, 0);
Log.v(TAG, "Results of linking program:\n"
+ glGetProgramInfoLog(programObjectId));
if (linkStatus[0] == 0) {
// If it failed, delete the program object.
glDeleteProgram(programObjectId);
Log.w(TAG, "Linking of program failed.");
return 0;
}
return programObjectId;
}
public static boolean validateProgram(int programObjectId) {
glValidateProgram(programObjectId);
final int[] validateStatus = new int[1];
glGetProgramiv(programObjectId, GL_VALIDATE_STATUS, validateStatus, 0);
Log.v(TAG, "Results of validating program: " + validateStatus[0]
+ "\nLog:" + glGetProgramInfoLog(programObjectId));
return validateStatus[0] != 0;
}
public static int buildProgram(String vertexShaderSource,
String fragmentShaderSource) {
int program;
// Compile the shaders.
int vertexShader = compileVertexShader(vertexShaderSource);
int fragmentShader = compileFragmentShader(fragmentShaderSource);
// Link them into a shader program.
program = linkProgram(vertexShader, fragmentShader);
validateProgram(program);
return program;
}
}
vertex shader:
attribute vec4 a_Position;
void main()
{
gl_Position = a_Position;
gl_PointSize = 10.0;
}
fragment shader:
void main()
{
gl_FragColor = vec4(1.0,1.0,1.0,1.0);
}
The problem was indeed in passing vertex data: OpenGL uses native byte order (little-endian for x86 emu), but I have allocated buffer in java (big-endian I suppose) so corrupted float values were passed to vertex shader. After specifying byte order in byte buffer everything works fine.
squareBuffer = ByteBuffer.allocateDirect(squareVertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
I draw a 3D cube by using opengl-es
https://db.tt/ktcbwtnD //this is the picture, because I'm new in stackoverflow so i cant paste picture
and I also renderer video by reference this code
public class VideoTextureRender implements Renderer, SurfaceTexture.OnFrameAvailableListener {
private static String TAG = "VideoRender";
private static final int FLOAT_SIZE_BYTES = 4;
public static final int BYTES_PER_FLOAT = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private static int count = 1;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-0.5f, -0.5f, 0, 0.f, 0.f,
0.5f, -0.5f, 0, 1.f, 0.f,
-0.5f, 0.5f, 0, 0.f, 1.f,
0.5f, 0.5f, 0, 1.f, 1.f,
};
private FloatBuffer mTriangleVertices;
private final String mVertexShader =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private final String mFragmentShader =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int mProgram;
private int mTextureID;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
private GLSurfaceViewActivity mGLSurfaceViewActivity;
private SurfaceTexture mSurface;
private boolean updateSurface = false;
public static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private MediaPlayer mMediaPlayer;
private Context context;
public VideoTextureRender(Context Context) {
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
}
public void setMediaPlayer(MediaPlayer player) {
mMediaPlayer = player;
}
#Override
public void onDrawFrame(GL10 glUnused) {
synchronized(this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
}
}
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.setIdentityM(mMVPMatrix, 0);
//rotateM(mSTMatrix, 0, count, 1f, 1f, 0f);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();
}
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
}
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
mProgram = createProgram(mVertexShader, mFragmentShader);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
mMediaPlayer.setScreenOnWhilePlaying(true);
surface.release();
try {
mMediaPlayer.prepare();
} catch (IOException t) {
Log.e(TAG, "media player prepare failed");
synchronized(this) {
updateSurface = false;
}
}
mMediaPlayer.start();
}
synchronized public void onFrameAvailable(SurfaceTexture surface) {
updateSurface = true;
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
}
https://db.tt/rDjbtYjE
My problem is how to set video as a texture and bind video texture on cube ?
Thank You!
Using video with OpenGL ES is more complex than you probably expect. The key issues are:
Video frames must be converted from YUV to RGB color space. This is best done in 2.0 or 3.0 GLSL shader code or using external textures.
The glTexImage2D() function is too slow to handle HD video frame rates because it copies the data. Use the EGL Image Extension instead named EGL_NATIVE_BUFFER_ANDROID.
The decoding of video frames must be synchronized with OpenGL ES's texture loading. This can be done with the fence sync extensions.
This answer has some links with further information to get you started.
I'm trying to use the new EffectFactory/Effect to add effects to images off screen (i.e. framebuffer). I've looked at the HelloEffects.java example provided in the SDK and I've tried it out and it works. Except it obviously uses a GLSurfaceView and that isn't what I want.
So I've taken tests/effect/src/android/effect/cts/GLEnv.java to setup the EGL stuff and I've also grabbed TextureRenderer.java and GLToolbox from the HelloEffects example. Mashed them all up and I've got the code below.
(On a side note, I have also tried tests/media/src/android/media/cts/OutputSurface.java to setup the EGL stuff and I got the exact same result.)
When I run it, the image I get back is just uniformly blue. This corresponds to the glClear I did with the colour blue. This proves at least to some degree that pixels are being rendered to the framebuffer, glReadPixels is seeing those pixels and the bitmap output is working.
But why is the texture not showing up? Neither the original nor the effect-applied texture shows up. No GL errors are detected either.
I've trimmed down the code to a single file working example that can be copied/pasted into Eclipse and will run. Obviously modify the input and output image paths per your needs.
Tested on a Nexus 10 / Android 4.3 as well as the Emulator. Same results.
import java.io.FileOutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import android.media.effect.Effect;
import android.media.effect.EffectContext;
import android.media.effect.EffectFactory;
import android.os.Bundle;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLUtils;
public class MainActivity extends Activity
{
private int[] mTextures = new int[2];
private EffectContext mEffectContext;
private Effect mEffect;
private TextureRenderer mTexRenderer = new TextureRenderer();
private int mImageWidth;
private int mImageHeight;
final static String imageFileOut = "/data/local/out.png";
final static String imageFileIn = "/data/local/lol.png";
private GLEnv mEnv;
#Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mEnv = new GLEnv();
mEnv.makeCurrent();
mEffectContext = EffectContext.createWithCurrentGlContext();
mTexRenderer.init();
loadTextures();
initAndapplyEffect();
renderResult();
saveBitmap();
}
void saveBitmap()
{
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
ByteBuffer pixelBuffer = ByteBuffer.allocateDirect(mImageWidth * mImageHeight * 4).order(ByteOrder.nativeOrder());
GLES20.glReadPixels(0, 0, mImageWidth, mImageHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
mEnv.checkForEGLErrors("store Pixels");
Bitmap bitmap = Bitmap.createBitmap(mImageWidth, mImageHeight, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(pixelBuffer);
try
{
FileOutputStream fos = new FileOutputStream(imageFileOut);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, fos);
fos.close();
} catch (Exception e) { e.printStackTrace(); }
}
private void initAndapplyEffect()
{
EffectFactory effectFactory = mEffectContext.getFactory();
if (mEffect != null)
{
mEffect.release();
}
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_BRIGHTNESS);
mEffect.setParameter("brightness", 2.0f);
mEffect.apply(mTextures[0], mImageWidth, mImageHeight, mTextures[1]);
}
private int loadTextures()
{
// Generate textures
GLES20.glGenTextures(2, mTextures, 0);
// Load input bitmap
Bitmap bitmap = BitmapFactory.decodeFile(imageFileIn);
mImageWidth = bitmap.getWidth();
mImageHeight = bitmap.getHeight();
mTexRenderer.updateTextureSize(mImageWidth, mImageHeight);
// Upload to texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[0]);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Set texture parameters
GLToolbox.initTexParams();
return mTextures[0];
}
private void renderResult()
{
mTexRenderer.renderTexture(mTextures[1]);
//mTexRenderer.renderTexture(mTextures[0]);
}
public class GLEnv {
private EGLContext mEGLContext;
private EGLSurface mEGLSurface;
private EGLDisplay mEGLDisplay;
private EGLConfig mEGLConfig;
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private static final int EGL_OPENGL_ES2_BIT = 0x0004;
public GLEnv() {
EGL10 egl = (EGL10)EGLContext.getEGL();
mEGLDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
checkForEGLErrors("eglGetDisplay");
int[] version = new int[2];
egl.eglInitialize(mEGLDisplay, version);
int[] configSpec = {
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] num_config = new int[1];
egl.eglChooseConfig(mEGLDisplay, configSpec, configs, 1, num_config);
checkForEGLErrors("eglChooseConfig");
if (num_config[0] < 1) {
throw new RuntimeException("Could not find a suitable config for EGL context!");
}
mEGLConfig = configs[0];
int[] attribs = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
mEGLContext = egl.eglCreateContext(mEGLDisplay, mEGLConfig, EGL10.EGL_NO_CONTEXT, attribs);
checkForEGLErrors("eglCreateContext");
int[] surfaceSize = { EGL10.EGL_WIDTH, 1920, EGL10.EGL_HEIGHT, 1080, EGL10.EGL_NONE };
mEGLSurface = egl.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, surfaceSize);
checkForEGLErrors("eglCreatePbufferSurface");
}
public void makeCurrent() {
EGL10 egl = (EGL10)EGLContext.getEGL();
egl.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
checkForEGLErrors("eglMakeCurrent");
}
public void checkForEGLErrors(String operation) {
EGL10 egl = (EGL10)EGLContext.getEGL();
int error = egl.eglGetError();
if (error != EGL10.EGL_SUCCESS) {
throw new RuntimeException("Operation '" + operation + "' caused EGL error: " + error);
}
}
}
private static final float[] TEX_VERTICES = {
0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f
};
private static final float[] POS_VERTICES = {
-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f
};
public class TextureRenderer {
private int mProgram;
private int mTexSamplerHandle;
private int mTexCoordHandle;
private int mPosCoordHandle;
private FloatBuffer mTexVertices;
private FloatBuffer mPosVertices;
private int mViewWidth;
private int mViewHeight;
private int mTexWidth;
private int mTexHeight;
private static final String VERTEX_SHADER =
"attribute vec4 a_position;\n" +
"attribute vec2 a_texcoord;\n" +
"varying vec2 v_texcoord;\n" +
"void main() {\n" +
" gl_Position = a_position;\n" +
" v_texcoord = a_texcoord;\n" +
"}\n";
private static final String FRAGMENT_SHADER =
"precision mediump float;\n" +
"uniform sampler2D tex_sampler;\n" +
"varying vec2 v_texcoord;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(tex_sampler, v_texcoord);\n" +
"}\n";
private static final int FLOAT_SIZE_BYTES = 4;
public void init() {
// Create program
mProgram = GLToolbox.createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
// Bind attributes and uniforms
mTexSamplerHandle = GLES20.glGetUniformLocation(mProgram,
"tex_sampler");
mTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_texcoord");
mPosCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_position");
// Setup coordinate buffers
mTexVertices = ByteBuffer.allocateDirect(
TEX_VERTICES.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTexVertices.put(TEX_VERTICES).position(0);
mPosVertices = ByteBuffer.allocateDirect(
POS_VERTICES.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mPosVertices.put(POS_VERTICES).position(0);
}
public void tearDown() {
GLES20.glDeleteProgram(mProgram);
}
public void updateTextureSize(int texWidth, int texHeight) {
mTexWidth = texWidth;
mTexHeight = texHeight;
computeOutputVertices();
}
public void updateViewSize(int viewWidth, int viewHeight) {
mViewWidth = viewWidth;
mViewHeight = viewHeight;
computeOutputVertices();
}
public void renderTexture(int texId) {
// Bind default FBO
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
// Use our shader program
GLES20.glUseProgram(mProgram);
GLToolbox.checkGlError("glUseProgram");
// Set viewport
GLES20.glViewport(0, 0, mViewWidth, mViewHeight);
GLToolbox.checkGlError("glViewport");
// Disable blending
GLES20.glDisable(GLES20.GL_BLEND);
// Set the vertex attributes
GLES20.glVertexAttribPointer(mTexCoordHandle, 2, GLES20.GL_FLOAT, false,
0, mTexVertices);
GLES20.glEnableVertexAttribArray(mTexCoordHandle);
GLES20.glVertexAttribPointer(mPosCoordHandle, 2, GLES20.GL_FLOAT, false,
0, mPosVertices);
GLES20.glEnableVertexAttribArray(mPosCoordHandle);
GLToolbox.checkGlError("vertex attribute setup");
// Set the input texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLToolbox.checkGlError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texId);
GLToolbox.checkGlError("glBindTexture");
GLES20.glUniform1i(mTexSamplerHandle, 0);
// Draw
GLES20.glClearColor(0.0f, 0.0f, 0.5f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
private void computeOutputVertices() {
if (mPosVertices != null) {
float imgAspectRatio = mTexWidth / (float)mTexHeight;
float viewAspectRatio = mViewWidth / (float)mViewHeight;
float relativeAspectRatio = viewAspectRatio / imgAspectRatio;
float x0, y0, x1, y1;
if (relativeAspectRatio > 1.0f) {
x0 = -1.0f / relativeAspectRatio;
y0 = -1.0f;
x1 = 1.0f / relativeAspectRatio;
y1 = 1.0f;
} else {
x0 = -1.0f;
y0 = -relativeAspectRatio;
x1 = 1.0f;
y1 = relativeAspectRatio;
}
float[] coords = new float[] { x0, y0, x1, y0, x0, y1, x1, y1 };
mPosVertices.put(coords).position(0);
}
}
}
public static class GLToolbox {
public static int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
String info = GLES20.glGetShaderInfoLog(shader);
GLES20.glDeleteShader(shader);
shader = 0;
throw new RuntimeException("Could not compile shader " +
shaderType + ":" + info);
}
}
return shader;
}
public static int createProgram(String vertexSource,
String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus,
0);
if (linkStatus[0] != GLES20.GL_TRUE) {
String info = GLES20.glGetProgramInfoLog(program);
GLES20.glDeleteProgram(program);
program = 0;
throw new RuntimeException("Could not link program: " + info);
}
}
return program;
}
public static void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
throw new RuntimeException(op + ": glError " + error);
}
}
public static void initTexParams() {
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
}
}
}
So I finally figured it out myself after over clocking quite a few of the little grey cells.
The issue is with computeOutputVertices(); This might have worked with a GLSurfaceView, but for some reason, whatever its doing is not compatible with a PBuffer. Just comment that line out and it works beautifully.
I've been assigned to create an open source Java port of this Objective C GPUImage Framework so that it can be used in an Android application. I am to recreate it as closely as I can, with all the variable names, function names, etc all the same. I'm in the beginning stages and I'm trying to port GPUImageOpenGLESContext.h and GPUImageOpenGLESContext.m (Sorry, would provide links, but as a new users I cannot add any more links).
I'm having difficulty with these methods
+ (GLint)maximumTextureSizeForThisDevice;
{
GLint maxTextureSize;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);
return maxTextureSize;
}
+ (GLint)maximumTextureUnitsForThisDevice;
{
GLint maxTextureUnits;
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &maxTextureUnits);
return maxTextureUnits;
}
It seems that in Objective C, you can just simply call these methods but in Java you cannot. I've done some searching and found that most people said to use a GLSurfaceView, but that would require an activity, correct? I was very excited when I found this Get Maximum OpenGL ES 2.0 Texture Size Limit on Android, but the response claims that the code would not work.
So, my question is, how can I get the minimum and maximum texture in a class that is not an activity? Using GLSurfaceView?
I'd also appreciate any suggestions on how to port this over. I've never ported anything from Objective C to Java, so any advice would be appreciated!
If it would be helpful, here is my current code:
public class GPUImageOpenGLESContext
{
private static GPUImageOpenGLESContext instance = null;
EGLContext context;
protected GPUImageOpenGLESContext()
{
// This is a protected empty method
// that exists only to prevent
// this singleton object from
// multiple instantiation
return;
}
public enum GPUImageRotationMode {
kGPUImageNoRotation, kGPUImageRotateLeft, kGPUImageRotateRight, kGPUImageFlipVertical,
kGPUImageFlipHorizontal, kGPUImageRotateRightFlipVertical, kGPUImageRotate180
}
public GPUImageRotationMode GPUImageRotationSwapsWidthAndHeight(GPUImageRotationMode rotation)
{
// TODO: Implement GPUImageRotationSwapsWidthAndHeight macro as method
//rotation = ((rotation) == kGPUImageRotateLeft || (rotation) == kGPUImageRotateRight || (rotation) == kGPUImageRotateRightFlipVertical)
return rotation;
}
public static GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext()
{
if (instance == null)
{
instance = new GPUImageOpenGLESContext();
}
return instance;
}
public static void useImageProcessingContext()
{
EGLContext imageProcessingContext = GPUImageOpenGLESContext.sharedImageProcessingOpenGLESContext().context;
if (EGLContext.getEGL() != imageProcessingContext)
{
// In Objective C, this call would be here:
// [EAGLContext setCurrentContext:imageProcessingContext]
// Cannot figure out how to handle this. For now, throws an exception.
throw new RuntimeException("useImageProcessingContext not equal to EGLContext");
}
return;
}
public static int maximumTextureSizeForThisDevice()
{
int[] maxTextureSize = new int[1];
// TODO: See if you can use gl. without an activity
//GL10 gl = new GL10();
//EGL gl = EGLContext.getEGL();
//gl.glGetIntegerv(GL10.GL_MAX_TEXTURE_SIZE, maxTextureSize, 0);
return maxTextureSize[0];
}
public static int maximumTextureUnitsForThisDevice()
{
// TODO: Implement maximumTextureUnitsForThisDevice();
return -1;
}
public static CGSize sizeThatFitsWithinATextureForSize(CGSize inputSize)
{
int maxTextureSize = maximumTextureSizeForThisDevice();
if ((inputSize.width < maxTextureSize) && (inputSize.height < maxTextureSize))
{
return inputSize;
}
CGSize adjustedSize = new CGSize();
if (inputSize.width > inputSize.height)
{
adjustedSize.width = (float)maxTextureSize;
adjustedSize.height = ((float)maxTextureSize / inputSize.width) * inputSize.height;
}
else
{
adjustedSize.height = (float)maxTextureSize;
adjustedSize.width = ((float)maxTextureSize / inputSize.height) * inputSize.width;
}
return adjustedSize;
}
public EGLContext getContext()
{
if (context == null)
{
// TODO: Implement getContext()
}
}
public interface GPUImageInput
{
public void newFrameReadyAtTime(Time frameTime);
public void setInputTextureAtIndex(int newInputTexture, int textureIndex);
public int nextAvailableTextureIndex();
public void setInputSizeAtIndex(CGSize newSize, int textureIndex);
public void setInputRotationAtIndex(GPUImageRotationMode newInputRotation, int textureIndex);
public CGSize maximumOutputSize();
public void endProcessing();
public boolean shouldIgnoreUpdatesToThisTarget();
}
}
I realize this is an old post but your problem is that you haven't properly initialized the EGLContext.
Usually you'd want to use a GLSurfaceView or a TextureView to actually include your GL content in the View hierarchy. The GLSurfaceView will handle a lot of things for you like properly creating the EGLContext and managing a render thread. The TextureView requires a bit more manual work.
Once you have a context through either of these means, you can use :
GLES20.glGetIntegerv(GLES20.GL_MAX_TEXTURE_SIZE, size, 0);
Assuming you have bound the OpenGL ES 2.0 API. First make sure that you have properly created your EGLContext and can execute EGL and GLES calls then you should be able to query the max texture size.
You can see Romain Guy's post about using a TextureView as you would a GLSurfaceView to see the nitty gritty details about managing your own EGLContext here (https://groups.google.com/d/msg/android-developers/U5RXFGpAHPE/IqHeIeGXhr0J):
GLSurfaceView handles GL setup for you, which TextureView will not do.
A TextureView can be used as the native window when you create an EGL
surface. Here is an example (the interesting part is the call to
eglCreateWindowSurface()):
#Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
mRenderThread = new RenderThread(getResources(), surface);
mRenderThread.start();
}
private static class RenderThread extends Thread {
private static final String LOG_TAG = "GLTextureView";
static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
static final int EGL_OPENGL_ES2_BIT = 4;
private volatile boolean mFinished;
private final Resources mResources;
private final SurfaceTexture mSurface;
private EGL10 mEgl;
private EGLDisplay mEglDisplay;
private EGLConfig mEglConfig;
private EGLContext mEglContext;
private EGLSurface mEglSurface;
private GL mGL;
RenderThread(Resources resources, SurfaceTexture surface) {
mResources = resources;
mSurface = surface;
}
private static final String sSimpleVS =
"attribute vec4 position;\n" +
"attribute vec2 texCoords;\n" +
"varying vec2 outTexCoords;\n" +
"\nvoid main(void) {\n" +
" outTexCoords = texCoords;\n" +
" gl_Position = position;\n" +
"}\n\n";
private static final String sSimpleFS =
"precision mediump float;\n\n" +
"varying vec2 outTexCoords;\n" +
"uniform sampler2D texture;\n" +
"\nvoid main(void) {\n" +
" gl_FragColor = texture2D(texture, outTexCoords);\n" +
"}\n\n";
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0.0f, 0.0f, 0.0f,
1.0f, -1.0f, 0.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f, 1.0f,
};
#Override
public void run() {
initGL();
FloatBuffer triangleVertices = ByteBuffer.allocateDirect(mTriangleVerticesData.length
* FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
triangleVertices.put(mTriangleVerticesData).position(0);
int texture = loadTexture(R.drawable.large_photo);
int program = buildProgram(sSimpleVS, sSimpleFS);
int attribPosition = glGetAttribLocation(program, "position");
checkGlError();
int attribTexCoords = glGetAttribLocation(program, "texCoords");
checkGlError();
int uniformTexture = glGetUniformLocation(program, "texture");
checkGlError();
glBindTexture(GL_TEXTURE_2D, texture);
checkGlError();
glUseProgram(program);
checkGlError();
glEnableVertexAttribArray(attribPosition);
checkGlError();
glEnableVertexAttribArray(attribTexCoords);
checkGlError();
glUniform1i(uniformTexture, texture);
checkGlError();
while (!mFinished) {
checkCurrent();
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
checkGlError();
glClear(GL_COLOR_BUFFER_BIT);
checkGlError();
// drawQuad
triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
glVertexAttribPointer(attribPosition, 3, GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
glVertexAttribPointer(attribTexCoords, 3, GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
if (!mEgl.eglSwapBuffers(mEglDisplay, mEglSurface)) {
throw new RuntimeException("Cannot swap buffers");
}
checkEglError();
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// Ignore
}
}
finishGL();
}
private int loadTexture(int resource) {
int[] textures = new int[1];
glActiveTexture(GL_TEXTURE0);
glGenTextures(1, textures, 0);
checkGlError();
int texture = textures[0];
glBindTexture(GL_TEXTURE_2D, texture);
checkGlError();
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
Bitmap bitmap = BitmapFactory.decodeResource(mResources, resource);
GLUtils.texImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bitmap, GL_UNSIGNED_BYTE, 0);
checkGlError();
bitmap.recycle();
return texture;
}
private int buildProgram(String vertex, String fragment) {
int vertexShader = buildShader(vertex, GL_VERTEX_SHADER);
if (vertexShader == 0) return 0;
int fragmentShader = buildShader(fragment, GL_FRAGMENT_SHADER);
if (fragmentShader == 0) return 0;
int program = glCreateProgram();
glAttachShader(program, vertexShader);
checkGlError();
glAttachShader(program, fragmentShader);
checkGlError();
glLinkProgram(program);
checkGlError();
int[] status = new int[1];
glGetProgramiv(program, GL_LINK_STATUS, status, 0);
if (status[0] != GL_TRUE) {
String error = glGetProgramInfoLog(program);
Log.d(LOG_TAG, "Error while linking program:\n" + error);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
glDeleteProgram(program);
return 0;
}
return program;
}
private int buildShader(String source, int type) {
int shader = glCreateShader(type);
glShaderSource(shader, source);
checkGlError();
glCompileShader(shader);
checkGlError();
int[] status = new int[1];
glGetShaderiv(shader, GL_COMPILE_STATUS, status, 0);
if (status[0] != GL_TRUE) {
String error = glGetShaderInfoLog(shader);
Log.d(LOG_TAG, "Error while compiling shader:\n" + error);
glDeleteShader(shader);
return 0;
}
return shader;
}
private void checkEglError() {
int error = mEgl.eglGetError();
if (error != EGL10.EGL_SUCCESS) {
Log.w(LOG_TAG, "EGL error = 0x" + Integer.toHexString(error));
}
}
private void checkGlError() {
int error = glGetError();
if (error != GL_NO_ERROR) {
Log.w(LOG_TAG, "GL error = 0x" + Integer.toHexString(error));
}
}
private void finishGL() {
mEgl.eglDestroyContext(mEglDisplay, mEglContext);
mEgl.eglDestroySurface(mEglDisplay, mEglSurface);
}
private void checkCurrent() {
if (!mEglContext.equals(mEgl.eglGetCurrentContext()) ||
!mEglSurface.equals(mEgl.eglGetCurrentSurface(EGL10.EGL_DRAW))) {
if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
throw new RuntimeException("eglMakeCurrent failed "
+ GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
}
}
private void initGL() {
mEgl = (EGL10) EGLContext.getEGL();
mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (mEglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed "
+ GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int[] version = new int[2];
if (!mEgl.eglInitialize(mEglDisplay, version)) {
throw new RuntimeException("eglInitialize failed " +
GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
mEglConfig = chooseEglConfig();
if (mEglConfig == null) {
throw new RuntimeException("eglConfig not initialized");
}
mEglContext = createContext(mEgl, mEglDisplay, mEglConfig);
mEglSurface = mEgl.eglCreateWindowSurface(mEglDisplay, mEglConfig, mSurface, null);
if (mEglSurface == null || mEglSurface == EGL10.EGL_NO_SURFACE) {
int error = mEgl.eglGetError();
if (error == EGL10.EGL_BAD_NATIVE_WINDOW) {
Log.e(LOG_TAG, "createWindowSurface returned EGL_BAD_NATIVE_WINDOW.");
return;
}
throw new RuntimeException("createWindowSurface failed "
+ GLUtils.getEGLErrorString(error));
}
if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
throw new RuntimeException("eglMakeCurrent failed "
+ GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
mGL = mEglContext.getGL();
}
EGLContext createContext(EGL10 egl, EGLDisplay eglDisplay, EGLConfig eglConfig) {
int[] attrib_list = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
return egl.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
}
private EGLConfig chooseEglConfig() {
int[] configsCount = new int[1];
EGLConfig[] configs = new EGLConfig[1];
int[] configSpec = getConfig();
if (!mEgl.eglChooseConfig(mEglDisplay, configSpec, configs, 1, configsCount)) {
throw new IllegalArgumentException("eglChooseConfig failed " +
GLUtils.getEGLErrorString(mEgl.eglGetError()));
} else if (configsCount[0] > 0) {
return configs[0];
}
return null;
}
private int[] getConfig() {
return new int[] {
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_NONE
};
}
void finish() {
mFinished = true;
}
}
You could have also went the NDK route and that would likely be a more straightforward transition from Objective C.
Need to help
How play video on Surface(OpenGL) in Android?
I tried playing video in mySurfaceView extends SurfaceView with help method setSurface() in MediaPlayer.
SurfaceTexture mTexture = new SurfaceTexture(texture_id);
Surface mSurface = new Surface(mTexture);
MediaPlayer mp = new MediaPlayer();
mp.setSurface(mSurface);
I got only playing audio - video not playing.
How get video buffer for sending in OpenGL??
How playing video on GLTexture?
From android source code...
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;
class VideoSurfaceView extends GLSurfaceView {
VideoRender mRenderer;
private MediaPlayer mMediaPlayer = null;
public VideoSurfaceView(Context context, MediaPlayer mp) {
super(context);
setEGLContextClientVersion(2);
mMediaPlayer = mp;
mRenderer = new VideoRender(context);
setRenderer(mRenderer);
}
#Override
public void onResume() {
queueEvent(new Runnable(){
public void run() {
mRenderer.setMediaPlayer(mMediaPlayer);
}});
super.onResume();
}
private static class VideoRender
implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
private static String TAG = "VideoRender";
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};
private FloatBuffer mTriangleVertices;
private final String mVertexShader =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private final String mFragmentShader =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int mProgram;
private int mTextureID;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
private SurfaceTexture mSurface;
private boolean updateSurface = false;
private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private MediaPlayer mMediaPlayer;
public VideoRender(Context context) {
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
}
public void setMediaPlayer(MediaPlayer player) {
mMediaPlayer = player;
}
public void onDrawFrame(GL10 glUnused) {
synchronized(this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
}
}
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();
}
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
}
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
mProgram = createProgram(mVertexShader, mFragmentShader);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
/*
* Create the SurfaceTexture that will feed this textureID,
* and pass it to the MediaPlayer
*/
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
surface.release();
try {
mMediaPlayer.prepare();
} catch (IOException t) {
Log.e(TAG, "media player prepare failed");
}
synchronized(this) {
updateSurface = false;
}
mMediaPlayer.start();
}
synchronized public void onFrameAvailable(SurfaceTexture surface) {
updateSurface = true;
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
} // End of class VideoRender.
} // End of class VideoSurfaceView.
I guess you can't. At least that's what i found out.
My plan was to have some sort of OpenGL scene (text ticker) while playing a video.
Since android uses HW decoding for displaying a video, it will not be done with OpenGL. I also tried to play the video in OpenGL using ffmpeg but i found out, that no device i tried with, was performant enough to do SW decoding via ffmpeg.
So I had to use a VideoView to display my video and put a GLSurfaceView on top of it, to view my ticker text.
But you have to make the GLSurfaceView translucent like in ApiDemos done with 'TranslucentGLSurfaceViewActivity'.
Another thing i realized: If you put a GLSurfaceView on top of a VideoView your fps breaks down dramatically from 60fps (opengl) to about 30-40fps.
This applied to all 2.x versions of android i tested.
Last week i had the chance to test it on Android 4 and this time i got no break down in fps. Maybe they really improved graphics pipeline for ICS.
Greetings, -chris-
I just converted the Java to Kotlin version
internal inline fun <T> glRun(message: String = "", block: (() -> T)): T {
return block().also {
var error: Int = GLES20.glGetError()
while (error != GLES20.GL_NO_ERROR) {
error = GLES20.glGetError()
Log.d("MOVIE_GL_ERROR", "$message: $error")
throw RuntimeException("GL Error: $message")
}
}
}
class MovieRenderer: GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
private var program = 0
private var textureId = 0
// Handles
private var mvpMatrixHandle = 0
private var stMatrixHandle = 0
private var positionHandle = 0
private var textureHandle = 0
// Surface Texture
private var updateSurface = false
private lateinit var surfaceTexture: SurfaceTexture
// Matrices
private var mvpMatrix = FloatArray(16)
private var stMatrix = FloatArray(16)
// float buffer
private val vertices: FloatBuffer = ByteBuffer.allocateDirect(VERTICES_DATA.size * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder())
.asFloatBuffer().also {
it.put(VERTICES_DATA).position(0)
}
var mediaPlayer: MediaPlayer? = null
#Synchronized
override fun onFrameAvailable(surfaceTexture: SurfaceTexture?) {
updateSurface = true
}
override fun onDrawFrame(gl: GL10?) {
synchronized(this) {
if (updateSurface) {
surfaceTexture.updateTexImage()
surfaceTexture.getTransformMatrix(stMatrix)
updateSurface = false
}
}
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f)
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT or GLES20.GL_COLOR_BUFFER_BIT)
glRun("glUseProgram: $program") {
GLES20.glUseProgram(program)
}
vertices.position(VERTICES_POS_OFFSET);
glRun("glVertexAttribPointer: Stride bytes") {
GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false,
VERTICES_STRIDE_BYTES, vertices)
}
glRun("glEnableVertexAttribArray") {
GLES20.glEnableVertexAttribArray(positionHandle)
}
vertices.position(VERTICES_UV_OFFSET)
glRun("glVertexAttribPointer: texture handle") {
GLES20.glVertexAttribPointer(textureHandle, 3, GLES20.GL_FLOAT, false,
VERTICES_STRIDE_BYTES, vertices)
}
glRun("glEnableVertexAttribArray") {
GLES20.glEnableVertexAttribArray(textureHandle)
}
Matrix.setIdentityM(mvpMatrix, 0)
glRun("glUniformMatrix4fv: mvpMatrix") {
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mvpMatrix, 0)
}
glRun("glUniformMatrix4fv: stMatrix") {
GLES20.glUniformMatrix4fv(stMatrixHandle, 1, false, stMatrix, 0)
}
glRun("glDrawArrays: GL_TRIANGLE_STRIP") {
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
}
GLES20.glFinish()
}
override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
GLES20.glViewport(0, 0, width, height)
}
override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
program = createProgram()
positionHandle = "aPosition".attr()
textureHandle = "aTextureCoord".attr()
mvpMatrixHandle = "uMVPMatrix".uniform()
stMatrixHandle = "uSTMatrix".uniform()
createTexture()
}
private fun createTexture() {
val textures = IntArray(1)
GLES20.glGenTextures(1, textures, 0)
textureId = textures.first()
glRun("glBindTexture textureId") { GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId) }
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST)
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
surfaceTexture = SurfaceTexture(textureId)
surfaceTexture.setOnFrameAvailableListener(this)
val surface = Surface(surfaceTexture)
mediaPlayer?.setSurface(surface)
surface.release()
try {
mediaPlayer?.prepare()
} catch (error: IOException) {
Log.e("MovieRenderer", "media player prepare failed");
throw error
}
synchronized(this) {
updateSurface = false
}
mediaPlayer?.start()
}
private fun String.attr(): Int {
return glRun("Get attribute location: $this") {
GLES20.glGetAttribLocation(program, this).also {
if (it == -1) fail("Error Attribute: $this not found!")
}
}
}
private fun String.uniform(): Int {
return glRun("Get uniform location: $this") {
GLES20.glGetUniformLocation(program, this).also {
if (it == -1) fail("Error Uniform: $this not found!")
}
}
}
companion object {
private const val GL_TEXTURE_EXTERNAL_OES = 0x8D65
private const val FLOAT_SIZE_BYTES = 4
private const val VERTICES_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES
private const val VERTICES_POS_OFFSET = 0
private const val VERTICES_UV_OFFSET = 3
private val VERTICES_DATA = floatArrayOf(
-1.0f, -1.0f, 0f, 0.0f, 0.0f,
1.0f, -1.0f, 0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0f, 0.0f, 1.0f,
1.0f, 1.0f, 0f, 1.0f, 1.0f
)
private const val VERTEX_SHADER = """
uniform mat4 uMVPMatrix;
uniform mat4 uSTMatrix;
attribute vec4 aPosition;
attribute vec4 aTextureCoord;
varying vec2 vTextureCoord;
void main() {
gl_Position = uMVPMatrix * aPosition;
vTextureCoord = (uSTMatrix * aTextureCoord).xy;
}
"""
private const val FRAGMENT_SHADER = """
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTextureCoord;
uniform samplerExternalOES sTexture;
void main() {
gl_FragColor = texture2D(sTexture, vTextureCoord);
}
"""
private fun createShader(type: Int, source: String): Int {
val shader = GLES20.glCreateShader(type)
if (shader == 0) throw RuntimeException("Cannot create shader $type\n$source")
GLES20.glShaderSource(shader, source)
GLES20.glCompileShader(shader)
val args = IntArray(1)
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, args, 0)
if (args.first() == 0) {
Log.e("MOVIE_SHADER", "Failed to compile shader source")
Log.e("MOVIE_SHADER", GLES20.glGetShaderInfoLog(shader))
GLES20.glDeleteShader(shader)
throw RuntimeException("Could not compile shader $source\n$type")
}
return shader
}
private fun createProgram(vertexShaderSource: String = VERTEX_SHADER,
fragmentShaderSource: String = FRAGMENT_SHADER): Int {
val vertexShader = createShader(GLES20.GL_VERTEX_SHADER, vertexShaderSource)
val fragmentShader = createShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderSource)
val program = GLES20.glCreateProgram()
if (program == 0) throw RuntimeException("Cannot create program")
glRun("Attach vertex shader to program") {
GLES20.glAttachShader(program, vertexShader)
}
glRun("Attach fragment shader to program") {
GLES20.glAttachShader(program, fragmentShader)
}
GLES20.glLinkProgram(program)
val args = IntArray(1)
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, args, 0)
if (args.first() != GLES20.GL_TRUE) {
val info = GLES20.glGetProgramInfoLog(program)
GLES20.glDeleteProgram(program)
throw RuntimeException("Cannot link program $program, Info: $info")
}
return program
}
private fun fail(message: String): Nothing {
throw RuntimeException(message)
}
}
}
mMediaPlayer.setSurface(new Surface(mSurfaceTexture));
You can use above line of code to use it on your mediaPlayerObject over your desired surfaceTexture which is a applied texture over your surfaceview.
Hope that helps.