Cannot get OpenGL ES Android plugin to show drawn vertex array - android

I am attempting to write a Flutter plugin for Android to allow me to directly write pixels using a Texture, so I need to make a SurfaceTexture available, and I want to be able to draw arbitrary pixel data to it using a single textured quad. For now, for debugging, I am simply trying to draw a single cyan triangle over a magenta background to verify my vertices are being drawn correctly, but it appears they are not. The glClear call is doing what I expect, as the magenta background is being shown instead of the black color that would otherwise be behind it, and I can change that color by changing what I pass to glClearColor, so in some way, the texture is being rendered, but I see no evidence that calling glDrawArrays is accomplishing anything. The code containing all of my interfacing with OpenGL ES is in the file below, and the drawTextureToCurrentSurface method is where both glClear and glDrawArrays are being called:
class EglContext {
companion object {
// Pass through position and UV values
val vertexSource = """
#version 300 es
precision mediump float;
/*layout(location = 0)*/ in vec2 position;
/*layout(location = 1)*/ in vec2 uv;
out vec2 uvOut;
void main() {
gl_Position = vec4(position, -0.5, 1.0);
uvOut = uv;
}
""".trimIndent()
// Eventually get the texture value, for now, just make it cyan so I can see it
val fragmentSource = """
#version 300 es
precision mediump float;
in vec2 uvOut;
out vec4 fragColor;
uniform sampler2D tex;
void main() {
vec4 texel = texture(tex, uvOut);
// Effectively ignore the texel without optimizing it out
fragColor = texel * 0.0001 + vec4(0.0, 1.0, 1.0, 1.0);
}
""".trimIndent()
var glThread: HandlerThread? = null
var glHandler: Handler? = null
}
private var display = EGL14.EGL_NO_DISPLAY
private var context = EGL14.EGL_NO_CONTEXT
private var config: EGLConfig? = null
private var vertexBuffer: FloatBuffer
private var uvBuffer: FloatBuffer
//private var indexBuffer: IntBuffer
private var defaultProgram: Int = -1
private var uniformTextureLocation: Int = -1
private var vertexLocation: Int = -1
private var uvLocation: Int = -1
var initialized = false
private fun checkGlError(msg: String) {
val errCodeEgl = EGL14.eglGetError()
val errCodeGl = GLES30.glGetError()
if (errCodeEgl != EGL14.EGL_SUCCESS || errCodeGl != GLES30.GL_NO_ERROR) {
throw RuntimeException(
"$msg - $errCodeEgl(${GLU.gluErrorString(errCodeEgl)}) : $errCodeGl(${
GLU.gluErrorString(
errCodeGl
)
})"
)
}
}
init {
// Flat square
// Am I allocating and writing to these correctly?
val vertices = floatArrayOf(-1f, -1f, 1f, -1f, -1f, 1f, 1f, 1f)
vertexBuffer = ByteBuffer.allocateDirect(vertices.size * 4).asFloatBuffer().also {
it.put(vertices)
it.position(0)
}
val uv = floatArrayOf(0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f)
uvBuffer = ByteBuffer.allocateDirect(uv.size * 4).asFloatBuffer().also {
it.put(uv)
it.position(0)
}
// Not being used until I can figure out what's currently not working
/*val indices = intArrayOf(0, 1, 2, 2, 1, 3)
indexBuffer = ByteBuffer.allocateDirect(indices.size * 4).asIntBuffer().also {
it.position(0)
it.put(indices)
it.position(0)
}*/
if (glThread == null) {
glThread = HandlerThread("flutterSoftwareRendererPlugin")
glThread!!.start()
glHandler = Handler(glThread!!.looper)
}
}
// Run OpenGL code on a separate thread to keep the context available
private fun doOnGlThread(blocking: Boolean = true, task: () -> Unit) {
val semaphore: Semaphore? = if (blocking) Semaphore(0) else null
glHandler!!.post {
task.invoke()
semaphore?.release()
}
semaphore?.acquire()
}
fun setup() {
doOnGlThread {
Log.d("Native", "Setting up EglContext")
display = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY)
if (display == EGL14.EGL_NO_DISPLAY) {
Log.e("Native", "No display")
checkGlError("Failed to get display")
}
val versionBuffer = IntArray(2)
if (!EGL14.eglInitialize(display, versionBuffer, 0, versionBuffer, 1)) {
Log.e("Native", "Did not init")
checkGlError("Failed to initialize")
}
val configs = arrayOfNulls<EGLConfig>(1)
val configNumBuffer = IntArray(1)
var attrBuffer = intArrayOf(
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_DEPTH_SIZE, 16,
//EGL14.EGL_STENCIL_SIZE, 8,
//EGL14.EGL_SAMPLE_BUFFERS, 1,
//EGL14.EGL_SAMPLES, 4,
EGL14.EGL_NONE
)
if (!EGL14.eglChooseConfig(
display,
attrBuffer,
0,
configs,
0,
configs.size,
configNumBuffer,
0
)
) {
Log.e("Native", "No config")
checkGlError("Failed to choose a config")
}
if (configNumBuffer[0] == 0) {
Log.e("Native", "No config")
checkGlError("Got zero configs")
}
Log.d("Native", "Got Config x${configNumBuffer[0]}: ${configs[0]}")
config = configs[0]
attrBuffer = intArrayOf(
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE
)
context = EGL14.eglCreateContext(display, config, EGL14.EGL_NO_CONTEXT, attrBuffer, 0)
if (context == EGL14.EGL_NO_CONTEXT) {
Log.e("Native", "Failed to get any context")
checkGlError("Failed to get context")
}
Log.d("Native", "Context = $context\n 'Current' = ${EGL14.eglGetCurrentContext()}")
initialized = true
}
}
// Called by my plugin to get a surface to register for Texture widget
fun buildSurfaceTextureWindow(surfaceTexture: SurfaceTexture): EGLSurface {
var _surface: EGLSurface? = null
doOnGlThread {
val attribBuffer = intArrayOf(EGL14.EGL_NONE)
val surface =
EGL14.eglCreateWindowSurface(display, config, surfaceTexture, attribBuffer, 0)
if (surface == EGL14.EGL_NO_SURFACE) {
checkGlError("Obtained no surface")
}
EGL14.eglMakeCurrent(display, surface, surface, context)
Log.d("Native", "New current context = ${EGL14.eglGetCurrentContext()}")
if (defaultProgram == -1) {
defaultProgram = makeProgram(
mapOf(
GLES30.GL_VERTEX_SHADER to vertexSource,
GLES30.GL_FRAGMENT_SHADER to fragmentSource
)
)
uniformTextureLocation = GLES30.glGetUniformLocation(defaultProgram, "tex")
vertexLocation = GLES30.glGetAttribLocation(defaultProgram, "position")
uvLocation = GLES30.glGetAttribLocation(defaultProgram, "uv")
Log.d("Native", "Attrib locations $vertexLocation, $uvLocation")
checkGlError("Getting uniform")
}
_surface = surface
}
return _surface!!
}
fun makeCurrent(eglSurface: EGLSurface, width: Int, height: Int) {
doOnGlThread {
GLES30.glViewport(0, 0, width, height)
if (!EGL14.eglMakeCurrent(display, eglSurface, eglSurface, context)) {
checkGlError("Failed to make surface current")
}
}
}
fun makeTexture(width: Int, height: Int): Int {
var _texture: Int? = null
doOnGlThread {
val intArr = IntArray(1)
GLES30.glGenTextures(1, intArr, 0)
checkGlError("Generate texture")
Log.d("Native", "${EGL14.eglGetCurrentContext()} ?= ${EGL14.EGL_NO_CONTEXT}")
val texture = intArr[0]
Log.d("Native", "Texture = $texture")
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, texture)
checkGlError("Bind texture")
val buffer = ByteBuffer.allocateDirect(width * height * 4)
GLES30.glTexImage2D(
GLES30.GL_TEXTURE_2D,
0,
GLES30.GL_RGBA,
width,
height,
0,
GLES30.GL_RGBA,
GLES30.GL_UNSIGNED_BYTE,
buffer
)
checkGlError("Create texture buffer")
_texture = texture
}
return _texture!!
}
private fun compileShader(source: String, shaderType: Int): Int {
val currentContext = EGL14.eglGetCurrentContext()
val noContext = EGL14.EGL_NO_CONTEXT
val shaderId = GLES30.glCreateShader(shaderType)
Log.d("Native", "Created $shaderId\nContext $currentContext vs $noContext")
checkGlError("Create shader")
if (shaderId == 0) {
Log.e("Native", "Could not create shader for some reason")
checkGlError("Could not create shader")
}
GLES30.glShaderSource(shaderId, source)
checkGlError("Setting shader source")
GLES30.glCompileShader(shaderId)
val statusBuffer = IntArray(1)
GLES30.glGetShaderiv(shaderId, GLES30.GL_COMPILE_STATUS, statusBuffer, 0)
val shaderLog = GLES30.glGetShaderInfoLog(shaderId)
Log.d("Native", "Compiling shader #$shaderId : $shaderLog")
if (statusBuffer[0] == 0) {
GLES30.glDeleteShader(shaderId)
checkGlError("Failed to compile shader $shaderId")
}
return shaderId
}
private fun makeProgram(sources: Map<Int, String>): Int {
val currentContext = EGL14.eglGetCurrentContext()
val noContext = EGL14.EGL_NO_CONTEXT
val program = GLES30.glCreateProgram()
Log.d("Native", "Created $program\nContext $currentContext vs $noContext")
checkGlError("Create program")
sources.forEach {
val shader = compileShader(it.value, it.key)
GLES30.glAttachShader(program, shader)
}
val linkBuffer = IntArray(1)
GLES30.glLinkProgram(program)
GLES30.glGetProgramiv(program, GLES30.GL_LINK_STATUS, linkBuffer, 0)
if (linkBuffer[0] == 0) {
GLES30.glDeleteProgram(program)
checkGlError("Failed to link program $program")
}
return program
}
// Called to actually draw to the surface. When fully implemented it should draw whatever is
// on the associated texture, but for now, to debug, I just want to verify I can draw vertices,
// but it seems I cannot?
fun drawTextureToCurrentSurface(texture: Int, surface: EGLSurface) {
doOnGlThread {
// Verify I have a context
val currentContext = EGL14.eglGetCurrentContext()
val noContext = EGL14.EGL_NO_CONTEXT
Log.d("Native", "Drawing, Context = $currentContext vs $noContext")
checkGlError("Just checking first")
GLES30.glClearColor(1f, 0f, 1f, 1f)
GLES30.glClearDepthf(1f)
GLES30.glDisable(GLES30.GL_DEPTH_TEST)
GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT or GLES30.GL_DEPTH_BUFFER_BIT)
checkGlError("Clearing")
GLES30.glUseProgram(defaultProgram)
checkGlError("Use program")
GLES30.glActiveTexture(GLES30.GL_TEXTURE0)
checkGlError("Activate texture 0")
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, texture)
checkGlError("Bind texture $texture")
GLES30.glUniform1i(uniformTextureLocation, 0)
checkGlError("Set uniform")
GLES30.glEnableVertexAttribArray(vertexLocation)
vertexBuffer.position(0)
GLES30.glVertexAttribPointer(vertexLocation, 2, GLES30.GL_FLOAT, false, 0, vertexBuffer)
Log.d("Native", "Bound vertices (shader=$defaultProgram)")
checkGlError("Attribute 0")
GLES30.glEnableVertexAttribArray(uvLocation)
uvBuffer.position(0)
GLES30.glVertexAttribPointer(uvLocation, 2, GLES30.GL_FLOAT, false, 0, uvBuffer)
checkGlError("Attribute 1")
//indexBuffer.position(0)
//GLES30.glDrawElements(GLES30.GL_TRIANGLES, 4, GLES30.GL_UNSIGNED_INT, indexBuffer)
// I would expect to get a triangle of different color than the background
GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, 3)
GLES30.glFinish()
checkGlError("Finished GL")
EGL14.eglSwapBuffers(display, surface)
checkGlError("Swapped buffers")
}
}
...currently unused other methods
}
The general flow of the above code is that the init block executes when initializing the context, of which there is only one. setup is called when the plugin is registered, and buildSurfaceTextureWindow is called when initializing a SurfaceTexture for a Flutter Texture. The first time this is called, it compiles the shaders. When the plugin wants to render the texture, it calls makeCurrent then drawTextureToCurrentSurface, which is where the magenta background becomes visible but without any cyan triangle. Calls to GL functions are done in a separate thread using doOnGlThread.
If you need to see all of the code including the full plugin implementation and example app using it, I have it on Github, but as far as I can tell the above code should be the only relevant region to not seeing any geometry rendered in the effectively hardcoded color from my fragment shader.
tl;dr My background color from glClear shows up on screen, but my expected result of calling glDrawArrays, a cyan triangle, does not, and I am trying to understand why.

Apparently I needed to call .order(ByteOrder.nativeOrder()) on my buffers. Without this, the vertex array data is not set up properly. Also I needed to set glTexParameteri(GL_TEXTURE_2D, ...) for GL_TEXTURE_MAG/MIN_FILTER and GL_TEXTURE_WRAP_S/T. Without that, all textures are all-black

Related

Only seeing glClear color with no geometry on Android OpenGL ES

I am attempting to write a very simple Android application that uses OpenGL to display a green background and render a single cyan triangle over it, to verify that my code properly displays some geometry. I am running into an issue where the call to glClearColor/glClear to set the background is visible and makes the view green as intended, but I see no visible change from calling glDrawArrays after pointing the 0th attribute array to a FloatBuffer containing vertex coords. All my vertex shader does is pass the position as a vec4 to the fragment shader, which always just sets the output color to cyan, so I would expect to see the view green with one cyan triangle, but instead I see only the green background, and I am unsure why.
Main activity file (imports omitted):
setupDefaultProgram()
}
override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
GLES30.glViewport(0, 0, width, height)
}
override fun onDrawFrame(gl: GL10?) {
glRenderer.renderFunc()
}
}
Renderer code:
class GlRenderer {
companion object {
// Pass through position and UV values
val vertexSource = """
#version 300 es
in vec2 position;
void main() {
gl_Position = vec4(position, -0.5, 1.0);
}
""".trimIndent()
// Eventually get the texture value, for now, just make it cyan so I can see it
val fragmentSource = """
#version 300 es
precision mediump float;
out vec4 fragColor;
void main() {
fragColor = vec4(0.0, 1.0, 1.0, 1.0);
}
""".trimIndent()
}
private var vertexBuffer: FloatBuffer
private var defaultProgram: Int = -1
private var vertexLocation: Int = -1
private fun checkGlError(msg: String) {
val errCodeEgl = EGL14.eglGetError()
val errCodeGl = GLES30.glGetError()
if (errCodeEgl != EGL14.EGL_SUCCESS || errCodeGl != GLES30.GL_NO_ERROR) {
throw RuntimeException(
"$msg - $errCodeEgl(${GLU.gluErrorString(errCodeEgl)}) : $errCodeGl(${
GLU.gluErrorString(
errCodeGl
)
})"
)
}
}
init {
// Flat square
// Am I allocating and writing to these correctly?
val vertices = floatArrayOf(-1f, -1f, 1f, -1f, -1f, 1f, 1f, 1f)
vertexBuffer = ByteBuffer.allocateDirect(vertices.size * 4).asFloatBuffer().also {
it.put(vertices)
it.position(0)
}
}
fun setupDefaultProgram() {
defaultProgram = makeProgram(
mapOf(
GLES30.GL_VERTEX_SHADER to vertexSource,
GLES30.GL_FRAGMENT_SHADER to fragmentSource
)
)
vertexLocation = GLES30.glGetAttribLocation(defaultProgram, "position")
checkGlError("Getting uniform")
}
private fun compileShader(source: String, shaderType: Int): Int {
val shaderId = GLES30.glCreateShader(shaderType)
checkGlError("Create shader")
if (shaderId == 0) {
Log.e("Native", "Could not create shader for some reason")
checkGlError("Could not create shader")
}
GLES30.glShaderSource(shaderId, source)
checkGlError("Setting shader source")
GLES30.glCompileShader(shaderId)
val statusBuffer = IntArray(1)
GLES30.glGetShaderiv(shaderId, GLES30.GL_COMPILE_STATUS, statusBuffer, 0)
val shaderLog = GLES30.glGetShaderInfoLog(shaderId)
Log.d("Native", "Compiling shader #$shaderId : $shaderLog")
if (statusBuffer[0] == 0) {
GLES30.glDeleteShader(shaderId)
checkGlError("Failed to compile shader $shaderId")
}
return shaderId
}
private fun makeProgram(sources: Map<Int, String>): Int {
val program = GLES30.glCreateProgram()
checkGlError("Create program")
sources.forEach {
val shader = compileShader(it.value, it.key)
GLES30.glAttachShader(program, shader)
}
val linkBuffer = IntArray(1)
GLES30.glLinkProgram(program)
GLES30.glGetProgramiv(program, GLES30.GL_LINK_STATUS, linkBuffer, 0)
if (linkBuffer[0] == 0) {
GLES30.glDeleteProgram(program)
checkGlError("Failed to link program $program")
}
return program
}
// Called to actually draw to the surface. When fully implemented it should draw whatever is
// on the associated texture, but for now, to debug, I just want to verify I can draw vertices,
// but it seems I cannot?
fun renderFunc() {
GLES30.glClearColor(0f, 1f, 0.5f, 1f)
GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT)
checkGlError("Clearing")
GLES30.glUseProgram(defaultProgram)
checkGlError("Use program")
GLES30.glEnableVertexAttribArray(vertexLocation)
vertexBuffer.position(0)
FloatArray(2 * 4).apply {
vertexBuffer.get(this)
vertexBuffer.position(0)
Log.d("Native", "Vertex buffer ${contentToString()}")
}
GLES30.glVertexAttribPointer(vertexLocation, 2, GLES30.GL_FLOAT, false, 0, vertexBuffer)
checkGlError("Attribute 0")
// Just render a triangle
GLES30.glDrawArrays(GLES30.GL_TRIANGLES, 0, 3)
GLES30.glFinish()
checkGlError("Finished GL")
}
}
My debug output logs what I would expect:
D/Native: Compiling shader #2 :
D/Native: Compiling shader #3 :
D/Native: Vertex buffer [-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0]
I am including <uses-feature android:glEsVersion="0x00020000" android:required="true" /> in my manifest. Where am I going wrong that the geometry of the triangle is either not rendered to the screen or is not visible?
When you create Buffer for GL, you need to specify the native byte-order.
val vertices = floatArrayOf(-1f, -1f, 1f, -1f, -1f, 1f, 1f, 1f)
vertexBuffer = ByteBuffer.allocateDirect(vertices.size * 4).order(ByteOrder.nativeOrder()).asFloatBuffer().also {
it.put(vertices)
it.position(0)
}
To expand on #ardget's answer: A byte-order is the internal order of the ByteBuffer's data, which basically determines how the data is stored in memory.
There are two 'byte-orders' possible: little-endian and big-endian. I won't go in to too much detail here, but big-endian stores data from the most significant to least significant value and little-endian stores data from least significant to most significant value. Your machine has a native order, which can be either little-endian or big-endian.
When you call ByteBuffer.allocateDirect, Java automatically sets the byte-order to be big-endian. But when you then use glBufferData, (if I'm not mistaken) OpenGL expects the byte-order to be the machine's native order. If the native-order is not big-endian, but instead is little-endian (as in your machine), then problems occur.
You can fix this by directly specifying the byte-order as the native byte order by using .order(ByteOrder.nativeOrder()).

How to deal with orientation change in Open GL ES

I'm trying out Open GL 2 for android. So far I have been able to initialize GLES20 and draw some simple shapes on it. The problem is that I can't find information on how should be GLES20 configured for screen orientation change, as for now I just have a black screen after device rotation. It is even possible for GLSurfaceView to rotate after screen has been rotated, or do I have to do it manually using some matrix?
Open GL init code:
object OpenGLBuilder {
val vertexShaderCode =
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition;" +
"}"
val fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}"
var mProgram = -1;
init {
val vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode)
val fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode)
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram().also {
// add the vertex shader to program
GLES20.glAttachShader(it, vertexShader)
// add the fragment shader to program
GLES20.glAttachShader(it, fragmentShader)
// creates OpenGL ES program executables
GLES20.glLinkProgram(it)
}
}
fun loadShader(type: Int, shaderCode: String): Int {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
return GLES20.glCreateShader(type).also { shader ->
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode)
GLES20.glCompileShader(shader)
}
}
}
My shape class:
class Triangle(val triangleCoords: FloatArray) {
// Set color with red, green, blue and alpha (opacity) values
val color = floatArrayOf(0.63671875f, 0.76953125f, 0.22265625f, 1.0f)
private var vertexBuffer: FloatBuffer =
// (number of coordinate values * 4 bytes per float)
ByteBuffer.allocateDirect(triangleCoords.size * 4).run {
// use the device hardware's native byte order
order(ByteOrder.nativeOrder())
// create a floating point buffer from the ByteBuffer
asFloatBuffer().apply {
// add the coordinates to the FloatBuffer
put(triangleCoords)
// set the buffer to read the first coordinate
position(0)
}
}
private val COORDS_PER_VERTEX = 3
private var positionHandle = 0
private var mColorHandle = 0;
private val vertexCount = triangleCoords.size / COORDS_PER_VERTEX
private val vertexStride = COORDS_PER_VERTEX * 4
fun draw(program: Int) {
GLES20.glUseProgram(program)
positionHandle = GLES20.glGetAttribLocation(program, "vPosition").also {
GLES20.glEnableVertexAttribArray(it)
GLES20.glVertexAttribPointer(
it,
COORDS_PER_VERTEX,
GLES20.GL_FLOAT,
false,
vertexStride,
vertexBuffer
)
}
mColorHandle = GLES20.glGetUniformLocation(program, "vColor").also{
GLES20.glUniform4fv(mColorHandle, 1, color, 0)
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount)
GLES20.glDisableVertexAttribArray(positionHandle)
}
}
Then I just create and use single instances of shapes like this:
object SceneObjects {
val triangle1 = Triangle(floatArrayOf(
0f, 0.5f, 0f,
-0.5f, -0.3f, 0f,
0.5f, -0.3f, 0f
))
val square1 = Square2(floatArrayOf(
-0.5f, 0.5f, 0f,
-0.5f, -0.5f, 0f,
0.5f, -0.5f, 0f,
0.5f, 0.5f, 0f
))
}
And GLSurfaceView:
class MyGLSurfaceView(context: Context) : GLSurfaceView(context), GLSurfaceView.Renderer {
init{
setEGLContextClientVersion(2)
setRenderer(this)
// Render the view only when there is a change in the drawing data
renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
}
override fun onSurfaceCreated(unused: GL10, config: EGLConfig) {
// Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f)
}
override fun onDrawFrame(unused: GL10) {
// Redraw background color
SceneObjects.triangle1.draw(OpenGLBuilder.mProgram)
}
override fun onSurfaceChanged(unused: GL10, width: Int, height: Int) {
GLES20.glViewport(0, 0, width, height)
}
}
I think that there's two things you want to do to handle orientation change neatly.
Firstly, by default your OpenGL context and all created objects are destroyed when entering the background. There's an argument that you should just let that happen and add code to recreate all the resources, but if you'd rather have an easy life then just use setPreserveEGLContextOnPause and never worry about it again.
You might be thinking "but it was an orientation change, my app didn't enter the background". Well, by default an Android activity gets restarted after an orientation change which might be messing things up. This behaviour is detailed here. You probably want to use this lot in the manifest for your activity: android:configChanges="orientation|screenSize|screenLayout|keyboardHidden"

How to render Bitmap off-screen in Android using OpenGL?

I need to render a bitmap without displaying it on the screen. For that I create OpenGL context using EGL14 as described in this answer. Then I save OpenGL surface to bitmap using GLES20.glReadPixels. But for some reason it is not rendered as expected and is just transparent.
import android.graphics.Bitmap
import android.opengl.*
import android.opengl.EGL14.EGL_CONTEXT_CLIENT_VERSION
import java.nio.ByteBuffer
class Renderer {
private lateinit var display: EGLDisplay
private lateinit var surface: EGLSurface
private lateinit var eglContext: EGLContext
fun draw() {
// Just a stub that fills the bitmap with red color
GLES20.glClearColor(1f, 0f, 0f, 1f)
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
}
fun saveBitmap(): Bitmap {
val width = 320
val height = 240
val mPixelBuf = ByteBuffer.allocate(width * height * 4)
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf)
return Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888)
}
private fun initializeEglContext() {
display = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY)
if (display == EGL14.EGL_NO_DISPLAY) {
throw RuntimeException("eglGetDisplay failed ${EGL14.eglGetError()}")
}
val versions = IntArray(2)
if (!EGL14.eglInitialize(display, versions, 0, versions, 1)) {
throw RuntimeException("eglInitialize failed ${EGL14.eglGetError()}")
}
val configAttr = intArrayOf(
EGL14.EGL_COLOR_BUFFER_TYPE, EGL14.EGL_RGB_BUFFER,
EGL14.EGL_LEVEL, 0,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
EGL14.EGL_NONE
)
val configs: Array<EGLConfig?> = arrayOfNulls(1)
val numConfig = IntArray(1)
EGL14.eglChooseConfig(
display, configAttr, 0,
configs, 0, 1, numConfig, 0
)
if (numConfig[0] == 0) {
throw RuntimeException("No configs found")
}
val config: EGLConfig? = configs[0]
val surfAttr = intArrayOf(
EGL14.EGL_WIDTH, 320,
EGL14.EGL_HEIGHT, 240,
EGL14.EGL_NONE
)
surface = EGL14.eglCreatePbufferSurface(display, config, surfAttr, 0)
val contextAttrib = intArrayOf(
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
)
eglContext = EGL14.eglCreateContext(display, config, EGL14.EGL_NO_CONTEXT, contextAttrib, 0)
EGL14.eglMakeCurrent(display, surface, surface, eglContext)
}
fun destroy() {
EGL14.eglMakeCurrent(display, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT)
EGL14.eglDestroySurface(display, surface)
EGL14.eglDestroyContext(display, eglContext)
EGL14.eglTerminate(display)
}
}
This is how I use it:
val renderer = Renderer()
renderer.initializeEglContext()
renderer.draw()
val bitmap = renderer.saveBitmap()
renderer.destroy()
The code runs without any errors. I checked that context is created successfully. For example GLES20.glCreateProgram works as expected and returns a valid id. The only warning I get is
W/OpenGLRenderer: Failed to choose config with EGL_SWAP_BEHAVIOR_PRESERVED, retrying without...
But I'm not sure if it affects the result in any way.
However bitmap is not filled with color and is transparent:
val color = bitmap[0, 0]
Log.d("Main", "onCreate: ${Color.valueOf(color)}")
Color(0.0, 0.0, 0.0, 0.0, sRGB IEC61966-2.1)
I guess that I'm missing something, but I can't figure out what. How to make it to actually render?
Pixel buffer must be copied to bitmap:
val mPixelBuf bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888)
bitmap.copyPixelsFromBuffer(mPixelBuf)
return bitmap

How to repair Corrupted Mp4 file generated by MediaMuxer?

I used MediaMuxer and MediaCodec to generate a mp4 video.
The video is playble after I call mMediaMuxer.stop()
However, when the user quit the app before I get the change to call the stop() method, I am left with a big mp4 file that is not playable.
Is there anyway to repair this mp4 file to make it playable?
Edit
Here is one example of a corrupted mp4 file
And I was able to repair the file using this online tool but this tool asked to upload a non-corrupted video as reference.
Here is the non-corrupted mp4 video that I used as reference. When I uploaded this video, the tool repaired my broken mp4 file.
So it is possible to repair the file but how did they do it?
If useful, here is the code I used to generate both corrupted and non corrupted
package com.tolotra.images_to_video
import android.content.ContentValues.TAG
import android.content.Context
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.media.*
import android.opengl.*
import android.util.Log
import android.util.TimingLogger
import android.view.Surface
import java.io.File
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.FloatBuffer
import java.nio.IntBuffer
import java.text.SimpleDateFormat
import java.util.*
class VideoBuilder(applicationContext: Context) {
private var frameId: Long = 0
private lateinit var muxer: MediaMuxer
private lateinit var glTool: OverlayRenderer
private lateinit var encoder: MediaCodec
private lateinit var outVideoFilePath: String
private var context = applicationContext
private var trackIndex: Int = 0
private lateinit var bufferInfo: MediaCodec.BufferInfo
private var eglContext: EGLContext? = null
private var eglDisplay: EGLDisplay? = null
private var eglSurface: EGLSurface? = null
private lateinit var surface: Surface
val timeoutUs = 10000L
val frameRate = 5
var presentationTimeUs: Long = 0
fun setup() {
encoder = createEncoder()
initInputSurface(encoder)
encoder.start()
outVideoFilePath = getScreenshotPath("tolotra-screen-recoder-${Date().time}.mp4")
muxer = MediaMuxer(outVideoFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
glTool = OverlayRenderer()
glTool.initGl()
}
/**
* Laspse is the duration between the current frame and the previous frame
*/
fun feed(bitmap: Bitmap, timelapse: Long) {
frameId++
Log.d("FEED_PROFILE", "feed frame:$frameId")
val timings = TimingLogger("FEED_PROFILE", "feed frame:$frameId")
// Get encoded data and feed it to muxer
drainEncoder(encoder, muxer, false, timelapse)
timings.addSplit("drainEncoder done");
// Render the bitmap/texture with OpenGL here
glTool.render(bitmap)
timings.addSplit("render done");
// Set timestamp with EGL extension
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, presentationTimeUs * 1000)
// Feed encoder with next frame produced by OpenGL
EGL14.eglSwapBuffers(eglDisplay, eglSurface)
timings.dumpToLog();
}
fun finish() {
Log.d(TAG, "Finishing")
// Drain last encoded data and finalize the video file
drainEncoder(encoder, muxer, true, 0)
_cleanUp(encoder, muxer)
val file = File(outVideoFilePath)
val file_size = (file.length() / 1024).toString().toInt()
val retriever = MediaMetadataRetriever()
retriever.setDataSource(outVideoFilePath)
val width =
retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)
val height =
retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)
val rotation =
retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION)
val bitRate =
retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE)
val duration =
java.lang.Long.valueOf(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)) * 1000
Log.d("Result", "bitrate $bitRate duration $duration fileSize $file_size ")
}
fun getScreenshotPath(fileName: String): String {
val f = context.externalCacheDir
val externalDir: String = f!!.path;
val sDir: String = externalDir + File.separator + "Screen Recorder";
val dir = File(sDir);
val dirPath: String;
if (dir.exists() || dir.mkdir()) {
dirPath = sDir + File.separator + fileName;
} else {
dirPath = externalDir + File.separator + fileName
}
Log.d("Mp4 file path", "Path: $dirPath")
return dirPath;
} //
fun createEncoder(): MediaCodec {
bufferInfo = MediaCodec.BufferInfo()
val MIME = "video/avc"
val encoder = MediaCodec.createEncoderByType(MIME)
val width = 320
val heigh = 512
val format = MediaFormat.createVideoFormat(MIME, width, heigh)
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
// format.setInteger(MediaFormat.KEY_BIT_RATE, 2_000_000)
format.setInteger(MediaFormat.KEY_BIT_RATE, 350_000)
format.setInteger(MediaFormat.KEY_FRAME_RATE, 45)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5)
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
trackIndex = -1;
return encoder
}
fun drainEncoder(
encoder: MediaCodec,
muxer: MediaMuxer,
endOfStream: Boolean,
timelapseUs: Long
) {
if (endOfStream)
encoder.signalEndOfInputStream()
while (true) {
val outBufferId = encoder.dequeueOutputBuffer(bufferInfo, timeoutUs)
if (outBufferId >= 0) {
val encodedBuffer = encoder.getOutputBuffer(outBufferId)
// MediaMuxer is ignoring KEY_FRAMERATE, so I set it manually here
// to achieve the desired frame rate
bufferInfo.presentationTimeUs = presentationTimeUs
if (encodedBuffer != null) {
muxer.writeSampleData(trackIndex, encodedBuffer, bufferInfo)
}
presentationTimeUs += timelapseUs
encoder.releaseOutputBuffer(outBufferId, false)
// Are we finished here?
if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
break
} else if (outBufferId == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (!endOfStream)
break
// End of stream, but still no output available. Try again.
} else if (outBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
trackIndex = muxer.addTrack(encoder.outputFormat)
muxer.start()
}
}
}
private fun initInputSurface(encoder: MediaCodec) {
val surface = encoder.createInputSurface()
val eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY)
if (eglDisplay == EGL14.EGL_NO_DISPLAY)
throw RuntimeException(
"eglDisplay == EGL14.EGL_NO_DISPLAY: "
+ GLUtils.getEGLErrorString(EGL14.eglGetError())
)
val version = IntArray(2)
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1))
throw RuntimeException("eglInitialize(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()))
val attribList = intArrayOf(
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGLExt.EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
)
val configs = arrayOfNulls<EGLConfig>(1)
val nConfigs = IntArray(1)
EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.size, nConfigs, 0)
var err = EGL14.eglGetError()
if (err != EGL14.EGL_SUCCESS)
throw RuntimeException(GLUtils.getEGLErrorString(err))
val ctxAttribs = intArrayOf(
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
)
val eglContext =
EGL14.eglCreateContext(eglDisplay, configs[0], EGL14.EGL_NO_CONTEXT, ctxAttribs, 0)
err = EGL14.eglGetError()
if (err != EGL14.EGL_SUCCESS)
throw RuntimeException(GLUtils.getEGLErrorString(err))
val surfaceAttribs = intArrayOf(
EGL14.EGL_NONE
)
val eglSurface =
EGL14.eglCreateWindowSurface(eglDisplay, configs[0], surface, surfaceAttribs, 0)
err = EGL14.eglGetError()
if (err != EGL14.EGL_SUCCESS)
throw RuntimeException(GLUtils.getEGLErrorString(err))
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext))
throw RuntimeException("eglMakeCurrent(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()))
this.eglSurface = eglSurface
this.eglDisplay = eglDisplay
this.eglContext = eglContext
this.surface = surface
}
private fun _cleanUp(encoder: MediaCodec, muxer: MediaMuxer) {
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglDestroySurface(eglDisplay, eglSurface)
EGL14.eglDestroyContext(eglDisplay, eglContext)
EGL14.eglReleaseThread()
EGL14.eglTerminate(eglDisplay);
}
surface?.release();
eglDisplay = EGL14.EGL_NO_DISPLAY
eglContext = EGL14.EGL_NO_CONTEXT
eglSurface = EGL14.EGL_NO_SURFACE
encoder.stop()
encoder.release()
muxer.stop()
muxer.release()
}
}
class OverlayRenderer() {
private val mvpMatrix = FloatArray(16)
private val projectionMatrix = FloatArray(16)
private val viewMatrix = FloatArray(16)
private val vertexShaderCode =
"precision highp float;\n" +
"attribute vec3 vertexPosition;\n" +
"attribute vec2 uvs;\n" +
"varying vec2 varUvs;\n" +
"uniform mat4 mvp;\n" +
"\n" +
"void main()\n" +
"{\n" +
"\tvarUvs = uvs;\n" +
"\tgl_Position = mvp * vec4(vertexPosition, 1.0);\n" +
"}"
private val fragmentShaderCode =
"precision mediump float;\n" +
"\n" +
"varying vec2 varUvs;\n" +
"uniform sampler2D texSampler;\n" +
"\n" +
"void main()\n" +
"{\t\n" +
"\tgl_FragColor = texture2D(texSampler, varUvs);\n" +
"}"
private var vertices = floatArrayOf(
// x, y, z, u, v
-1.0f, -1.0f, 0.0f, 0f, 0f,
-1.0f, 1.0f, 0.0f, 0f, 1f,
1.0f, 1.0f, 0.0f, 1f, 1f,
1.0f, -1.0f, 0.0f, 1f, 0f
)
private var indices = intArrayOf(
2, 1, 0, 0, 3, 2
)
private var program: Int = 0
private var vertexHandle: Int = 0
private var bufferHandles = IntArray(2)
private var uvsHandle: Int = 0
private var mvpHandle: Int = 0
private var samplerHandle: Int = 0
private val textureHandle = IntArray(1)
val viewportWidth = 320
val viewportHeight = 486
var vertexBuffer: FloatBuffer = ByteBuffer.allocateDirect(vertices.size * 4).run {
order(ByteOrder.nativeOrder())
asFloatBuffer().apply {
put(vertices)
position(0)
}
}
var indexBuffer: IntBuffer = ByteBuffer.allocateDirect(indices.size * 4).run {
order(ByteOrder.nativeOrder())
asIntBuffer().apply {
put(indices)
position(0)
}
}
fun render(bitmap: Bitmap) {
Log.d("Bitmap", "width ${bitmap.width} height ${bitmap.height}")
// Prepare some transformations
val mvp = FloatArray(16)
Matrix.setIdentityM(mvp, 0)
Matrix.scaleM(mvp, 0, 1f, -1f, 1f)
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT or GLES20.GL_DEPTH_BUFFER_BIT)
GLES20.glClearColor(0f, 0f, 0f, 0f)
GLES20.glViewport(0, 0, viewportWidth, viewportHeight)
GLES20.glUseProgram(program)
// Pass transformations to shader
GLES20.glUniformMatrix4fv(mvpHandle, 1, false, mvp, 0)
// Prepare texture for drawing
GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0])
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1)
// Pass the Bitmap to OpenGL here
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0)
GLES20.glTexParameteri(
GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST
)
GLES20.glTexParameteri(
GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_NEAREST
)
// Prepare buffers with vertices and indices & draw
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0])
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1])
GLES20.glEnableVertexAttribArray(vertexHandle)
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 4 * 5, 0)
GLES20.glEnableVertexAttribArray(uvsHandle)
GLES20.glVertexAttribPointer(uvsHandle, 2, GLES20.GL_FLOAT, false, 4 * 5, 3 * 4)
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_INT, 0)
}
fun initGl() {
val vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER).also { shader ->
GLES20.glShaderSource(shader, vertexShaderCode)
GLES20.glCompileShader(shader)
}
val fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER).also { shader ->
GLES20.glShaderSource(shader, fragmentShaderCode)
GLES20.glCompileShader(shader)
}
program = GLES20.glCreateProgram().also {
GLES20.glAttachShader(it, vertexShader)
GLES20.glAttachShader(it, fragmentShader)
GLES20.glLinkProgram(it)
vertexHandle = GLES20.glGetAttribLocation(it, "vertexPosition")
uvsHandle = GLES20.glGetAttribLocation(it, "uvs")
mvpHandle = GLES20.glGetUniformLocation(it, "mvp")
samplerHandle = GLES20.glGetUniformLocation(it, "texSampler")
}
// Initialize buffers
GLES20.glGenBuffers(2, bufferHandles, 0)
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0])
GLES20.glBufferData(
GLES20.GL_ARRAY_BUFFER,
vertices.size * 4,
vertexBuffer,
GLES20.GL_DYNAMIC_DRAW
)
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1])
GLES20.glBufferData(
GLES20.GL_ELEMENT_ARRAY_BUFFER,
indices.size * 4,
indexBuffer,
GLES20.GL_DYNAMIC_DRAW
)
// Init texture handle
GLES20.glGenTextures(1, textureHandle, 0)
// Ensure I can draw transparent stuff that overlaps properly
GLES20.glEnable(GLES20.GL_BLEND)
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA)
}
}
In general MP4 is not a good recording format. Usually the sample table is kept in memory and written on close. So in case of a power loss or an application bug - you loose the recording. Use a MPEG-2 Transport Stream or a fragmented MP4 then most of the written media remains playable. Most likely your file will contains just a MP4 'ftyp' and 'mdat' atom with the audio and video interleaved. With some educated guessing and knowledge about the video stream - there is chance to extract audio and video. https://fix.video seems to do it.
Correct MP4:
[ftyp]
[mdat]
[moov]
-end-
Truncated MP4:
[ftyp]
[mdat]
-end-
Fix.video parses your good file extracts the settings for audio and video.
It uses the information from the good file to recreate most of the 'moov' atom. The missing sample tables 'stXX' are recreated by parsing your 'mdat' atom. The video chunks inside the 'mdat' atom are each prefixed with the length and the rest must be AAC audio.

OpenGL ES 2.0 not drawing in Android Lollipop (5.1)

I'm new in openGL ES programming, so I followed the guide provided by Android Developers site. It shows how draw a simple green triangle that rotate on the screen following the touch point. I tried it on my Tablet (Galaxy Tab A10, Android Oreo - 8) and it worked, also on a Huawei (Android Marhmallow - 6.0); the problem is that the triangle is not shown on my Galaxy J3 (Android Lollipop - 5.1) without log errors and 0 as return of all glGetError() call, the only thing I can see is the color change of the background.
I couldn't find similar problems here in SO and on the web, has someone had the same problem?
(The language used is Kotlin, but I think it's a conceptual question, so please take a look also if the code is slightly different from Java).
Game Activity:
import android.content.Context
import android.opengl.GLSurfaceView
import android.os.Bundle
import android.support.v7.app.AppCompatActivity
import android.view.MotionEvent
class GameActivity : AppCompatActivity() {
private lateinit var mGLView: GLSurfaceView
public override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// Create a GLSurfaceView instance and set it
// as the ContentView for this Activity.
mGLView = MyGLSurfaceView(this)
setContentView(mGLView)
}
class MyGLSurfaceView(context: Context) : GLSurfaceView(context) {
private val mRenderer: MyGLRenderer
init {
// Create an OpenGL ES 2.0 context
setEGLContextClientVersion(2)
mRenderer = MyGLRenderer()
// Set the Renderer for drawing on the GLSurfaceView
setRenderer(mRenderer)
renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
}
private val TOUCH_SCALE_FACTOR: Float = 180.0f / 320f
private var previousX: Float = 0f
private var previousY: Float = 0f
override fun onTouchEvent(e: MotionEvent): Boolean {
// MotionEvent reports input details from the touch screen
// and other input controls. In this case, you are only
// interested in events where the touch position changed.
val x: Float = e.x
val y: Float = e.y
when (e.action) {
MotionEvent.ACTION_MOVE -> {
var dx: Float = x - previousX
var dy: Float = y - previousY
// reverse direction of rotation above the mid-line
if (y > height / 2) {
dx *= -1
}
// reverse direction of rotation to left of the mid-line
if (x < width / 2) {
dy *= -1
}
mRenderer.angle += (dx + dy) * TOUCH_SCALE_FACTOR
requestRender()
}
}
previousX = x
previousY = y
return true
}
}
}
Custom Renderer Class:
import android.opengl.GLES20
import android.opengl.GLSurfaceView
import android.opengl.Matrix
import javax.microedition.khronos.egl.EGLConfig
import javax.microedition.khronos.opengles.GL10
class MyGLRenderer : GLSurfaceView.Renderer {
val TAG = MyGLRenderer::class.java.name
#Volatile
var angle: Float = 0f
private lateinit var mTriangle: Triangle
private val mRotationMatrix = FloatArray(16)
// mMVPMatrix is an abbreviation for "Model View Projection Matrix"
private val mMVPMatrix = FloatArray(16)
private val mProjectionMatrix = FloatArray(16)
private val mViewMatrix = FloatArray(16)
override fun onSurfaceCreated(unused: GL10, config: EGLConfig) {
// Set the background frame color
GLES20.glClearColor(0.8f, 0.2f, 0.2f, 1.0f)
// initialize a triangle
mTriangle = Triangle()
}
override fun onDrawFrame(unused: GL10) {
val scratch = FloatArray(16)
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
// enable face culling feature
//GLES20.glEnable(GL10.GL_CULL_FACE)
// specify which faces to not draw
//GLES20.glCullFace(GL10.GL_BACK)
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 0f, 1.0f, 0.0f)
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0)
// Create a rotation transformation for the triangle
/*val time = SystemClock.uptimeMillis() % 4000L
val angle = 0.090f * time.toInt()*/
Matrix.setRotateM(mRotationMatrix, 0, -angle, 0f, 0f, -1.0f)
// Combine the rotation matrix with the projection and camera view
// Note that the mMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
Matrix.multiplyMM(scratch, 0, mMVPMatrix, 0, mRotationMatrix, 0)
// Draw shape
mTriangle.draw(scratch)
}
override fun onSurfaceChanged(unused: GL10, width: Int, height: Int) {
GLES20.glViewport(0, 0, width, height)
val ratio: Float = width.toFloat() / height.toFloat()
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1f, 1f, 3f, 7f)
}
}
Triangle Class:
import android.opengl.GLES20
import android.util.Log
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.FloatBuffer
class Triangle {
val TAG = Triangle::class.java.name
// number of coordinates per vertex in this array
val COORDS_PER_VERTEX = 3
var triangleCoords = floatArrayOf( // in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
)
private val fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}"
private val vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}"
// Use to access and set the view transformation
private var mMVPMatrixHandle: Int = 0
// Set color with red, green, blue and alpha (opacity) values
val color = floatArrayOf(0.63671875f, 0.76953125f, 0.22265625f, 1.0f)
private var vertexBuffer: FloatBuffer =
// (number of coordinate values * 4 bytes per float)
ByteBuffer.allocateDirect(triangleCoords.size * 4).run {
// use the device hardware's native byte order
order(ByteOrder.nativeOrder())
// create a floating point buffer from the ByteBuffer
asFloatBuffer().apply {
// add the coordinates to the FloatBuffer
put(triangleCoords)
// set the buffer to read the first coordinate
position(0)
}
}
private var mProgram: Int
init {
val vertexShader: Int = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode)
val fragmentShader: Int = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode)
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram().also {
// add the vertex shader to program
GLES20.glAttachShader(it, vertexShader)
Log.d(TAG, "glAttachShader: ${GLES20.glGetError()}")
// add the fragment shader to program
GLES20.glAttachShader(it, fragmentShader)
Log.d(TAG, "glAttachShader: ${GLES20.glGetError()}")
// creates OpenGL ES program executables
GLES20.glLinkProgram(it)
Log.d(TAG, "glLinkProgram: ${GLES20.glGetError()}")
}
Log.d(TAG, "glCreateProgram: ${GLES20.glGetError()}")
}
fun loadShader(type: Int, shaderCode: String): Int {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
return GLES20.glCreateShader(type).also { shader ->
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode)
GLES20.glCompileShader(shader)
}
}
private var mPositionHandle: Int = 0
private var mColorHandle: Int = 0
private val vertexCount: Int = triangleCoords.size / COORDS_PER_VERTEX
private val vertexStride: Int = COORDS_PER_VERTEX * 4 // 4 bytes per vertex
fun draw(mvpMatrix: FloatArray) { // pass in the calculated transformation matrix
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram)
Log.d(TAG, "glUseProgram: ${GLES20.glGetError()}")
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition").also {
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(it)
Log.d(TAG, "glEnableVertexAttribArray: ${GLES20.glGetError()}")
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(
it,
COORDS_PER_VERTEX,
GLES20.GL_FLOAT,
false,
vertexStride,
vertexBuffer
)
Log.d(TAG, "glVertexAttribPointer: ${GLES20.glGetError()}")
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor").also { colorHandle ->
// Set color for drawing the triangle
GLES20.glUniform4fv(colorHandle, 1, color, 0)
}
Log.d(TAG, "glGetUniformLocation: ${GLES20.glGetError()}")
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix")
Log.d(TAG, "glGetUniformLocation: ${GLES20.glGetError()}")
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0)
Log.d(TAG, "glUniformMatrix4fv: ${GLES20.glGetError()}")
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount)
Log.d(TAG, "glDrawArrays: ${GLES20.glGetError()}")
// Disable vertex array
GLES20.glDisableVertexAttribArray(it)
Log.d(TAG, "glDisableVertexAttribArray: ${GLES20.glGetError()}")
}
Log.d(TAG, "glGetAttribLocation: ${GLES20.glGetError()}")
}
}
Of course I added this line in the manifest.xml:
<uses-feature android:glEsVersion="0x00020000" android:required="true"/>
UPDATE:
I discovered that if I try to run the app while the phone (the Lollipop) is disconnected from PC, it'll fail to run, with the toast message: "Authorization denied" (the Italian message is "Autorizzazione negata", so in English it could be also "Permission denied"), and I don't know what it means, but maybe it could be helpful for someone.

Categories

Resources