I used MediaMuxer and MediaCodec to generate a mp4 video.
The video is playble after I call mMediaMuxer.stop()
However, when the user quit the app before I get the change to call the stop() method, I am left with a big mp4 file that is not playable.
Is there anyway to repair this mp4 file to make it playable?
Edit
Here is one example of a corrupted mp4 file
And I was able to repair the file using this online tool but this tool asked to upload a non-corrupted video as reference.
Here is the non-corrupted mp4 video that I used as reference. When I uploaded this video, the tool repaired my broken mp4 file.
So it is possible to repair the file but how did they do it?
If useful, here is the code I used to generate both corrupted and non corrupted
package com.tolotra.images_to_video
import android.content.ContentValues.TAG
import android.content.Context
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.media.*
import android.opengl.*
import android.util.Log
import android.util.TimingLogger
import android.view.Surface
import java.io.File
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.FloatBuffer
import java.nio.IntBuffer
import java.text.SimpleDateFormat
import java.util.*
class VideoBuilder(applicationContext: Context) {
private var frameId: Long = 0
private lateinit var muxer: MediaMuxer
private lateinit var glTool: OverlayRenderer
private lateinit var encoder: MediaCodec
private lateinit var outVideoFilePath: String
private var context = applicationContext
private var trackIndex: Int = 0
private lateinit var bufferInfo: MediaCodec.BufferInfo
private var eglContext: EGLContext? = null
private var eglDisplay: EGLDisplay? = null
private var eglSurface: EGLSurface? = null
private lateinit var surface: Surface
val timeoutUs = 10000L
val frameRate = 5
var presentationTimeUs: Long = 0
fun setup() {
encoder = createEncoder()
initInputSurface(encoder)
encoder.start()
outVideoFilePath = getScreenshotPath("tolotra-screen-recoder-${Date().time}.mp4")
muxer = MediaMuxer(outVideoFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
glTool = OverlayRenderer()
glTool.initGl()
}
/**
* Laspse is the duration between the current frame and the previous frame
*/
fun feed(bitmap: Bitmap, timelapse: Long) {
frameId++
Log.d("FEED_PROFILE", "feed frame:$frameId")
val timings = TimingLogger("FEED_PROFILE", "feed frame:$frameId")
// Get encoded data and feed it to muxer
drainEncoder(encoder, muxer, false, timelapse)
timings.addSplit("drainEncoder done");
// Render the bitmap/texture with OpenGL here
glTool.render(bitmap)
timings.addSplit("render done");
// Set timestamp with EGL extension
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, presentationTimeUs * 1000)
// Feed encoder with next frame produced by OpenGL
EGL14.eglSwapBuffers(eglDisplay, eglSurface)
timings.dumpToLog();
}
fun finish() {
Log.d(TAG, "Finishing")
// Drain last encoded data and finalize the video file
drainEncoder(encoder, muxer, true, 0)
_cleanUp(encoder, muxer)
val file = File(outVideoFilePath)
val file_size = (file.length() / 1024).toString().toInt()
val retriever = MediaMetadataRetriever()
retriever.setDataSource(outVideoFilePath)
val width =
retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)
val height =
retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)
val rotation =
retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION)
val bitRate =
retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE)
val duration =
java.lang.Long.valueOf(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)) * 1000
Log.d("Result", "bitrate $bitRate duration $duration fileSize $file_size ")
}
fun getScreenshotPath(fileName: String): String {
val f = context.externalCacheDir
val externalDir: String = f!!.path;
val sDir: String = externalDir + File.separator + "Screen Recorder";
val dir = File(sDir);
val dirPath: String;
if (dir.exists() || dir.mkdir()) {
dirPath = sDir + File.separator + fileName;
} else {
dirPath = externalDir + File.separator + fileName
}
Log.d("Mp4 file path", "Path: $dirPath")
return dirPath;
} //
fun createEncoder(): MediaCodec {
bufferInfo = MediaCodec.BufferInfo()
val MIME = "video/avc"
val encoder = MediaCodec.createEncoderByType(MIME)
val width = 320
val heigh = 512
val format = MediaFormat.createVideoFormat(MIME, width, heigh)
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
// format.setInteger(MediaFormat.KEY_BIT_RATE, 2_000_000)
format.setInteger(MediaFormat.KEY_BIT_RATE, 350_000)
format.setInteger(MediaFormat.KEY_FRAME_RATE, 45)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5)
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
trackIndex = -1;
return encoder
}
fun drainEncoder(
encoder: MediaCodec,
muxer: MediaMuxer,
endOfStream: Boolean,
timelapseUs: Long
) {
if (endOfStream)
encoder.signalEndOfInputStream()
while (true) {
val outBufferId = encoder.dequeueOutputBuffer(bufferInfo, timeoutUs)
if (outBufferId >= 0) {
val encodedBuffer = encoder.getOutputBuffer(outBufferId)
// MediaMuxer is ignoring KEY_FRAMERATE, so I set it manually here
// to achieve the desired frame rate
bufferInfo.presentationTimeUs = presentationTimeUs
if (encodedBuffer != null) {
muxer.writeSampleData(trackIndex, encodedBuffer, bufferInfo)
}
presentationTimeUs += timelapseUs
encoder.releaseOutputBuffer(outBufferId, false)
// Are we finished here?
if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
break
} else if (outBufferId == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (!endOfStream)
break
// End of stream, but still no output available. Try again.
} else if (outBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
trackIndex = muxer.addTrack(encoder.outputFormat)
muxer.start()
}
}
}
private fun initInputSurface(encoder: MediaCodec) {
val surface = encoder.createInputSurface()
val eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY)
if (eglDisplay == EGL14.EGL_NO_DISPLAY)
throw RuntimeException(
"eglDisplay == EGL14.EGL_NO_DISPLAY: "
+ GLUtils.getEGLErrorString(EGL14.eglGetError())
)
val version = IntArray(2)
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1))
throw RuntimeException("eglInitialize(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()))
val attribList = intArrayOf(
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGLExt.EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
)
val configs = arrayOfNulls<EGLConfig>(1)
val nConfigs = IntArray(1)
EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.size, nConfigs, 0)
var err = EGL14.eglGetError()
if (err != EGL14.EGL_SUCCESS)
throw RuntimeException(GLUtils.getEGLErrorString(err))
val ctxAttribs = intArrayOf(
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
)
val eglContext =
EGL14.eglCreateContext(eglDisplay, configs[0], EGL14.EGL_NO_CONTEXT, ctxAttribs, 0)
err = EGL14.eglGetError()
if (err != EGL14.EGL_SUCCESS)
throw RuntimeException(GLUtils.getEGLErrorString(err))
val surfaceAttribs = intArrayOf(
EGL14.EGL_NONE
)
val eglSurface =
EGL14.eglCreateWindowSurface(eglDisplay, configs[0], surface, surfaceAttribs, 0)
err = EGL14.eglGetError()
if (err != EGL14.EGL_SUCCESS)
throw RuntimeException(GLUtils.getEGLErrorString(err))
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext))
throw RuntimeException("eglMakeCurrent(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()))
this.eglSurface = eglSurface
this.eglDisplay = eglDisplay
this.eglContext = eglContext
this.surface = surface
}
private fun _cleanUp(encoder: MediaCodec, muxer: MediaMuxer) {
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglDestroySurface(eglDisplay, eglSurface)
EGL14.eglDestroyContext(eglDisplay, eglContext)
EGL14.eglReleaseThread()
EGL14.eglTerminate(eglDisplay);
}
surface?.release();
eglDisplay = EGL14.EGL_NO_DISPLAY
eglContext = EGL14.EGL_NO_CONTEXT
eglSurface = EGL14.EGL_NO_SURFACE
encoder.stop()
encoder.release()
muxer.stop()
muxer.release()
}
}
class OverlayRenderer() {
private val mvpMatrix = FloatArray(16)
private val projectionMatrix = FloatArray(16)
private val viewMatrix = FloatArray(16)
private val vertexShaderCode =
"precision highp float;\n" +
"attribute vec3 vertexPosition;\n" +
"attribute vec2 uvs;\n" +
"varying vec2 varUvs;\n" +
"uniform mat4 mvp;\n" +
"\n" +
"void main()\n" +
"{\n" +
"\tvarUvs = uvs;\n" +
"\tgl_Position = mvp * vec4(vertexPosition, 1.0);\n" +
"}"
private val fragmentShaderCode =
"precision mediump float;\n" +
"\n" +
"varying vec2 varUvs;\n" +
"uniform sampler2D texSampler;\n" +
"\n" +
"void main()\n" +
"{\t\n" +
"\tgl_FragColor = texture2D(texSampler, varUvs);\n" +
"}"
private var vertices = floatArrayOf(
// x, y, z, u, v
-1.0f, -1.0f, 0.0f, 0f, 0f,
-1.0f, 1.0f, 0.0f, 0f, 1f,
1.0f, 1.0f, 0.0f, 1f, 1f,
1.0f, -1.0f, 0.0f, 1f, 0f
)
private var indices = intArrayOf(
2, 1, 0, 0, 3, 2
)
private var program: Int = 0
private var vertexHandle: Int = 0
private var bufferHandles = IntArray(2)
private var uvsHandle: Int = 0
private var mvpHandle: Int = 0
private var samplerHandle: Int = 0
private val textureHandle = IntArray(1)
val viewportWidth = 320
val viewportHeight = 486
var vertexBuffer: FloatBuffer = ByteBuffer.allocateDirect(vertices.size * 4).run {
order(ByteOrder.nativeOrder())
asFloatBuffer().apply {
put(vertices)
position(0)
}
}
var indexBuffer: IntBuffer = ByteBuffer.allocateDirect(indices.size * 4).run {
order(ByteOrder.nativeOrder())
asIntBuffer().apply {
put(indices)
position(0)
}
}
fun render(bitmap: Bitmap) {
Log.d("Bitmap", "width ${bitmap.width} height ${bitmap.height}")
// Prepare some transformations
val mvp = FloatArray(16)
Matrix.setIdentityM(mvp, 0)
Matrix.scaleM(mvp, 0, 1f, -1f, 1f)
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT or GLES20.GL_DEPTH_BUFFER_BIT)
GLES20.glClearColor(0f, 0f, 0f, 0f)
GLES20.glViewport(0, 0, viewportWidth, viewportHeight)
GLES20.glUseProgram(program)
// Pass transformations to shader
GLES20.glUniformMatrix4fv(mvpHandle, 1, false, mvp, 0)
// Prepare texture for drawing
GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0])
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1)
// Pass the Bitmap to OpenGL here
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0)
GLES20.glTexParameteri(
GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST
)
GLES20.glTexParameteri(
GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_NEAREST
)
// Prepare buffers with vertices and indices & draw
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0])
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1])
GLES20.glEnableVertexAttribArray(vertexHandle)
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 4 * 5, 0)
GLES20.glEnableVertexAttribArray(uvsHandle)
GLES20.glVertexAttribPointer(uvsHandle, 2, GLES20.GL_FLOAT, false, 4 * 5, 3 * 4)
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_INT, 0)
}
fun initGl() {
val vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER).also { shader ->
GLES20.glShaderSource(shader, vertexShaderCode)
GLES20.glCompileShader(shader)
}
val fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER).also { shader ->
GLES20.glShaderSource(shader, fragmentShaderCode)
GLES20.glCompileShader(shader)
}
program = GLES20.glCreateProgram().also {
GLES20.glAttachShader(it, vertexShader)
GLES20.glAttachShader(it, fragmentShader)
GLES20.glLinkProgram(it)
vertexHandle = GLES20.glGetAttribLocation(it, "vertexPosition")
uvsHandle = GLES20.glGetAttribLocation(it, "uvs")
mvpHandle = GLES20.glGetUniformLocation(it, "mvp")
samplerHandle = GLES20.glGetUniformLocation(it, "texSampler")
}
// Initialize buffers
GLES20.glGenBuffers(2, bufferHandles, 0)
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0])
GLES20.glBufferData(
GLES20.GL_ARRAY_BUFFER,
vertices.size * 4,
vertexBuffer,
GLES20.GL_DYNAMIC_DRAW
)
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1])
GLES20.glBufferData(
GLES20.GL_ELEMENT_ARRAY_BUFFER,
indices.size * 4,
indexBuffer,
GLES20.GL_DYNAMIC_DRAW
)
// Init texture handle
GLES20.glGenTextures(1, textureHandle, 0)
// Ensure I can draw transparent stuff that overlaps properly
GLES20.glEnable(GLES20.GL_BLEND)
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA)
}
}
In general MP4 is not a good recording format. Usually the sample table is kept in memory and written on close. So in case of a power loss or an application bug - you loose the recording. Use a MPEG-2 Transport Stream or a fragmented MP4 then most of the written media remains playable. Most likely your file will contains just a MP4 'ftyp' and 'mdat' atom with the audio and video interleaved. With some educated guessing and knowledge about the video stream - there is chance to extract audio and video. https://fix.video seems to do it.
Correct MP4:
[ftyp]
[mdat]
[moov]
-end-
Truncated MP4:
[ftyp]
[mdat]
-end-
Fix.video parses your good file extracts the settings for audio and video.
It uses the information from the good file to recreate most of the 'moov' atom. The missing sample tables 'stXX' are recreated by parsing your 'mdat' atom. The video chunks inside the 'mdat' atom are each prefixed with the length and the rest must be AAC audio.
Related
I am attempting to write a Flutter plugin for Android to allow me to directly write pixels using a Texture, so I need to make a SurfaceTexture available, and I want to be able to draw arbitrary pixel data to it using a single textured quad. For now, for debugging, I am simply trying to draw a single cyan triangle over a magenta background to verify my vertices are being drawn correctly, but it appears they are not. The glClear call is doing what I expect, as the magenta background is being shown instead of the black color that would otherwise be behind it, and I can change that color by changing what I pass to glClearColor, so in some way, the texture is being rendered, but I see no evidence that calling glDrawArrays is accomplishing anything. The code containing all of my interfacing with OpenGL ES is in the file below, and the drawTextureToCurrentSurface method is where both glClear and glDrawArrays are being called:
class EglContext {
companion object {
// Pass through position and UV values
val vertexSource = """
#version 300 es
precision mediump float;
/*layout(location = 0)*/ in vec2 position;
/*layout(location = 1)*/ in vec2 uv;
out vec2 uvOut;
void main() {
gl_Position = vec4(position, -0.5, 1.0);
uvOut = uv;
}
""".trimIndent()
// Eventually get the texture value, for now, just make it cyan so I can see it
val fragmentSource = """
#version 300 es
precision mediump float;
in vec2 uvOut;
out vec4 fragColor;
uniform sampler2D tex;
void main() {
vec4 texel = texture(tex, uvOut);
// Effectively ignore the texel without optimizing it out
fragColor = texel * 0.0001 + vec4(0.0, 1.0, 1.0, 1.0);
}
""".trimIndent()
var glThread: HandlerThread? = null
var glHandler: Handler? = null
}
private var display = EGL14.EGL_NO_DISPLAY
private var context = EGL14.EGL_NO_CONTEXT
private var config: EGLConfig? = null
private var vertexBuffer: FloatBuffer
private var uvBuffer: FloatBuffer
//private var indexBuffer: IntBuffer
private var defaultProgram: Int = -1
private var uniformTextureLocation: Int = -1
private var vertexLocation: Int = -1
private var uvLocation: Int = -1
var initialized = false
private fun checkGlError(msg: String) {
val errCodeEgl = EGL14.eglGetError()
val errCodeGl = GLES30.glGetError()
if (errCodeEgl != EGL14.EGL_SUCCESS || errCodeGl != GLES30.GL_NO_ERROR) {
throw RuntimeException(
"$msg - $errCodeEgl(${GLU.gluErrorString(errCodeEgl)}) : $errCodeGl(${
GLU.gluErrorString(
errCodeGl
)
})"
)
}
}
init {
// Flat square
// Am I allocating and writing to these correctly?
val vertices = floatArrayOf(-1f, -1f, 1f, -1f, -1f, 1f, 1f, 1f)
vertexBuffer = ByteBuffer.allocateDirect(vertices.size * 4).asFloatBuffer().also {
it.put(vertices)
it.position(0)
}
val uv = floatArrayOf(0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f)
uvBuffer = ByteBuffer.allocateDirect(uv.size * 4).asFloatBuffer().also {
it.put(uv)
it.position(0)
}
// Not being used until I can figure out what's currently not working
/*val indices = intArrayOf(0, 1, 2, 2, 1, 3)
indexBuffer = ByteBuffer.allocateDirect(indices.size * 4).asIntBuffer().also {
it.position(0)
it.put(indices)
it.position(0)
}*/
if (glThread == null) {
glThread = HandlerThread("flutterSoftwareRendererPlugin")
glThread!!.start()
glHandler = Handler(glThread!!.looper)
}
}
// Run OpenGL code on a separate thread to keep the context available
private fun doOnGlThread(blocking: Boolean = true, task: () -> Unit) {
val semaphore: Semaphore? = if (blocking) Semaphore(0) else null
glHandler!!.post {
task.invoke()
semaphore?.release()
}
semaphore?.acquire()
}
fun setup() {
doOnGlThread {
Log.d("Native", "Setting up EglContext")
display = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY)
if (display == EGL14.EGL_NO_DISPLAY) {
Log.e("Native", "No display")
checkGlError("Failed to get display")
}
val versionBuffer = IntArray(2)
if (!EGL14.eglInitialize(display, versionBuffer, 0, versionBuffer, 1)) {
Log.e("Native", "Did not init")
checkGlError("Failed to initialize")
}
val configs = arrayOfNulls<EGLConfig>(1)
val configNumBuffer = IntArray(1)
var attrBuffer = intArrayOf(
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_DEPTH_SIZE, 16,
//EGL14.EGL_STENCIL_SIZE, 8,
//EGL14.EGL_SAMPLE_BUFFERS, 1,
//EGL14.EGL_SAMPLES, 4,
EGL14.EGL_NONE
)
if (!EGL14.eglChooseConfig(
display,
attrBuffer,
0,
configs,
0,
configs.size,
configNumBuffer,
0
)
) {
Log.e("Native", "No config")
checkGlError("Failed to choose a config")
}
if (configNumBuffer[0] == 0) {
Log.e("Native", "No config")
checkGlError("Got zero configs")
}
Log.d("Native", "Got Config x${configNumBuffer[0]}: ${configs[0]}")
config = configs[0]
attrBuffer = intArrayOf(
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE
)
context = EGL14.eglCreateContext(display, config, EGL14.EGL_NO_CONTEXT, attrBuffer, 0)
if (context == EGL14.EGL_NO_CONTEXT) {
Log.e("Native", "Failed to get any context")
checkGlError("Failed to get context")
}
Log.d("Native", "Context = $context\n 'Current' = ${EGL14.eglGetCurrentContext()}")
initialized = true
}
}
// Called by my plugin to get a surface to register for Texture widget
fun buildSurfaceTextureWindow(surfaceTexture: SurfaceTexture): EGLSurface {
var _surface: EGLSurface? = null
doOnGlThread {
val attribBuffer = intArrayOf(EGL14.EGL_NONE)
val surface =
EGL14.eglCreateWindowSurface(display, config, surfaceTexture, attribBuffer, 0)
if (surface == EGL14.EGL_NO_SURFACE) {
checkGlError("Obtained no surface")
}
EGL14.eglMakeCurrent(display, surface, surface, context)
Log.d("Native", "New current context = ${EGL14.eglGetCurrentContext()}")
if (defaultProgram == -1) {
defaultProgram = makeProgram(
mapOf(
GLES30.GL_VERTEX_SHADER to vertexSource,
GLES30.GL_FRAGMENT_SHADER to fragmentSource
)
)
uniformTextureLocation = GLES30.glGetUniformLocation(defaultProgram, "tex")
vertexLocation = GLES30.glGetAttribLocation(defaultProgram, "position")
uvLocation = GLES30.glGetAttribLocation(defaultProgram, "uv")
Log.d("Native", "Attrib locations $vertexLocation, $uvLocation")
checkGlError("Getting uniform")
}
_surface = surface
}
return _surface!!
}
fun makeCurrent(eglSurface: EGLSurface, width: Int, height: Int) {
doOnGlThread {
GLES30.glViewport(0, 0, width, height)
if (!EGL14.eglMakeCurrent(display, eglSurface, eglSurface, context)) {
checkGlError("Failed to make surface current")
}
}
}
fun makeTexture(width: Int, height: Int): Int {
var _texture: Int? = null
doOnGlThread {
val intArr = IntArray(1)
GLES30.glGenTextures(1, intArr, 0)
checkGlError("Generate texture")
Log.d("Native", "${EGL14.eglGetCurrentContext()} ?= ${EGL14.EGL_NO_CONTEXT}")
val texture = intArr[0]
Log.d("Native", "Texture = $texture")
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, texture)
checkGlError("Bind texture")
val buffer = ByteBuffer.allocateDirect(width * height * 4)
GLES30.glTexImage2D(
GLES30.GL_TEXTURE_2D,
0,
GLES30.GL_RGBA,
width,
height,
0,
GLES30.GL_RGBA,
GLES30.GL_UNSIGNED_BYTE,
buffer
)
checkGlError("Create texture buffer")
_texture = texture
}
return _texture!!
}
private fun compileShader(source: String, shaderType: Int): Int {
val currentContext = EGL14.eglGetCurrentContext()
val noContext = EGL14.EGL_NO_CONTEXT
val shaderId = GLES30.glCreateShader(shaderType)
Log.d("Native", "Created $shaderId\nContext $currentContext vs $noContext")
checkGlError("Create shader")
if (shaderId == 0) {
Log.e("Native", "Could not create shader for some reason")
checkGlError("Could not create shader")
}
GLES30.glShaderSource(shaderId, source)
checkGlError("Setting shader source")
GLES30.glCompileShader(shaderId)
val statusBuffer = IntArray(1)
GLES30.glGetShaderiv(shaderId, GLES30.GL_COMPILE_STATUS, statusBuffer, 0)
val shaderLog = GLES30.glGetShaderInfoLog(shaderId)
Log.d("Native", "Compiling shader #$shaderId : $shaderLog")
if (statusBuffer[0] == 0) {
GLES30.glDeleteShader(shaderId)
checkGlError("Failed to compile shader $shaderId")
}
return shaderId
}
private fun makeProgram(sources: Map<Int, String>): Int {
val currentContext = EGL14.eglGetCurrentContext()
val noContext = EGL14.EGL_NO_CONTEXT
val program = GLES30.glCreateProgram()
Log.d("Native", "Created $program\nContext $currentContext vs $noContext")
checkGlError("Create program")
sources.forEach {
val shader = compileShader(it.value, it.key)
GLES30.glAttachShader(program, shader)
}
val linkBuffer = IntArray(1)
GLES30.glLinkProgram(program)
GLES30.glGetProgramiv(program, GLES30.GL_LINK_STATUS, linkBuffer, 0)
if (linkBuffer[0] == 0) {
GLES30.glDeleteProgram(program)
checkGlError("Failed to link program $program")
}
return program
}
// Called to actually draw to the surface. When fully implemented it should draw whatever is
// on the associated texture, but for now, to debug, I just want to verify I can draw vertices,
// but it seems I cannot?
fun drawTextureToCurrentSurface(texture: Int, surface: EGLSurface) {
doOnGlThread {
// Verify I have a context
val currentContext = EGL14.eglGetCurrentContext()
val noContext = EGL14.EGL_NO_CONTEXT
Log.d("Native", "Drawing, Context = $currentContext vs $noContext")
checkGlError("Just checking first")
GLES30.glClearColor(1f, 0f, 1f, 1f)
GLES30.glClearDepthf(1f)
GLES30.glDisable(GLES30.GL_DEPTH_TEST)
GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT or GLES30.GL_DEPTH_BUFFER_BIT)
checkGlError("Clearing")
GLES30.glUseProgram(defaultProgram)
checkGlError("Use program")
GLES30.glActiveTexture(GLES30.GL_TEXTURE0)
checkGlError("Activate texture 0")
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, texture)
checkGlError("Bind texture $texture")
GLES30.glUniform1i(uniformTextureLocation, 0)
checkGlError("Set uniform")
GLES30.glEnableVertexAttribArray(vertexLocation)
vertexBuffer.position(0)
GLES30.glVertexAttribPointer(vertexLocation, 2, GLES30.GL_FLOAT, false, 0, vertexBuffer)
Log.d("Native", "Bound vertices (shader=$defaultProgram)")
checkGlError("Attribute 0")
GLES30.glEnableVertexAttribArray(uvLocation)
uvBuffer.position(0)
GLES30.glVertexAttribPointer(uvLocation, 2, GLES30.GL_FLOAT, false, 0, uvBuffer)
checkGlError("Attribute 1")
//indexBuffer.position(0)
//GLES30.glDrawElements(GLES30.GL_TRIANGLES, 4, GLES30.GL_UNSIGNED_INT, indexBuffer)
// I would expect to get a triangle of different color than the background
GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, 3)
GLES30.glFinish()
checkGlError("Finished GL")
EGL14.eglSwapBuffers(display, surface)
checkGlError("Swapped buffers")
}
}
...currently unused other methods
}
The general flow of the above code is that the init block executes when initializing the context, of which there is only one. setup is called when the plugin is registered, and buildSurfaceTextureWindow is called when initializing a SurfaceTexture for a Flutter Texture. The first time this is called, it compiles the shaders. When the plugin wants to render the texture, it calls makeCurrent then drawTextureToCurrentSurface, which is where the magenta background becomes visible but without any cyan triangle. Calls to GL functions are done in a separate thread using doOnGlThread.
If you need to see all of the code including the full plugin implementation and example app using it, I have it on Github, but as far as I can tell the above code should be the only relevant region to not seeing any geometry rendered in the effectively hardcoded color from my fragment shader.
tl;dr My background color from glClear shows up on screen, but my expected result of calling glDrawArrays, a cyan triangle, does not, and I am trying to understand why.
Apparently I needed to call .order(ByteOrder.nativeOrder()) on my buffers. Without this, the vertex array data is not set up properly. Also I needed to set glTexParameteri(GL_TEXTURE_2D, ...) for GL_TEXTURE_MAG/MIN_FILTER and GL_TEXTURE_WRAP_S/T. Without that, all textures are all-black
I need to render a bitmap without displaying it on the screen. For that I create OpenGL context using EGL14 as described in this answer. Then I save OpenGL surface to bitmap using GLES20.glReadPixels. But for some reason it is not rendered as expected and is just transparent.
import android.graphics.Bitmap
import android.opengl.*
import android.opengl.EGL14.EGL_CONTEXT_CLIENT_VERSION
import java.nio.ByteBuffer
class Renderer {
private lateinit var display: EGLDisplay
private lateinit var surface: EGLSurface
private lateinit var eglContext: EGLContext
fun draw() {
// Just a stub that fills the bitmap with red color
GLES20.glClearColor(1f, 0f, 0f, 1f)
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
}
fun saveBitmap(): Bitmap {
val width = 320
val height = 240
val mPixelBuf = ByteBuffer.allocate(width * height * 4)
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf)
return Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888)
}
private fun initializeEglContext() {
display = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY)
if (display == EGL14.EGL_NO_DISPLAY) {
throw RuntimeException("eglGetDisplay failed ${EGL14.eglGetError()}")
}
val versions = IntArray(2)
if (!EGL14.eglInitialize(display, versions, 0, versions, 1)) {
throw RuntimeException("eglInitialize failed ${EGL14.eglGetError()}")
}
val configAttr = intArrayOf(
EGL14.EGL_COLOR_BUFFER_TYPE, EGL14.EGL_RGB_BUFFER,
EGL14.EGL_LEVEL, 0,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
EGL14.EGL_NONE
)
val configs: Array<EGLConfig?> = arrayOfNulls(1)
val numConfig = IntArray(1)
EGL14.eglChooseConfig(
display, configAttr, 0,
configs, 0, 1, numConfig, 0
)
if (numConfig[0] == 0) {
throw RuntimeException("No configs found")
}
val config: EGLConfig? = configs[0]
val surfAttr = intArrayOf(
EGL14.EGL_WIDTH, 320,
EGL14.EGL_HEIGHT, 240,
EGL14.EGL_NONE
)
surface = EGL14.eglCreatePbufferSurface(display, config, surfAttr, 0)
val contextAttrib = intArrayOf(
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
)
eglContext = EGL14.eglCreateContext(display, config, EGL14.EGL_NO_CONTEXT, contextAttrib, 0)
EGL14.eglMakeCurrent(display, surface, surface, eglContext)
}
fun destroy() {
EGL14.eglMakeCurrent(display, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT)
EGL14.eglDestroySurface(display, surface)
EGL14.eglDestroyContext(display, eglContext)
EGL14.eglTerminate(display)
}
}
This is how I use it:
val renderer = Renderer()
renderer.initializeEglContext()
renderer.draw()
val bitmap = renderer.saveBitmap()
renderer.destroy()
The code runs without any errors. I checked that context is created successfully. For example GLES20.glCreateProgram works as expected and returns a valid id. The only warning I get is
W/OpenGLRenderer: Failed to choose config with EGL_SWAP_BEHAVIOR_PRESERVED, retrying without...
But I'm not sure if it affects the result in any way.
However bitmap is not filled with color and is transparent:
val color = bitmap[0, 0]
Log.d("Main", "onCreate: ${Color.valueOf(color)}")
Color(0.0, 0.0, 0.0, 0.0, sRGB IEC61966-2.1)
I guess that I'm missing something, but I can't figure out what. How to make it to actually render?
Pixel buffer must be copied to bitmap:
val mPixelBuf bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888)
bitmap.copyPixelsFromBuffer(mPixelBuf)
return bitmap
I'm trying to run a YoloV4 model on Android that's been converted to .tflite. My input shape seems to be fine [1, 224, 224, 4] but the app crashes on my output shape. I'm using code from a Udacity course on tflite.
I get the above error when I run the following code:
class TFLiteObjectDetectionAPIModel private constructor() : Classifier {
override val statString: String
get() = TODO("not implemented") //To change initializer of created properties use File | Settings | File Templates.
private var isModelQuantized: Boolean = false
// Config values.
private var inputSize: Int = 0
// Pre-allocated buffers.
private val labels = Vector<String>()
private var intValues: IntArray? = null
// outputLocations: array of shape [Batchsize, NUM_DETECTIONS,4]
// contains the location of detected boxes
private var outputLocations: Array<Array<FloatArray>>? = null
// outputClasses: array of shape [Batchsize, NUM_DETECTIONS]
// contains the classes of detected boxes
private var outputClasses: Array<FloatArray>? = null
// outputScores: array of shape [Batchsize, NUM_DETECTIONS]
// contains the scores of detected boxes
private var outputScores: Array<FloatArray>? = null
// numDetections: array of shape [Batchsize]
// contains the number of detected boxes
private var numDetections: FloatArray? = null
private var imgData: ByteBuffer? = null
private var tfLite: Interpreter? = null
override fun recognizeImage(bitmap: Bitmap): List<Classifier.Recognition> {
// Log this method so that it can be analyzed with systrace.
Trace.beginSection("recognizeImage")
Trace.beginSection("preprocessBitmap")
// Preprocess the image data from 0-255 int to normalized float based
// on the provided parameters.
bitmap.getPixels(intValues, 0, bitmap.width, 0, 0, bitmap.width, bitmap.height)
imgData!!.rewind()
for (i in 0 until inputSize) {
for (j in 0 until inputSize) {
val pixelValue = intValues!![i * inputSize + j]
if (isModelQuantized) {
// Quantized model
imgData!!.put((pixelValue shr 16 and 0xFF).toByte())
imgData!!.put((pixelValue shr 8 and 0xFF).toByte())
imgData!!.put((pixelValue and 0xFF).toByte())
} else { // Float model
imgData!!.putFloat(((pixelValue shr 16 and 0xFF) - IMAGE_MEAN) / IMAGE_STD)
imgData!!.putFloat(((pixelValue shr 8 and 0xFF) - IMAGE_MEAN) / IMAGE_STD)
imgData!!.putFloat(((pixelValue and 0xFF) - IMAGE_MEAN) / IMAGE_STD)
}
}
}
Trace.endSection() // preprocessBitmap
// Copy the input data into TensorFlow.
Trace.beginSection("feed")
outputLocations = Array(1) { Array(NUM_DETECTIONS) { FloatArray(4) } }
outputClasses = Array(1) { FloatArray(NUM_DETECTIONS) }
outputScores = Array(1) { FloatArray(NUM_DETECTIONS) }
numDetections = FloatArray(1)
val inputArray = arrayOf<Any>(imgData!!)
val outputMap = ArrayMap<Int, Any>()
outputMap[0] = outputLocations!!
outputMap[1] = outputClasses!!
outputMap[2] = outputScores!!
outputMap[3] = numDetections!!
Trace.endSection()
// Run the inference call.
Trace.beginSection("run")
tfLite!!.runForMultipleInputsOutputs(inputArray, outputMap)
Trace.endSection()
// Show the best detections.
// after scaling them back to the input size.
val recognitions = ArrayList<Classifier.Recognition>(NUM_DETECTIONS)
for (i in 0 until NUM_DETECTIONS) {
val detection = RectF(
outputLocations!![0][i][1] * inputSize,
outputLocations!![0][i][0] * inputSize,
outputLocations!![0][i][3] * inputSize,
outputLocations!![0][i][2] * inputSize)
// SSD Mobilenet V1 Model assumes class 0 is background class
// in label file and class labels start from 1 to number_of_classes+1,
// while outputClasses correspond to class index from 0 to number_of_classes
val labelOffset = 1
recognitions.add(
Classifier.Recognition(
"" + i,
labels[outputClasses!![0][i].toInt() + labelOffset],
outputScores!![0][i],
detection))
}
Trace.endSection() // "recognizeImage"
return recognitions
}
override fun enableStatLogging(debug: Boolean) {
//Not implemented
}
override fun close() {
//Not needed.
}
override fun setNumThreads(numThreads: Int) {
if (tfLite != null) tfLite!!.setNumThreads(numThreads)
}
override fun setUseNNAPI(isChecked: Boolean) {
if (tfLite != null) tfLite!!.setUseNNAPI(isChecked)
}
companion object {
// Only return this many results.
private const val NUM_DETECTIONS = 3087
// Float model
private const val IMAGE_MEAN = 128.0f
private const val IMAGE_STD = 128.0f
/** Memory-map the model file in Assets. */
#Throws(IOException::class)
private fun loadModelFile(assets: AssetManager, modelFilename: String): MappedByteBuffer {
val fileDescriptor = assets.openFd(modelFilename)
val inputStream = FileInputStream(fileDescriptor.fileDescriptor)
val fileChannel = inputStream.channel
val startOffset = fileDescriptor.startOffset
val declaredLength = fileDescriptor.declaredLength
return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength)
}
/**
* Initializes a native TensorFlow session for classifying images.
*
* #param assetManager The asset manager to be used to load assets.
* #param modelFilename The filepath of the model GraphDef protocol buffer.
* #param labelFilename The filepath of label file for classes.
* #param inputSize The size of image input
* #param isQuantized Boolean representing model is quantized or not
*/
#Throws(IOException::class)
fun create(
assetManager: AssetManager,
modelFilename: String,
labelFilename: String,
inputSize: Int,
isQuantized: Boolean): Classifier {
val d = TFLiteObjectDetectionAPIModel()
val labelsInput: InputStream?
val actualFilename = labelFilename.split("file:///android_asset/".toRegex())
.dropLastWhile { it.isEmpty() }.toTypedArray()[1]
labelsInput = assetManager.open(actualFilename)
val br: BufferedReader?
br = BufferedReader(InputStreamReader(labelsInput!!))
while (br.readLine()?.let { d.labels.add(it) } != null);
br.close()
d.inputSize = inputSize
try {
val options = Interpreter.Options()
options.setNumThreads(4)
d.tfLite = Interpreter(loadModelFile(assetManager, modelFilename), options)
} catch (e: Exception) {
throw RuntimeException(e)
}
d.isModelQuantized = isQuantized
// Pre-allocate buffers.
val numBytesPerChannel: Int = if (isQuantized) {
1 // Quantized
} else {
4 // Floating point
}
d.imgData = ByteBuffer.allocateDirect(1 * d.inputSize * d.inputSize * 3 * numBytesPerChannel)
d.imgData!!.order(ByteOrder.nativeOrder())
d.intValues = IntArray(d.inputSize * d.inputSize)
d.outputLocations = Array(1) { Array(NUM_DETECTIONS) { FloatArray(2) } }
d.outputClasses = Array(1) { FloatArray(NUM_DETECTIONS) }
d.outputScores = Array(1) { FloatArray(NUM_DETECTIONS) }
d.numDetections = FloatArray(1)
return d
}
}
When I change the outputLocation to
outputLocations = Array(1) { Array(NUM_DETECTIONS) { FloatArray(2) } }
I get the following error Cannot copy from a TensorFlowLite tensor (Identity) with shape [1, 3087, 4] to a Java object with shape [1, 3087, 2]
What is Identity and Identity_1? I've looked at my model on Netron and can see both but I'm not sure how to understand the model.
Can anyone help? Is there anything else I can change or is my model just not suitable for mobile platforms?
Cannot copy from a TensorFlowLite tensor (Identity) with shape [1, 25200, 8] to a Java object with shape [1, 80, 80, 255].
I have encountered similar problems and haven't found a solution yet
I have been trying to use facenet with ml_kit and I was able to generate the .tflite file following this tutorial but when I try to use it on Android I am getting this error message
TensorFlowLite buffer with 76800 bytes and a ByteBuffer with 307200
bytes
my model is as expected
INPUTS:
[{'name': 'input', 'index': 451, 'shape': array([ 1, 160, 160, 3], dtype=int32), 'dtype': <class 'numpy.uint8'>, 'quantization': (0.0078125, 128)}]
OUTPUTS:
[{'name': 'embeddings', 'index': 450, 'shape': array([ 1, 512], dtype=int32), 'dtype': <class 'numpy.uint8'>, 'quantization': (0.0235294122248888, 0)}]
And the way that I use the model interpreter is as follow
val input = convertBitmapToByteBuffer(Bitmap.createScaledBitmap(bitmap, IMAGE_WIDTH, IMAGE_HEIGHT, true))
//val input = convertBitmap(bitmap)
val inputOutputOptions = createInputOutputOptions()
// [START mlkit_run_inference]
val inputs = FirebaseModelInputs.Builder()
.add(input) // add() as many input arrays as your model requires
.build()
firebaseInterpreter.run(inputs, inputOutputOptions)
#Throws(FirebaseMLException::class)
private fun createInputOutputOptions(): FirebaseModelInputOutputOptions {
// [START mlkit_create_io_options]
val inputOutputOptions = FirebaseModelInputOutputOptions.Builder()
.setInputFormat(0, FirebaseModelDataType.INT32, intArrayOf(1, IMAGE_WIDTH, IMAGE_HEIGHT, 3))
.setOutputFormat(0, FirebaseModelDataType.INT32, intArrayOf(1, 512))
.build()
// [END mlkit_create_io_options]
return inputOutputOptions
}
fun processImage(bitmap: Bitmap){
val input = convertBitmapToByteBuffer(Bitmap.createScaledBitmap(bitmap, IMAGE_WIDTH, IMAGE_HEIGHT, true))
val inputOutputOptions = createInputOutputOptions()
// [START mlkit_run_inference]
val inputs = FirebaseModelInputs.Builder()
.add(input) // add() as many input arrays as your model requires
.build()
firebaseInterpreter.run(inputs, inputOutputOptions)
.addOnSuccessListener { result ->
// [START_EXCLUDE]
// [START mlkit_read_result]
// [END mlkit_read_result]
// [END_EXCLUDE]
// listener?.onSuccess(probabilities)
}
.addOnFailureListener(
object : OnFailureListener {
override fun onFailure(e: Exception) {
// Task failed with an exception
// ...
listener?.onFailure(e)
}
})
}
private fun convertBitmapToByteBuffer(bitmap: Bitmap): ByteBuffer {
val height = bitmap.getHeight()
val width = bitmap.getWidth()
val byteBuffer: ByteBuffer = ByteBuffer.allocateDirect(BYTES_PER_CHANNEL * DIM_BATCH_SIZE * width * height * DIM_PIXEL_SIZE)
byteBuffer.order(ByteOrder.nativeOrder())
val intValues = IntArray(width * height)
bitmap.getPixels(intValues, 0, width, 0, 0, width, height);
// Convert the image to floating point.
var pixel = 0
for (i in 0 until width) {
for (j in 0 until height) {
val `val` = intValues[pixel++]
addPixelValueInt(byteBuffer, `val`)
}
}
byteBuffer.rewind()
return byteBuffer
}
protected fun addPixelValueInt(byteBuffer: ByteBuffer, pixelValue: Int) {
byteBuffer.putInt((pixelValue shr 16 and 0xFF))
byteBuffer.putInt((pixelValue shr 8 and 0xFF))
byteBuffer.putInt((pixelValue and 0xFF))
}
My Config values
private val IMAGE_WIDTH : Int = 160
private val IMAGE_HEIGHT : Int = 160
private val DIM_BATCH_SIZE = 1
private val DIM_PIXEL_SIZE = 3
private val BYTES_PER_CHANNEL = 4
Any Idea what I am doing wrong?
After many hours I found that ml_kit can't handle the type QUANTIZED_UINT8 (or at least I don't know hoe to do it). So I have modified my .tflite file to use FLOAT instead
tflite_convert --output_file model_mobile/my_facenet.tflite --graph_def_file facenet_frozen.pb --input_arrays "input" --input_shapes "1,160,160,3" --output_arrays "embeddings" --output_format TFLITE --mean_values 128 --std_dev_values 128 --default_ranges_min 0 --default_ranges_max 6 --inference_type QUANTIZED_UINT8 --inference_input_type QUANTIZED_UINT8
Also I have changed the way that I convert a bitmap to something that can be used by ml_kit
private fun bitmapToInputArray(originalBitmap: Bitmap, quantified : Boolean = true): Array<Array<Array<FloatArray>>> {
var bitmap = originalBitmap
// [START mlkit_bitmap_input]
//be sure it's the right size
bitmap = Bitmap.createScaledBitmap(bitmap, IMAGE_WIDTH, IMAGE_HEIGHT, true)
val batchNum = 0
val input = Array(DIM_BATCH_SIZE) { Array(bitmap.width) { Array(bitmap.height) { FloatArray(DIM_PIXEL_SIZE) } } }
for (x in 0..bitmap.width-1) {
for (y in 0..bitmap.height-1) {
val pixelValue = bitmap.getPixel(x, y)
var red:Float
var green:Float
var blue:Float
if(quantified) {
red = (pixelValue shr 16 and 0xFF) / 255f
green = (pixelValue shr 8 and 0xFF) / 255f
blue = (pixelValue and 0xFF) / 255f
} else {
red = ((pixelValue shr 16 and 0xFF) - IMAGE_MEAN) / IMAGE_STD
green= ((pixelValue shr 8 and 0xFF) - IMAGE_MEAN) / IMAGE_STD
blue= ((pixelValue and 0xFF) - IMAGE_MEAN) / IMAGE_STD
}
input[batchNum][x][y][0] = red
input[batchNum][x][y][1] = green
input[batchNum][x][y][2] = blue
}
}
// [END mlkit_bitmap_input]
return input
}
And finally my ml_kit options looks like
#Throws(FirebaseMLException::class)
private fun createInputOutputOptions(): FirebaseModelInputOutputOptions {
// [START mlkit_create_io_options]
val inputOutputOptions = FirebaseModelInputOutputOptions.Builder()
.setInputFormat(0, FirebaseModelDataType.FLOAT32, intArrayOf(DIM_BATCH_SIZE, IMAGE_WIDTH, IMAGE_HEIGHT, DIM_PIXEL_SIZE))
.setOutputFormat(0, FirebaseModelDataType.FLOAT32, intArrayOf(DIM_BATCH_SIZE, 512))
.build()
// [END mlkit_create_io_options]
return inputOutputOptions
}
And those are my final config settings
private val IMAGE_WIDTH : Int = 160
private val IMAGE_HEIGHT : Int = 160
private val DIM_BATCH_SIZE = 1
private val DIM_PIXEL_SIZE = 3
private val BYTES_PER_CHANNEL = 4
private val IMAGE_MEAN = 127
private val IMAGE_STD = 128.0f
I'm new in openGL ES programming, so I followed the guide provided by Android Developers site. It shows how draw a simple green triangle that rotate on the screen following the touch point. I tried it on my Tablet (Galaxy Tab A10, Android Oreo - 8) and it worked, also on a Huawei (Android Marhmallow - 6.0); the problem is that the triangle is not shown on my Galaxy J3 (Android Lollipop - 5.1) without log errors and 0 as return of all glGetError() call, the only thing I can see is the color change of the background.
I couldn't find similar problems here in SO and on the web, has someone had the same problem?
(The language used is Kotlin, but I think it's a conceptual question, so please take a look also if the code is slightly different from Java).
Game Activity:
import android.content.Context
import android.opengl.GLSurfaceView
import android.os.Bundle
import android.support.v7.app.AppCompatActivity
import android.view.MotionEvent
class GameActivity : AppCompatActivity() {
private lateinit var mGLView: GLSurfaceView
public override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// Create a GLSurfaceView instance and set it
// as the ContentView for this Activity.
mGLView = MyGLSurfaceView(this)
setContentView(mGLView)
}
class MyGLSurfaceView(context: Context) : GLSurfaceView(context) {
private val mRenderer: MyGLRenderer
init {
// Create an OpenGL ES 2.0 context
setEGLContextClientVersion(2)
mRenderer = MyGLRenderer()
// Set the Renderer for drawing on the GLSurfaceView
setRenderer(mRenderer)
renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
}
private val TOUCH_SCALE_FACTOR: Float = 180.0f / 320f
private var previousX: Float = 0f
private var previousY: Float = 0f
override fun onTouchEvent(e: MotionEvent): Boolean {
// MotionEvent reports input details from the touch screen
// and other input controls. In this case, you are only
// interested in events where the touch position changed.
val x: Float = e.x
val y: Float = e.y
when (e.action) {
MotionEvent.ACTION_MOVE -> {
var dx: Float = x - previousX
var dy: Float = y - previousY
// reverse direction of rotation above the mid-line
if (y > height / 2) {
dx *= -1
}
// reverse direction of rotation to left of the mid-line
if (x < width / 2) {
dy *= -1
}
mRenderer.angle += (dx + dy) * TOUCH_SCALE_FACTOR
requestRender()
}
}
previousX = x
previousY = y
return true
}
}
}
Custom Renderer Class:
import android.opengl.GLES20
import android.opengl.GLSurfaceView
import android.opengl.Matrix
import javax.microedition.khronos.egl.EGLConfig
import javax.microedition.khronos.opengles.GL10
class MyGLRenderer : GLSurfaceView.Renderer {
val TAG = MyGLRenderer::class.java.name
#Volatile
var angle: Float = 0f
private lateinit var mTriangle: Triangle
private val mRotationMatrix = FloatArray(16)
// mMVPMatrix is an abbreviation for "Model View Projection Matrix"
private val mMVPMatrix = FloatArray(16)
private val mProjectionMatrix = FloatArray(16)
private val mViewMatrix = FloatArray(16)
override fun onSurfaceCreated(unused: GL10, config: EGLConfig) {
// Set the background frame color
GLES20.glClearColor(0.8f, 0.2f, 0.2f, 1.0f)
// initialize a triangle
mTriangle = Triangle()
}
override fun onDrawFrame(unused: GL10) {
val scratch = FloatArray(16)
// Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
// enable face culling feature
//GLES20.glEnable(GL10.GL_CULL_FACE)
// specify which faces to not draw
//GLES20.glCullFace(GL10.GL_BACK)
// Set the camera position (View matrix)
Matrix.setLookAtM(mViewMatrix, 0, 0f, 0f, -3f, 0f, 0f, 0f, 0f, 1.0f, 0.0f)
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0)
// Create a rotation transformation for the triangle
/*val time = SystemClock.uptimeMillis() % 4000L
val angle = 0.090f * time.toInt()*/
Matrix.setRotateM(mRotationMatrix, 0, -angle, 0f, 0f, -1.0f)
// Combine the rotation matrix with the projection and camera view
// Note that the mMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
Matrix.multiplyMM(scratch, 0, mMVPMatrix, 0, mRotationMatrix, 0)
// Draw shape
mTriangle.draw(scratch)
}
override fun onSurfaceChanged(unused: GL10, width: Int, height: Int) {
GLES20.glViewport(0, 0, width, height)
val ratio: Float = width.toFloat() / height.toFloat()
// this projection matrix is applied to object coordinates
// in the onDrawFrame() method
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1f, 1f, 3f, 7f)
}
}
Triangle Class:
import android.opengl.GLES20
import android.util.Log
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.FloatBuffer
class Triangle {
val TAG = Triangle::class.java.name
// number of coordinates per vertex in this array
val COORDS_PER_VERTEX = 3
var triangleCoords = floatArrayOf( // in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
)
private val fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}"
private val vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}"
// Use to access and set the view transformation
private var mMVPMatrixHandle: Int = 0
// Set color with red, green, blue and alpha (opacity) values
val color = floatArrayOf(0.63671875f, 0.76953125f, 0.22265625f, 1.0f)
private var vertexBuffer: FloatBuffer =
// (number of coordinate values * 4 bytes per float)
ByteBuffer.allocateDirect(triangleCoords.size * 4).run {
// use the device hardware's native byte order
order(ByteOrder.nativeOrder())
// create a floating point buffer from the ByteBuffer
asFloatBuffer().apply {
// add the coordinates to the FloatBuffer
put(triangleCoords)
// set the buffer to read the first coordinate
position(0)
}
}
private var mProgram: Int
init {
val vertexShader: Int = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode)
val fragmentShader: Int = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode)
// create empty OpenGL ES Program
mProgram = GLES20.glCreateProgram().also {
// add the vertex shader to program
GLES20.glAttachShader(it, vertexShader)
Log.d(TAG, "glAttachShader: ${GLES20.glGetError()}")
// add the fragment shader to program
GLES20.glAttachShader(it, fragmentShader)
Log.d(TAG, "glAttachShader: ${GLES20.glGetError()}")
// creates OpenGL ES program executables
GLES20.glLinkProgram(it)
Log.d(TAG, "glLinkProgram: ${GLES20.glGetError()}")
}
Log.d(TAG, "glCreateProgram: ${GLES20.glGetError()}")
}
fun loadShader(type: Int, shaderCode: String): Int {
// create a vertex shader type (GLES20.GL_VERTEX_SHADER)
// or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
return GLES20.glCreateShader(type).also { shader ->
// add the source code to the shader and compile it
GLES20.glShaderSource(shader, shaderCode)
GLES20.glCompileShader(shader)
}
}
private var mPositionHandle: Int = 0
private var mColorHandle: Int = 0
private val vertexCount: Int = triangleCoords.size / COORDS_PER_VERTEX
private val vertexStride: Int = COORDS_PER_VERTEX * 4 // 4 bytes per vertex
fun draw(mvpMatrix: FloatArray) { // pass in the calculated transformation matrix
// Add program to OpenGL ES environment
GLES20.glUseProgram(mProgram)
Log.d(TAG, "glUseProgram: ${GLES20.glGetError()}")
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition").also {
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(it)
Log.d(TAG, "glEnableVertexAttribArray: ${GLES20.glGetError()}")
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(
it,
COORDS_PER_VERTEX,
GLES20.GL_FLOAT,
false,
vertexStride,
vertexBuffer
)
Log.d(TAG, "glVertexAttribPointer: ${GLES20.glGetError()}")
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor").also { colorHandle ->
// Set color for drawing the triangle
GLES20.glUniform4fv(colorHandle, 1, color, 0)
}
Log.d(TAG, "glGetUniformLocation: ${GLES20.glGetError()}")
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix")
Log.d(TAG, "glGetUniformLocation: ${GLES20.glGetError()}")
// Pass the projection and view transformation to the shader
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0)
Log.d(TAG, "glUniformMatrix4fv: ${GLES20.glGetError()}")
// Draw the triangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, vertexCount)
Log.d(TAG, "glDrawArrays: ${GLES20.glGetError()}")
// Disable vertex array
GLES20.glDisableVertexAttribArray(it)
Log.d(TAG, "glDisableVertexAttribArray: ${GLES20.glGetError()}")
}
Log.d(TAG, "glGetAttribLocation: ${GLES20.glGetError()}")
}
}
Of course I added this line in the manifest.xml:
<uses-feature android:glEsVersion="0x00020000" android:required="true"/>
UPDATE:
I discovered that if I try to run the app while the phone (the Lollipop) is disconnected from PC, it'll fail to run, with the toast message: "Authorization denied" (the Italian message is "Autorizzazione negata", so in English it could be also "Permission denied"), and I don't know what it means, but maybe it could be helpful for someone.