android app development with ndk and tcc possible? - android

Is it possible to create android apps with the help of tcc (tiny C compiler) ?
Normal ndk APK's looks simple zip to me.
There were only some signatures under /META-INF folder, dynamic *.so libraries under /lib, AndroidManifest.xml and resources.arsc.
├───lib
│ └───x86
│ └───gdbserver
│ └───libAndroid1.so
├───META-INF
│ └───CERT.RSA
│ └───CERT.SF
│ └───MANIFEST.MF
├───AndroidManifest.xml
├───resources.arsc
And also, it seems like gcc/clang compilers can be replaced by tcc. I got managed to compile an general *.so file with command:
tcc -r dllmain.c -o dllmain.o
tcc -shared dllmain.o -o dll.so
But now I'm NOT sure, how gonna I compile a full ndk project by replacing gcc/clang with tcc.
Here's my dead attempt:
C:\Users\gray\source\repos\Android1\Android1\Android1.NativeActivity>tcc -shared -r main.c -o libAndroid1.so -IC:\Microsoft\AndroidNDK\android-ndk-r15c\platforms\android-19\arch-x86\usr\include\
tcc: warning: -r: overriding compiler action already specified
In file included from main.c:24:
In file included from C:/Microsoft/AndroidNDK/android-ndk-r15c/platforms/android-19/arch-x86/usr/include//android/sensor.h:43:
In file included from C:/Microsoft/AndroidNDK/android-ndk-r15c/platforms/android-19/arch-x86/usr/include//sys/types.h:35:
C:/Microsoft/AndroidNDK/android-ndk-r15c/platforms/android-19/arch-x86/usr/include//sys/cdefs.h:283: error: #error "No function renaming possible"
And also, I renamed main.cpp to main.c to make it work with tcc.
main.c:
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, "AndroidProject1.NativeActivity", __VA_ARGS__))
#define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, "AndroidProject1.NativeActivity", __VA_ARGS__))
/**
* Our saved state data.
*/
#include <android/sensor.h>
struct saved_state {
float angle;
int x;
int y;
};
/**
* Shared state for our app.
*/
struct engine {
struct android_app* app;
ASensorManager* sensorManager;
const ASensor* accelerometerSensor;
ASensorEventQueue* sensorEventQueue;
int animating;
EGLDisplay display;
EGLSurface surface;
EGLContext context;
int width;
int height;
struct saved_state state;
};
/**
* Initialize an EGL context for the current display.
*/
static int engine_init_display(struct engine* engine) {
// initialize OpenGL ES and EGL
/*
* Here specify the attributes of the desired configuration.
* Below, we select an EGLConfig with at least 8 bits per color
* component compatible with on-screen windows
*/
const EGLint attribs[] = {
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_NONE
};
EGLint w, h, format;
EGLint numConfigs;
EGLConfig config;
EGLSurface surface;
EGLContext context;
EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, 0, 0);
/* Here, the application chooses the configuration it desires. In this
* sample, we have a very simplified selection process, where we pick
* the first EGLConfig that matches our criteria */
eglChooseConfig(display, attribs, &config, 1, &numConfigs);
/* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is
* guaranteed to be accepted by ANativeWindow_setBuffersGeometry().
* As soon as we picked a EGLConfig, we can safely reconfigure the
* ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);
ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format);
surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);
context = eglCreateContext(display, config, NULL, NULL);
if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
LOGW("Unable to eglMakeCurrent");
return -1;
}
eglQuerySurface(display, surface, EGL_WIDTH, &w);
eglQuerySurface(display, surface, EGL_HEIGHT, &h);
engine->display = display;
engine->context = context;
engine->surface = surface;
engine->width = w;
engine->height = h;
engine->state.angle = 0;
// Initialize GL state.
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);
glEnable(GL_CULL_FACE);
glShadeModel(GL_SMOOTH);
glDisable(GL_DEPTH_TEST);
return 0;
}
/**
* Just the current frame in the display.
*/
static void engine_draw_frame(struct engine* engine) {
if (engine->display == NULL) {
// No display.
return;
}
// Just fill the screen with a color.
glClearColor(((float)engine->state.x) / engine->width, engine->state.angle,
((float)engine->state.y) / engine->height, 1);
glClear(GL_COLOR_BUFFER_BIT);
eglSwapBuffers(engine->display, engine->surface);
}
/**
* Tear down the EGL context currently associated with the display.
*/
static void engine_term_display(struct engine* engine) {
if (engine->display != EGL_NO_DISPLAY) {
eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
if (engine->context != EGL_NO_CONTEXT) {
eglDestroyContext(engine->display, engine->context);
}
if (engine->surface != EGL_NO_SURFACE) {
eglDestroySurface(engine->display, engine->surface);
}
eglTerminate(engine->display);
}
engine->animating = 0;
engine->display = EGL_NO_DISPLAY;
engine->context = EGL_NO_CONTEXT;
engine->surface = EGL_NO_SURFACE;
}
/**
* Process the next input event.
*/
static int engine_handle_input(struct android_app* app, AInputEvent* event) {
struct engine* engine = (struct engine*)app->userData;
if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION) {
engine->state.x = AMotionEvent_getX(event, 0);
engine->state.y = AMotionEvent_getY(event, 0);
return 1;
}
return 0;
}
/**
* Process the next main command.
*/
static void engine_handle_cmd(struct android_app* app, int cmd) {
struct engine* engine = (struct engine*)app->userData;
switch (cmd) {
case APP_CMD_SAVE_STATE:
// The system has asked us to save our current state. Do so.
engine->app->savedState = malloc(sizeof(struct saved_state));
*((struct saved_state*)engine->app->savedState) = engine->state;
engine->app->savedStateSize = sizeof(struct saved_state);
break;
case APP_CMD_INIT_WINDOW:
// The window is being shown, get it ready.
if (engine->app->window != NULL) {
engine_init_display(engine);
engine_draw_frame(engine);
}
break;
case APP_CMD_TERM_WINDOW:
// The window is being hidden or closed, clean it up.
engine_term_display(engine);
break;
case APP_CMD_GAINED_FOCUS:
// When our app gains focus, we start monitoring the accelerometer.
if (engine->accelerometerSensor != NULL) {
ASensorEventQueue_enableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
// We'd like to get 60 events per second (in us).
ASensorEventQueue_setEventRate(engine->sensorEventQueue,
engine->accelerometerSensor, (1000L / 60) * 1000);
}
break;
case APP_CMD_LOST_FOCUS:
// When our app loses focus, we stop monitoring the accelerometer.
// This is to avoid consuming battery while not being used.
if (engine->accelerometerSensor != NULL) {
ASensorEventQueue_disableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
}
// Also stop animating.
engine->animating = 0;
engine_draw_frame(engine);
break;
}
}
/**
* This is the main entry point of a native application that is using
* android_native_app_glue. It runs in its own thread, with its own
* event loop for receiving input events and doing other things.
*/
void android_main(struct android_app* state) {
struct engine engine;
memset(&engine, 0, sizeof(engine));
state->userData = &engine;
state->onAppCmd = engine_handle_cmd;
state->onInputEvent = engine_handle_input;
engine.app = state;
// Prepare to monitor accelerometer
engine.sensorManager = ASensorManager_getInstance();
engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager,
ASENSOR_TYPE_ACCELEROMETER);
engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager,
state->looper, LOOPER_ID_USER, NULL, NULL);
if (state->savedState != NULL) {
// We are starting with a previous saved state; restore from it.
engine.state = *(struct saved_state*)state->savedState;
}
engine.animating = 1;
// loop waiting for stuff to do.
while (1) {
// Read all pending events.
int ident;
int events;
struct android_poll_source* source;
// If not animating, we will block forever waiting for events.
// If animating, we loop until all events are read, then continue
// to draw the next frame of animation.
while ((ident = ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events,
(void**)&source)) >= 0) {
// Process this event.
if (source != NULL) {
source->process(state, source);
}
// If a sensor has data, process it now.
if (ident == LOOPER_ID_USER) {
if (engine.accelerometerSensor != NULL) {
ASensorEvent event;
while (ASensorEventQueue_getEvents(engine.sensorEventQueue,
&event, 1) > 0) {
LOGI("accelerometer: x=%f y=%f z=%f",
event.acceleration.x, event.acceleration.y,
event.acceleration.z);
}
}
}
// Check if we are exiting.
if (state->destroyRequested != 0) {
engine_term_display(&engine);
return;
}
}
if (engine.animating) {
// Done with events; draw next animation frame.
engine.state.angle += .01f;
if (engine.state.angle > 1) {
engine.state.angle = 0;
}
// Drawing is throttled to the screen update rate, so there
// is no need to do timing here.
engine_draw_frame(&engine);
}
}
}

Related

Read from GL_TEXTURE_EXTERNAL_OES to GL_TEXTURE_2D have perfomance issues and glitches

I'm need to send data from GL_TEXTURE_EXTERNAL_OES to simple GL_TEXTURE_2D (Render image from Android player to Unity texture) and currently do it through read pixels from buffer with attached source texture. This process work correctly on my OnePlus 5 phone, but have some glitches with image on phones like xiaomi note 4, mi a2 and etc (like image is very green), and also there is perfomance issues becouse of this process works every frame and than more pixels to read, than worser perfomance (even my phone has low fps at 4k resolution). Any idea how to optimize this process or do it in some other way?
Thanks and best regards!
GLuint FramebufferName;
glGenFramebuffers(1, &FramebufferName);
glBindFramebuffer(GL_FRAMEBUFFER, FramebufferName);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_EXTERNAL_OES, g_ExtTexturePointer, 0);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
{
LOGD("%s", "Error: Could not setup frame buffer.");
}
unsigned char* data = new unsigned char[g_SourceWidth * g_SourceHeight * 4];
glReadPixels(0, 0, g_SourceWidth, g_SourceHeight, GL_RGBA, GL_UNSIGNED_BYTE, data);
glBindTexture(GL_TEXTURE_2D, g_TexturePointer);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, g_SourceWidth, g_SourceHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glDeleteFramebuffers(1, &FramebufferName);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
delete[] data;
UPDATE.
Function which contain this code and function which calls it from Unity side
static void UNITY_INTERFACE_API OnRenderEvent(int eventID) { ... }
extern "C" UnityRenderingEvent UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API UMDGetRenderEventFunc()
{
return OnRenderEvent;
}
Which called from Unity Update function like this:
[DllImport("RenderingPlugin")]
static extern IntPtr UMDGetRenderEventFunc();
IEnumerator UpdateVideoTexture()
{
while (true)
{
...
androidPlugin.UpdateSurfaceTexture();
GL.IssuePluginEvent(UMDGetRenderEventFunc, 1);
}
}
And Android plugin do this on its side (surfaceTexture its texture which contain this external texture on which ExoPlayer render video)
public void exportUpdateSurfaceTexture() {
synchronized (this) {
if (this.mIsStopped) {
return;
}
surfaceTexture.updateTexImage();
}
}
On the C++ side:
You're creating and destroying pixel data every frame when you do new unsigned char[g_SourceWidth * g_SourceHeight * 4]; and delete[] data and that's expensive depending on the Texture size. Create the texture data once then re-use it.
One way to do this is to have static variables on the C++ side hold the texture information then a function to initialize those variables::
static void* pixelData = nullptr;
static int _x;
static int _y;
static int _width;
static int _height;
void initPixelData(void* buffer, int x, int y, int width, int height) {
pixelData = buffer;
_x = x;
_y = y;
_width = width;
_height = height;
}
Then your capture function should be re-written to remove new unsigned char[g_SourceWidth * g_SourceHeight * 4]; and delete[] data but use the static variables.
static void UNITY_INTERFACE_API OnRenderEvent(int eventID)
{
if (pixelData == nullptr) {
//Debug::Log("Pointer is null", Color::Red);
return;
}
GLuint FramebufferName;
glGenFramebuffers(1, &FramebufferName);
glBindFramebuffer(GL_FRAMEBUFFER, FramebufferName);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_EXTERNAL_OES, g_ExtTexturePointer, 0);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
{
LOGD("%s", "Error: Could not setup frame buffer.");
}
glReadPixels(_x, _y, _width, _height, GL_RGBA, GL_UNSIGNED_BYTE, pixelData);
glBindTexture(GL_TEXTURE_2D, g_TexturePointer);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, _width, _height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixelData);
glDeleteFramebuffers(1, &FramebufferName);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
}
extern "C" UnityRenderingEvent UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API
UMDGetRenderEventFunc()
{
return OnRenderEvent;
}
On the C# side:
[DllImport("RenderingPlugin", CallingConvention = CallingConvention.Cdecl)]
public static extern void initPixelData(IntPtr buffer, int x, int y, int width, int height);
[DllImport("RenderingPlugin", CallingConvention = CallingConvention.StdCall)]
private static extern IntPtr UMDGetRenderEventFunc();
Create the Texture information, pin it and send the pointer to C++:
int width = 500;
int height = 500;
//Where Pixel data will be saved
byte[] screenData;
//Where handle that pins the Pixel data will stay
GCHandle pinHandler;
//Used to test the color
public RawImage rawImageColor;
private Texture2D texture;
// Use this for initialization
void Awake()
{
Resolution res = Screen.currentResolution;
width = res.width;
height = res.height;
//Allocate array to be used
screenData = new byte[width * height * 4];
texture = new Texture2D(width, height, TextureFormat.RGBA32, false, false);
//Pin the Array so that it doesn't move around
pinHandler = GCHandle.Alloc(screenData, GCHandleType.Pinned);
//Register the screenshot and pass the array that will receive the pixels
IntPtr arrayPtr = pinHandler.AddrOfPinnedObject();
initPixelData(arrayPtr, 0, 0, width, height);
StartCoroutine(UpdateVideoTexture());
}
Then to update the texture, see the sample below. Note that there are two methods to update the texture as shown on the code below. If you run into issues with Method1, comment out the two lines which uses texture.LoadRawTextureData and texture.Apply and un-comment the Method2 code which uses the ByteArrayToColor, texture.SetPixels and texture.Apply function:
IEnumerator UpdateVideoTexture()
{
while (true)
{
//Take screenshot of the screen
GL.IssuePluginEvent(UMDGetRenderEventFunc(), 1);
//Update Texture Method1
texture.LoadRawTextureData(screenData);
texture.Apply();
//Update Texture Method2. Use this if the Method1 above crashes
/*
ByteArrayToColor();
texture.SetPixels(colors);
texture.Apply();
*/
//Test it by assigning the texture to a raw image
rawImageColor.texture = texture;
//Wait for a frame
yield return null;
}
}
Color[] colors = null;
void ByteArrayToColor()
{
if (colors == null)
{
colors = new Color[screenData.Length / 4];
}
for (int i = 0; i < screenData.Length; i += 4)
{
colors[i / 4] = new Color(screenData[i],
screenData[i + 1],
screenData[i + 2],
screenData[i + 3]);
}
}
Unpin the array when done or when the script is about to be destroyed:
void OnDisable()
{
//Unpin the array when disabled
pinHandler.Free();
}
Calling glReadPixels is always going to be slow; CPUs are not good at bulk data transfer.
Ideally you'd managed to convince Unity to accept an external image handle, and do the whole process zero copy, but failing that I would use a GPU render-to-texture and use a shader to transfer from the external image to the RGB surface.

Real-time image process and display using Android Camera2 api and ANativeWindow

I need to do some real-time image processing with the camera preview data, such as face detection which is a c++ library, and then display the processed preview with face labeled on screen.
I have read http://nezarobot.blogspot.com/2016/03/android-surfacetexture-camera2-opencv.html and Eddy Talvala's answer from Android camera2 API - Display processed frame in real time. Following the two webpages, I managed to build the app(no calling the face detection lib, only trying to display preview using ANativeWindow), but everytime I run this app on Google Pixel - 7.1.0 - API 25 running on Genymotion, the app always collapses throwing the following log
08-28 14:23:09.598 2099-2127/tau.camera2demo A/libc: Fatal signal 11 (SIGSEGV), code 2, fault addr 0xd3a96000 in tid 2127 (CAMERA2)
[ 08-28 14:23:09.599 117: 117 W/ ]
debuggerd: handling request: pid=2099 uid=10067 gid=10067 tid=2127
I googled this but no answer found.
The whole project on Github:https://github.com/Fung-yuantao/android-camera2demo
Here is the key code(I think).
Code in Camera2Demo.java:
private void startPreview(CameraDevice camera) throws CameraAccessException {
SurfaceTexture texture = mPreviewView.getSurfaceTexture();
// to set PREVIEW size
texture.setDefaultBufferSize(mPreviewSize.getWidth(),mPreviewSize.getHeight());
surface = new Surface(texture);
try {
// to set request for PREVIEW
mPreviewBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.YUV_420_888, 2);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener,mHandler);
mPreviewBuilder.addTarget(mImageReader.getSurface());
//output Surface
List<Surface> outputSurfaces = new ArrayList<>();
outputSurfaces.add(mImageReader.getSurface());
/*camera.createCaptureSession(
Arrays.asList(surface, mImageReader.getSurface()),
mSessionStateCallback, mHandler);
*/
camera.createCaptureSession(outputSurfaces, mSessionStateCallback, mHandler);
}
private CameraCaptureSession.StateCallback mSessionStateCallback = new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(CameraCaptureSession session) {
try {
updatePreview(session);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
#Override
public void onConfigureFailed(CameraCaptureSession session) {
}
};
private void updatePreview(CameraCaptureSession session)
throws CameraAccessException {
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
session.setRepeatingRequest(mPreviewBuilder.build(), null, mHandler);
}
private ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
// get the newest frame
Image image = reader.acquireNextImage();
if (image == null) {
return;
}
// print image format
int format = reader.getImageFormat();
Log.d(TAG, "the format of captured frame: " + format);
// HERE to call jni methods
JNIUtils.display(image.getWidth(), image.getHeight(), image.getPlanes()[0].getBuffer(), surface);
//ByteBuffer buffer = image.getPlanes()[0].getBuffer();
//byte[] bytes = new byte[buffer.remaining()];
image.close();
}
};
Code in JNIUtils.java:
import android.media.Image;
import android.view.Surface;
import java.nio.ByteBuffer;
public class JNIUtils {
// TAG for JNIUtils class
private static final String TAG = "JNIUtils";
// Load native library.
static {
System.loadLibrary("native-lib");
}
public static native void display(int srcWidth, int srcHeight, ByteBuffer srcBuffer, Surface surface);
}
Code in native-lib.cpp:
#include <jni.h>
#include <string>
#include <android/log.h>
//#include <android/bitmap.h>
#include <android/native_window_jni.h>
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, "Camera2Demo", __VA_ARGS__)
extern "C" {
JNIEXPORT jstring JNICALL Java_tau_camera2demo_JNIUtils_display(
JNIEnv *env,
jobject obj,
jint srcWidth,
jint srcHeight,
jobject srcBuffer,
jobject surface) {
/*
uint8_t *srcLumaPtr = reinterpret_cast<uint8_t *>(env->GetDirectBufferAddress(srcBuffer));
if (srcLumaPtr == nullptr) {
LOGE("srcLumaPtr null ERROR!");
return NULL;
}
*/
ANativeWindow * window = ANativeWindow_fromSurface(env, surface);
ANativeWindow_acquire(window);
ANativeWindow_Buffer buffer;
ANativeWindow_setBuffersGeometry(window, srcWidth, srcHeight, 0/* format unchanged */);
if (int32_t err = ANativeWindow_lock(window, &buffer, NULL)) {
LOGE("ANativeWindow_lock failed with error code: %d\n", err);
ANativeWindow_release(window);
return NULL;
}
memcpy(buffer.bits, srcBuffer, srcWidth * srcHeight * 4);
ANativeWindow_unlockAndPost(window);
ANativeWindow_release(window);
return NULL;
}
}
After I commented the memcpy out, the app no longer collapses but displays nothing. So I guess the problem is now turning to how to correctly use memcpy to copy the captured/processed buffer to buffer.bits.
Update:
I change
memcpy(buffer.bits, srcBuffer, srcWidth * srcHeight * 4);
to
memcpy(buffer.bits, srcLumaPtr, srcWidth * srcHeight * 4);
the app no longer collapses and starts to display but it's displaying something strange.
As mentioned by yakobom, you're trying to copy a YUV_420_888 image directly into a RGBA_8888 destination (that's the default, if you haven't changed it). That won't work with just a memcpy.
You need to actually convert the data, and you need to ensure you don't copy too much - the sample code you have copies width*height*4 bytes, while a YUV_420_888 image takes up only stride*height*1.5 bytes (roughly). So when you copied, you were running way off the end of the buffer.
You also have to account for the stride provided at the Java level to correctly index into the buffer. This link from Microsoft has a useful diagram.
If you just care about the luminance (so grayscale output is enough), just duplicate the luminance channel into the R, G, and B channels. The pseudocode would be roughly:
uint8_t *outPtr = buffer.bits;
for (size_t y = 0; y < height; y++) {
uint8_t *rowPtr = srcLumaPtr + y * srcLumaStride;
for (size_t x = 0; x < width; x++) {
*(outPtr++) = *rowPtr;
*(outPtr++) = *rowPtr;
*(outPtr++) = *rowPtr;
*(outPtr++) = 255; // gamma for RGBA_8888
++rowPtr;
}
}
You'll need to read the srcLumaStride from the Image object (row stride of the first Plane) and pass it down via JNI as well.
Just to put it as an answer, to avoid a long chain of comments - such a crash issue may be due to improper size of bites being copied by the memcpy (UPDATE following other comments: In this case it was due to forbidden direct copy).
If you are now getting a weird image, it is probably another issue - I would suspect the image format, try to modify that.

OpenGL ES ANDROID C++ ERROR

I created an "native android app" project in Visual Studio 2017.
It compiles and runs well but when I add simple triangle code triangle is not showing up.
My code :
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#define LOGI(...)((void) __android_log_print(ANDROID_LOG_INFO, "AndroidProject1.NativeActivity", __VA_ARGS__))# define LOGW(...)((void) __android_log_print(ANDROID_LOG_WARN, "AndroidProject1.NativeActivity", __VA_ARGS__))
#define LOG_TAG "libgl2jni"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
static void printGLString(const char * name, GLenum s) {
const char * v = (const char * ) glGetString(s);
LOGI("GL %s = %s\n", name, v);
}
static void checkGlError(const char * op) {
for (GLint error = glGetError(); error; error = glGetError()) {
LOGI("after %s() glError (0x%x)\n", op, error);
}
}
static const char gVertexShader[] =
"attribute vec4 vPosition;\n"
"void main() {\n"
" gl_Position = vPosition;\n"
"}\n";
static const char gFragmentShader[] =
"precision mediump float;\n"
"void main() {\n"
" gl_FragColor = vec4(0.0, 1.0, 0.0, 1.0);\n"
"}\n";
GLuint loadShader(GLenum shaderType,
const char * pSource) {
GLuint shader = glCreateShader(shaderType);
if (shader) {
glShaderSource(shader, 1, & pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, & compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, & infoLen);
if (infoLen) {
char * buf = (char * ) malloc(infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n",
shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
GLuint createProgram(const char * pVertexSource,
const char * pFragmentSource) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader) {
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader) {
return 0;
}
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, & linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, & bufLength);
if (bufLength) {
char * buf = (char * ) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(program, bufLength, NULL, buf);
LOGE("Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
GLuint gProgram;
GLuint gvPositionHandle;
/**
* Our saved state data.
*/
struct saved_state {
float angle;
int32_t x;
int32_t y;
};
/**
* Shared state for our app.
*/
struct engine {
struct android_app * app;
ASensorManager * sensorManager;
const ASensor * accelerometerSensor;
ASensorEventQueue * sensorEventQueue;
int animating;
EGLDisplay display;
EGLSurface surface;
EGLContext context;
int32_t width;
int32_t height;
struct saved_state state;
};
/**
* Initialize an EGL context for the current display.
*/
static int engine_init_display(struct engine * engine) {
// initialize OpenGL ES and EGL
/*
* Here specify the attributes of the desired configuration.
* Below, we select an EGLConfig with at least 8 bits per color
* component compatible with on-screen windows
*/
const EGLint attribs[] = {
EGL_SURFACE_TYPE,
EGL_WINDOW_BIT,
EGL_BLUE_SIZE,
8,
EGL_GREEN_SIZE,
8,
EGL_RED_SIZE,
8,
EGL_NONE
};
EGLint w, h, format;
EGLint numConfigs;
EGLConfig config;
EGLSurface surface;
EGLContext context;
EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, 0, 0);
/* Here, the application chooses the configuration it desires. In this
* sample, we have a very simplified selection process, where we pick
* the first EGLConfig that matches our criteria */
eglChooseConfig(display, attribs, & config, 1, & numConfigs);
/* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is
* guaranteed to be accepted by ANativeWindow_setBuffersGeometry().
* As soon as we picked a EGLConfig, we can safely reconfigure the
* ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, & format);
ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format);
surface = eglCreateWindowSurface(display, config, engine->app-> window, NULL);
context = eglCreateContext(display, config, NULL, NULL);
if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
LOGW("Unable to eglMakeCurrent");
return -1;
}
eglQuerySurface(display, surface, EGL_WIDTH, & w);
eglQuerySurface(display, surface, EGL_HEIGHT, & h);
engine->display = display;
engine->context = context;
engine->surface = surface;
engine->width = w;
engine->height = h;
engine->state.angle = 0;
// Initialize GL state.
//glEnable(GL_CULL_FACE);
glDisable(GL_DEPTH_TEST);
gProgram = createProgram(gVertexShader, gFragmentShader);
gvPositionHandle = glGetAttribLocation(gProgram, "vPosition");
glViewport(0, 0, w, h);
return 0;
}
const GLfloat gTriangleVertices[] = {
0.0 f,
0.5 f,
-0.5 f,
-0.5 f,
0.5 f,
-0.5 f
};
/**
* Just the current frame in the display.
*/
static void engine_draw_frame(struct engine * engine) {
if (engine->display == NULL) {
// No display.
return;
}
glClearColor(0.0 f, 0.0 f, 0.0 f, 1.0 f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(gProgram);
checkGlError("glUseProgram");
glVertexAttribPointer(gvPositionHandle, 2, GL_FLOAT, GL_FALSE, 0, gTriangleVertices);
checkGlError("glVertexAttribPointer");
glEnableVertexAttribArray(gvPositionHandle);
checkGlError("glEnableVertexAttribArray");
glDrawArrays(GL_TRIANGLES, 0, 3);
checkGlError("glDrawArrays");
eglSwapBuffers(engine->display, engine->surface);
}
/**
* Tear down the EGL context currently associated with the display.
*/
static void engine_term_display(struct engine * engine) {
if (engine->display != EGL_NO_DISPLAY) {
eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
if (engine-> context != EGL_NO_CONTEXT) {
eglDestroyContext(engine->display, engine->context);
}
if (engine->surface != EGL_NO_SURFACE) {
eglDestroySurface(engine->display, engine->surface);
}
eglTerminate(engine->display);
}
engine->animating = 0;
engine->display = EGL_NO_DISPLAY;
engine->context = EGL_NO_CONTEXT;
engine->surface = EGL_NO_SURFACE;
}
/**
* Process the next input event.
*/
static int32_t engine_handle_input(struct android_app * app, AInputEvent * event) {
struct engine * engine = (struct engine * ) app->userData;
if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION) {
engine->state.x = AMotionEvent_getX(event, 0);
engine->state.y = AMotionEvent_getY(event, 0);
return 1;
}
return 0;
}
/**
* Process the next main command.
*/
static void engine_handle_cmd(struct android_app * app, int32_t cmd) {
struct engine * engine = (struct engine * ) app->userData;
switch (cmd) {
case APP_CMD_SAVE_STATE:
// The system has asked us to save our current state. Do so.
engine->app-> savedState = malloc(sizeof(struct saved_state)); * ((struct saved_state * ) engine->app->savedState) = engine->state;
engine->app->savedStateSize = sizeof(struct saved_state);
break;
case APP_CMD_INIT_WINDOW:
// The window is being shown, get it ready.
if (engine->app-> window != NULL) {
engine_init_display(engine);
engine_draw_frame(engine);
}
break;
case APP_CMD_TERM_WINDOW:
// The window is being hidden or closed, clean it up.
engine_term_display(engine);
break;
case APP_CMD_GAINED_FOCUS:
// When our app gains focus, we start monitoring the accelerometer.
if (engine->accelerometerSensor != NULL) {
ASensorEventQueue_enableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
// We'd like to get 60 events per second (in us).
ASensorEventQueue_setEventRate(engine->sensorEventQueue,
engine->accelerometerSensor, (1000 L / 60) * 1000);
}
break;
case APP_CMD_LOST_FOCUS:
// When our app loses focus, we stop monitoring the accelerometer.
// This is to avoid consuming battery while not being used.
if (engine->accelerometerSensor != NULL) {
ASensorEventQueue_disableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
}
// Also stop animating.
engine->animating = 0;
engine_draw_frame(engine);
break;
}
}
/**
* This is the main entry point of a native application that is using
* android_native_app_glue. It runs in its own thread, with its own
* event loop for receiving input events and doing other things.
*/
void android_main(struct android_app * state) {
struct engine engine;
memset( & engine, 0, sizeof(engine));
state->userData = & engine;
state->onAppCmd = engine_handle_cmd;
state->onInputEvent = engine_handle_input;
engine.app = state;
// Prepare to monitor accelerometer
engine.sensorManager = ASensorManager_getInstance();
engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager,
ASENSOR_TYPE_ACCELEROMETER);
engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager,
state->looper, LOOPER_ID_USER, NULL, NULL);
if (state->savedState != NULL) {
// We are starting with a previous saved state; restore from it.
engine.state = * (struct saved_state * ) state->savedState;
}
engine.animating = 1;
// loop waiting for stuff to do.
while (1) {
// Read all pending events.
int ident;
int events;
struct android_poll_source * source;
// If not animating, we will block forever waiting for events.
// If animating, we loop until all events are read, then continue
// to draw the next frame of animation.
while ((ident = ALooper_pollAll(engine.animating ? 0 : -1, NULL, & events,
(void * * ) & source)) >= 0) {
// Process this event.
if (source != NULL) {
source->process(state, source);
}
// Check if we are exiting.
if (state->destroyRequested != 0) {
engine_term_display( & engine);
return;
}
}
if (engine.animating) {
// Drawing is throttled to the screen update rate, so there
// is no need to do timing here.
engine_draw_frame(&engine);
}
}
}
I also updated pch.h to import OpenGL ES 2.0
I fixed it.
If any of you have this problem this is what you should do :
add the following code to the static int engine_init_display(struct engine* engine) function
const EGLint contextAttribs[] = {
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE
};
and replace
context = eglCreateContext(display, config, NULL, NULL);
with
context = eglCreateContext(display, config, EGL_NO_CONTEXT, contextAttribs);

createWindowSurface failed: EGL_BAD_MATCH?

the version android is 2.2.1 the device is a samsung galaxy II the full crash log is:
java.lang.RuntimeException: createWindowSurface failed: EGL_BAD_MATCH
at android.opengl.GLSurfaceView$EglHelper.throwEglException(GLSurfaceView.java:1077)
at android.opengl.GLSurfaceView$EglHelper.createSurface(GLSurfaceView.java:981)
at android.opengl.GLSurfaceView$GLThread.guardedRun(GLSurfaceView.java:1304)
at android.opengl.GLSurfaceView$GLThread.run(GLSurfaceView.java:1116)
this is the relevant code to the crash:
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
glView = new GLSurfaceView(this);
glView.setEGLConfigChooser(8 , 8, 8, 8, 16, 0);
glView.setRenderer(this);
setContentView(glView);
\\etc..............}
i used setEGLConfigChooser() because the app would crash on API-17 if it wasnt in there so for this specific device that it is crashing on i been looking around and it has something to do with the PixelFormat for the device.
What im wondering is how can i use some code so this will not crash on the samsung galaxy II android version 2.2.1, i cant test this in an emulator and i dont have the device to test it in, i just need for sure code and im not sure how to change it up?
Update: I found a way to work around this issue and actually it is fairly straightforward.
First of all: Android's default EGLConfigChooser implementation makes bad decisions on some
devices. Especially the older Android devices seem to suffer this EGL_BAD_MATCH issue. During my debugging sessions I also discovered that those older troublemaker devices had quite a limited set of available OpenGL ES configurations.
The cause of this "bad match" problem is more than just a mismatch between the GLSurfaceView's pixel format and the color bit depth settings of OpenGL ES. Overall we have to deal with the following issues:
A mismatch of the OpenGL ES API version
A mismatch of the requested target surface type
The requested color bit depth cannot be rendered on the surface view
The Android developer documentation is severely lacking when it comes to explaining the OpenGL ES API. It is therefore important to read the original documentation over at Khronos.org. Especially the doc page about eglChooseConfig is helpful here.
In order to remedy above listed problems you have to make sure to specify the following minimum configuration:
EGL_RENDERABLE_TYPE must match the OpenGL ES API version you are using. In the likely case of OpenGL ES 2.x you must set that attribute to 4(see egl.h)
EGL_SURFACE_TYPE should have the EGL_WINDOW_BIT set
And of course you also want to set up an OpenGL ES context that provides you with the correct color, depth and stencil buffer settings.
Unfortunately it is not possible to cherry-pick these configuration options in a straightforward way. We have to choose from whatever is available on any given device. That's why it is necessary to implement a custom EGLConfigChooser, that goes through the list of available configuration sets and picks the most suitable one that matches best the given criteria.
Anyway, I whipped up a sample implementation for such a config chooser:
public class MyConfigChooser implements EGLConfigChooser {
final private static String TAG = "MyConfigChooser";
// This constant is not defined in the Android API, so we need to do that here:
final private static int EGL_OPENGL_ES2_BIT = 4;
// Our minimum requirements for the graphics context
private static int[] mMinimumSpec = {
// We want OpenGL ES 2 (or set it to any other version you wish)
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
// We want to render to a window
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_WINDOW_BIT,
// We do not want a translucent window, otherwise the
// home screen or activity in the background may shine through
EGL10.EGL_TRANSPARENT_TYPE, EGL10.EGL_NONE,
// indicate that this list ends:
EGL10.EGL_NONE
};
private int[] mValue = new int[1];
protected int mAlphaSize;
protected int mBlueSize;
protected int mDepthSize;
protected int mGreenSize;
protected int mRedSize;
protected int mStencilSize;
/**
* The constructor lets you specify your minimum pixel format,
* depth and stencil buffer requirements.
*/
public MyConfigChooser(int r, int g, int b, int a, int depth, int
stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
#Override
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
int[] arg = new int[1];
egl.eglChooseConfig(display, mMinimumSpec, null, 0, arg);
int numConfigs = arg[0];
Log.i(TAG, "%d configurations available", numConfigs);
if(numConfigs <= 0) {
// Ooops... even the minimum spec is not available here
return null;
}
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, mMinimumSpec, configs,
numConfigs, arg);
// Let's do the hard work now (see next method below)
EGLConfig chosen = chooseConfig(egl, display, configs);
if(chosen == null) {
throw new RuntimeException(
"Could not find a matching configuration out of "
+ configs.length + " available.",
configs);
}
// Success
return chosen;
}
/**
* This method iterates through the list of configurations that
* fulfill our minimum requirements and tries to pick one that matches best
* our requested color, depth and stencil buffer requirements that were set using
* the constructor of this class.
*/
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
EGLConfig bestMatch = null;
int bestR = Integer.MAX_VALUE, bestG = Integer.MAX_VALUE,
bestB = Integer.MAX_VALUE, bestA = Integer.MAX_VALUE,
bestD = Integer.MAX_VALUE, bestS = Integer.MAX_VALUE;
for(EGLConfig config : configs) {
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
if(r <= bestR && g <= bestG && b <= bestB && a <= bestA
&& d <= bestD && s <= bestS && r >= mRedSize
&& g >= mGreenSize && b >= mBlueSize
&& a >= mAlphaSize && d >= mDepthSize
&& s >= mStencilSize) {
bestR = r;
bestG = g;
bestB = b;
bestA = a;
bestD = d;
bestS = s;
bestMatch = config;
}
}
return bestMatch;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
if(egl.eglGetConfigAttrib(display, config, attribute,
mValue)) {
return mValue[0];
}
return defaultValue;
}
}
I don't have the reputation score to add a comment yet, or else I would have put a brief comment on Nobu Games' answer. I encountered this same EGL_BAD_MATCH error and their answer helped put me on the right path. Instead, I have to create a separate answer.
As Nobu Games mentions, there appears to be a mismatch between the GLSurfaceView's PixelFormat and the pixel format parameters passed to setEGLConfigChooser(). In my case, I was asking for RGBA8888 but my GLSurfaceView was RGB565. This caused the EGL_BAD_MATCH error later on within my initialization.
The enhancement to their answer is that you can get the desired PixelFormat for the window and use it to dynamically choose an EGL context.
To make my code as generic as possible, I changed the GLSurfaceView to take in an additional parameter -- the pixel format of the display. I get this from my activity by calling:
getWindowManager().getDefaultDisplay().getPixelFormat();
I pass this value down to the GLSurfaceView and then extract the optimal bit depths for each of RGBA like this:
if (pixelFormatVal > 0) {
PixelFormat info = new PixelFormat();
PixelFormat.getPixelFormatInfo(pixelFormatVal, info);
if (PixelFormat.formatHasAlpha(pixelFormatVal)) {
if (info.bitsPerPixel >= 24) {
m_desiredABits = 8;
} else {
m_desiredABits = 6; // total guess
}
} else {
m_desiredABits = 0;
}
if (info.bitsPerPixel >= 24) {
m_desiredRBits = 8;
m_desiredGBits = 8;
m_desiredBBits = 8;
} else if (info.bitsPerPixel >= 16) {
m_desiredRBits = 5;
m_desiredGBits = 6;
m_desiredRBits = 5;
} else {
m_desiredRBits = 4;
m_desiredGBits = 4;
m_desiredBBits = 4;
}
} else {
m_desiredRBits = 8;
m_desiredGBits = 8;
m_desiredBBits = 8;
}
I then pass these values down to my config chooser. This code works for me on a RGB565 device as well as a RGBA8888 device.
My assumption is that the vendor has chosen the default for a reason and that it will give the most performant results. Of course I have nothing to back that statement up, but it is the strategy I'm going with.

android: how to get display resolution in native code

Is it possible in android to get display dimensions in the native code?
I see that there are EGL related function:
EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);
eglQuerySurface(display, surface, EGL_WIDTH, &w);
eglQuerySurface(display, surface, EGL_HEIGHT, &h);
But I need dimension without creating any surface.
You can get the window size while processing the APP_CMD_INIT_WINDOW "app command" like this in your native activity:
static void engine_handle_cmd(struct android_app* app, int32_t cmd) {
struct engine* engine = (struct engine*)app->userData;
switch (cmd) {
case APP_CMD_INIT_WINDOW:
if (engine->app->window != NULL) {
int width = ANativeWindow_getWidth(engine->app->window);
int height = ANativeWindow_getHeight(engine->app->window);
}
Check out main.c in the native-activity sample NDK project if you want to know how to set up the engine struct & the engine_handle_cmd callback.
Below is the relevant code taken from the Google source for screen capture http://code.google.com/p/androidscreenshot/source/browse/trunk/Binary/screenshot.c?r=3
the vinfo structure contains fields xres and yres.
int framebufferHandle = -1;
struct fb_var_screeninfo vinfo;
framebufferHandle = open("/dev/graphics/fb0", O_RDONLY);
if(framebufferHandle < 0)
{
printf("failed to open /dev/graphics/fb0\n");
// handle error
}
if(ioctl(framebufferHandle, FBIOGET_VSCREENINFO, &vinfo) < 0)
{
printf("failed to open ioctl\n");
// handle error
}
fcntl(framebufferHandle, F_SETFD, FD_CLOEXEC);
if you're looking for the pixel values height_pixels and width_pixels are probs what you're looking for. See the developer docs for more info:
http://developer.android.com/reference/android/util/DisplayMetrics.html

Categories

Resources