'ID_GY' was not declared in this scope - android

I am a complete code noob and I'm trying to implement a HAL (specifically the BMA222 Sensor) to my Android device.
When I try this code (BMA222.cpp):
#include <fcntl.h>
#include <errno.h>
#include <math.h>
#include <poll.h>
#include <unistd.h>
#include <dirent.h>
#include <sys/select.h>
#include <cutils/log.h>
#include "BMA222Sensor.h"
#define CONVERT (GRAVITY_EARTH / 256)
//#define CONVERT_GRAVITY_X (CONVERT)
//#define CONVERT_GRAVITY_Y (-1 * CONVERT)
//#define CONVERT_GRAVITY_Z (-1 * CONVERT)
#define SENSOR_NAME "bma222"
/*****************************************************************************/
BMA222Sensor::BMA222Sensor()
: SensorBase(NULL, SENSOR_NAME),
mEnabled(0),
mInputReader(4),
mHasPendingEvent(false),
mEnabledTime(0)
{
mPendingEvent.version = sizeof(sensors_event_t);
mPendingEvent.sensor = ID_GY;
mPendingEvent.type = SENSOR_TYPE_ACCELEROMETER;
memset(mPendingEvent.data, 0, sizeof(mPendingEvent.data));
if (sensor_get_class_path() < 0)
LOGD("Failed to get class path\n");
}
BMA222Sensor::~BMA222Sensor() {
if (mEnabled) {
enable(0, 0);
}
}
int BMA222Sensor::enable(int32_t, int en) {
int flags = en ? 1 : 0;
char buf[2];
int count = 0;;
if (flags != mEnabled) {
buf[1] = 0;
if (flags) {
buf[0] = '1';
mEnabledTime = getTimestamp();
} else {
buf[0] = '0';
}
set_sysfs_input_attr(class_path, "enable", buf, 1);
mEnabled = flags;
}
return 0;
}
bool BMA222Sensor::hasPendingEvents() const {
return mHasPendingEvent;
}
int BMA222Sensor::setDelay(int32_t handle, int64_t delay_ns)
{
char buf[80];
int count = 0;;
int64_t delay_ms = delay_ns / 1000000;
count = sprintf(buf, "%lld", delay_ms);
set_sysfs_input_attr(class_path, "delay", buf, count);
return 0;
}
int BMA222Sensor::readEvents(sensors_event_t* data, int count)
{
if (count < 1)
return -EINVAL;
if (mHasPendingEvent) {
mHasPendingEvent = false;
mPendingEvent.timestamp = getTimestamp();
*data = mPendingEvent;
return mEnabled ? 1 : 0;
}
ssize_t n = mInputReader.fill(data_fd);
if (n < 0)
return n;
int numEventReceived = 0;
input_event const* event;
while (count && mInputReader.readEvent(&event)) {
int type = event->type;
if (type == EV_ABS) {
float value = event->value;
if (event->code == ABS_X) {
//mPendingEvent.data[1] = value * CONVERT_GRAVITY_Y;
mPendingEvent.data[0] = value * CONVERT;
} else if (event->code == ABS_Y) {
//mPendingEvent.data[0] = value * CONVERT_GRAVITY_X;
mPendingEvent.data[1] = value * CONVERT;
} else if (event->code == ABS_Z) {
//mPendingEvent.data[2] = value * CONVERT_GRAVITY_Z;
mPendingEvent.data[2] = value * CONVERT;
}
} else if (type == EV_SYN) {
mPendingEvent.version = sizeof(sensors_event_t);
mPendingEvent.sensor = ID_A;
mPendingEvent.type = SENSOR_TYPE_ACCELEROMETER;
mPendingEvent.timestamp = timevalToNano(event->time);
*data++ = mPendingEvent;
numEventReceived++;
count--;
} else {
LOGE("BMA222Sensor: unknown event (type=%d, code=%d)",
type, event->code);
}
mInputReader.next();
}
return numEventReceived;
}
bool BMA222Sensor::isEnabled(int32_t handle) {
return (0 != (mEnabled)) ? true : false;
}
int BMA222Sensor::sensor_get_class_path()
{
char dirname[] = "/sys/class/input";
char buf[256];
int res;
DIR *dir;
struct dirent *de;
int fd = -1;
int found = 0;
dir = opendir(dirname);
if (dir == NULL)
return -1;
while((de = readdir(dir))) {
if (strncmp(de->d_name, "input", strlen("input")) != 0) {
continue;
}
sprintf(class_path, "%s/%s", dirname, de->d_name);
snprintf(buf, sizeof(buf), "%s/name", class_path);
fd = open(buf, O_RDONLY);
if (fd < 0) {
continue;
}
if ((res = read(fd, buf, sizeof(buf))) < 0) {
close(fd);
continue;
}
buf[res - 1] = '\0';
if (strcmp(buf, SENSOR_NAME) == 0) {
found = 1;
close(fd);
break;
}
close(fd);
fd = -1;
}
closedir(dir);
if (found) {
return 0;
}else {
*class_path = '\0';
return -1;
}
}
int BMA222Sensor:: set_sysfs_input_attr(char *class_path,
const char *attr, char *value, int len)
{
char path[256];
int fd;
if (class_path == NULL || *class_path == '\0'
|| attr == NULL || value == NULL || len < 1) {
return -EINVAL;
}
snprintf(path, sizeof(path), "%s/%s", class_path, attr);
path[sizeof(path) - 1] = '\0';
fd = open(path, O_RDWR);
if (fd < 0) {
return -errno;
}
if (write(fd, value, len) < 0) {
close(fd);
return -errno;
}
close(fd);
return 0;
}
And this (BMA222Sensor.h):
#ifndef ANDROID_BMA222_SENSOR_H
#define ANDROID_BMA222_SENSOR_H
#include <stdint.h>
#include <errno.h>
#include <sys/cdefs.h>
#include <sys/types.h>
#include "sensors.h"
#include "SensorBase.h"
#include "InputEventReader.h"
/*****************************************************************************/
struct input_event;
class BMA222Sensor : public SensorBase {
int mEnabled;
InputEventCircularReader mInputReader;
sensors_event_t mPendingEvent;
bool mHasPendingEvent;
char input_sysfs_path[PATH_MAX];
int input_sysfs_path_len;
int64_t mEnabledTime;
char class_path[256];
int sensor_get_class_path();
int set_sysfs_input_attr(char *class_path,
const char *attr, char *value, int len);
public:
BMA222Sensor();
virtual ~BMA222Sensor();
virtual int readEvents(sensors_event_t* data, int count);
virtual bool hasPendingEvents() const;
virtual int setDelay(int32_t handle, int64_t ns);
virtual int enable(int32_t handle, int enabled);
virtual bool isEnabled(int32_t handle); //rockie
};
/*****************************************************************************/
#endif // ANDROID_BMA222_SENSOR_H
I get this (shortened):
In constructor 'BMA222Sensor::BMA222Sensor()':
error: 'ID_GY' was not declared in this scope
make: *** [BMA222Sensor.o] Error 1
I don't understand where I'm going wrong, any help?

Unfortunately I know nothing about android only C++. I would guess that ID_GY is a integer value that identifies the sensor. One way to define that would be to write
#define ID_GY 100
just after where you wrote
#define SENSOR_NAME "bma222"
The value of 100 is arbitrary, it might not matter too much what the actual value is, but I could easily be wrong about that.

Related

Superpowered SDK trying to record and play it at the same time

I am trying to create android app with Superpowered SDK, which would allow me to redirect mic input to headphones with added reverb effect to mic, to simulate huge room effect. I found this repository with code snippets: https://bitbucket.org/snippets/kasurd/Mynnp/nativesuperpoweredrecorder-with, but I can't make it working. This is my current code:
#include <jni.h>
#include <stdio.h>
#include "SuperpoweredExample.h"
#include <SuperpoweredSimple.h>
#include <SuperpoweredCPU.h>
#include <pthread.h>
#include <malloc.h>
static void playerEventCallback(void *clientData, SuperpoweredAdvancedAudioPlayerEvent event, void * __unused value) {
if (event == SuperpoweredAdvancedAudioPlayerEvent_LoadSuccess) {
SuperpoweredAdvancedAudioPlayer *player = *((SuperpoweredAdvancedAudioPlayer **)clientData);
//player->setBpm(126.0f);
//player->setFirstBeatMs(353);
player->setPosition(player->firstBeatMs, false, false);
}else if (event == SuperpoweredAdvancedAudioPlayerEvent_LoadError) {
} else if (event == SuperpoweredAdvancedAudioPlayerEvent_EOF) {
};
}
static bool audioProcessingPlayback(void *clientdata, short int *audioIO, int numberOfSamples, int samplerate) {
return ((SuperpoweredPlayer *)clientdata)->processPlayback(0, audioIO, numberOfSamples);
}
static bool audioProcessingRecording(void *clientdata, short int *audioIO, int numberOfSamples, int samplerate) {
return ((SuperpoweredPlayer *)clientdata)->processRecording(audioIO, numberOfSamples);
}
bool SuperpoweredPlayer::process(short int *output, unsigned int numberOfSamples) {
bool processResult = player->process(stereoBuffer, false, numberOfSamples);
SuperpoweredFloatToShortInt(stereoBuffer, output, numberOfSamples);
return processResult;
}
static bool audioProcessing(void *clientdata, short int *audioIO, int numberOfSamples, int __unused samplerate) {
return ((SuperpoweredPlayer *)clientdata)->process(audioIO, (unsigned int)numberOfSamples);
}
SuperpoweredPlayer::SuperpoweredPlayer(unsigned int sampleRate, unsigned int bufferSize, const char *path, int fileOffset, int fileLength) {
stereoBuffer = (float *)memalign(16, (bufferSize + 16) * sizeof(float) * 2);
stereoBufferRecording = (float *)memalign(16, (bufferSize + 16) * sizeof(float) * 2);
this->sampleRate = sampleRate;
this->bufferSize = bufferSize;
playerA = NULL;
recorder = NULL;
audioSystemRecording = new SuperpoweredAndroidAudioIO(sampleRate, bufferSize, true, false, audioProcessingRecording, this, bufferSize * 2);
initPlayerA(path, fileOffset, fileLength, 0);
}
void SuperpoweredPlayer::initPlayerA(const char *path, int fileOffset, int fileSize, double startOffsetMs) {
audioSystemA = new SuperpoweredAndroidAudioIO(sampleRate, bufferSize, false, true, audioProcessingPlayback, this, bufferSize * 2);
playerA = new SuperpoweredAdvancedAudioPlayer(&playerA , playerEventCallback, sampleRate, 0);
playerA->open(path, fileOffset, fileSize);
playerA->syncMode = SuperpoweredAdvancedAudioPlayerSyncMode_TempoAndBeat;
}
void SuperpoweredPlayer::playPause(bool play) {
if (!play) {
askPlaying = false;
if (playerA != NULL) {
playerA->pause();
}
} else {
if (playerA != NULL) {
playerA->play(false);
}
askPlaying = true;
};
SuperpoweredCPU::setSustainedPerformanceMode(play);
}
void SuperpoweredPlayer::startRecording(const char *tempPath, const char *destinationPath) {
recorder = new SuperpoweredRecorder(tempPath, sampleRate);
askRecording = true;
recorder->start(destinationPath);
playPause(true);
}
void SuperpoweredPlayer::stopRecording() {
if (!askRecording) {
return;
}
askPlaying = false;
askRecording = false;
recorder->stop();
playPause(false);
}
// method to receive playback parts
bool SuperpoweredPlayer::processPlayback(int playerId, short int *output, unsigned int numberOfSamples) {
pthread_mutex_lock(&mutex);
if (!askPlaying) {
pthread_mutex_unlock(&mutex);
return false;
}
playerA->process(stereoBuffer, false, numberOfSamples, 1.0f, 0.0f, -1.0f);
SuperpoweredFloatToShortInt(stereoBuffer, output, numberOfSamples);
pthread_mutex_unlock(&mutex);
return true;
}
// method to receive recording parts
bool SuperpoweredPlayer::processRecording(short int *input, unsigned int numberOfSamples) {
pthread_mutex_lock(&mutex);
if (askRecording) {
unsigned int data = 0;
SuperpoweredShortIntToFloat(input, stereoBufferRecording, numberOfSamples);
data = recorder->process(stereoBufferRecording, NULL, numberOfSamples);
pthread_mutex_unlock(&mutex);
return true;
}
pthread_mutex_unlock(&mutex);
return false;
}
//void SuperpoweredPlayer::setTempo(double value) {
// player->setTempo(value, true);
//}
SuperpoweredPlayer::~SuperpoweredPlayer() {
delete playerA;
free(stereoBuffer);
pthread_mutex_destroy(&mutex);
}
static SuperpoweredPlayer *player = NULL;
static const char *path;
extern "C" JNIEXPORT void Java_com_example_pc_superpoweredsdk_SuperPoweredPlayerWrapper_SuperpoweredPlayer(JNIEnv *javaEnvironment, jobject __unused obj, jint sampleRate, jint bufferSize, jstring apkPath, jint fileOffset, jint fileLength) {
path = javaEnvironment->GetStringUTFChars(apkPath, JNI_FALSE);
player = new SuperpoweredPlayer((unsigned int)sampleRate, ((unsigned int)bufferSize), path + 'temp.wav', fileOffset, fileLength);
javaEnvironment->ReleaseStringUTFChars(apkPath, path);
}
extern "C" JNIEXPORT void Java_com_example_pc_superpoweredsdk_SuperPoweredPlayerWrapper_playPause(JNIEnv * __unused javaEnvironment, jobject __unused obj, jboolean play) {
player->startRecording(path + 'temp.wav', path + 'dest.wav');
}
sampleRate is 44100, bufferSize is 512, and path is context.getFilesDir().getAbsolutePath()
I am stuck at the moment, trying to figure out, how should I playback mic input with added reverb filter threw headphones in real time with as little latency as possible.
The code attached is not really relevant here, as it mostly deals with a player.
I recommend to do your own code from scratch. Use SuperpoweredAndroidAudioIO for audio input/output, and use SuperpoweredRecorder to process the incoming audio. Will be no more than a few lines total.

Java.lang.UnsatisfiedLinkError: No implementation found for int [duplicate]

This question already has answers here:
Android NDK C++ JNI (no implementation found for native...)
(11 answers)
Closed 5 years ago.
I executed youtube watch me android application project. I just add some classes in my project and build with ndk. I got the error like
java.lang.UnsatisfiedLinkError: No implementation found for int com.ephronsystem.mobilizerapp.Ffmpeg.encodeVideoFrame(byte[]) (tried Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeVideoFrame and Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeVideoFrame___3B).
My code:
package com.ephronsystem.mobilizerapp;
public class Ffmpeg {
static {
System.loadLibrary("ffmpeg");
}
public static native boolean init(int width, int height, int audio_sample_rate, String rtmpUrl);
public static native void shutdown();
// Returns the size of the encoded frame.
public static native int encodeVideoFrame(byte[] yuv_image);
public static native int encodeAudioFrame(short[] audio_data, int length);
}
This is ffmpeg-jni.c
#include <android/log.h>
#include <string.h>
#include <jni.h>
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/opt.h"
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jboolean JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_init(JNIEnv *env, jobject thiz,
jint width, jint height,
jint audio_sample_rate,
jstring rtmp_url);
JNIEXPORT void JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_shutdown(JNIEnv *env,
jobject thiz
);
JNIEXPORT jint JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeVideoFrame(JNIEnv
*env,
jobject thiz,
jbyteArray
yuv_image);
JNIEXPORT jint JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeAudioFrame(JNIEnv *env,
jobject thiz,
jshortArray audio_data,
jint length);
#ifdef __cplusplus
}
#endif
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, "ffmpeg-jni", __VA_ARGS__)
#define URL_WRONLY 2
static AVFormatContext *fmt_context;
static AVStream *video_stream;
static AVStream *audio_stream;
static int pts
= 0;
static int last_audio_pts = 0;
// Buffers for UV format conversion
static unsigned char *u_buf;
static unsigned char *v_buf;
static int enable_audio = 1;
static int64_t audio_samples_written = 0;
static int audio_sample_rate = 0;
// Stupid buffer for audio samples. Not even a proper ring buffer
#define AUDIO_MAX_BUF_SIZE 16384 // 2x what we get from Java
static short audio_buf[AUDIO_MAX_BUF_SIZE];
static int audio_buf_size = 0;
void AudioBuffer_Push(const short *audio, int num_samples) {
if (audio_buf_size >= AUDIO_MAX_BUF_SIZE - num_samples) {
LOGI("AUDIO BUFFER OVERFLOW: %i + %i > %i", audio_buf_size, num_samples,
AUDIO_MAX_BUF_SIZE);
return;
}
for (int i = 0; i < num_samples; i++) {
audio_buf[audio_buf_size++] = audio[i];
}
}
int AudioBuffer_Size() { return audio_buf_size; }
short *AudioBuffer_Get() { return audio_buf; }
void AudioBuffer_Pop(int num_samples) {
if (num_samples > audio_buf_size) {
LOGI("Audio buffer Pop WTF: %i vs %i", num_samples, audio_buf_size);
return;
}
memmove(audio_buf, audio_buf + num_samples, num_samples * sizeof(short));
audio_buf_size -= num_samples;
}
void AudioBuffer_Clear() {
memset(audio_buf, 0, sizeof(audio_buf));
audio_buf_size = 0;
}
static void log_callback(void *ptr, int level, const char *fmt, va_list vl) {
char x[2048];
vsnprintf(x, 2048, fmt, vl);
LOGI(x);
}
JNIEXPORT jboolean JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_init(JNIEnv *env, jobject thiz,
jint width, jint height,
jint audio_sample_rate_param,
jstring rtmp_url) {
avcodec_register_all();
av_register_all();
av_log_set_callback(log_callback);
fmt_context = avformat_alloc_context();
AVOutputFormat *ofmt = av_guess_format("flv", NULL, NULL);
if (ofmt) {
LOGI("av_guess_format returned %s", ofmt->long_name);
} else {
LOGI("av_guess_format fail");
return JNI_FALSE;
}
fmt_context->oformat = ofmt;
LOGI("creating video stream");
video_stream = av_new_stream(fmt_context, 0);
if (enable_audio) {
LOGI("creating audio stream");
audio_stream = av_new_stream(fmt_context, 1);
}
// Open Video Codec.
// ======================
AVCodec *video_codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!video_codec) {
LOGI("Did not find the video codec");
return JNI_FALSE; // leak!
} else {
LOGI("Video codec found!");
}
AVCodecContext *video_codec_ctx = video_stream->codec;
video_codec_ctx->codec_id = video_codec->id;
video_codec_ctx->codec_type = AVMEDIA_TYPE_VIDEO;
video_codec_ctx->level = 31;
video_codec_ctx->width = width;
video_codec_ctx->height = height;
video_codec_ctx->pix_fmt = PIX_FMT_YUV420P;
video_codec_ctx->rc_max_rate = 0;
video_codec_ctx->rc_buffer_size = 0;
video_codec_ctx->gop_size = 12;
video_codec_ctx->max_b_frames = 0;
video_codec_ctx->slices = 8;
video_codec_ctx->b_frame_strategy = 1;
video_codec_ctx->coder_type = 0;
video_codec_ctx->me_cmp = 1;
video_codec_ctx->me_range = 16;
video_codec_ctx->qmin = 10;
video_codec_ctx->qmax = 51;
video_codec_ctx->keyint_min = 25;
video_codec_ctx->refs = 3;
video_codec_ctx->trellis = 0;
video_codec_ctx->scenechange_threshold = 40;
video_codec_ctx->flags |= CODEC_FLAG_LOOP_FILTER;
video_codec_ctx->me_method = ME_HEX;
video_codec_ctx->me_subpel_quality = 6;
video_codec_ctx->i_quant_factor = 0.71;
video_codec_ctx->qcompress = 0.6;
video_codec_ctx->max_qdiff = 4;
video_codec_ctx->time_base.den = 10;
video_codec_ctx->time_base.num = 1;
video_codec_ctx->bit_rate = 3200 * 1000;
video_codec_ctx->bit_rate_tolerance = 0;
video_codec_ctx->flags2 |= 0x00000100;
fmt_context->bit_rate = 4000 * 1000;
av_opt_set(video_codec_ctx, "partitions", "i8x8,i4x4,p8x8,b8x8", 0);
av_opt_set_int(video_codec_ctx, "direct-pred", 1, 0);
av_opt_set_int(video_codec_ctx, "rc-lookahead", 0, 0);
av_opt_set_int(video_codec_ctx, "fast-pskip", 1, 0);
av_opt_set_int(video_codec_ctx, "mixed-refs", 1, 0);
av_opt_set_int(video_codec_ctx, "8x8dct", 0, 0);
av_opt_set_int(video_codec_ctx, "weightb", 0, 0);
if (fmt_context->oformat->flags & AVFMT_GLOBALHEADER)
video_codec_ctx->flags |= CODEC_FLAG_GLOBAL_HEADER;
LOGI("Opening video codec");
AVDictionary *vopts = NULL;
av_dict_set(&vopts, "profile", "main", 0);
//av_dict_set(&vopts, "vprofile", "main", 0);
av_dict_set(&vopts, "rc-lookahead", 0, 0);
av_dict_set(&vopts, "tune", "film", 0);
av_dict_set(&vopts, "preset", "ultrafast", 0);
av_opt_set(video_codec_ctx->priv_data, "tune", "film", 0);
av_opt_set(video_codec_ctx->priv_data, "preset", "ultrafast", 0);
av_opt_set(video_codec_ctx->priv_data, "tune", "film", 0);
int open_res = avcodec_open2(video_codec_ctx, video_codec, &vopts);
if (open_res < 0) {
LOGI("Error opening video codec: %i", open_res);
return JNI_FALSE; // leak!
}
// Open Audio Codec.
// ======================
if (enable_audio) {
AudioBuffer_Clear();
audio_sample_rate = audio_sample_rate_param;
AVCodec *audio_codec = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!audio_codec) {
LOGI("Did not find the audio codec");
return JNI_FALSE; // leak!
} else {
LOGI("Audio codec found!");
}
AVCodecContext *audio_codec_ctx = audio_stream->codec;
audio_codec_ctx->codec_id = audio_codec->id;
audio_codec_ctx->codec_type = AVMEDIA_TYPE_AUDIO;
audio_codec_ctx->bit_rate = 128000;
audio_codec_ctx->bit_rate_tolerance = 16000;
audio_codec_ctx->channels = 1;
audio_codec_ctx->profile = FF_PROFILE_AAC_LOW;
audio_codec_ctx->sample_fmt = AV_SAMPLE_FMT_FLT;
audio_codec_ctx->sample_rate = 44100;
LOGI("Opening audio codec");
AVDictionary *opts = NULL;
av_dict_set(&opts, "strict", "experimental", 0);
open_res = avcodec_open2(audio_codec_ctx, audio_codec, &opts);
LOGI("audio frame size: %i", audio_codec_ctx->frame_size);
if (open_res < 0) {
LOGI("Error opening audio codec: %i", open_res);
return JNI_FALSE; // leak!
}
}
const jbyte *url = (*env)->GetStringUTFChars(env, rtmp_url, NULL);
// Point to an output file
if (!(ofmt->flags & AVFMT_NOFILE)) {
if (avio_open(&fmt_context->pb, url, URL_WRONLY) < 0) {
LOGI("ERROR: Could not open file %s", url);
return JNI_FALSE; // leak!
}
}
(*env)->ReleaseStringUTFChars(env, rtmp_url, url);
LOGI("Writing output header.");
// Write file header
if (avformat_write_header(fmt_context, NULL) != 0) {
LOGI("ERROR: av_write_header failed");
return JNI_FALSE;
}
pts = 0;
last_audio_pts = 0;
audio_samples_written = 0;
// Initialize buffers for UV format conversion
int frame_size = video_codec_ctx->width * video_codec_ctx->height;
u_buf = (unsigned char *) av_malloc(frame_size / 4);
v_buf = (unsigned char *) av_malloc(frame_size / 4);
LOGI("ffmpeg encoding init done");
return JNI_TRUE;
}
JNIEXPORT void JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_shutdown(JNIEnv
*env,
jobject thiz
) {
av_write_trailer(fmt_context);
avio_close(fmt_context
->pb);
avcodec_close(video_stream
->codec);
if (enable_audio) {
avcodec_close(audio_stream
->codec);
}
av_free(fmt_context);
av_free(u_buf);
av_free(v_buf);
fmt_context = NULL;
u_buf = NULL;
v_buf = NULL;
}
JNIEXPORT jint JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeVideoFrame(JNIEnv
*env,
jobject thiz,
jbyteArray
yuv_image) {
int yuv_length = (*env)->GetArrayLength(env, yuv_image);
unsigned char *yuv_data = (*env)->GetByteArrayElements(env, yuv_image, 0);
AVCodecContext *video_codec_ctx = video_stream->codec;
//LOGI("Yuv size: %i w: %i h: %i", yuv_length, video_codec_ctx->width, video_codec_ctx->height);
int frame_size = video_codec_ctx->width * video_codec_ctx->height;
const unsigned char *uv = yuv_data + frame_size;
// Convert YUV from NV12 to I420. Y channel is the same so we don't touch it,
// we just have to deinterleave UV.
for (
int i = 0;
i < frame_size / 4; i++) {
v_buf[i] = uv[i * 2];
u_buf[i] = uv[i * 2 + 1];
}
AVFrame source;
memset(&source, 0, sizeof(AVFrame));
source.data[0] =
yuv_data;
source.data[1] =
u_buf;
source.data[2] =
v_buf;
source.linesize[0] = video_codec_ctx->
width;
source.linesize[1] = video_codec_ctx->width / 2;
source.linesize[2] = video_codec_ctx->width / 2;
// only for bitrate regulation. irrelevant for sync.
source.
pts = pts;
pts++;
int out_length = frame_size + (frame_size / 2);
unsigned char *out = (unsigned char *) av_malloc(out_length);
int compressed_length = avcodec_encode_video(video_codec_ctx, out, out_length, &source);
(*env)->
ReleaseByteArrayElements(env, yuv_image, yuv_data,
0);
// Write to file too
if (compressed_length > 0) {
AVPacket pkt;
av_init_packet(&pkt);
pkt.
pts = last_audio_pts;
if (video_codec_ctx->coded_frame && video_codec_ctx->coded_frame->key_frame) {
pkt.flags |= 0x0001;
}
pkt.
stream_index = video_stream->index;
pkt.
data = out;
pkt.
size = compressed_length;
if (
av_interleaved_write_frame(fmt_context,
&pkt) != 0) {
LOGI("Error writing video frame");
}
} else {
LOGI("??? compressed_length <= 0");
}
last_audio_pts++;
av_free(out);
return
compressed_length;
}
JNIEXPORT jint JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeAudioFrame(JNIEnv
*env,
jobject thiz,
jshortArray
audio_data,
jint length
) {
if (!enable_audio) {
return 0;
}
short *audio = (*env)->GetShortArrayElements(env, audio_data, 0);
//LOGI("java audio buffer size: %i", length);
AVCodecContext *audio_codec_ctx = audio_stream->codec;
unsigned char *out = av_malloc(128000);
AudioBuffer_Push(audio, length
);
int total_compressed = 0;
while (
AudioBuffer_Size()
>= audio_codec_ctx->frame_size) {
AVPacket pkt;
av_init_packet(&pkt);
int compressed_length = avcodec_encode_audio(audio_codec_ctx, out, 128000,
AudioBuffer_Get());
total_compressed +=
compressed_length;
audio_samples_written += audio_codec_ctx->
frame_size;
int new_pts = (audio_samples_written * 1000) / audio_sample_rate;
if (compressed_length > 0) {
pkt.
size = compressed_length;
pkt.
pts = new_pts;
last_audio_pts = new_pts;
//LOGI("audio_samples_written: %i comp_length: %i pts: %i", (int)audio_samples_written, (int)compressed_length, (int)new_pts);
pkt.flags |= 0x0001;
pkt.
stream_index = audio_stream->index;
pkt.
data = out;
if (
av_interleaved_write_frame(fmt_context,
&pkt) != 0) {
LOGI("Error writing audio frame");
}
}
AudioBuffer_Pop(audio_codec_ctx
->frame_size);
}
(*env)->
ReleaseShortArrayElements(env, audio_data, audio,
0);
av_free(out);
return
total_compressed;
}
This error generally occurs when your native library can't be found by JVM during execution. Your native code must be compiled into .so file and make it available for JVM during run time.
You may find more details on java.library.path and linking here
in all of this methods, as a second parameter you take jobject thizz - that means you try to implement non-static method that will get reference to jobject that this method is running against.
Try changing those to jclass clazz to mark them as you would like to implement those as static methods

occurred starting process 'command 'C:\*********\Sdk\ndk-bundle\ndk-build.cmd'' in android studio

I am using yt-watchme-master for live streaming videos from our mobile to Youtube.
I used one third party library :- https://github.com/youtube/yt-watchme
Here, they are using c++ code that's why I installed NDK in my android studio but still errors are coming up.
Can some one help me please?
Below is the entire code given.
#include <android/log.h>
#include <string.h>
#include <jni.h>
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/opt.h"
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jboolean
JNICALL Java_com_google_android_apps_watchme_Ffmpeg_init(JNIEnv *env, jobject thiz,
jint width, jint height,
jint audio_sample_rate,
jstring rtmp_url);
JNIEXPORT void JNICALL Java_com_google_android_apps_watchme_Ffmpeg_shutdown(JNIEnv
*env,
jobject thiz
);
JNIEXPORT jintJNICALL
Java_com_google_android_apps_watchme_Ffmpeg_encodeVideoFrame(JNIEnv
*env,
jobject thiz,
jbyteArray
yuv_image);
JNIEXPORT jint
JNICALL Java_com_google_android_apps_watchme_Ffmpeg_encodeAudioFrame(JNIEnv *env,
jobject thiz,
jshortArray audio_data,
jint length);
#ifdef __cplusplus
}
#endif
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, "ffmpeg-jni", __VA_ARGS__)
#define URL_WRONLY 2
static AVFormatContext *fmt_context;
static AVStream *video_stream;
static AVStream *audio_stream;
static int pts
= 0;
static int last_audio_pts = 0;
// Buffers for UV format conversion
static unsigned char *u_buf;
static unsigned char *v_buf;
static int enable_audio = 1;
static int64_t audio_samples_written = 0;
static int audio_sample_rate = 0;
// Stupid buffer for audio samples. Not even a proper ring buffer
#define AUDIO_MAX_BUF_SIZE 16384 // 2x what we get from Java
static short audio_buf[AUDIO_MAX_BUF_SIZE];
static int audio_buf_size = 0;
void AudioBuffer_Push(const short *audio, int num_samples) {
if (audio_buf_size >= AUDIO_MAX_BUF_SIZE - num_samples) {
LOGI("AUDIO BUFFER OVERFLOW: %i + %i > %i", audio_buf_size, num_samples,
AUDIO_MAX_BUF_SIZE);
return;
}
for (int i = 0; i < num_samples; i++) {
audio_buf[audio_buf_size++] = audio[i];
}
}
int AudioBuffer_Size() { return audio_buf_size; }
short *AudioBuffer_Get() { return audio_buf; }
void AudioBuffer_Pop(int num_samples) {
if (num_samples > audio_buf_size) {
LOGI("Audio buffer Pop WTF: %i vs %i", num_samples, audio_buf_size);
return;
}
memmove(audio_buf, audio_buf + num_samples, num_samples * sizeof(short));
audio_buf_size -= num_samples;
}
void AudioBuffer_Clear() {
memset(audio_buf, 0, sizeof(audio_buf));
audio_buf_size = 0;
}
static void log_callback(void *ptr, int level, const char *fmt, va_list vl) {
char x[2048];
vsnprintf(x, 2048, fmt, vl);
LOGI(x);
}
JNIEXPORT jboolean
JNICALL Java_com_google_android_apps_watchme_Ffmpeg_init(JNIEnv *env, jobject thiz,
jint width, jint height,
jint audio_sample_rate_param,
jstring rtmp_url) {
avcodec_register_all();
av_register_all();
av_log_set_callback(log_callback);
fmt_context = avformat_alloc_context();
AVOutputFormat *ofmt = av_guess_format("flv", NULL, NULL);
if (ofmt) {
LOGI("av_guess_format returned %s", ofmt->long_name);
} else {
LOGI("av_guess_format fail");
return JNI_FALSE;
}
fmt_context->oformat = ofmt;
LOGI("creating video stream");
video_stream = av_new_stream(fmt_context, 0);
if (enable_audio) {
LOGI("creating audio stream");
audio_stream = av_new_stream(fmt_context, 1);
}
// Open Video Codec.
// ======================
AVCodec *video_codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!video_codec) {
LOGI("Did not find the video codec");
return JNI_FALSE; // leak!
} else {
LOGI("Video codec found!");
}
AVCodecContext *video_codec_ctx = video_stream->codec;
video_codec_ctx->codec_id = video_codec->id;
video_codec_ctx->codec_type = AVMEDIA_TYPE_VIDEO;
video_codec_ctx->level = 31;
video_codec_ctx->width = width;
video_codec_ctx->height = height;
video_codec_ctx->pix_fmt = PIX_FMT_YUV420P;
video_codec_ctx->rc_max_rate = 0;
video_codec_ctx->rc_buffer_size = 0;
video_codec_ctx->gop_size = 12;
video_codec_ctx->max_b_frames = 0;
video_codec_ctx->slices = 8;
video_codec_ctx->b_frame_strategy = 1;
video_codec_ctx->coder_type = 0;
video_codec_ctx->me_cmp = 1;
video_codec_ctx->me_range = 16;
video_codec_ctx->qmin = 10;
video_codec_ctx->qmax = 51;
video_codec_ctx->keyint_min = 25;
video_codec_ctx->refs = 3;
video_codec_ctx->trellis = 0;
video_codec_ctx->scenechange_threshold = 40;
video_codec_ctx->flags |= CODEC_FLAG_LOOP_FILTER;
video_codec_ctx->me_method = ME_HEX;
video_codec_ctx->me_subpel_quality = 6;
video_codec_ctx->i_quant_factor = 0.71;
video_codec_ctx->qcompress = 0.6;
video_codec_ctx->max_qdiff = 4;
video_codec_ctx->time_base.den = 10;
video_codec_ctx->time_base.num = 1;
video_codec_ctx->bit_rate = 3200 * 1000;
video_codec_ctx->bit_rate_tolerance = 0;
video_codec_ctx->flags2 |= 0x00000100;
fmt_context->bit_rate = 4000 * 1000;
av_opt_set(video_codec_ctx, "partitions", "i8x8,i4x4,p8x8,b8x8", 0);
av_opt_set_int(video_codec_ctx, "direct-pred", 1, 0);
av_opt_set_int(video_codec_ctx, "rc-lookahead", 0, 0);
av_opt_set_int(video_codec_ctx, "fast-pskip", 1, 0);
av_opt_set_int(video_codec_ctx, "mixed-refs", 1, 0);
av_opt_set_int(video_codec_ctx, "8x8dct", 0, 0);
av_opt_set_int(video_codec_ctx, "weightb", 0, 0);
if (fmt_context->oformat->flags & AVFMT_GLOBALHEADER)
video_codec_ctx->flags |= CODEC_FLAG_GLOBAL_HEADER;
LOGI("Opening video codec");
AVDictionary *vopts = NULL;
av_dict_set(&vopts, "profile", "main", 0);
//av_dict_set(&vopts, "vprofile", "main", 0);
av_dict_set(&vopts, "rc-lookahead", 0, 0);
av_dict_set(&vopts, "tune", "film", 0);
av_dict_set(&vopts, "preset", "ultrafast", 0);
av_opt_set(video_codec_ctx->priv_data, "tune", "film", 0);
av_opt_set(video_codec_ctx->priv_data, "preset", "ultrafast", 0);
av_opt_set(video_codec_ctx->priv_data, "tune", "film", 0);
int open_res = avcodec_open2(video_codec_ctx, video_codec, &vopts);
if (open_res < 0) {
LOGI("Error opening video codec: %i", open_res);
return JNI_FALSE; // leak!
}
// Open Audio Codec.
// ======================
if (enable_audio) {
AudioBuffer_Clear();
audio_sample_rate = audio_sample_rate_param;
AVCodec *audio_codec = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!audio_codec) {
LOGI("Did not find the audio codec");
return JNI_FALSE; // leak!
} else {
LOGI("Audio codec found!");
}
AVCodecContext *audio_codec_ctx = audio_stream->codec;
audio_codec_ctx->codec_id = audio_codec->id;
audio_codec_ctx->codec_type = AVMEDIA_TYPE_AUDIO;
audio_codec_ctx->bit_rate = 128000;
audio_codec_ctx->bit_rate_tolerance = 16000;
audio_codec_ctx->channels = 1;
audio_codec_ctx->profile = FF_PROFILE_AAC_LOW;
audio_codec_ctx->sample_fmt = AV_SAMPLE_FMT_FLT;
audio_codec_ctx->sample_rate = 44100;
LOGI("Opening audio codec");
AVDictionary *opts = NULL;
av_dict_set(&opts, "strict", "experimental", 0);
open_res = avcodec_open2(audio_codec_ctx, audio_codec, &opts);
LOGI("audio frame size: %i", audio_codec_ctx->frame_size);
if (open_res < 0) {
LOGI("Error opening audio codec: %i", open_res);
return JNI_FALSE; // leak!
}
}
const jbyte *url = (*env)->GetStringUTFChars(env, rtmp_url, NULL);
// Point to an output file
if (!(ofmt->flags & AVFMT_NOFILE)) {
if (avio_open(&fmt_context->pb, url, URL_WRONLY) < 0) {
LOGI("ERROR: Could not open file %s", url);
return JNI_FALSE; // leak!
}
}
(*env)->ReleaseStringUTFChars(env, rtmp_url, url);
LOGI("Writing output header.");
// Write file header
if (avformat_write_header(fmt_context, NULL) != 0) {
LOGI("ERROR: av_write_header failed");
return JNI_FALSE;
}
pts = 0;
last_audio_pts = 0;
audio_samples_written = 0;
// Initialize buffers for UV format conversion
int frame_size = video_codec_ctx->width * video_codec_ctx->height;
u_buf = (unsigned char *) av_malloc(frame_size / 4);
v_buf = (unsigned char *) av_malloc(frame_size / 4);
LOGI("ffmpeg encoding init done");
return JNI_TRUE;
}
JNIEXPORT void JNICALL
Java_com_google_android_apps_watchme_Ffmpeg_shutdown(JNIEnv
*env,
jobject thiz
) {
av_write_trailer(fmt_context);
avio_close(fmt_context
->pb);
avcodec_close(video_stream
->codec);
if (enable_audio) {
avcodec_close(audio_stream
->codec);
}
av_free(fmt_context);
av_free(u_buf);
av_free(v_buf);
fmt_context = NULL;
u_buf = NULL;
v_buf = NULL;
}
JNIEXPORT jintJNICALL
Java_com_google_android_apps_watchme_Ffmpeg_encodeVideoFrame(JNIEnv
*env,
jobject thiz,
jbyteArray
yuv_image) {
int yuv_length = (*env)->GetArrayLength(env, yuv_image);
unsigned char *yuv_data = (*env)->GetByteArrayElements(env, yuv_image, 0);
AVCodecContext *video_codec_ctx = video_stream->codec;
//LOGI("Yuv size: %i w: %i h: %i", yuv_length, video_codec_ctx->width, video_codec_ctx->height);
int frame_size = video_codec_ctx->width * video_codec_ctx->height;
const unsigned char *uv = yuv_data + frame_size;
// Convert YUV from NV12 to I420. Y channel is the same so we don't touch it,
// we just have to deinterleave UV.
for (
int i = 0;
i < frame_size / 4; i++) {
v_buf[i] = uv[i * 2];
u_buf[i] = uv[i * 2 + 1];
}
AVFrame source;
memset(&source, 0, sizeof(AVFrame));
source.data[0] =
yuv_data;
source.data[1] =
u_buf;
source.data[2] =
v_buf;
source.linesize[0] = video_codec_ctx->
width;
source.linesize[1] = video_codec_ctx->width / 2;
source.linesize[2] = video_codec_ctx->width / 2;
// only for bitrate regulation. irrelevant for sync.
source.
pts = pts;
pts++;
int out_length = frame_size + (frame_size / 2);
unsigned char *out = (unsigned char *) av_malloc(out_length);
int compressed_length = avcodec_encode_video(video_codec_ctx, out, out_length, &source);
(*env)->
ReleaseByteArrayElements(env, yuv_image, yuv_data,
0);
// Write to file too
if (compressed_length > 0) {
AVPacket pkt;
av_init_packet(&pkt);
pkt.
pts = last_audio_pts;
if (video_codec_ctx->coded_frame && video_codec_ctx->coded_frame->key_frame) {
pkt.flags |= 0x0001;
}
pkt.
stream_index = video_stream->index;
pkt.
data = out;
pkt.
size = compressed_length;
if (
av_interleaved_write_frame(fmt_context,
&pkt) != 0) {
LOGI("Error writing video frame");
}
} else {
LOGI("??? compressed_length <= 0");
}
last_audio_pts++;
av_free(out);
return
compressed_length;
}
JNIEXPORT jintJNICALL
Java_com_google_android_apps_watchme_Ffmpeg_encodeAudioFrame(JNIEnv
*env,
jobject thiz,
jshortArray
audio_data,
jint length
) {
if (!enable_audio) {
return 0;
}
short *audio = (*env)->GetShortArrayElements(env, audio_data, 0);
//LOGI("java audio buffer size: %i", length);
AVCodecContext *audio_codec_ctx = audio_stream->codec;
unsigned char *out = av_malloc(128000);
AudioBuffer_Push(audio, length
);
int total_compressed = 0;
while (
AudioBuffer_Size()
>= audio_codec_ctx->frame_size) {
AVPacket pkt;
av_init_packet(&pkt);
int compressed_length = avcodec_encode_audio(audio_codec_ctx, out, 128000,
AudioBuffer_Get());
total_compressed +=
compressed_length;
audio_samples_written += audio_codec_ctx->
frame_size;
int new_pts = (audio_samples_written * 1000) / audio_sample_rate;
if (compressed_length > 0) {
pkt.
size = compressed_length;
pkt.
pts = new_pts;
last_audio_pts = new_pts;
//LOGI("audio_samples_written: %i comp_length: %i pts: %i", (int)audio_samples_written, (int)compressed_length, (int)new_pts);
pkt.flags |= 0x0001;
pkt.
stream_index = audio_stream->index;
pkt.
data = out;
if (
av_interleaved_write_frame(fmt_context,
&pkt) != 0) {
LOGI("Error writing audio frame");
}
}
AudioBuffer_Pop(audio_codec_ctx
->frame_size);
}
(*env)->
ReleaseShortArrayElements(env, audio_data, audio,
0);
av_free(out);
return
total_compressed;
}

Android-NDK - fread

I write this question, because I don't see this in any foro o helper pager, I have a app, it make two function basic, I have log in c and write in console step by step.
I have two case in my app, I call this line: "DEAD LINE", it one line code make fail all function, dosen't fial this line, when try to call.
Case 1: ("DEAD LINE" it's comments.)
Firth function ("setArrayByteYUV") write a buffer in the file (this make pefect with out problem), it to simple, the epp can pass de array of byte to c, and write the file:
This function run in the 4.2 - 5.0 - 6.0 android (crate file and write buffer).
Write all log in console.
Second function ("RenderProcessOther") read buffer by file, previously write, please pay present much attention:
This function run in the 4.2 - 5.0 - 6.0 android (open file).
Write all log in console.
Case 2: ("DEAD LINE" it's not comments.)
Firth function (setArrayByteYUV) write a buffer in the file (this make pefect with out problem), it to simple, I can pass de array of byte to c, and write the file:
This function run in the 4.2 - 5.0 - 6.0 android (crate file).
Write all log in console.
Second function (RenderProcessOther) read buffer by file, previously write, but, please pay present much attention:
This function run in the 4.2 android and read the buffer by file and finsh the process, but in 5.0 - 6.0 android fail, when call this function (Dosen't fial in the "LINE DEAD" fail to call function).
Write all log in console 4.2 android, but in 5.0 - 6.0 android fail, when call this function, and it dosen't write any log from this function. PD: when fail, Doesn't write ANY LOG from the function RenderProcessOther, like function doesn't exit.
In all case compile, never fail in the comple of ndk-build (case comments "DEAD LINE" or not comments "DEAD LINE").
I try with fgets and fread and all way, but in all ways it work same: in 4.2 run and read file, in 5.0 - 6.0 fail in the setArrayByteYUV if "DEAD LINE" it not commet.
Nota: The process write and read, Doesn't it have problem runtime error, and it compile fine.
If my code need "try" or "catch", validation "if" u other thing to make work "fread" "DEAD LINE" in the 5.0 - 6.0
Code C:
#include <jni.h>
#include <stdlib.h>
#include <android/log.h>
#include <pthread.h>
#include <stdio.h>
#include<string.h>
#include <stdlib.h>
#define APPNAME "MyApp"
#define EMPTY (0)
static jint *c_arrayPrincipal;
static jint **c_arrayMain;
static jint *rgbDataGlobal;
int startThread = 0;
int in = 0;
int inByte = 0;
int lengthglobal =0;
int rgbDataFinal[15000000];
jint lenVector = 0;
JNIEXPORT jint
Java_com_example_android_camera2basic_utils_Utils_setArrayByteYUV(JNIEnv* env, jobject thiz, jbyteArray arrY, jbyteArray arrU, jbyteArray arrV, jint indexP, jint width, jint height, jstring javaString){
const char *nativeString = (*env)->GetStringUTFChars(env, javaString, 0);
int w = width;
int h = height;
int sz = w * h;
int i;
int j;
int Y;
int Cr = 0;
int Cb = 0;
int pixPtr = 0;
int jDiv2 = 0;
int R = 0;
int G = 0;
int B = 0;
int cOff;
int ind = 0;
int nind = lenVector;
int p = 0;
jsize lenY = (*env)->GetArrayLength(env, arrY);
jsize lenU = (*env)->GetArrayLength(env, arrU);
jsize lenV = (*env)->GetArrayLength(env, arrV);
char arr[lenY];
int rgbData2[sz];
int counter =0;
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "lenY: number: = %d",lenY);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "lenU: number: = %d",lenU);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "lenV: number: = %d",lenV);
//file/
int lstr = 0;
int mychar = 0;
FILE *pfileY = NULL;
FILE *pfileU = NULL;
FILE *pfileV = NULL;
char filenameconY[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
char filenameconU[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
char filenameconV[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
//sprintf(filenamecon, "/storage/emulated/legacy/ls/myfile%04d.txt", indexP);
sprintf(filenameconY, "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt", indexP);
sprintf(filenameconU, "/data/data/com.example.android.camera2basic/ls/myfile%04d-u.txt", indexP);
sprintf(filenameconV, "/data/data/com.example.android.camera2basic/ls/myfile%04d-v.txt", indexP);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",1);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconY);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconU);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconV);
//pfilecon = fopen(filenamecon, "a");
jboolean isCopy;
jbyte* c_arrayY = (*env)->GetByteArrayElements(env, arrY, &isCopy);
jbyte* c_arrayU = (*env)->GetByteArrayElements(env, arrU, &isCopy);
jbyte* c_arrayV = (*env)->GetByteArrayElements(env, arrV, &isCopy);
//File
pfileY = fopen(filenameconY, "w");
pfileU = fopen(filenameconU, "w");
pfileV = fopen(filenameconV, "w");
if(pfileY == NULL)
{
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "Error null file: = %d",2);
}
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",2);
fwrite(c_arrayY , 1 , lenY+1 , pfileY );
fwrite(c_arrayU , 1 , lenU+1 , pfileU );
fwrite(c_arrayV , 1 , lenV+1 , pfileV );
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",3);
(*env)->ReleaseStringUTFChars(env, javaString, nativeString);
(*env)->DeleteLocalRef(env,javaString);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",4);
fclose(pfileY);
fclose(pfileU);
fclose(pfileV);
(*env)->ReleaseByteArrayElements(env, arrY, c_arrayY, 0);
(*env)->DeleteLocalRef(env,arrY);
(*env)->ReleaseByteArrayElements(env, arrU, c_arrayU, 0);
(*env)->DeleteLocalRef(env,arrU);
(*env)->ReleaseByteArrayElements(env, arrV, c_arrayV, 0);
(*env)->DeleteLocalRef(env,arrV);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",5);
return 0;
}
JNIEXPORT jintArray JNICALL
Java_com_example_android_camera2basic_utils_Utils_RenderProcessOther(JNIEnv * env, jobject obj, jint number, jint width, jint height, jint api)
{
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOtherYUV: api: = %d",api);
const char *nativeString = "";
int w = width;
int h = height;
int sz = w * h;
int i;
int j;
int Y;
int Cr = 0;
int Cb = 0;
int pixPtr = 0;
int jDiv2 = 0;
int R = 0;
int G = 0;
int B = 0;
int cOff;
int ind = 0;
int nind = lenVector;
int p = 0;
char arr[15000000];
int rgbData2[sz];
int counter =0;
int indexP =0;
char arrY[15000000];
char arrU[15000000];
char arrV[15000000];
int rgbDataFinalR[15000000];
if(api!=0) {
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOtherYUV: api IF: = %d",sz);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther2: number: = %d",1);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: number: = %d",2);
rgbDataGlobal = (jint*)calloc(lenVector, sizeof(jint));
int lstr = 0;
int mychar = 0;
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: number: = %d",3);
int nnumber = (int)number;
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: nnumber: = %d",4);
for(indexP=1; indexP<nnumber;indexP++){
//filename =
//printf(filename, "/storage/emulated/legacy/ls/myfile%d.txt", (int)indexP);
pixPtr = 0;
char filenameconY[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
char filenameconU[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
char filenameconV[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
//sprintf(filenamecon, "/storage/emulated/legacy/ls/myfile%04d.txt", indexP);
sprintf(filenameconY, "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt", indexP);
sprintf(filenameconU, "/data/data/com.example.android.camera2basic/ls/myfile%04d-u.txt", indexP);
sprintf(filenameconV, "/data/data/com.example.android.camera2basic/ls/myfile%04d-v.txt", indexP);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",1);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconY);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconU);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconV);
FILE* fileY = fopen(filenameconY, "r");
FILE* fileU = fopen(filenameconU, "r");
FILE* fileV = fopen(filenameconV, "r");
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: indexP 4: = %d",indexP);
int n = sizeof(arrY);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "sizeof: sizeof 4: = %d",n);
if(n !=0){
n = sizeof(arrY)/sizeof(arrY[0]);
int step = (int)api;
int size = w * h;
for (i = 0; i<h; i++)
{
for (j=0; j<w; j++)
{
float Y = arrY[i*step + j];
float U = arrU[ (int)(size + (i/2)*(step/2) + j/2) ];
float V = arrV[ (int)(size*1.25 + (i/2)*(step/2) + j/2)];
float R = Y + (int)(1.772f*V);
float G = Y - (int)(0.344f*V + 0.714f*U);
float B = Y + (int)(1.402f*U);
if (R < 0){
R = 0;
}
if (G < 0){
G = 0;
} if (B < 0){
B = 0;
}
if (R > 255 ){
R = 255;
}
if (G > 255) {
G = 255;
} if (B > 255) {
B = 255;
}
int rint = (int)R;
int gint = (int)G;
int bint = (int)B;
int rgbDataPixel = 0xff000000 + (bint << 16) + (gint << 8) + rint;
rgbDataFinalR[pixPtr] = (int)rgbDataPixel;
pixPtr++;
}
}
fclose(fileY);
fclose(fileU);
fclose(fileV);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther2: nnumber for close: = %d",indexP);
remove(filenameconY);
remove(filenameconU);
remove(filenameconV);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther2: nnumber for remove: = %d",4);
}
}
}else{
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOtherYUV: api ELSE: = %d",sz);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: number: = %d",1);
rgbDataGlobal = (jint*)calloc(lenVector, sizeof(jint));
int lstr = 0;
int mychar = 0;
char *filename = NULL;
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: number: = %d",1);
int nnumber = (int)number;
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: nnumber: = %d",1);
for(indexP=1; indexP<nnumber;indexP++){
char filenamecon[sizeof "/storage/emulated/legacy/ls/myfile0000.txt"];
sprintf(filenamecon, "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt", indexP);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: number: = %s",filenamecon);
FILE* file = fopen(filenamecon, "r");
if(file ==NULL){
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "no existe RenderProcessOther: number: = %s",filenamecon);
}
/* DEAD LINE */
/* DEAD LINE */
/* DEAD LINE */
fread(arr, lenVector,0 , file); // <------- DEAD LINE
///fgets(arr, lenVector, file)
/* DEAD LINE */
/* DEAD LINE */
/* DEAD LINE */
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: indexP 4: = %d",indexP);
int n = sizeof(arr);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "sizeof: sizeof 4: = %d",n);
if(n !=0){
n = sizeof(arr)/sizeof(arr[0]);
for(j = 0; j < h; j++) {
pixPtr = j * w;
jDiv2 = j >> 1;
for(i = 0; i < w; i++) {
counter++;
Y = arr[pixPtr];
if(Y < 0) Y += 255;
if((i & 0x1) != 1) {
cOff = sz + jDiv2 * w + (i >> 1) * 2;
Cb = arr[cOff];
if(Cb < 0) Cb += 127; else Cb -= 128;
Cr = arr[cOff + 1];
if(Cr < 0) Cr += 127; else Cr -= 128;
}
R = Y + Cr + (Cr >> 2) + (Cr >> 3) + (Cr >> 5);//1.406*~1.403
if(R < 0) R = 0; else if(R > 255) R = 255;
G = Y - (Cb >> 2) + (Cb >> 4) + (Cb >> 5) - (Cr >> 1) + (Cr >> 3) + (Cr >> 4) + (Cr >> 5);//
if(G < 0) G = 0; else if(G > 255) G = 255;
B = Y + Cb + (Cb >> 1) + (Cb >> 2) + (Cb >> 6);//1.765~1.770
if(B < 0) B = 0; else if(B > 255) B = 255;
if(indexP==0){
rgbDataFinal[pixPtr++] = 0xff000000 + (B << 16) + (G << 8) + R;
}else{
int pixPtrIndex = pixPtr++;
int rgbDataPixel = 0xff000000 + (B << 16) + (G << 8) + R;
int color = rgbDataPixel;
int r = color & 0xff;
int g = (color >> 8) & 0xff;
int b = (color >> 16) & 0xff;
int a = (color >> 24) & 0xff;
int color2 = rgbDataFinal[pixPtrIndex];
int r2 = color2 & 0xff;
int g2 = (color2 >> 8) & 0xff;
int b2 = (color2 >> 16) & 0xff;
int a2 = (color2 >> 24) & 0xff;
double ad = a - a2;
double rd = r - r2;
double gd = g - g2;
double bd = b- b2;
double ar = a2/a;
double rr = r2/r;
double gr = g2/g;
double br = b2/b;
if(a2<a|| r2<r|| g2<g || b2<b) {
double coeficiente = 0.5;
double d =(r - ((r - r2)*coeficiente));
int red1 = (int) d;
d =(g - ((g - g2)*coeficiente));
int green1 = (int)d;
d =(b - ((b - b2)*coeficiente));
int blue1 = (int)d;
int renderRGBDataPixel = 0xff000000 + (blue1 << 16) + (green1 << 8) + red1;
rgbDataFinal[pixPtrIndex] = renderRGBDataPixel;
}
}
}
}
fclose(file);
remove(filenamecon);
}
}
};
for (i = 0; i < sz; ++i) {
rgbDataGlobal[i]= rgbDataFinal[i];
}
jintArray jArray = (*env)->NewIntArray(env, sz);
if (jArray != NULL) {
jint u;
u =0;
(*env)->SetIntArrayRegion(env, jArray, 0, sz,rgbDataGlobal);
}
return jArray;
}
Android.mk
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := helloc
LOCAL_SRC_FILES := HelloC.c
LOCAL_LDLIBS := -llog -ldl -landroid
include $(BUILD_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := imageformat
LOCAL_SRC_FILES := ImageFormat.c
LOCAL_LDLIBS := -llog
include $(BUILD_SHARED_LIBRARY)
Application.mk
APP_ABI := armeabi armeabi-v7a mips x86
APP_PLATFORM := android-21
build.gradle
android {
compileSdkVersion 23
buildToolsVersion "24.0.0 rc3"
defaultConfig {
minSdkVersion 17
targetSdkVersion 23
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_7
targetCompatibility JavaVersion.VERSION_1_7
}
}

Android jni read the same path , but not the same

static void setIntField(JNIEnv* env, jobject obj, const char* path,
jfieldID fieldID) {
const int SIZE = 128;
char buf[SIZE] = "\0";
jint value = 0;
if (readFromFile(path, buf, SIZE) > 0) {
value = atoi(buf);
}
env->SetIntField(obj, fieldID, value);
}
static int readFromFile(const char* path, char* buf, size_t size) {
if (!path)
return -1;
int fd = open(path, O_RDONLY, 0);
if (fd == -1) {
LOGE("Could not open '%s'", path);
return -1;
}
size_t count = read(fd, buf, size);
if (count > 0) {
count = (count < size) ? count : size - 1;
while (count > 0 && buf[count - 1] == '\n')
count--;
buf[count] = '\0';
} else {
buf[0] = '\0';
}
close(fd);
return count;
}
This is my Android JNI code. Why do I read the same path, but the value is not the same twice. I am reading this path /sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq.
How to fix it?
scaling_min_freq value is 51000. The first time I get 51000. The second time I get 150000.

Categories

Resources