encrypt and decrypt utf8 string in android NDK jni - android

i write a simple string encrypt and decrypt with split string to two var and increment assci code and after swap right and left of string.
when i encrypt this text = "this is test" this is work well, but when encrypt utf char don't word like this string = "تست تست تست تست"
encrypt code is :
JNIEXPORT jstring JNICALL Java_com_test_ndk_MainActivity_encrypt(JNIEnv* env, jobject thiz, jstring dec) {
const char *nativeString = (*env)->GetStringUTFChars(env, dec, 0);
char *newstr;
char *left;
char *right;
int decLenght = strlen(nativeString);
int middl = decLenght / 2;
int i;
newstr = substr(nativeString, 0, middl);
int lenght = strlen(newstr);
left = malloc(lenght);
for (i = 0; i < lenght; i++) {
left[i] = newstr[i] + 1;
}
left[lenght] = '\0';
newstr = substr(nativeString, middl, decLenght - middl);
lenght = strlen(newstr);
right = malloc(lenght);
for (i = 0; i < lenght; i++) {
right[i] = newstr[i] - 1;
}
right[lenght] = '\0';
strcat(right, left);
(*env)->ReleaseStringUTFChars(env, dec, nativeString);
return (*env)->NewStringUTF(env, right);
}
and decrypt code is :
JNIEXPORT jstring JNICALL Java_com_test_ndk_MainActivity_decrypt(JNIEnv* env, jobject thiz, jstring enc) {
const char *nativeString = (*env)->GetStringUTFChars(env, enc, 0);
char *newstr;
char *left;
char *right;
int encLenght = strlen(nativeString);
int middl = encLenght / 2;
int i;
if (encLenght % 2 != 0) {
middl++;
}
newstr = substr(nativeString, 0, middl);
int lenght = strlen(newstr);
left = malloc(lenght);
for (i = 0; i < lenght; i++) {
left[i] = (char) ((int) newstr[i] + 1);
}
left[lenght] = '\0';
newstr = substr(nativeString, middl, encLenght - middl);
lenght = strlen(newstr);
right = malloc(lenght);
for (i = 0; i < lenght; i++) {
right[i] = (char) ((int) newstr[i] - 1);
}
right[lenght] = '\0';
strcat(right, left);
(*env)->ReleaseStringUTFChars(env, enc, nativeString);
return (*env)->NewStringUTF(env, right);
}
substr function :
char* substr(const char *source, unsigned int start, unsigned int end) {
return strndup(source + start, end);
}
Does anyone have solution .

UTF-8 is not trivial for manipulations. For your encode/decode, you can use GetStringChars() (or the more efficient, but also more restrictive GetStringCritical()) and operate the resulting 16-bit jchar array.

Related

Java.lang.UnsatisfiedLinkError: No implementation found for int [duplicate]

This question already has answers here:
Android NDK C++ JNI (no implementation found for native...)
(11 answers)
Closed 5 years ago.
I executed youtube watch me android application project. I just add some classes in my project and build with ndk. I got the error like
java.lang.UnsatisfiedLinkError: No implementation found for int com.ephronsystem.mobilizerapp.Ffmpeg.encodeVideoFrame(byte[]) (tried Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeVideoFrame and Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeVideoFrame___3B).
My code:
package com.ephronsystem.mobilizerapp;
public class Ffmpeg {
static {
System.loadLibrary("ffmpeg");
}
public static native boolean init(int width, int height, int audio_sample_rate, String rtmpUrl);
public static native void shutdown();
// Returns the size of the encoded frame.
public static native int encodeVideoFrame(byte[] yuv_image);
public static native int encodeAudioFrame(short[] audio_data, int length);
}
This is ffmpeg-jni.c
#include <android/log.h>
#include <string.h>
#include <jni.h>
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/opt.h"
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jboolean JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_init(JNIEnv *env, jobject thiz,
jint width, jint height,
jint audio_sample_rate,
jstring rtmp_url);
JNIEXPORT void JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_shutdown(JNIEnv *env,
jobject thiz
);
JNIEXPORT jint JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeVideoFrame(JNIEnv
*env,
jobject thiz,
jbyteArray
yuv_image);
JNIEXPORT jint JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeAudioFrame(JNIEnv *env,
jobject thiz,
jshortArray audio_data,
jint length);
#ifdef __cplusplus
}
#endif
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, "ffmpeg-jni", __VA_ARGS__)
#define URL_WRONLY 2
static AVFormatContext *fmt_context;
static AVStream *video_stream;
static AVStream *audio_stream;
static int pts
= 0;
static int last_audio_pts = 0;
// Buffers for UV format conversion
static unsigned char *u_buf;
static unsigned char *v_buf;
static int enable_audio = 1;
static int64_t audio_samples_written = 0;
static int audio_sample_rate = 0;
// Stupid buffer for audio samples. Not even a proper ring buffer
#define AUDIO_MAX_BUF_SIZE 16384 // 2x what we get from Java
static short audio_buf[AUDIO_MAX_BUF_SIZE];
static int audio_buf_size = 0;
void AudioBuffer_Push(const short *audio, int num_samples) {
if (audio_buf_size >= AUDIO_MAX_BUF_SIZE - num_samples) {
LOGI("AUDIO BUFFER OVERFLOW: %i + %i > %i", audio_buf_size, num_samples,
AUDIO_MAX_BUF_SIZE);
return;
}
for (int i = 0; i < num_samples; i++) {
audio_buf[audio_buf_size++] = audio[i];
}
}
int AudioBuffer_Size() { return audio_buf_size; }
short *AudioBuffer_Get() { return audio_buf; }
void AudioBuffer_Pop(int num_samples) {
if (num_samples > audio_buf_size) {
LOGI("Audio buffer Pop WTF: %i vs %i", num_samples, audio_buf_size);
return;
}
memmove(audio_buf, audio_buf + num_samples, num_samples * sizeof(short));
audio_buf_size -= num_samples;
}
void AudioBuffer_Clear() {
memset(audio_buf, 0, sizeof(audio_buf));
audio_buf_size = 0;
}
static void log_callback(void *ptr, int level, const char *fmt, va_list vl) {
char x[2048];
vsnprintf(x, 2048, fmt, vl);
LOGI(x);
}
JNIEXPORT jboolean JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_init(JNIEnv *env, jobject thiz,
jint width, jint height,
jint audio_sample_rate_param,
jstring rtmp_url) {
avcodec_register_all();
av_register_all();
av_log_set_callback(log_callback);
fmt_context = avformat_alloc_context();
AVOutputFormat *ofmt = av_guess_format("flv", NULL, NULL);
if (ofmt) {
LOGI("av_guess_format returned %s", ofmt->long_name);
} else {
LOGI("av_guess_format fail");
return JNI_FALSE;
}
fmt_context->oformat = ofmt;
LOGI("creating video stream");
video_stream = av_new_stream(fmt_context, 0);
if (enable_audio) {
LOGI("creating audio stream");
audio_stream = av_new_stream(fmt_context, 1);
}
// Open Video Codec.
// ======================
AVCodec *video_codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!video_codec) {
LOGI("Did not find the video codec");
return JNI_FALSE; // leak!
} else {
LOGI("Video codec found!");
}
AVCodecContext *video_codec_ctx = video_stream->codec;
video_codec_ctx->codec_id = video_codec->id;
video_codec_ctx->codec_type = AVMEDIA_TYPE_VIDEO;
video_codec_ctx->level = 31;
video_codec_ctx->width = width;
video_codec_ctx->height = height;
video_codec_ctx->pix_fmt = PIX_FMT_YUV420P;
video_codec_ctx->rc_max_rate = 0;
video_codec_ctx->rc_buffer_size = 0;
video_codec_ctx->gop_size = 12;
video_codec_ctx->max_b_frames = 0;
video_codec_ctx->slices = 8;
video_codec_ctx->b_frame_strategy = 1;
video_codec_ctx->coder_type = 0;
video_codec_ctx->me_cmp = 1;
video_codec_ctx->me_range = 16;
video_codec_ctx->qmin = 10;
video_codec_ctx->qmax = 51;
video_codec_ctx->keyint_min = 25;
video_codec_ctx->refs = 3;
video_codec_ctx->trellis = 0;
video_codec_ctx->scenechange_threshold = 40;
video_codec_ctx->flags |= CODEC_FLAG_LOOP_FILTER;
video_codec_ctx->me_method = ME_HEX;
video_codec_ctx->me_subpel_quality = 6;
video_codec_ctx->i_quant_factor = 0.71;
video_codec_ctx->qcompress = 0.6;
video_codec_ctx->max_qdiff = 4;
video_codec_ctx->time_base.den = 10;
video_codec_ctx->time_base.num = 1;
video_codec_ctx->bit_rate = 3200 * 1000;
video_codec_ctx->bit_rate_tolerance = 0;
video_codec_ctx->flags2 |= 0x00000100;
fmt_context->bit_rate = 4000 * 1000;
av_opt_set(video_codec_ctx, "partitions", "i8x8,i4x4,p8x8,b8x8", 0);
av_opt_set_int(video_codec_ctx, "direct-pred", 1, 0);
av_opt_set_int(video_codec_ctx, "rc-lookahead", 0, 0);
av_opt_set_int(video_codec_ctx, "fast-pskip", 1, 0);
av_opt_set_int(video_codec_ctx, "mixed-refs", 1, 0);
av_opt_set_int(video_codec_ctx, "8x8dct", 0, 0);
av_opt_set_int(video_codec_ctx, "weightb", 0, 0);
if (fmt_context->oformat->flags & AVFMT_GLOBALHEADER)
video_codec_ctx->flags |= CODEC_FLAG_GLOBAL_HEADER;
LOGI("Opening video codec");
AVDictionary *vopts = NULL;
av_dict_set(&vopts, "profile", "main", 0);
//av_dict_set(&vopts, "vprofile", "main", 0);
av_dict_set(&vopts, "rc-lookahead", 0, 0);
av_dict_set(&vopts, "tune", "film", 0);
av_dict_set(&vopts, "preset", "ultrafast", 0);
av_opt_set(video_codec_ctx->priv_data, "tune", "film", 0);
av_opt_set(video_codec_ctx->priv_data, "preset", "ultrafast", 0);
av_opt_set(video_codec_ctx->priv_data, "tune", "film", 0);
int open_res = avcodec_open2(video_codec_ctx, video_codec, &vopts);
if (open_res < 0) {
LOGI("Error opening video codec: %i", open_res);
return JNI_FALSE; // leak!
}
// Open Audio Codec.
// ======================
if (enable_audio) {
AudioBuffer_Clear();
audio_sample_rate = audio_sample_rate_param;
AVCodec *audio_codec = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!audio_codec) {
LOGI("Did not find the audio codec");
return JNI_FALSE; // leak!
} else {
LOGI("Audio codec found!");
}
AVCodecContext *audio_codec_ctx = audio_stream->codec;
audio_codec_ctx->codec_id = audio_codec->id;
audio_codec_ctx->codec_type = AVMEDIA_TYPE_AUDIO;
audio_codec_ctx->bit_rate = 128000;
audio_codec_ctx->bit_rate_tolerance = 16000;
audio_codec_ctx->channels = 1;
audio_codec_ctx->profile = FF_PROFILE_AAC_LOW;
audio_codec_ctx->sample_fmt = AV_SAMPLE_FMT_FLT;
audio_codec_ctx->sample_rate = 44100;
LOGI("Opening audio codec");
AVDictionary *opts = NULL;
av_dict_set(&opts, "strict", "experimental", 0);
open_res = avcodec_open2(audio_codec_ctx, audio_codec, &opts);
LOGI("audio frame size: %i", audio_codec_ctx->frame_size);
if (open_res < 0) {
LOGI("Error opening audio codec: %i", open_res);
return JNI_FALSE; // leak!
}
}
const jbyte *url = (*env)->GetStringUTFChars(env, rtmp_url, NULL);
// Point to an output file
if (!(ofmt->flags & AVFMT_NOFILE)) {
if (avio_open(&fmt_context->pb, url, URL_WRONLY) < 0) {
LOGI("ERROR: Could not open file %s", url);
return JNI_FALSE; // leak!
}
}
(*env)->ReleaseStringUTFChars(env, rtmp_url, url);
LOGI("Writing output header.");
// Write file header
if (avformat_write_header(fmt_context, NULL) != 0) {
LOGI("ERROR: av_write_header failed");
return JNI_FALSE;
}
pts = 0;
last_audio_pts = 0;
audio_samples_written = 0;
// Initialize buffers for UV format conversion
int frame_size = video_codec_ctx->width * video_codec_ctx->height;
u_buf = (unsigned char *) av_malloc(frame_size / 4);
v_buf = (unsigned char *) av_malloc(frame_size / 4);
LOGI("ffmpeg encoding init done");
return JNI_TRUE;
}
JNIEXPORT void JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_shutdown(JNIEnv
*env,
jobject thiz
) {
av_write_trailer(fmt_context);
avio_close(fmt_context
->pb);
avcodec_close(video_stream
->codec);
if (enable_audio) {
avcodec_close(audio_stream
->codec);
}
av_free(fmt_context);
av_free(u_buf);
av_free(v_buf);
fmt_context = NULL;
u_buf = NULL;
v_buf = NULL;
}
JNIEXPORT jint JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeVideoFrame(JNIEnv
*env,
jobject thiz,
jbyteArray
yuv_image) {
int yuv_length = (*env)->GetArrayLength(env, yuv_image);
unsigned char *yuv_data = (*env)->GetByteArrayElements(env, yuv_image, 0);
AVCodecContext *video_codec_ctx = video_stream->codec;
//LOGI("Yuv size: %i w: %i h: %i", yuv_length, video_codec_ctx->width, video_codec_ctx->height);
int frame_size = video_codec_ctx->width * video_codec_ctx->height;
const unsigned char *uv = yuv_data + frame_size;
// Convert YUV from NV12 to I420. Y channel is the same so we don't touch it,
// we just have to deinterleave UV.
for (
int i = 0;
i < frame_size / 4; i++) {
v_buf[i] = uv[i * 2];
u_buf[i] = uv[i * 2 + 1];
}
AVFrame source;
memset(&source, 0, sizeof(AVFrame));
source.data[0] =
yuv_data;
source.data[1] =
u_buf;
source.data[2] =
v_buf;
source.linesize[0] = video_codec_ctx->
width;
source.linesize[1] = video_codec_ctx->width / 2;
source.linesize[2] = video_codec_ctx->width / 2;
// only for bitrate regulation. irrelevant for sync.
source.
pts = pts;
pts++;
int out_length = frame_size + (frame_size / 2);
unsigned char *out = (unsigned char *) av_malloc(out_length);
int compressed_length = avcodec_encode_video(video_codec_ctx, out, out_length, &source);
(*env)->
ReleaseByteArrayElements(env, yuv_image, yuv_data,
0);
// Write to file too
if (compressed_length > 0) {
AVPacket pkt;
av_init_packet(&pkt);
pkt.
pts = last_audio_pts;
if (video_codec_ctx->coded_frame && video_codec_ctx->coded_frame->key_frame) {
pkt.flags |= 0x0001;
}
pkt.
stream_index = video_stream->index;
pkt.
data = out;
pkt.
size = compressed_length;
if (
av_interleaved_write_frame(fmt_context,
&pkt) != 0) {
LOGI("Error writing video frame");
}
} else {
LOGI("??? compressed_length <= 0");
}
last_audio_pts++;
av_free(out);
return
compressed_length;
}
JNIEXPORT jint JNICALL Java_com_ephronsystem_mobilizerapp_Ffmpeg_encodeAudioFrame(JNIEnv
*env,
jobject thiz,
jshortArray
audio_data,
jint length
) {
if (!enable_audio) {
return 0;
}
short *audio = (*env)->GetShortArrayElements(env, audio_data, 0);
//LOGI("java audio buffer size: %i", length);
AVCodecContext *audio_codec_ctx = audio_stream->codec;
unsigned char *out = av_malloc(128000);
AudioBuffer_Push(audio, length
);
int total_compressed = 0;
while (
AudioBuffer_Size()
>= audio_codec_ctx->frame_size) {
AVPacket pkt;
av_init_packet(&pkt);
int compressed_length = avcodec_encode_audio(audio_codec_ctx, out, 128000,
AudioBuffer_Get());
total_compressed +=
compressed_length;
audio_samples_written += audio_codec_ctx->
frame_size;
int new_pts = (audio_samples_written * 1000) / audio_sample_rate;
if (compressed_length > 0) {
pkt.
size = compressed_length;
pkt.
pts = new_pts;
last_audio_pts = new_pts;
//LOGI("audio_samples_written: %i comp_length: %i pts: %i", (int)audio_samples_written, (int)compressed_length, (int)new_pts);
pkt.flags |= 0x0001;
pkt.
stream_index = audio_stream->index;
pkt.
data = out;
if (
av_interleaved_write_frame(fmt_context,
&pkt) != 0) {
LOGI("Error writing audio frame");
}
}
AudioBuffer_Pop(audio_codec_ctx
->frame_size);
}
(*env)->
ReleaseShortArrayElements(env, audio_data, audio,
0);
av_free(out);
return
total_compressed;
}
This error generally occurs when your native library can't be found by JVM during execution. Your native code must be compiled into .so file and make it available for JVM during run time.
You may find more details on java.library.path and linking here
in all of this methods, as a second parameter you take jobject thizz - that means you try to implement non-static method that will get reference to jobject that this method is running against.
Try changing those to jclass clazz to mark them as you would like to implement those as static methods

occurred starting process 'command 'C:\*********\Sdk\ndk-bundle\ndk-build.cmd'' in android studio

I am using yt-watchme-master for live streaming videos from our mobile to Youtube.
I used one third party library :- https://github.com/youtube/yt-watchme
Here, they are using c++ code that's why I installed NDK in my android studio but still errors are coming up.
Can some one help me please?
Below is the entire code given.
#include <android/log.h>
#include <string.h>
#include <jni.h>
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/opt.h"
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jboolean
JNICALL Java_com_google_android_apps_watchme_Ffmpeg_init(JNIEnv *env, jobject thiz,
jint width, jint height,
jint audio_sample_rate,
jstring rtmp_url);
JNIEXPORT void JNICALL Java_com_google_android_apps_watchme_Ffmpeg_shutdown(JNIEnv
*env,
jobject thiz
);
JNIEXPORT jintJNICALL
Java_com_google_android_apps_watchme_Ffmpeg_encodeVideoFrame(JNIEnv
*env,
jobject thiz,
jbyteArray
yuv_image);
JNIEXPORT jint
JNICALL Java_com_google_android_apps_watchme_Ffmpeg_encodeAudioFrame(JNIEnv *env,
jobject thiz,
jshortArray audio_data,
jint length);
#ifdef __cplusplus
}
#endif
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, "ffmpeg-jni", __VA_ARGS__)
#define URL_WRONLY 2
static AVFormatContext *fmt_context;
static AVStream *video_stream;
static AVStream *audio_stream;
static int pts
= 0;
static int last_audio_pts = 0;
// Buffers for UV format conversion
static unsigned char *u_buf;
static unsigned char *v_buf;
static int enable_audio = 1;
static int64_t audio_samples_written = 0;
static int audio_sample_rate = 0;
// Stupid buffer for audio samples. Not even a proper ring buffer
#define AUDIO_MAX_BUF_SIZE 16384 // 2x what we get from Java
static short audio_buf[AUDIO_MAX_BUF_SIZE];
static int audio_buf_size = 0;
void AudioBuffer_Push(const short *audio, int num_samples) {
if (audio_buf_size >= AUDIO_MAX_BUF_SIZE - num_samples) {
LOGI("AUDIO BUFFER OVERFLOW: %i + %i > %i", audio_buf_size, num_samples,
AUDIO_MAX_BUF_SIZE);
return;
}
for (int i = 0; i < num_samples; i++) {
audio_buf[audio_buf_size++] = audio[i];
}
}
int AudioBuffer_Size() { return audio_buf_size; }
short *AudioBuffer_Get() { return audio_buf; }
void AudioBuffer_Pop(int num_samples) {
if (num_samples > audio_buf_size) {
LOGI("Audio buffer Pop WTF: %i vs %i", num_samples, audio_buf_size);
return;
}
memmove(audio_buf, audio_buf + num_samples, num_samples * sizeof(short));
audio_buf_size -= num_samples;
}
void AudioBuffer_Clear() {
memset(audio_buf, 0, sizeof(audio_buf));
audio_buf_size = 0;
}
static void log_callback(void *ptr, int level, const char *fmt, va_list vl) {
char x[2048];
vsnprintf(x, 2048, fmt, vl);
LOGI(x);
}
JNIEXPORT jboolean
JNICALL Java_com_google_android_apps_watchme_Ffmpeg_init(JNIEnv *env, jobject thiz,
jint width, jint height,
jint audio_sample_rate_param,
jstring rtmp_url) {
avcodec_register_all();
av_register_all();
av_log_set_callback(log_callback);
fmt_context = avformat_alloc_context();
AVOutputFormat *ofmt = av_guess_format("flv", NULL, NULL);
if (ofmt) {
LOGI("av_guess_format returned %s", ofmt->long_name);
} else {
LOGI("av_guess_format fail");
return JNI_FALSE;
}
fmt_context->oformat = ofmt;
LOGI("creating video stream");
video_stream = av_new_stream(fmt_context, 0);
if (enable_audio) {
LOGI("creating audio stream");
audio_stream = av_new_stream(fmt_context, 1);
}
// Open Video Codec.
// ======================
AVCodec *video_codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!video_codec) {
LOGI("Did not find the video codec");
return JNI_FALSE; // leak!
} else {
LOGI("Video codec found!");
}
AVCodecContext *video_codec_ctx = video_stream->codec;
video_codec_ctx->codec_id = video_codec->id;
video_codec_ctx->codec_type = AVMEDIA_TYPE_VIDEO;
video_codec_ctx->level = 31;
video_codec_ctx->width = width;
video_codec_ctx->height = height;
video_codec_ctx->pix_fmt = PIX_FMT_YUV420P;
video_codec_ctx->rc_max_rate = 0;
video_codec_ctx->rc_buffer_size = 0;
video_codec_ctx->gop_size = 12;
video_codec_ctx->max_b_frames = 0;
video_codec_ctx->slices = 8;
video_codec_ctx->b_frame_strategy = 1;
video_codec_ctx->coder_type = 0;
video_codec_ctx->me_cmp = 1;
video_codec_ctx->me_range = 16;
video_codec_ctx->qmin = 10;
video_codec_ctx->qmax = 51;
video_codec_ctx->keyint_min = 25;
video_codec_ctx->refs = 3;
video_codec_ctx->trellis = 0;
video_codec_ctx->scenechange_threshold = 40;
video_codec_ctx->flags |= CODEC_FLAG_LOOP_FILTER;
video_codec_ctx->me_method = ME_HEX;
video_codec_ctx->me_subpel_quality = 6;
video_codec_ctx->i_quant_factor = 0.71;
video_codec_ctx->qcompress = 0.6;
video_codec_ctx->max_qdiff = 4;
video_codec_ctx->time_base.den = 10;
video_codec_ctx->time_base.num = 1;
video_codec_ctx->bit_rate = 3200 * 1000;
video_codec_ctx->bit_rate_tolerance = 0;
video_codec_ctx->flags2 |= 0x00000100;
fmt_context->bit_rate = 4000 * 1000;
av_opt_set(video_codec_ctx, "partitions", "i8x8,i4x4,p8x8,b8x8", 0);
av_opt_set_int(video_codec_ctx, "direct-pred", 1, 0);
av_opt_set_int(video_codec_ctx, "rc-lookahead", 0, 0);
av_opt_set_int(video_codec_ctx, "fast-pskip", 1, 0);
av_opt_set_int(video_codec_ctx, "mixed-refs", 1, 0);
av_opt_set_int(video_codec_ctx, "8x8dct", 0, 0);
av_opt_set_int(video_codec_ctx, "weightb", 0, 0);
if (fmt_context->oformat->flags & AVFMT_GLOBALHEADER)
video_codec_ctx->flags |= CODEC_FLAG_GLOBAL_HEADER;
LOGI("Opening video codec");
AVDictionary *vopts = NULL;
av_dict_set(&vopts, "profile", "main", 0);
//av_dict_set(&vopts, "vprofile", "main", 0);
av_dict_set(&vopts, "rc-lookahead", 0, 0);
av_dict_set(&vopts, "tune", "film", 0);
av_dict_set(&vopts, "preset", "ultrafast", 0);
av_opt_set(video_codec_ctx->priv_data, "tune", "film", 0);
av_opt_set(video_codec_ctx->priv_data, "preset", "ultrafast", 0);
av_opt_set(video_codec_ctx->priv_data, "tune", "film", 0);
int open_res = avcodec_open2(video_codec_ctx, video_codec, &vopts);
if (open_res < 0) {
LOGI("Error opening video codec: %i", open_res);
return JNI_FALSE; // leak!
}
// Open Audio Codec.
// ======================
if (enable_audio) {
AudioBuffer_Clear();
audio_sample_rate = audio_sample_rate_param;
AVCodec *audio_codec = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!audio_codec) {
LOGI("Did not find the audio codec");
return JNI_FALSE; // leak!
} else {
LOGI("Audio codec found!");
}
AVCodecContext *audio_codec_ctx = audio_stream->codec;
audio_codec_ctx->codec_id = audio_codec->id;
audio_codec_ctx->codec_type = AVMEDIA_TYPE_AUDIO;
audio_codec_ctx->bit_rate = 128000;
audio_codec_ctx->bit_rate_tolerance = 16000;
audio_codec_ctx->channels = 1;
audio_codec_ctx->profile = FF_PROFILE_AAC_LOW;
audio_codec_ctx->sample_fmt = AV_SAMPLE_FMT_FLT;
audio_codec_ctx->sample_rate = 44100;
LOGI("Opening audio codec");
AVDictionary *opts = NULL;
av_dict_set(&opts, "strict", "experimental", 0);
open_res = avcodec_open2(audio_codec_ctx, audio_codec, &opts);
LOGI("audio frame size: %i", audio_codec_ctx->frame_size);
if (open_res < 0) {
LOGI("Error opening audio codec: %i", open_res);
return JNI_FALSE; // leak!
}
}
const jbyte *url = (*env)->GetStringUTFChars(env, rtmp_url, NULL);
// Point to an output file
if (!(ofmt->flags & AVFMT_NOFILE)) {
if (avio_open(&fmt_context->pb, url, URL_WRONLY) < 0) {
LOGI("ERROR: Could not open file %s", url);
return JNI_FALSE; // leak!
}
}
(*env)->ReleaseStringUTFChars(env, rtmp_url, url);
LOGI("Writing output header.");
// Write file header
if (avformat_write_header(fmt_context, NULL) != 0) {
LOGI("ERROR: av_write_header failed");
return JNI_FALSE;
}
pts = 0;
last_audio_pts = 0;
audio_samples_written = 0;
// Initialize buffers for UV format conversion
int frame_size = video_codec_ctx->width * video_codec_ctx->height;
u_buf = (unsigned char *) av_malloc(frame_size / 4);
v_buf = (unsigned char *) av_malloc(frame_size / 4);
LOGI("ffmpeg encoding init done");
return JNI_TRUE;
}
JNIEXPORT void JNICALL
Java_com_google_android_apps_watchme_Ffmpeg_shutdown(JNIEnv
*env,
jobject thiz
) {
av_write_trailer(fmt_context);
avio_close(fmt_context
->pb);
avcodec_close(video_stream
->codec);
if (enable_audio) {
avcodec_close(audio_stream
->codec);
}
av_free(fmt_context);
av_free(u_buf);
av_free(v_buf);
fmt_context = NULL;
u_buf = NULL;
v_buf = NULL;
}
JNIEXPORT jintJNICALL
Java_com_google_android_apps_watchme_Ffmpeg_encodeVideoFrame(JNIEnv
*env,
jobject thiz,
jbyteArray
yuv_image) {
int yuv_length = (*env)->GetArrayLength(env, yuv_image);
unsigned char *yuv_data = (*env)->GetByteArrayElements(env, yuv_image, 0);
AVCodecContext *video_codec_ctx = video_stream->codec;
//LOGI("Yuv size: %i w: %i h: %i", yuv_length, video_codec_ctx->width, video_codec_ctx->height);
int frame_size = video_codec_ctx->width * video_codec_ctx->height;
const unsigned char *uv = yuv_data + frame_size;
// Convert YUV from NV12 to I420. Y channel is the same so we don't touch it,
// we just have to deinterleave UV.
for (
int i = 0;
i < frame_size / 4; i++) {
v_buf[i] = uv[i * 2];
u_buf[i] = uv[i * 2 + 1];
}
AVFrame source;
memset(&source, 0, sizeof(AVFrame));
source.data[0] =
yuv_data;
source.data[1] =
u_buf;
source.data[2] =
v_buf;
source.linesize[0] = video_codec_ctx->
width;
source.linesize[1] = video_codec_ctx->width / 2;
source.linesize[2] = video_codec_ctx->width / 2;
// only for bitrate regulation. irrelevant for sync.
source.
pts = pts;
pts++;
int out_length = frame_size + (frame_size / 2);
unsigned char *out = (unsigned char *) av_malloc(out_length);
int compressed_length = avcodec_encode_video(video_codec_ctx, out, out_length, &source);
(*env)->
ReleaseByteArrayElements(env, yuv_image, yuv_data,
0);
// Write to file too
if (compressed_length > 0) {
AVPacket pkt;
av_init_packet(&pkt);
pkt.
pts = last_audio_pts;
if (video_codec_ctx->coded_frame && video_codec_ctx->coded_frame->key_frame) {
pkt.flags |= 0x0001;
}
pkt.
stream_index = video_stream->index;
pkt.
data = out;
pkt.
size = compressed_length;
if (
av_interleaved_write_frame(fmt_context,
&pkt) != 0) {
LOGI("Error writing video frame");
}
} else {
LOGI("??? compressed_length <= 0");
}
last_audio_pts++;
av_free(out);
return
compressed_length;
}
JNIEXPORT jintJNICALL
Java_com_google_android_apps_watchme_Ffmpeg_encodeAudioFrame(JNIEnv
*env,
jobject thiz,
jshortArray
audio_data,
jint length
) {
if (!enable_audio) {
return 0;
}
short *audio = (*env)->GetShortArrayElements(env, audio_data, 0);
//LOGI("java audio buffer size: %i", length);
AVCodecContext *audio_codec_ctx = audio_stream->codec;
unsigned char *out = av_malloc(128000);
AudioBuffer_Push(audio, length
);
int total_compressed = 0;
while (
AudioBuffer_Size()
>= audio_codec_ctx->frame_size) {
AVPacket pkt;
av_init_packet(&pkt);
int compressed_length = avcodec_encode_audio(audio_codec_ctx, out, 128000,
AudioBuffer_Get());
total_compressed +=
compressed_length;
audio_samples_written += audio_codec_ctx->
frame_size;
int new_pts = (audio_samples_written * 1000) / audio_sample_rate;
if (compressed_length > 0) {
pkt.
size = compressed_length;
pkt.
pts = new_pts;
last_audio_pts = new_pts;
//LOGI("audio_samples_written: %i comp_length: %i pts: %i", (int)audio_samples_written, (int)compressed_length, (int)new_pts);
pkt.flags |= 0x0001;
pkt.
stream_index = audio_stream->index;
pkt.
data = out;
if (
av_interleaved_write_frame(fmt_context,
&pkt) != 0) {
LOGI("Error writing audio frame");
}
}
AudioBuffer_Pop(audio_codec_ctx
->frame_size);
}
(*env)->
ReleaseShortArrayElements(env, audio_data, audio,
0);
av_free(out);
return
total_compressed;
}

Android-NDK - fread

I write this question, because I don't see this in any foro o helper pager, I have a app, it make two function basic, I have log in c and write in console step by step.
I have two case in my app, I call this line: "DEAD LINE", it one line code make fail all function, dosen't fial this line, when try to call.
Case 1: ("DEAD LINE" it's comments.)
Firth function ("setArrayByteYUV") write a buffer in the file (this make pefect with out problem), it to simple, the epp can pass de array of byte to c, and write the file:
This function run in the 4.2 - 5.0 - 6.0 android (crate file and write buffer).
Write all log in console.
Second function ("RenderProcessOther") read buffer by file, previously write, please pay present much attention:
This function run in the 4.2 - 5.0 - 6.0 android (open file).
Write all log in console.
Case 2: ("DEAD LINE" it's not comments.)
Firth function (setArrayByteYUV) write a buffer in the file (this make pefect with out problem), it to simple, I can pass de array of byte to c, and write the file:
This function run in the 4.2 - 5.0 - 6.0 android (crate file).
Write all log in console.
Second function (RenderProcessOther) read buffer by file, previously write, but, please pay present much attention:
This function run in the 4.2 android and read the buffer by file and finsh the process, but in 5.0 - 6.0 android fail, when call this function (Dosen't fial in the "LINE DEAD" fail to call function).
Write all log in console 4.2 android, but in 5.0 - 6.0 android fail, when call this function, and it dosen't write any log from this function. PD: when fail, Doesn't write ANY LOG from the function RenderProcessOther, like function doesn't exit.
In all case compile, never fail in the comple of ndk-build (case comments "DEAD LINE" or not comments "DEAD LINE").
I try with fgets and fread and all way, but in all ways it work same: in 4.2 run and read file, in 5.0 - 6.0 fail in the setArrayByteYUV if "DEAD LINE" it not commet.
Nota: The process write and read, Doesn't it have problem runtime error, and it compile fine.
If my code need "try" or "catch", validation "if" u other thing to make work "fread" "DEAD LINE" in the 5.0 - 6.0
Code C:
#include <jni.h>
#include <stdlib.h>
#include <android/log.h>
#include <pthread.h>
#include <stdio.h>
#include<string.h>
#include <stdlib.h>
#define APPNAME "MyApp"
#define EMPTY (0)
static jint *c_arrayPrincipal;
static jint **c_arrayMain;
static jint *rgbDataGlobal;
int startThread = 0;
int in = 0;
int inByte = 0;
int lengthglobal =0;
int rgbDataFinal[15000000];
jint lenVector = 0;
JNIEXPORT jint
Java_com_example_android_camera2basic_utils_Utils_setArrayByteYUV(JNIEnv* env, jobject thiz, jbyteArray arrY, jbyteArray arrU, jbyteArray arrV, jint indexP, jint width, jint height, jstring javaString){
const char *nativeString = (*env)->GetStringUTFChars(env, javaString, 0);
int w = width;
int h = height;
int sz = w * h;
int i;
int j;
int Y;
int Cr = 0;
int Cb = 0;
int pixPtr = 0;
int jDiv2 = 0;
int R = 0;
int G = 0;
int B = 0;
int cOff;
int ind = 0;
int nind = lenVector;
int p = 0;
jsize lenY = (*env)->GetArrayLength(env, arrY);
jsize lenU = (*env)->GetArrayLength(env, arrU);
jsize lenV = (*env)->GetArrayLength(env, arrV);
char arr[lenY];
int rgbData2[sz];
int counter =0;
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "lenY: number: = %d",lenY);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "lenU: number: = %d",lenU);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "lenV: number: = %d",lenV);
//file/
int lstr = 0;
int mychar = 0;
FILE *pfileY = NULL;
FILE *pfileU = NULL;
FILE *pfileV = NULL;
char filenameconY[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
char filenameconU[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
char filenameconV[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
//sprintf(filenamecon, "/storage/emulated/legacy/ls/myfile%04d.txt", indexP);
sprintf(filenameconY, "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt", indexP);
sprintf(filenameconU, "/data/data/com.example.android.camera2basic/ls/myfile%04d-u.txt", indexP);
sprintf(filenameconV, "/data/data/com.example.android.camera2basic/ls/myfile%04d-v.txt", indexP);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",1);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconY);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconU);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconV);
//pfilecon = fopen(filenamecon, "a");
jboolean isCopy;
jbyte* c_arrayY = (*env)->GetByteArrayElements(env, arrY, &isCopy);
jbyte* c_arrayU = (*env)->GetByteArrayElements(env, arrU, &isCopy);
jbyte* c_arrayV = (*env)->GetByteArrayElements(env, arrV, &isCopy);
//File
pfileY = fopen(filenameconY, "w");
pfileU = fopen(filenameconU, "w");
pfileV = fopen(filenameconV, "w");
if(pfileY == NULL)
{
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "Error null file: = %d",2);
}
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",2);
fwrite(c_arrayY , 1 , lenY+1 , pfileY );
fwrite(c_arrayU , 1 , lenU+1 , pfileU );
fwrite(c_arrayV , 1 , lenV+1 , pfileV );
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",3);
(*env)->ReleaseStringUTFChars(env, javaString, nativeString);
(*env)->DeleteLocalRef(env,javaString);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",4);
fclose(pfileY);
fclose(pfileU);
fclose(pfileV);
(*env)->ReleaseByteArrayElements(env, arrY, c_arrayY, 0);
(*env)->DeleteLocalRef(env,arrY);
(*env)->ReleaseByteArrayElements(env, arrU, c_arrayU, 0);
(*env)->DeleteLocalRef(env,arrU);
(*env)->ReleaseByteArrayElements(env, arrV, c_arrayV, 0);
(*env)->DeleteLocalRef(env,arrV);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",5);
return 0;
}
JNIEXPORT jintArray JNICALL
Java_com_example_android_camera2basic_utils_Utils_RenderProcessOther(JNIEnv * env, jobject obj, jint number, jint width, jint height, jint api)
{
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOtherYUV: api: = %d",api);
const char *nativeString = "";
int w = width;
int h = height;
int sz = w * h;
int i;
int j;
int Y;
int Cr = 0;
int Cb = 0;
int pixPtr = 0;
int jDiv2 = 0;
int R = 0;
int G = 0;
int B = 0;
int cOff;
int ind = 0;
int nind = lenVector;
int p = 0;
char arr[15000000];
int rgbData2[sz];
int counter =0;
int indexP =0;
char arrY[15000000];
char arrU[15000000];
char arrV[15000000];
int rgbDataFinalR[15000000];
if(api!=0) {
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOtherYUV: api IF: = %d",sz);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther2: number: = %d",1);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: number: = %d",2);
rgbDataGlobal = (jint*)calloc(lenVector, sizeof(jint));
int lstr = 0;
int mychar = 0;
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: number: = %d",3);
int nnumber = (int)number;
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: nnumber: = %d",4);
for(indexP=1; indexP<nnumber;indexP++){
//filename =
//printf(filename, "/storage/emulated/legacy/ls/myfile%d.txt", (int)indexP);
pixPtr = 0;
char filenameconY[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
char filenameconU[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
char filenameconV[sizeof "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt"];
//sprintf(filenamecon, "/storage/emulated/legacy/ls/myfile%04d.txt", indexP);
sprintf(filenameconY, "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt", indexP);
sprintf(filenameconU, "/data/data/com.example.android.camera2basic/ls/myfile%04d-u.txt", indexP);
sprintf(filenameconV, "/data/data/com.example.android.camera2basic/ls/myfile%04d-v.txt", indexP);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %d",1);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconY);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconU);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "setArrayByte: number: = %s",filenameconV);
FILE* fileY = fopen(filenameconY, "r");
FILE* fileU = fopen(filenameconU, "r");
FILE* fileV = fopen(filenameconV, "r");
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: indexP 4: = %d",indexP);
int n = sizeof(arrY);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "sizeof: sizeof 4: = %d",n);
if(n !=0){
n = sizeof(arrY)/sizeof(arrY[0]);
int step = (int)api;
int size = w * h;
for (i = 0; i<h; i++)
{
for (j=0; j<w; j++)
{
float Y = arrY[i*step + j];
float U = arrU[ (int)(size + (i/2)*(step/2) + j/2) ];
float V = arrV[ (int)(size*1.25 + (i/2)*(step/2) + j/2)];
float R = Y + (int)(1.772f*V);
float G = Y - (int)(0.344f*V + 0.714f*U);
float B = Y + (int)(1.402f*U);
if (R < 0){
R = 0;
}
if (G < 0){
G = 0;
} if (B < 0){
B = 0;
}
if (R > 255 ){
R = 255;
}
if (G > 255) {
G = 255;
} if (B > 255) {
B = 255;
}
int rint = (int)R;
int gint = (int)G;
int bint = (int)B;
int rgbDataPixel = 0xff000000 + (bint << 16) + (gint << 8) + rint;
rgbDataFinalR[pixPtr] = (int)rgbDataPixel;
pixPtr++;
}
}
fclose(fileY);
fclose(fileU);
fclose(fileV);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther2: nnumber for close: = %d",indexP);
remove(filenameconY);
remove(filenameconU);
remove(filenameconV);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther2: nnumber for remove: = %d",4);
}
}
}else{
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOtherYUV: api ELSE: = %d",sz);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: number: = %d",1);
rgbDataGlobal = (jint*)calloc(lenVector, sizeof(jint));
int lstr = 0;
int mychar = 0;
char *filename = NULL;
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: number: = %d",1);
int nnumber = (int)number;
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: nnumber: = %d",1);
for(indexP=1; indexP<nnumber;indexP++){
char filenamecon[sizeof "/storage/emulated/legacy/ls/myfile0000.txt"];
sprintf(filenamecon, "/data/data/com.example.android.camera2basic/ls/myfile%04d-y.txt", indexP);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: number: = %s",filenamecon);
FILE* file = fopen(filenamecon, "r");
if(file ==NULL){
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "no existe RenderProcessOther: number: = %s",filenamecon);
}
/* DEAD LINE */
/* DEAD LINE */
/* DEAD LINE */
fread(arr, lenVector,0 , file); // <------- DEAD LINE
///fgets(arr, lenVector, file)
/* DEAD LINE */
/* DEAD LINE */
/* DEAD LINE */
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "RenderProcessOther: indexP 4: = %d",indexP);
int n = sizeof(arr);
__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "sizeof: sizeof 4: = %d",n);
if(n !=0){
n = sizeof(arr)/sizeof(arr[0]);
for(j = 0; j < h; j++) {
pixPtr = j * w;
jDiv2 = j >> 1;
for(i = 0; i < w; i++) {
counter++;
Y = arr[pixPtr];
if(Y < 0) Y += 255;
if((i & 0x1) != 1) {
cOff = sz + jDiv2 * w + (i >> 1) * 2;
Cb = arr[cOff];
if(Cb < 0) Cb += 127; else Cb -= 128;
Cr = arr[cOff + 1];
if(Cr < 0) Cr += 127; else Cr -= 128;
}
R = Y + Cr + (Cr >> 2) + (Cr >> 3) + (Cr >> 5);//1.406*~1.403
if(R < 0) R = 0; else if(R > 255) R = 255;
G = Y - (Cb >> 2) + (Cb >> 4) + (Cb >> 5) - (Cr >> 1) + (Cr >> 3) + (Cr >> 4) + (Cr >> 5);//
if(G < 0) G = 0; else if(G > 255) G = 255;
B = Y + Cb + (Cb >> 1) + (Cb >> 2) + (Cb >> 6);//1.765~1.770
if(B < 0) B = 0; else if(B > 255) B = 255;
if(indexP==0){
rgbDataFinal[pixPtr++] = 0xff000000 + (B << 16) + (G << 8) + R;
}else{
int pixPtrIndex = pixPtr++;
int rgbDataPixel = 0xff000000 + (B << 16) + (G << 8) + R;
int color = rgbDataPixel;
int r = color & 0xff;
int g = (color >> 8) & 0xff;
int b = (color >> 16) & 0xff;
int a = (color >> 24) & 0xff;
int color2 = rgbDataFinal[pixPtrIndex];
int r2 = color2 & 0xff;
int g2 = (color2 >> 8) & 0xff;
int b2 = (color2 >> 16) & 0xff;
int a2 = (color2 >> 24) & 0xff;
double ad = a - a2;
double rd = r - r2;
double gd = g - g2;
double bd = b- b2;
double ar = a2/a;
double rr = r2/r;
double gr = g2/g;
double br = b2/b;
if(a2<a|| r2<r|| g2<g || b2<b) {
double coeficiente = 0.5;
double d =(r - ((r - r2)*coeficiente));
int red1 = (int) d;
d =(g - ((g - g2)*coeficiente));
int green1 = (int)d;
d =(b - ((b - b2)*coeficiente));
int blue1 = (int)d;
int renderRGBDataPixel = 0xff000000 + (blue1 << 16) + (green1 << 8) + red1;
rgbDataFinal[pixPtrIndex] = renderRGBDataPixel;
}
}
}
}
fclose(file);
remove(filenamecon);
}
}
};
for (i = 0; i < sz; ++i) {
rgbDataGlobal[i]= rgbDataFinal[i];
}
jintArray jArray = (*env)->NewIntArray(env, sz);
if (jArray != NULL) {
jint u;
u =0;
(*env)->SetIntArrayRegion(env, jArray, 0, sz,rgbDataGlobal);
}
return jArray;
}
Android.mk
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := helloc
LOCAL_SRC_FILES := HelloC.c
LOCAL_LDLIBS := -llog -ldl -landroid
include $(BUILD_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := imageformat
LOCAL_SRC_FILES := ImageFormat.c
LOCAL_LDLIBS := -llog
include $(BUILD_SHARED_LIBRARY)
Application.mk
APP_ABI := armeabi armeabi-v7a mips x86
APP_PLATFORM := android-21
build.gradle
android {
compileSdkVersion 23
buildToolsVersion "24.0.0 rc3"
defaultConfig {
minSdkVersion 17
targetSdkVersion 23
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_7
targetCompatibility JavaVersion.VERSION_1_7
}
}

Android jni read the same path , but not the same

static void setIntField(JNIEnv* env, jobject obj, const char* path,
jfieldID fieldID) {
const int SIZE = 128;
char buf[SIZE] = "\0";
jint value = 0;
if (readFromFile(path, buf, SIZE) > 0) {
value = atoi(buf);
}
env->SetIntField(obj, fieldID, value);
}
static int readFromFile(const char* path, char* buf, size_t size) {
if (!path)
return -1;
int fd = open(path, O_RDONLY, 0);
if (fd == -1) {
LOGE("Could not open '%s'", path);
return -1;
}
size_t count = read(fd, buf, size);
if (count > 0) {
count = (count < size) ? count : size - 1;
while (count > 0 && buf[count - 1] == '\n')
count--;
buf[count] = '\0';
} else {
buf[0] = '\0';
}
close(fd);
return count;
}
This is my Android JNI code. Why do I read the same path, but the value is not the same twice. I am reading this path /sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq.
How to fix it?
scaling_min_freq value is 51000. The first time I get 51000. The second time I get 150000.

getting symbols in string Android ndk

in ndk i want to convert some ascii values to string.I am getting the result but there is some symbols after the string
this is the answer i am getting
sint#j8na8̀
My code is
jstring Java_com_magsonwink_utils_security_En_invokeNativeFunction(
JNIEnv* env, jobject javaThis) {
int i = 0;
int a[3]= {
115,
105,
110,
};
char b[3];
for (i = 0; i < 3; i++) {
b[i] = (char) a[i];
}
jstring result = (*env)->NewStringUTF(env, b);
return result;
}
Don't you have to null-terminate the string?
Try
jstring Java_com_magsonwink_utils_security_En_invokeNativeFunction(
JNIEnv* env, jobject javaThis) {
int i = 0;
int a[3]= {
115,
105,
110,
};
char b[4];
for (i = 0; i < 3; i++) {
b[i] = (char) a[i];
}
b[3] = 0;
jstring result = (*env)->NewStringUTF(env, b);
return result;
}

Categories

Resources