hi I am trying to change the sample rate of the audio streams of a video file using ffmpeg. but i am not able to change audio of the original file. Till now i am able to read audio and video streams separately and can show its sample rate. Don't know how can i apply this effect permanently. Following is my Java code.
public class MainActivity extends Activity {
public static native float logFileInfo(String inputfilename);
static
{
System.loadLibrary("mylib");
}
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
TextView tv= new TextView(this);
String path= Environment.getExternalStorageDirectory().getPath();
path+="/test2_modified.mp4";
float x=logFileInfo(path);
String y=Float.toString(x);
tv.setText(y);
setContentView(tv);
}
and my native file
JNIEXPORT jfloat JNICALL Java_ru_dzakhov_ffmpeg_test_MainActivity_logFileInfo
(JNIEnv * env,
jobject this,
jstring filename
)
{
AVFormatContext *pFormatCtx;
int i, videoStream, audioStream;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame;
AVPacket packet;
int frameFinished;
float aspect_ratio;
AVCodecContext *aCodecCtx;
AVCodec *aCodec;
av_register_all();
char *str = (*env)->GetStringUTFChars(env, filename, 0);
LOGI(str);
// Open video file
if(av_open_input_file(&pFormatCtx, str, NULL, 0, NULL)!=0)
;
// Retrieve stream information
if(av_find_stream_info(pFormatCtx)<0)
;
LOGI("Separating");
// Find the first video stream
videoStream=-1;
audioStream=-1;
for(i=0; i<pFormatCtx->nb_streams; i++) {
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO &&
videoStream < 0) {
videoStream=i;
}
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO &&
audioStream < 0) {
audioStream=i;
}
}
if(videoStream==-1)
LOGI("Video stream is -1");
;
if(audioStream==-1)
LOGI("Audio stream is -1");
;
//pFormatCtx->streams[audioStream]->codec->sample_rate=16000;
aCodecCtx=pFormatCtx->streams[audioStream]->codec;
//aCodecCtx->sample_rate=16000;
jfloat sr=aCodecCtx->sample_rate;
return sr;}
Related
I'm trying to retrieve metadata in Android using FFmpeg, JNI and a Java FileDescriptor and it isn't' working. I know FFmpeg supports the pipe protocol so I'm trying to emmulate: "cat test.mp3 | ffmpeg i pipe:0" programmatically. I use the following code to get a FileDescriptor from an asset bundled with the Android application:
FileDescriptor fd = getContext().getAssets().openFd("test.mp3").getFileDescriptor();
setDataSource(fd, 0, 0x7ffffffffffffffL); // native function, shown below
Then, in my native (In C++) code I get the FileDescriptor by calling:
static void wseemann_media_FFmpegMediaMetadataRetriever_setDataSource(JNIEnv *env, jobject thiz, jobject fileDescriptor, jlong offset, jlong length)
{
//...
int fd = jniGetFDFromFileDescriptor(env, fileDescriptor); // function contents show below
//...
}
// function contents
static int jniGetFDFromFileDescriptor(JNIEnv * env, jobject fileDescriptor) {
jint fd = -1;
jclass fdClass = env->FindClass("java/io/FileDescriptor");
if (fdClass != NULL) {
jfieldID fdClassDescriptorFieldID = env->GetFieldID(fdClass, "descriptor", "I");
if (fdClassDescriptorFieldID != NULL && fileDescriptor != NULL) {
fd = env->GetIntField(fileDescriptor, fdClassDescriptorFieldID);
}
}
return fd;
}
I then pass the file descriptor pipe # (In C) to FFmpeg:
char path[256] = "";
FILE *file = fdopen(fd, "rb");
if (file && (fseek(file, offset, SEEK_SET) == 0)) {
char str[20];
sprintf(str, "pipe:%d", fd);
strcat(path, str);
}
State *state = av_mallocz(sizeof(State));
state->pFormatCtx = NULL;
if (avformat_open_input(&state->pFormatCtx, path, NULL, &options) != 0) { // Note: path is in the format "pipe:<the FD #>"
printf("Metadata could not be retrieved\n");
*ps = NULL;
return FAILURE;
}
if (avformat_find_stream_info(state->pFormatCtx, NULL) < 0) {
printf("Metadata could not be retrieved\n");
avformat_close_input(&state->pFormatCtx);
*ps = NULL;
return FAILURE;
}
// Find the first audio and video stream
for (i = 0; i < state->pFormatCtx->nb_streams; i++) {
if (state->pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO && video_index < 0) {
video_index = i;
}
if (state->pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO && audio_index < 0) {
audio_index = i;
}
set_codec(state->pFormatCtx, i);
}
if (audio_index >= 0) {
stream_component_open(state, audio_index);
}
if (video_index >= 0) {
stream_component_open(state, video_index);
}
printf("Found metadata\n");
AVDictionaryEntry *tag = NULL;
while ((tag = av_dict_get(state->pFormatCtx->metadata, "", tag, AV_DICT_IGNORE_SUFFIX))) {
printf("Key %s: \n", tag->key);
printf("Value %s: \n", tag->value);
}
*ps = state;
return SUCCESS;
My issue is avformat_open_input doesn't fail but it also doesn't let me retrieve any metadata or frames, The same code works if I use a regular file URI (e.g file://sdcard/test.mp3) as the path. What am I doing wrong? Thanks in advance.
Note: if you would like to look at all of the code I'm trying to solve the issue in order to provide this functionality for my library: FFmpegMediaMetadataRetriever.
Java
AssetFileDescriptor afd = getContext().getAssets().openFd("test.mp3");
setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), fd.getLength());
C
void ***_setDataSource(JNIEnv *env, jobject thiz,
jobject fileDescriptor, jlong offset, jlong length)
{
int fd = jniGetFDFromFileDescriptor(env, fileDescriptor);
char path[20];
sprintf(path, "pipe:%d", fd);
State *state = av_mallocz(sizeof(State));
state->pFormatCtx = avformat_alloc_context();
state->pFormatCtx->skip_initial_bytes = offset;
state->pFormatCtx->iformat = av_find_input_format("mp3");
and now we can continue as usual:
if (avformat_open_input(&state->pFormatCtx, path, NULL, &options) != 0) {
printf("Metadata could not be retrieved\n");
*ps = NULL;
return FAILURE;
}
...
Even better, use <android/asset_manager.h>, like this:
Java
setDataSource(getContext().getAssets(), "test.mp3");
C
#include <android/asset_manager_jni.h>
void ***_setDataSource(JNIEnv *env, jobject thiz,
jobject assetManager, jstring assetName)
{
AAssetManager* assetManager = AAssetManager_fromJava(env, assetManager);
const char *szAssetName = (*env)->GetStringUTFChars(env, assetName, NULL);
AAsset* asset = AAssetManager_open(assetManager, szAssetName, AASSET_MODE_RANDOM);
(*env)->ReleaseStringUTFChars(env, assetName, szAssetName);
off_t offset, length;
int fd = AAsset_openFileDescriptor(asset, &offset, &length);
AAsset_close(asset);
Disclaimer: error checking was omitted for brevity, but resources are released correctly, except for fd. You must close(fd) when finished.
Post Scriptum: note that some media formats, e.g. mp4 need seekable protocol, and pipe: cannot help. In such case, you may try sprintf(path, "/proc/self/fd/%d", fd);, or use the custom saf: protocol.
Thks a lot for this post.
That help me a lot to integrate Android 10 and scoped storage with FFmpeg using FileDescriptor.
Here the solution I'm using on Android 10:
Java
URI uri = ContentUris.withAppendedId(
MediaStore.Audio.Media.EXTERNAL_CONTENT_URI,
trackId // Coming from `MediaStore.Audio.Media._ID`
);
ParcelFileDescriptor parcelFileDescriptor = getContentResolver().openFileDescriptor(
uri,
"r"
);
int pid = android.os.Process.myPid();
String path = "/proc/" + pid + "/fd/" + parcelFileDescriptor.dup().getFd();
loadFFmpeg(path); // Call native code
CPP
// Native code, `path` coming from Java `loadFFmpeg(String)`
avformat_open_input(&format, path, nullptr, nullptr);
OK, I spent a lot of time trying to transfer media data to ffmpeg through Assetfiledescriptor. Finally, I found that there may be a bug in mov.c. When mov.c parsed the trak atom, the corresponding skip_initial_bytes was not set. I have tried to fix this problem.
Detail please refer to FFmpegForAndroidAssetFileDescriptor, demo refer to WhatTheCodec.
FileDescriptor fd = getContext().getAssets().openFd("test.mp3").getFileDescriptor();
Think you should start with AssetFileDescripter.
http://developer.android.com/reference/android/content/res/AssetFileDescriptor.html
I'm using the following C function to decode packets in Android (with JNI). When I play an mp3 file the code works fine however and wma file results in choppy audio. I suspect the issue may be with the "swr_convert" function and the data_size I'm using but I'm not sure. Does anyone know why this would be happening?
int decodeFrameFromPacket(AVPacket *aPacket) {
int n;
AVPacket *pkt = aPacket;
AVFrame *decoded_frame = NULL;
int got_frame = 0;
if (aPacket->stream_index == global_audio_state->audio_stream) {
if (!decoded_frame) {
if (!(decoded_frame = avcodec_alloc_frame())) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Could not allocate audio frame\n");
return -2;
}
}
if (avcodec_decode_audio4(global_audio_state->audio_st->codec, decoded_frame, &got_frame, aPacket) < 0) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Error while decoding\n");
return -2;
}
int data_size = 0;
if (got_frame) {
/* if a frame has been decoded, output it */
data_size = av_samples_get_buffer_size(NULL, global_audio_state->audio_st->codec->channels,
decoded_frame->nb_samples,
global_audio_state->audio_st->codec->sample_fmt, 1);
}
swr_convert(global_audio_state->swr, (uint8_t **) &gAudioFrameRefBuffer, decoded_frame->nb_samples, (uint8_t const **) decoded_frame->data, decoded_frame->nb_samples);
avcodec_free_frame(&decoded_frame);
gAudioFrameDataLengthRefBuffer[0] = data_size;
return AUDIO_DATA_ID;
}
return 0;
}
I am trying to solve a big problem but stuck with very small issue. I am trying to read audio streams inside a video file with the help of ffmpeg but the loop that should traverse the whole file of streams only runs couple of times. Can not figure out what is the issue as others have used it very similarly.
Following is my code please check:
JNIEXPORT jint JNICALL Java_ru_dzakhov_ffmpeg_test_MainActivity_logFileInfo
(JNIEnv * env,
jobject this,
jstring filename
)
{
AVFormatContext *pFormatCtx;
int i,j,k, videoStream, audioStream;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame;
AVPacket packet;
int frameFinished;
float aspect_ratio;
AVCodecContext *aCodecCtx;
AVCodec *aCodec;
//uint8_t inbuf[AUDIO_INBUF_SIZE + FF_INPUT_BUFFER_PADDING_SIZE];
j=0;
av_register_all();
char *str = (*env)->GetStringUTFChars(env, filename, 0);
LOGI(str);
// Open video file
if(av_open_input_file(&pFormatCtx, str, NULL, 0, NULL)!=0)
;
// Retrieve stream information
if(av_find_stream_info(pFormatCtx)<0)
;
LOGI("Separating");
// Find the first video stream
videoStream=-1;
audioStream=-1;
for(i=0; i<&pFormatCtx->nb_streams; i++) {
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO)
{
LOGI("Audio Stream");
audioStream=i;
}
}
av_write_header(pFormatCtx);
if(videoStream==-1)
LOGI("Video stream is -1");
if(audioStream==-1)
LOGI("Audio stream is -1");
return i;}
you may be having issue related to library loading and unloading and how that relates to repeated calls thru jni. Not sure from what your symptom is , but if u have no solution try reading :
here
and here
I am new to android and iam trying to read a file from SD card using NDK and sending that read bytes to SDK and iam writing it to the SD card using SDK (for checking).. I could sucefully make a call from NDK - SDK with the bytes and i could write it to the SD card using SDK.. But i coulnt open the file written to the sd card..
I am pasting my code
Native C file
#include<stdio.h>
#include<string.h>
#include <jni.h>
#include <stdlib.h>
#include <android/log.h>
#include "com_example_fileupload_NativeLib.h"
#define LOG_TAG "testjni"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
JavaVM *m_vm;
jobject obj;
jclass gFilePermInfoClass;
jmethodID gFilePermInfoClsConstructor;
jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
JNIEnv* env;
LOGI("JNI INIT");
return JNI_VERSION_1_6;
}
JNIEXPORT jint JNICALL Java_com_example_fileupload_NativeLib_loop
(JNIEnv *env, jobject obj)
{
LOGI("JNI work !");
jbyteArray data;
jbyteArray data_rem;
jclass clazz = (*env)->FindClass(env,"com/example/fileupload/NativeLib");
if (clazz == 0) {
LOGI("FindClass error");
return 2;
}
gFilePermInfoClass = (*env)->NewGlobalRef(env, clazz);
LOGI("JNI before jmethodID");
jmethodID javamethod = (*env)->GetMethodID(env,gFilePermInfoClass, "callFromCPP", "([B)V");
gFilePermInfoClsConstructor = (*env)->NewGlobalRef(env, javamethod);
if (javamethod == 0) {
LOGI("GetMethodID error");
return 3;
}
LOGI("JNI before calling java ");
LOGI(" before File ");
FILE *fd = NULL;
char buff[1*1024*1024];
char *h;
unsigned int nread = 0;
unsigned int nread_remaining = 0;
int count=0;
memset(buff, '\0', sizeof(buff));
fd = fopen("/sdcard/20MB.zip","r+");
fseek ( fd, 0 , SEEK_END );
int fileSize = ftell(fd);
LOGI("****SUDARSHAN C SIZE %d",fileSize);
if(NULL == fd) // Check for error
{
LOGI("\n File opening failed\n");
return 1;
}
nread = (1*1024*1024); // Hard code the number of elements to 50
if(nread_remaining = fileSize%nread)
{
count = fileSize/nread;
LOGI("***** IN IF rem %d",nread_remaining);
}
else{
count = fileSize/nread;
LOGI("***** IN ELSE COUNT %d",count);
}
data = (*env)->NewByteArray(env, nread);
data_rem = (*env)->NewByteArray(env, nread_remaining);
if(data == NULL){
LOGI("No memory could be allocated for buffer");
return -1;
}
LOGI("Before While in C ");
int count_test=0;
int i;
for(i=count;i>0;i--) //while(count_test) //while( fread(buff, 1, nread, fd) )
{
fread(buff, 1, nread, fd);
LOGI("INSIUDE WHILE SUDARSHAN **** %d",i);
(*env)->SetByteArrayRegion(env, data, 0, nread, buff);
(*env)->CallVoidMethod(env, obj, gFilePermInfoClsConstructor, data);
}
if(nread_remaining){
char buffer[nread_remaining];
fread(buffer, 1, nread_remaining, fd);
(*env)->SetByteArrayRegion(env, data_rem, 0, nread_remaining, buffer);
(*env)->CallVoidMethod(env, obj, gFilePermInfoClsConstructor, data_rem);
}
if(feof(fd))
{
printf("\n Seems like end of file was reached\n");
}
else if(ferror(fd))
{
printf("\n Some errors on this stream occurred\n");
}
// Reset the buffer with NULLs
memset(buff, '\0', sizeof(buff));
return (10+11);
}
NativeLib.java
package com.example.fileupload;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.os.Environment;
public class NativeLib {
public int count=0;
File someFile = null;
FileOutputStream fos = null;
File sdDir = new File(Environment.getExternalStorageDirectory().getPath());
public NativeLib() throws FileNotFoundException{
someFile = new File(sdDir.getPath() +"/22MB.zip");
fos = new FileOutputStream(someFile);
//fos.flush();fos.close();
}
static {
System.loadLibrary("com_example_fileupload_NativeLib");
}
/**
* Adds two integers, returning their sum
*/
public native int loop();
/**
* Returns Hello World string
* #throws IOException
*/
//public native String hello();
public void callFromCPP(byte[] buff)
{
count=count+buff.length;
try {
//count=count+buff.length;
System.out.println("************ Sudarshan Inside callfromcpp try "+count+" "+buff.length);
fos.write(buff);
} catch (IOException e) {
// TODO Auto-generated catch block
System.out.println("************ Sudarshan Inside callfromcpp catch "+e.toString());
e.printStackTrace();
}
// count=count+buff.length;
//System.out.println("************ Sudarshan Inside callfromcpp 1 "+buff.toString()+" "+count);
}
Iam calling the native fn. loop() from my MainActivity.java and the java fn. call from native is callFromCPP(byte[] buff) and here only iam writing the byte's to SD card (named 22MB.zip) returned from native..
PLZ help
Reagards,
Deepak
I found the mistake i had done,.. there is a limit for "fread" function to read bytes.. I had given 1MB (nread value in my c fle) which is too large.. So i changed the value to 100 and it worked
Regards,
Deepak
I have integrated ffmpeg lib in my project and I can also get the information of media files. But now i have to play mp3 files using AudioTrack class in android using ffmpeg lib.
For this I have to pass byte buffer to AudioTrack but I dont know how to get byte buffer from ffmpeg and use it with AudioTrack. I also want to play file instantly without delay.
Here is my audio track code in java :
AudioTrack track;
bufferSize = AudioTrack.getMinBufferSize(44100,AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT)
track = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, mode);
//Play audio clip
track.play();
while(stream_is_over){
//Copy the decoded raw buffer from native code to "buffer" .....
............
track.write(buffer, 0, readBytes);
}
Can anyone please give me the working code to play mp3 files with audio track. I have searched a lot but haven't find any correct answer.
I managed this by creating buffer of audio files and then playing it with AudioTrack class on the fly.
Now i'm trying to pause/stop the audio file cause stopping or pausing AudioTrack is not working.
Here is my code to pass byte buffer to my java class :
#include <assert.h>
#include <jni.h>
#include <string.h>
#include <android/log.h>
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#define LOG_TAG "mylib"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#define AUDIO_INBUF_SIZE 20480
#define AUDIO_REFILL_THRESH 4096
void Java_ru_dzakhov_ffmpeg_test_MainActivity_createEngine(JNIEnv* env,
jclass clazz) {
avcodec_init();
av_register_all();
}
jstring Java_ru_dzakhov_ffmpeg_test_MainActivity_loadFile(JNIEnv* env,
jobject obj, jstring file, jbyteArray array) {
{
jboolean isfilenameCopy;
const char * filename = (*env)->GetStringUTFChars(env, file,
&isfilenameCopy);
int audioStreamIndex;
AVCodec *codec;
AVCodecContext *c = NULL;
AVFormatContext * pFormatCtx;
AVCodecContext * aCodecCtx;
int out_size, len, audioStream = -1, i, err;
FILE *f, *outfile;
uint8_t *outbuf;
uint8_t inbuf[AUDIO_INBUF_SIZE + FF_INPUT_BUFFER_PADDING_SIZE];
AVPacket avpkt;
jclass cls = (*env)->GetObjectClass(env, obj);
jmethodID play = (*env)->GetMethodID(env, cls, "playSound", "([BI)V"); //At the begining of your main function
LOGE("source file name is %s", filename);
avcodec_init();
av_register_all();
LOGE("Stage 1");
/* get format somthing of source file to AVFormatContext */
int lError;
if ((lError = av_open_input_file(&pFormatCtx, filename, NULL, 0, NULL))
!= 0) {
LOGE("Error open source file: %d", lError);
exit(1);
}
if ((lError = av_find_stream_info(pFormatCtx)) < 0) {
LOGE("Error find stream information: %d", lError);
exit(1);
}
LOGE("Stage 1.5");
LOGE("audio format: %s", pFormatCtx->iformat->name);
LOGE("audio bitrate: %d", pFormatCtx->bit_rate);
audioStreamIndex = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_AUDIO,
-1, -1, &codec, 0);
LOGE("audio codec: %s", codec->name);
/* get codec somthing of audio stream to AVCodecContext */
aCodecCtx = pFormatCtx->streams[audioStreamIndex]->codec;
if (avcodec_open(aCodecCtx, codec) < 0) {
LOGE("cannot open the audio codec!");
exit(1);
}
printf("Audio decoding\n");
LOGE("Stage 1.7");
LOGE("S");
codec = avcodec_find_decoder(aCodecCtx->codec_id);
LOGE("Stage 1.8");
if (!codec) {
LOGE("codec not found\n");
exit(1);
}
LOGE("Stage 2");
// c= avcodec_alloc_context();
LOGE("Stage 3");
/* open it */
if (avcodec_open(aCodecCtx, codec) < 0) {
LOGE("could upper");
fprintf(stderr, "could not open codec\n");
LOGE("could not open codec");
}
LOGE("Stage 4");
outbuf = malloc(AVCODEC_MAX_AUDIO_FRAME_SIZE);
f = fopen(filename, "rb");
if (!f) {
fprintf(stderr, "could not open %s\n", filename);
LOGE("could not open");
exit(1);
}
/* decode until eof */
avpkt.data = inbuf;
avpkt.size = fread(inbuf, 1, AUDIO_INBUF_SIZE, f);
LOGE("Stage 5");
while (avpkt.size > 0) {
// LOGE("Stage 6");
out_size = (AVCODEC_MAX_AUDIO_FRAME_SIZE / 3) * 2;
len = avcodec_decode_audio3(aCodecCtx, (int16_t *) outbuf,
&out_size, &avpkt);
LOGE("data_size %d len %d", out_size, len);
if (len < 0) {
fprintf(stderr, "Error while decoding\n");
LOGE("DECODING ERROR");
LOGE("DECODING ERROR %d", len);
exit(1);
}
// LOGE("Stage 7");
if (out_size > 0) {
/* if a frame has been decoded, output it */
// LOGE("Stage 8");
jbyte *bytes = (*env)->GetByteArrayElements(env, array, NULL);
memcpy(bytes, outbuf, out_size); //
(*env)->ReleaseByteArrayElements(env, array, bytes, 0);
(*env)->CallVoidMethod(env, obj, play, array, out_size);
// LOGE("DECODING ERROR5");
}
LOGE("Stage 9");
avpkt.size -= len;
avpkt.data += len;
if (avpkt.size < AUDIO_REFILL_THRESH) {
/* Refill the input buffer, to avoid trying to decode
* incomplete frames. Instead of this, one could also use
* a parser, or use a proper container format through
* libavformat. */
memmove(inbuf, avpkt.data, avpkt.size);
avpkt.data = inbuf;
len = fread(avpkt.data + avpkt.size, 1,
AUDIO_INBUF_SIZE - avpkt.size, f);
if (len > 0)
avpkt.size += len;
}
}
LOGE("Stage 12");
fclose(f);
free(outbuf);
avcodec_close(c);
av_free(c);
}
}
I have no clue about programming in android, but google just introduced a new low-level media api at I/O 2012
Here's the link to the yt video: http://www.youtube.com/watch?v=YmCqJlzIUXs