ffmpeg error on decode - android

I'm developing an android app with the libav and I'm trying decode a 3gp with code below:
#define simbiLog(...) __android_log_print(ANDROID_LOG_DEBUG, "simbiose", __VA_ARGS__)
...
AVCodec *codec;
AVCodecContext *c = NULL;
int len;
FILE *infile, *outfile;
uint8_t inbuf[AUDIO_INBUF_SIZE + FF_INPUT_BUFFER_PADDING_SIZE];
AVPacket avpkt;
AVFrame *decoded_frame = NULL;
simbiLog("inbuf size: %d", sizeof(inbuf) / sizeof(inbuf[0]));
av_register_all();
av_init_packet(&avpkt);
codec = avcodec_find_decoder(AV_CODEC_ID_AMR_NB);
if (!codec) {
simbiLog("codec not found");
return ERROR;
}
c = avcodec_alloc_context3(codec);
if (!c) {
simbiLog("Could not allocate audio codec context");
return ERROR;
}
int open = avcodec_open2(c, codec, NULL);
if (open < 0) {
simbiLog("could not open codec %d", open);
return ERROR;
}
infile = fopen(inputPath, "rb");
if (!infile) {
simbiLog("could not open %s", inputPath);
return ERROR;
}
outfile = fopen(outputPath, "wb");
if (!outfile) {
simbiLog("could not open %s", outputPath);
return ERROR;
}
avpkt.data = inbuf;
avpkt.size = fread(inbuf, 1, AUDIO_INBUF_SIZE, infile);
int iterations = 0;
while (avpkt.size > 0) {
simbiLog("iteration %d", (++iterations));
simbiLog("avpkt.size %d avpkt.data %X", avpkt.size, avpkt.data);
int got_frame = 0;
if (!decoded_frame) {
if (!(decoded_frame = avcodec_alloc_frame())) {
simbiLog("out of memory");
return ERROR;
}
} else {
avcodec_get_frame_defaults(decoded_frame);
}
//below the error, but it isn't occur on first time, only in 4th loop interation
len = avcodec_decode_audio4(c, decoded_frame, &got_frame, &avpkt);
if (len < 0) {
simbiLog("Error while decoding error %d frame %d duration %d", len, got_frame, avpkt.duration);
return ERROR;
} else {
simbiLog("Decoding length %d frame %d duration %d", len, got_frame, avpkt.duration);
}
if (got_frame) {
int data_size = av_samples_get_buffer_size(NULL, c->channels, decoded_frame->nb_samples, c->sample_fmt, 1);
size_t* fwrite_size = fwrite(decoded_frame->data[0], 1, data_size, outfile);
simbiLog("fwrite returned %d", fwrite_size);
}
avpkt.size -= len;
avpkt.data += len;
if (avpkt.size < AUDIO_REFILL_THRESH) {
memmove(inbuf, avpkt.data, avpkt.size);
avpkt.data = inbuf;
len = fread(avpkt.data + avpkt.size, 1, AUDIO_INBUF_SIZE - avpkt.size, infile);
if (len > 0)
avpkt.size += len;
simbiLog("fread returned %d", len);
}
}
fclose(outfile);
fclose(infile);
avcodec_close(c);
av_free(c);
av_free(decoded_frame);
but I'm getting the follow log and error:
inbuf size: 20488
iteration 1
avpkt.size 3305 avpkt.data BEEED40C
Decoding length 13 frame 1 duration 0
fwrite returned 640
fread returned 0
iteration 2
avpkt.size 3292 avpkt.data BEEED40C
Decoding length 13 frame 1 duration 0
fwrite returned 640
fread returned 0
iteration 3
avpkt.size 3279 avpkt.data BEEED40C
Decoding length 14 frame 1 duration 0
fwrite returned 640
fread returned 0
iteration 4
avpkt.size 3265 avpkt.data BEEED40C
Error while decoding error -1052488119 frame 0 duration 0
the audio file I'm trying decode:
$ avprobe blue.3gp
avprobe version 0.8.6-6:0.8.6-1ubuntu2, Copyright (c) 2007-2013 the Libav developers
built on Mar 30 2013 22:23:21 with gcc 4.7.2
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'blue.3gp':
Metadata:
major_brand : 3gp4
minor_version : 0
compatible_brands: isom3gp4
creation_time : 2013-09-19 18:53:38
Duration: 00:00:01.52, start: 0.000000, bitrate: 17 kb/s
Stream #0.0(eng): Audio: amrnb, 8000 Hz, 1 channels, flt, 12 kb/s
Metadata:
creation_time : 2013-09-19 18:53:38
thanks a lot!
EDITED
I read on ffmper documentation about the method avcodec_decode_audio4 the follow:
#warning The input buffer, avpkt->data must be FF_INPUT_BUFFER_PADDING_SIZE larger than the actual read bytes because some optimized bitstream readers read 32 or 64 bits at once and could read over the end.
#note You might have to align the input buffer. The alignment requirements depend on the CPU and the decoder.
and I see here a solution using posix_memalign, to android i founded a similar method called memalign, so i did the change:
removed:
uint8_t inbuf[AUDIO_INBUF_SIZE + FF_INPUT_BUFFER_PADDING_SIZE];
inserted:
int inbufSize = sizeof(uint8_t) * (AUDIO_INBUF_SIZE + FF_INPUT_BUFFER_PADDING_SIZE);
uint8_t *inbuf = memalign(FF_INPUT_BUFFER_PADDING_SIZE, inbufSize);
simbiLog("inbuf size: %d", inbufSize);
for (; inbufSize >= 0; inbufSize--)
simbiLog("inbuf position: %d index: %p", inbufSize, &inbuf[inbufSize]);
I'm getting the correct memory sequence position, but the error not changed.
A piece of outpout:
inbuf position: 37 index: 0x4e43d745
inbuf position: 36 index: 0x4e43d744
inbuf position: 35 index: 0x4e43d743
inbuf position: 34 index: 0x4e43d742
inbuf position: 33 index: 0x4e43d741
inbuf position: 32 index: 0x4e43d740
inbuf position: 31 index: 0x4e43d73f
inbuf position: 30 index: 0x4e43d73e
inbuf position: 29 index: 0x4e43d73d
inbuf position: 28 index: 0x4e43d73c
inbuf position: 27 index: 0x4e43d73b
inbuf position: 26 index: 0x4e43d73a
inbuf position: 25 index: 0x4e43d739
inbuf position: 24 index: 0x4e43d738
inbuf position: 23 index: 0x4e43d737
inbuf position: 22 index: 0x4e43d736
inbuf position: 21 index: 0x4e43d735
inbuf position: 20 index: 0x4e43d734
inbuf position: 19 index: 0x4e43d733

You are trying to decode without demuxing.

Related

MediaCodec fails for jnilibs/arm64-v8a

I'm maintaining an Android video replay app through Android Studio. All aspects of the application work fine across multiple devices (Samsung S8, S10, S20 etc) except for 64bit devices where the video decode fails. I'm looking for advice and/or an explaination as to what's causing the problem.
Below is a snippet of the code that is causing me grief. Below are the resultant logs. Basically the application works fine when run using jniLibs/armeabi however fails when run using jnilibs/arm64-v8a. The decoder threads keep failing with IllogicalArgumentException.
Code:
private class PlayerThread3 extends Thread {
private MediaCodec decoder;
private Surface surface;
public PlayerThread3(Surface surface) {
this.surface = surface;
}
#Override
public void run() {
Log.d("PlayerThread3->Run", "Entry surface=" + surface.toString());
MediaFormat format = MediaFormat.createVideoFormat("video/avc", mSurfaceWidth, mSurfaceHeight);
try {
Log.d("PlayerThread3->Run", "MediaFormat=" + format.toString());
decoder = MediaCodec.createByCodecName("OMX.google.h264.decoder");
Log.d("PlayerThread3->Run", "MediaCodec=" + decoder.toString());
if (surface.isValid()) {
Log.d("PlayerThread3->Run", "surface.isValid=" + surface.toString());
decoder.configure(format, surface, null, 0);
} else {
Log.i("debug","decoder31");
Message msg = Message.obtain();
msg.what = 3;
msg.obj = "Failed to start live video(surface)";
handler.sendMessage(msg);
return;
}
} catch (IllegalArgumentException e) {
Log.d("PlayerThread3->Run", "IA surface=" + surface.toString());
e.printStackTrace();
Log.i("debug", "decoder32");
Message msg = Message.obtain();
msg.what = 3;
msg.obj = "Failed to start live video(arg)";
handler.sendMessage(msg);
return;
} catch (IOException e) {
e.printStackTrace();
Log.i("debug", "decoder33");
Message msg = Message.obtain();
msg.what = 3;
msg.obj = "Failed to start live video(IO)";
handler.sendMessage(msg);
return;
}
try {
decoder.start();
inputBuffers3 = decoder.getInputBuffers();
Message msg = Message.obtain();
msg.what = 3;
handler.sendMessage(msg);
while (preferences_isAlways.getBoolean("indexDecode3" + index, false)) {
if (decoder_array3 != null) {
offerDecoder(decoder_array3, decoder_array3.length);
decoder_array3 = null;
}
}
decoder.stop();
decoder.release();
} catch (IllegalStateException e) {
e.printStackTrace();
Log.i("debug", "decoder34");
Message msg = Message.obtain();
msg.what = 3;
msg.obj = "Failed to start live video(IS)";
handler.sendMessage(msg);
}
}
Log:
I/frameRate: 2
I/whichPartOfFrame: 0
I/s_frame_serial: 40
I/frame_packet_body: 1952
D/PlayerThread3->Run: Entry surface=Surface(name=null)/#0x68f7d48
D/PlayerThread3->Run: MediaFormat={mime=video/avc, width=536, height=437}
D/CCodec: allocate(c2.android.avc.decoder)
I/CCodec: Created component [c2.android.avc.decoder]
D/CCodecConfig: read media type: video/avc
D/ReflectedParamUpdater: extent() != 1 for single value type: algo.buffers.max-count.values
extent() != 1 for single value type: output.subscribed-indices.values
D/ReflectedParamUpdater: extent() != 1 for single value type: input.buffers.allocator-ids.values
extent() != 1 for single value type: output.buffers.allocator-ids.values
extent() != 1 for single value type: algo.buffers.allocator-ids.values
extent() != 1 for single value type: output.buffers.pool-ids.values
D/ReflectedParamUpdater: extent() != 1 for single value type: algo.buffers.pool-ids.values
D/ReflectedParamUpdater: ignored struct field coded.color-format.locations
D/CCodecConfig: ignoring local param raw.size (0xd2001800) as it is already supported
ignoring local param raw.color (0xd2001809) as it is already supported
D/ReflectedParamUpdater: ignored struct field raw.hdr-static-info.mastering
I/CCodecConfig: query failed after returning 12 values (BAD_INDEX)
D/CCodecConfig: c2 config is Dict {
c2::u32 coded.pl.level = 20496
c2::u32 coded.pl.profile = 20481
c2::u32 coded.vui.color.matrix = 0
c2::u32 coded.vui.color.primaries = 0
c2::u32 coded.vui.color.range = 2
c2::u32 coded.vui.color.transfer = 0
c2::u32 default.color.matrix = 0
c2::u32 default.color.primaries = 0
c2::u32 default.color.range = 0
c2::u32 default.color.transfer = 0
c2::u32 input.buffers.max-size.value = 57600
c2::u32 input.delay.value = 0
string input.media-type.value = "video/avc"
c2::u32 output.delay.value = 8
string output.media-type.value = "video/raw"
c2::u32 raw.color.matrix = 0
c2::u32 raw.color.primaries = 0
c2::u32 raw.color.range = 2
c2::u32 raw.color.transfer = 0
c2::u32 raw.max-size.height = 240
c2::u32 raw.max-size.width = 320
c2::u32 raw.pixel-format.value = 35
c2::i32 raw.rotation.flip = 0
c2::i32 raw.rotation.value = 0
c2::u32 raw.sar.height = 1
c2::u32 raw.sar.width = 1
c2::u32 raw.size.height = 240
c2::u32 raw.size.width = 320
c2::u32 ra
W/ColorUtils: expected specified color aspects (2:0:0:0)
D/PlayerThread3->Run: MediaCodec=android.media.MediaCodec#da3c906
surface.isValid=Surface(name=null)/#0x68f7d48
D/SurfaceUtils: connecting to surface 0x7645de8010, reason connectToSurface
I/MediaCodec: [c2.android.avc.decoder] setting surface generation to 27948035
D/SurfaceUtils: disconnecting from surface 0x7645de8010, reason connectToSurface(reconnect)
connecting to surface 0x7645de8010, reason connectToSurface(reconnect)
D/CCodecConfig: no c2 equivalents for native-window
config failed => CORRUPTED
Bad parameter value
D/CCodecConfig: c2 config is Dict {
c2::u32 coded.pl.level = 20496
c2::u32 coded.pl.profile = 20481
c2::u32 coded.vui.color.matrix = 0
c2::u32 coded.vui.color.primaries = 0
c2::u32 coded.vui.color.range = 2
c2::u32 coded.vui.color.transfer = 0
c2::u32 default.color.matrix = 0
c2::u32 default.color.primaries = 0
c2::u32 default.color.range = 0
c2::u32 default.color.transfer = 0
c2::u32 input.buffers.max-size.value = 57600
c2::u32 input.delay.value = 0
string input.media-type.value = "video/avc"
c2::u32 output.delay.value = 8
string output.media-type.value = "video/raw"
c2::u32 raw.color.matrix = 0
c2::u32 raw.color.primaries = 0
c2::u32 raw.color.range = 2
c2::u32 raw.color.transfer = 0
c2::u32 raw.max-size.height = 240
c2::u32 raw.max-size.width = 320
c2::u32 raw.pixel-format.value = 35
c2::i32 raw.rotation.flip = 0
c2::i32 raw.rotation.value = 0
c2::u32 raw.sar.height = 1
c2::u32 raw.sar.width = 1
c2::u32 raw.size.height = 240
c2::u32 raw.size.width = 536
c2::u32 ra
W/CCodec: failed to configure c2 params
E/MediaCodec: Codec reported err 0xffffffea, actionCode 0, while in state 3
D/SurfaceUtils: disconnecting from surface 0x7645de8010, reason disconnectFromSurface
E/MediaCodec: configure failed with err 0xffffffea, resetting...
I/MediaCodec: Codec shutdown complete
D/CCodec: allocate(c2.android.avc.decoder)
I/CCodec: Created component [c2.android.avc.decoder]
D/CCodecConfig: read media type: video/avc
D/ReflectedParamUpdater: extent() != 1 for single value type: algo.buffers.max-count.values
extent() != 1 for single value type: output.subscribed-indices.values
extent() != 1 for single value type: input.buffers.allocator-ids.values
extent() != 1 for single value type: output.buffers.allocator-ids.values
extent() != 1 for single value type: algo.buffers.allocator-ids.values
extent() != 1 for single value type: output.buffers.pool-ids.values
extent() != 1 for single value type: algo.buffers.pool-ids.values
D/ReflectedParamUpdater: ignored struct field coded.color-format.locations
I/frameRate: 2
I/whichPartOfFrame: 0
I/s_frame_serial: 41
I/frame_packet_body: 46
D/CCodecConfig: ignoring local param raw.size (0xd2001800) as it is already supported
ignoring local param raw.color (0xd2001809) as it is already supported
D/ReflectedParamUpdater: ignored struct field raw.hdr-static-info.mastering
I/CCodecConfig: query failed after returning 12 values (BAD_INDEX)
D/CCodecConfig: c2 config is Dict {
c2::u32 coded.pl.level = 20496
c2::u32 coded.pl.profile = 20481
c2::u32 coded.vui.color.matrix = 0
c2::u32 coded.vui.color.primaries = 0
c2::u32 coded.vui.color.range = 2
c2::u32 coded.vui.color.transfer = 0
c2::u32 default.color.matrix = 0
c2::u32 default.color.primaries = 0
c2::u32 default.color.range = 0
c2::u32 default.color.transfer = 0
c2::u32 input.buffers.max-size.value = 57600
c2::u32 input.delay.value = 0
string input.media-type.value = "video/avc"
c2::u32 output.delay.value = 8
string output.media-type.value = "video/raw"
c2::u32 raw.color.matrix = 0
c2::u32 raw.color.primaries = 0
c2::u32 raw.color.range = 2
c2::u32 raw.color.transfer = 0
c2::u32 raw.max-size.height = 240
c2::u32 raw.max-size.width = 320
c2::u32 raw.pixel-format.value = 35
c2::i32 raw.rotation.flip = 0
c2::i32 raw.rotation.value = 0
c2::u32 raw.sar.height = 1
c2::u32 raw.sar.width = 1
c2::u32 raw.size.height = 240
c2::u32 raw.size.width = 320
c2::u32 ra
W/ColorUtils: expected specified color aspects (2:0:0:0)
D/PlayerThread3->Run: IA surface=Surface(name=null)/#0x68f7d48
W/System.err: java.lang.IllegalArgumentException
at android.media.MediaCodec.native_configure(Native Method)
W/System.err: at android.media.MediaCodec.configure(MediaCodec.java:2023)
at android.media.MediaCodec.configure(MediaCodec.java:1951)
at au.com.FreedomVMS.iFreedomVMSpro.live.ShowFragment$PlayerThread3.run(ShowFragment.java:5789)
I/debug: decoder32
I/.iFreedomVMSpr: Compiler allocated 15MB to compile void au.com.FreedomVMS.iFreedomVMSpro.live.ShowFragment.handleSocket1(int, byte, java.lang.String, int, java.lang.String, java.lang.String)
I/frameRate: 2
I/whichPartOfFrame: 1
I/s_frame_serial: 42
I/frame_packet_body: 10240
Simples, problem solved. Surface dimensions, width and height, have to be multiples of 16 under Android 10. The earlier versions of Android tested against don't appear to be worried by this constraint.

Can't get image orientation from EXIF

Currently, I'm writing a function for our company's micro-library in Kotlin that reads bytes of an image and returns orientation in degrees
I know, that in API 24 we have ExifInterface and ability to instantiate it from InputStream, but the problem is that we need to support API 21, which doesn't have such constructor.
Byte array that is passed to getOrientation function always looks like this:
-1, -40, -1, -32, 0, 16, 74, 70, 73, 70, 0, 1, 1, 0, 0, 72, 0, 72, 0, 0, -1, -31, 8, 82, 69, 120, 105, 102, 0, 0, 77, 77, 0, 42, 0, 0, 0, 8, 0, 12, 1, 15, 0, 2, 0, 0, 0, 6, 0, 0, 0, -98, 1, 16, 0, 2, 0, 0, 0, 9, 0, 0, 0, -92, 1, 18, 0, 3, 0, 0, 0, 1, 0, 6, 0, 0, 1, 26, 0, 5, 0, 0, 0, 1, 0, 0, 0, -82, 1, 27, 0, 5, 0, 0, 0, 1, 0, 0, 0, -74, 1, 40, 0 and so on
It looks like shifted, and this is the reason why I shift it right by 256 in the first line
Here's the code I'm stuck on for now:
object Exif {
fun getOrientation(_bytes: ByteArray): Int {
val bytes = _bytes.map { b -> b.toInt() + 256 }
if (bytes[0] != 0xff && bytes[1] != 0xd8) {
return 0
}
val length = bytes.size
var offset = 2
while (offset < length) {
// TODO: extract all operations like the following
// into separate function
val marker = (bytes[offset] shl 8) or bytes[offset + 1]
offset += 2
if (marker == 0xffe1) {
offset += 2
val exifStr = (bytes[offset] shl 24) or (bytes[offset + 1] shl 16) or (bytes[offset + 2] shl 8) or bytes[offset + 3]
if (exifStr != 0x45786966) {
return 0
}
offset += 6
val little = (bytes[offset] shl 8) or bytes[offset + 1] == 0x4949
offset += 4
val inc = (bytes[offset] shl 24) or (bytes[offset + 1] shl 16) or (bytes[offset + 2] shl 8) or bytes[offset + 3]
offset += if (little) inc.reverseBytes() else inc
val tagsWOEndian = (bytes[offset] shl 8) or bytes[offset + 1]
val tags = if (little) tagsWOEndian.reverseBytes() else tagsWOEndian
offset += 2
for (idx in 0..tags) {
val off = offset + idx * 12
val orientWOEndian = (bytes[off] shl 8) or bytes[off + 1]
val orient = if (little) orientWOEndian.reverseBytes() else orientWOEndian
if (orient == 0x0112) {
when ((bytes[off + 8] shl 8) or bytes[off + 8 + 1]) {
1 -> return 0
3 -> return 180
6 -> return 90
8 -> return 270
}
}
}
} else if (marker and 0xff00 != 0xff00) {
break
} else {
offset += (bytes[offset] shl 8) or bytes[offset + 1]
}
}
return 0
}
}
fun Int.reverseBytes(): Int {
val v0 = ((this ushr 0) and 0xFF)
val v1 = ((this ushr 8) and 0xFF)
val v2 = ((this ushr 16) and 0xFF)
val v3 = ((this ushr 24) and 0xFF)
return (v0 shl 24) or (v1 shl 16) or (v2 shl 8) or (v3 shl 0)
}
Solved!
I ended up with the following solution:
fun getUint16(bytes: List<Int>, offset: Int, littleEndian: Boolean): Int {
val value = (bytes[offset] shl 8) or bytes[offset + 1]
return if (littleEndian) value.reverseBytes() else value
}
fun getUint32(bytes: List<Int>, offset: Int, littleEndian: Boolean): Int {
val value = (bytes[offset] shl 24) or (bytes[offset + 1] shl 16) or (bytes[offset + 2] shl 8) or bytes[offset + 3]
return if (littleEndian) value.reverseBytes() else value
}
fun Int.reverseBytes(): Int {
val v0 = ((this ushr 0) and 0xFF)
val v1 = ((this ushr 8) and 0xFF)
val v2 = ((this ushr 16) and 0xFF)
val v3 = ((this ushr 24) and 0xFF)
return (v0 shl 24) or (v1 shl 16) or (v2 shl 8) or (v3 shl 0)
}
object Exif {
fun getRotationDegrees(_bytes: ByteArray): Int {
val bytes = _bytes.take(64 * 1024).map { b -> b.toInt() and 0xff }
if (getUint16(bytes, 0, false) != 0xffd8) {
return 0
}
val length = bytes.size
var offset = 2
while (offset < length) {
val marker = getUint16(bytes, offset, false)
offset += 2
if (marker == 0xffe1) {
if (getUint32(bytes, offset + 2, false) != 0x45786966) {
return 0
}
offset += 2
val little = getUint16(bytes, offset + 6, false) == 0x4949
offset += 6
offset += getUint32(bytes, offset + 4, little)
val tags = getUint16(bytes, offset, little)
offset += 2
for (idx in 0..tags) {
if (getUint16(bytes, offset + (idx * 12), little) == 0x0112) {
when (getUint16(bytes, offset + (idx * 12) + 8, little)) {
1 -> return 0
3 -> return 180
6 -> return 90
8 -> return 270
}
}
}
} else if (marker and 0xff00 != 0xff00) {
break
} else {
offset += getUint16(bytes, offset, false)
}
}
return 0
}
}
Changes in comparison to code posted in the question:
The transformation between the signed _bytes and unsigned bytes was performed wrong. Now I'm getting the first 65536 bytes first, then transforming them to unsigned by applying bitwise and to each byte instead of simply adding 256
All operations for getting UInt16 and UInt32 values from the byte array were moved to separate functions
Fixed the wrong offset incrementation inside while loop

MediaMuxer writing H264 stream to mpg file

MediaMuxer has been driving me mad for two days & nights now :-(
The situation: I receive a H264 encoded 1280x720 video stream via UDP.
The h264 stream contains NALU 1 - slice and NALU 5 - keyframe (5 is always preceded by NALU 7 - SPS and NALU 8 - PPS).
This stream appears to be stable 30fps with at least one NALU 5 keyframe per second. Bitrate is variable but less than 4Mbps.
MediaCodec sucessfully decodes the stream and renders it in a surface view so that part works well.
But now I need to save the H.264 into a local mpg file.
I set up a MediaMuxer with all MediaFormat information that I have, and feed it with the sample data from the stream.
Each sample contains one frame (NALU 1 or 5), and the first data sent to MediaMuxer is a keyframe (NALU 5). The presentation time is calculated based on framenumber and framerate.
All involved methods are called from the same thread.
But the mpg file is never created. As you can see in the output below the data in the ByteBuffer does start with NALU headers followed by varying size of data. MediaMuxer seems to "see" frames in the data as it counts the frames.
So what is wrong here?
Minimum API is 21, and I have tested with a Samsung Galaxy S4 running stock Android 5 and a couple of devices running Lineageos Oreo and Nougat.
Here is the code to setup the MediaMuxer:
void setupMuxer(File f) throws IOException {
if (DEBUG) Log.d(TAG, "Setup Muxer: " + f.getAbsolutePath() +" can write: " + f.canWrite());
MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, decoderWidth, decoderHeight);
format.setInteger(MediaFormat.KEY_BIT_RATE, 4000000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 29);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
format.setByteBuffer("csd-0", ByteBuffer.wrap(sps)); // sps and pps have been retrieved from the stream's NAL 7/8
format.setByteBuffer("csd-1", ByteBuffer.wrap(pps));
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 1920 * 1080);
muxer = new MediaMuxer(f.getPath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
videoTrack = muxer.addTrack(format);
muxer.start();
}
This method is called for each (complete) NALU 1 and NALU 5:
void muxFrame(ByteBuffer buf, int frame) {
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
bufferInfo.offset = buf.arrayOffset();
bufferInfo.size = buf.position() - bufferInfo.offset;
bufferInfo.flags = (buf.get(4) & 0x1f) == 5 ? MediaCodec.BUFFER_FLAG_KEY_FRAME : 0;
bufferInfo.presentationTimeUs = computePresentationTime(frame);
if (DEBUG)
Log.d(TAG, "muxFrame frame: " + frame + " size: " + bufferInfo.size + " NAL: " + (buf.get(4) & 0x1f) + " Flags: " + bufferInfo.flags + " PTS: " + bufferInfo.presentationTimeUs + " content: " + BitByteUtil.toByteString(buf.array(), buf.arrayOffset(), 8));
try {
muxer.writeSampleData(videoTrack, buf, bufferInfo);
} catch (Exception e) {
Log.w(TAG, "muxer failed", e);
} finally {
}
}
private static long computePresentationTime(int frameIndex) {
return 42 + frameIndex * 1000000 / FRAME_RATE;
}
Here is my output if MediaMuxer is stopped after it has consumed 100 NALUs.
05.651 D/VideoDecoderView: Setup Muxer: /storage/emulated/0/Pictures/test.mpg can write: true
05.656 I/MPEG4Writer: limits: 4294967295/0 bytes/us, bit rate: -1 bps and the estimated moov size 3317 bytes
06.263 D/VideoDecoderView: muxFrame frame: 2 size: 7257 NAL: 5 Flags: 1 PTS: 66708 content: 0:000 1:000 2:000 3:001 4:101 5:184 6:000 7:015
06.264 I/MPEG4Writer: setStartTimestampUs: 66708
06.264 I/MPEG4Writer: Earliest track starting time: 66708
06.308 D/VideoDecoderView: muxFrame frame: 3 size: 8998 NAL: 1 Flags: 0 PTS: 100042 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:034 7:020
06.342 D/VideoDecoderView: muxFrame frame: 4 size: 13664 NAL: 1 Flags: 0 PTS: 133375 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:066 7:020
06.375 D/VideoDecoderView: muxFrame frame: 5 size: 13674 NAL: 1 Flags: 0 PTS: 166708 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:098 7:020
06.409 D/VideoDecoderView: muxFrame frame: 6 size: 13772 NAL: 1 Flags: 0 PTS: 200042 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:130 7:020
06.483 D/VideoDecoderView: muxFrame frame: 7 size: 13707 NAL: 1 Flags: 0 PTS: 233375 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:162 7:020
06.520 D/VideoDecoderView: muxFrame frame: 8 size: 13778 NAL: 1 Flags: 0 PTS: 266708 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:194 7:020
06.555 D/VideoDecoderView: muxFrame frame: 9 size: 13743 NAL: 1 Flags: 0 PTS: 300042 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:226 7:020
06.575 D/VideoDecoderView: muxFrame frame: 10 size: 7338 NAL: 5 Flags: 1 PTS: 333375 content: 0:000 1:000 2:000 3:001 4:101 5:184 6:000 7:015
06.593 D/VideoDecoderView: muxFrame frame: 11 size: 9059 NAL: 1 Flags: 0 PTS: 366708 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:034 7:020
06.618 D/VideoDecoderView: muxFrame frame: 12 size: 13587 NAL: 1 Flags: 0 PTS: 400042 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:066 7:020
06.644 D/VideoDecoderView: muxFrame frame: 13 size: 13650 NAL: 1 Flags: 0 PTS: 433375 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:098 7:020
06.671 D/VideoDecoderView: muxFrame frame: 14 size: 13797 NAL: 1 Flags: 0 PTS: 466708 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:130 7:020
.... [snip]
09.620 D/VideoDecoderView: muxFrame frame: 97 size: 7212 NAL: 5 Flags: 1 PTS: 3233375 content: 0:000 1:000 2:000 3:001 4:101 5:184 6:000 7:015
09.661 D/VideoDecoderView: muxFrame frame: 98 size: 8814 NAL: 1 Flags: 0 PTS: 3266708 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:034 7:020
09.692 D/VideoDecoderView: muxFrame frame: 99 size: 13566 NAL: 1 Flags: 0 PTS: 3300042 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:066 7:020
09.737 D/VideoDecoderView: muxFrame frame: 100 size: 13733 NAL: 1 Flags: 0 PTS: 3333375 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:098 7:020
09.771 D/VideoDecoderView: muxFrame frame: 101 size: 13771 NAL: 1 Flags: 0 PTS: 3366708 content: 0:000 1:000 2:000 3:001 4:065 5:224 6:130 7:020
09.775 D/MPEG4Writer: Video track stopping. Stop source
09.775 I/MPEG4Writer: Received total/0-length (100/1) buffers and encoded 100 frames. - Video
09.775 D/MPEG4Writer: Video track source stopping
09.775 D/MPEG4Writer: Video track source stopped
09.775 D/MPEG4Writer: Video track stopped. Stop source
09.775 D/MPEG4Writer: Stopping writer thread
09.776 D/MPEG4Writer: 0 chunks are written in the last batch
09.779 D/MPEG4Writer: Writer thread stopped
09.780 I/MPEG4Writer: Ajust the moov start time from 66708 us -> 66708 us
09.780 D/MPEG4Writer: Video track stopping. Stop source
#greeble31: You are right. The first log entry clearly states "Pictures" and not "Videos".
I spent hours looking at this problem without noticing a simple cut&paste mistake in my preferences keys. How stupid is that!!?!
Note to myself: Coding two days & nights in a row is not heroic but just plain stupid.

Data transfer over wifi

I have create a hotspot/wifi-direct on an android device.And connect this wifi with iOS devices.
Now i want to create a iOS App which will be able to send and receive the data over wifi.
Is it possible if yes then how I can do this ?
yes it is possible, you can run local server on android, I recommend https://github.com/NanoHttpd/nanohttpd
then you must get ip address on iOS device to do http requests, there is c method to get route ip address
#include "IpHelper.h" // my header file
#include <stdio.h>
#include <netinet/in.h>
#include <stdlib.h>
#include <sys/sysctl.h>
#include <net/if.h>
#include <string.h>
#include <arpa/inet.h>
#if TARGET_IPHONE_SIMULATOR
#include <net/route.h>
#else
#include "route.h"
#endif
#define CTL_NET 4 /* network, see socket.h */
#if defined(BSD) || defined(__APPLE__)
#define ROUNDUP(a) \
((a) > 0 ? (1 + (((a) - 1) | (sizeof(long) - 1))) : sizeof(long))
int getdefaultgateway(in_addr_t * addr)
{
int mib[] = {CTL_NET, PF_ROUTE, 0, AF_INET,
NET_RT_FLAGS, RTF_GATEWAY};
size_t l;
char * buf, * p;
struct rt_msghdr * rt;
struct sockaddr * sa;
struct sockaddr * sa_tab[RTAX_MAX];
int i;
int r = -1;
if(sysctl(mib, sizeof(mib)/sizeof(int), 0, &l, 0, 0) < 0) {
return -1;
}
if(l>0) {
buf = malloc(l);
if(sysctl(mib, sizeof(mib)/sizeof(int), buf, &l, 0, 0) < 0) {
return -1;
}
for(p=buf; p<buf+l; p+=rt->rtm_msglen) {
rt = (struct rt_msghdr *)p;
sa = (struct sockaddr *)(rt + 1);
for(i=0; i<RTAX_MAX; i++) {
if(rt->rtm_addrs & (1 << i)) {
sa_tab[i] = sa;
sa = (struct sockaddr *)((char *)sa + ROUNDUP(sa->sa_len));
} else {
sa_tab[i] = NULL;
}
}
if( ((rt->rtm_addrs & (RTA_DST|RTA_GATEWAY)) == (RTA_DST|RTA_GATEWAY))
&& sa_tab[RTAX_DST]->sa_family == AF_INET
&& sa_tab[RTAX_GATEWAY]->sa_family == AF_INET) {
if(((struct sockaddr_in *)sa_tab[RTAX_DST])->sin_addr.s_addr == 0) {
char ifName[128];
if_indextoname(rt->rtm_index,ifName);
if(strcmp("en0",ifName)==0){
*addr = ((struct sockaddr_in *)(sa_tab[RTAX_GATEWAY]))->sin_addr.s_addr;
r = 0;
}
}
}
}
free(buf);
}
return r;
}
#endif
also you need to download and import route.h from apple open-sources from here https://opensource.apple.com/source/xnu/xnu-1456.1.26/bsd/net/route.h
And at the end you are able to get local http url http://+'ip you get before'+ :'port of server you lunched on android'
so, now you are ready to do http request via ALamofire(or other) to local server.
EDIT 1:
I found this code in my old projects you can do data transfer with CocoaAsyncSocket, it is written in swift 3, I know that you want some code written on objC, but can not find such , sorry if i can't fully answer your question.
import CocoaAsyncSocket
import UIKit
enum TAG: Int {
case header = 1
case body = 2
}
class ViewController: UIViewController, NetServiceDelegate, NetServiceBrowserDelegate, GCDAsyncSocketDelegate {
var service : NetService!
var socket : GCDAsyncSocket!
var newSocket: GCDAsyncSocket!
var serviceBrowser = NetServiceBrowser()
var adresses: [Data]?
override func viewDidLoad() {
super.viewDidLoad()
startTalking()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
func parseHeader(data: NSData) -> UInt {
var out: UInt = 0
data.getBytes(&out, length: MemoryLayout<UInt>.size)
return out
}
func handleResponseBody(data: NSData) {
if let message = NSString(data: data as Data, encoding: String.Encoding.utf8.rawValue) {
print(message)
}
}
func sendText() {
if let data = "aaaaa".data(using: String.Encoding.utf8) {
var header = data.count
let headerData = NSData(bytes: &header, length: MemoryLayout<UInt>.size)
self.socket.write(headerData as Data, withTimeout: -1.0, tag: TAG.header.rawValue)
self.socket.write(data, withTimeout: -1.0, tag: TAG.body.rawValue)
}
}
func startTalking () {
self.socket = GCDAsyncSocket(delegate: self, delegateQueue: DispatchQueue.main)
self.socket.isIPv4PreferredOverIPv6 = false
do {
try self.socket.connect(toHost: "localhost", onPort: UInt16(8080), withTimeout: -1)
} catch let err {
print(err)
}
}
/*
* Delegates of NSNetService
**/
func netServiceBrowser(_ browser: NetServiceBrowser, didFindDomain domainString: String, moreComing: Bool) {
print(111)
}
func netService(_ sender: NetService, didNotResolve errorDict: [String : NSNumber]) {
print("aaaaaaa")
}
func netServiceDidPublish(_ sender: NetService) {
print("Bonjour service published. domain: \(sender.domain), type: \(sender.type), name: \(sender.name), port: \(sender.port)")
}
func netService(_ sender: NetService, didNotPublish errorDict: [String : NSNumber]) {
print("Unable to create socket. domain: \(sender.domain), type: \(sender.type), name: \(sender.name), port: \(sender.port), Error \(errorDict)")
}
func netService(_ sender: NetService, didAcceptConnectionWith inputStream: InputStream, outputStream: OutputStream) {
print("4")
}
/*
* END OF Delegates
**/
/*
* Delegates of GCDAsyncSokcket
**/
func socketDidSecure(_ sock: GCDAsyncSocket) {
print("3")
}
func socket(_ sock: GCDAsyncSocket, didConnectToHost host: String, port: UInt16) {
print("connected")
self.socket.readData(toLength: UInt(MemoryLayout<UInt64>.size), withTimeout: -1, tag: 0)
}
func socket(_ sock: GCDAsyncSocket, didAcceptNewSocket newSocket: GCDAsyncSocket) {
print("Did accept new socket")
self.newSocket = newSocket
self.newSocket.readData(toLength: UInt(MemoryLayout<UInt64>.size), withTimeout: -1.0, tag: 0)
print("Connected to " + self.service.name)
}
func socketDidDisconnect(_ sock: GCDAsyncSocket, withError err: Error?) {
print("Socket disconnected: error \(err)")
if self.socket == socket {
// print("Disconnected from " + self.service.name)
}
}
func socket(_ sock: GCDAsyncSocket, didRead data: Data, withTag tag: Int) {
if data.count == MemoryLayout<UInt>.size {
let bodyLength: UInt = self.parseHeader(data: data as NSData)
sock.readData(toLength: bodyLength, withTimeout: -1, tag: TAG.body.rawValue)
} else {
self.handleResponseBody(data: data as NSData)
sock.readData(toLength: UInt(MemoryLayout<UInt>.size), withTimeout: -1, tag: TAG.header.rawValue)
}
}
func socket(_ sock: GCDAsyncSocket, didWriteDataWithTag tag: Int) {
print("Write data with tag of \(tag)")
}
func socket(_ sock: GCDAsyncSocket, didReceive trust: SecTrust, completionHandler: #escaping (Bool) -> Void) {
completionHandler(true)
}
/*
* END OF Delegates
**/
}
EDIT 2:
To parse ip to human readable string here is Objc method for it
+ (NSString *)getGatewayIP {
NSString *ipString = nil;
struct in_addr gatewayaddr;
int r = getdefaultgateway(&(gatewayaddr.s_addr));
if(r >= 0) {
ipString = [NSString stringWithFormat: #"%s",inet_ntoa(gatewayaddr)];
NSLog(#"default gateway : %#", ipString );
} else {
NSLog(#"getdefaultgateway() failed");
}
return ipString;
}

QtPositioning error in Qt 5.3 RC on Android

I’m experimenting with Qt Positioning in Qt 5.3 RC on Android.
This is sample of my code, where I create sources of position and satellites:
QGeoPositionInfoSource *source = QGeoPositionInfoSource::createDefaultSource(this);
if (source) {
QStringList posSourcesList = QGeoPositionInfoSource::availableSources();
qDebug() << "Position sources count: " << posSourcesList.count();
foreach (const QString &src, posSourcesList) {
qDebug() << "pos source in list: " << src;
}
source->startUpdates();
connect(source, SIGNAL(positionUpdated(QGeoPositionInfo)),
this, SLOT(positionUpdated(QGeoPositionInfo)));
}
//----------------------------------------------------------------------------------
QGeoSatelliteInfoSource *satelliteSource = QGeoSatelliteInfoSource::createDefaultSource(this);
if(satelliteSource)
{
QStringList sourcesList = QGeoSatelliteInfoSource::availableSources();
qDebug() << "Satellites sources count: " << sourcesList.count();
foreach (const QString &src, sourcesList) {
qDebug() << "source in list: " << src;
}
satelliteSource->startUpdates();
connect(satelliteSource, SIGNAL(satellitesInViewUpdated(QList<QGeoSatelliteInfo>)),
this, SLOT(satellitesInViewUpdated(QList<QGeoSatelliteInfo>)));
}
I get segmentation with the output:
D/Qt (16403): ../test_GPS/gpshandler.cpp:14 (GPSHandler::GPSHandler(QObject*)): Position sources count: 1
D/Qt (16403): ../test_GPS/gpshandler.cpp:16 (GPSHandler::GPSHandler(QObject*)): pos source in list: "android"
D/QtPositioning(16403): Regular updates using GPS
D/QtPositioning(16403): Regular updates using network
D/Qt (16403): ../test_GPS/gpshandler.cpp:37 (GPSHandler::GPSHandler(QObject*)): Satellites sources count: 1
D/Qt (16403): ../test_GPS/gpshandler.cpp:39 (GPSHandler::GPSHandler(QObject*)): source in list: "android"
F/Qt (16403): jnipositioning.cpp:496 (void satelliteUpdated(JNIEnv*, jobject, jobjectArray, jint, jboolean)): satelliteUpdated: source == 0
Here is the backtrace:
0 ?? /home/qtproj/build-test_GPS-Android_for_armeabi_v7a_GCC_4_8_Qt_5_3_0-Debug/libc.so 0x4010a8e8
1 abort /home/qtproj/build-test_GPS-Android_for_armeabi_v7a_GCC_4_8_Qt_5_3_0-Debug/libc.so 0x40108948
2 QMessageLogger::fatal(char const*, ...) const /opt/Qt-5.3.0_rc/5.3/android_armv7/lib/libQt5Core.so 0x75357ff6
3 satelliteUpdated(_JNIEnv*, _jobject*, _jobjectArray*, int, unsigned char) /opt/Qt-5.3.0_rc/5.3/android_armv7/plugins/position/libqtposition_android.so 0x751a8c08
4 ?? 0x40b3a910
5 ?? 0x40b3a910
When I’m creating only QGeoPositionInfoSource or only QGeoSatelliteInfoSource, this error doesn’t occur.
Any suggestions?
The same error is still present in 5.3.1. I don't have an explanation to offer, but the workaround here is to change the order of initialization of the sources.
QGeoSatelliteInfoSource
QGeoPositionInfoSource
So in your code that would be
QGeoSatelliteInfoSource *satelliteSource = QGeoSatelliteInfoSource::createDefaultSource(this);
if(satelliteSource) {
QStringList sourcesList = QGeoSatelliteInfoSource::availableSources();
qDebug() << "Satellites sources count: " << sourcesList.count();
foreach (const QString &src, sourcesList) {
qDebug() << "source in list: " << src;
}
satelliteSource->startUpdates();
connect(satelliteSource, SIGNAL(satellitesInViewUpdated(QList<QGeoSatelliteInfo>)),
this, SLOT(satellitesInViewUpdated(QList<QGeoSatelliteInfo>)));
}
QGeoPositionInfoSource *source = QGeoPositionInfoSource::createDefaultSource(this);
if (source) {
QStringList posSourcesList = QGeoPositionInfoSource::availableSources();
qDebug() << "Position sources count: " << posSourcesList.count();
foreach (const QString &src, posSourcesList) {
qDebug() << "pos source in list: " << src;
}
source->startUpdates();
connect(source, SIGNAL(positionUpdated(QGeoPositionInfo)),
this, SLOT(positionUpdated(QGeoPositionInfo)));
}

Categories

Resources