Android UDP glitches cause VOIP stutters - android

I am debugging a RTC video stutters issues on Android and I tried serval different devices. To simplify the question, I just keep sending udp packets with an interval about 10ms from a MAC and receiving them on Android with a good wifi. I can see big jitters (bigger than 200ms) almost every minute, can be bigger than 600ms sometimes. Especially when I open and close a task manager. Not duplicated with localhost testing. Can this be fixed?
while(1) {
int s = recvfrom(socket_fd, buffer, sizeof(buffer), 0, (struct sockaddr *)&recv_addr, (socklen_t *)&addr_len);
if (s > 0) {
struct timeval tv_ioctl;
tv_ioctl.tv_sec = 0;
tv_ioctl.tv_usec = 0;
int error = ioctl(socket_fd, SIOCGSTAMP, &tv_ioctl);
if (error == 0) {
int64_t ms = tv_ioctl.tv_sec * 1000LL + tv_ioctl.tv_usec/1000;
if (pre_rev_ms == 0) {
pre_rev_ms = ms;
}
if (ms - pre_rev_ms > 200) {
LOGV("Udp glitches\n");
}
pre_rev_ms = ms;
}
}
}

Related

Looking for a pattern for working with serial communication

I am developing an Android application where I must send and receive data over a serial link, an UART to be precise.
My UART handling class is centered around this function:
fun receive(expectedLength: Int, timeout: Int): ByteArray {
// println("Should receive $expectedLength bytes within $timeout ms")
var read_bytes = 0
var ret: Int
var readBuffer: ByteArray = byteArrayOf()
var start_millis: Long
start_millis = System.currentTimeMillis()
while (read_bytes < expectedLength && (System.currentTimeMillis() - start_millis < timeout) ) {
if (uart.inputStream.available() == 0) {
ret = -1
Thread.sleep(10)
} else {
ret = uart.inputStream.read()
}
if (ret >= 0 ) {
readBuffer = readBuffer.plus(ret.toByte())
read_bytes++
start_millis = System.currentTimeMillis()
}
}
return readBuffer
} // receive expected length with timeout
In real life there are several issues with this function, like for instance a "message" can be shorter or longer than the expectedLength (perhaps I'm waiting for an acknowledgement/confirmation worth of 10 bytes but receive an error of 6 bytes, or even worse, the other way around) or may take slightly longer to arrive.
All this translates into a cascade of errors throughout the application where various services can only chalk this up under communication/hardware errors. Not even retrying is safe.
Is there an alternative approach?

Android Socket Stuck every 20 seconds

I am using a TCP socket connection in an action game, we use lockstep synchronization, client will receive 20 - 40 packets per second. It works fine when the game is running on a PC but when running on Android devices, the socket will get stuck every 20 seconds
Server sends 5 packets per second
I have tried to use Unity3D's C# socket, Android Java socket and Android native C socket and blocking/non-blocking mode, small/large (1byte/100byte) data per packet, less/more (5/50) packets per second, use single thread/main thread, on multiple Android devices, all of them have the same issue.
PS: It seems the 20 second duration is based on devices, not my app or connection; that means if last stuck happens at 1:00:00, the next stuck will happen at 1:00:20, even if we reconnect or restart the app.
Android native C code:
extern "C" JNIEXPORT int JNICALL
Java_com_example_ymoon_sockettest_MainActivity_connect(JNIEnv *env, jobject /* this */)
{
__android_log_print(ANDROID_LOG_INFO, "CSocket", "Connecting...");
int sock = socket(AF_INET, SOCK_STREAM, 0);
if (sock < 0) return -1;
struct sockaddr_in server;
memset(&server, 0, sizeof(server));
server.sin_family = AF_INET;
server.sin_port = htons(12350);
server.sin_addr.s_addr = inet_addr("192.168.3.66");
if (connect(sock, (struct sockaddr*)&server, sizeof(server)) < 0)
{
close(sock);
return -2;
}
__android_log_print(ANDROID_LOG_INFO, "CSocket", "Connected");
char buf[100];
int t = -1;
int received = 0;
while (true)
{
int count = recv(sock, &buf, 100, 0);
if (count < 1) break;
received += count;
struct timeval tv;
gettimeofday(&tv, NULL);
int m = tv.tv_sec * 1000 + tv.tv_usec / 1000;
int diff = m - t;
t = m;
std::string p = t < 0 ? "" : diff < 50 ? "-" : diff < 100 ? "-|" : diff < 150 ? "--|" :
diff < 250 ? "---|" : diff < 500 ? "----|" : diff < 1000 ? "-----|" : "------|";
__android_log_print(diff > 500 ? ANDROID_LOG_ERROR : ANDROID_LOG_INFO,
"CSocket", "%i | %s %i", received, p.c_str(), diff);
}
close(sock);
return 0;
}
Am I doing something wrong ?
It is my first time to ask a question on stackoverflow, sorry for my bad English, any help or suggestion is appreciated, thanks.
Edit: add server code, i rewrite a simple tcp server to test (Window platform)
int main()
{
addrinfo conf, *add = nullptr;
memset(&conf, 0, sizeof(conf));
conf.ai_flags = AI_PASSIVE;
conf.ai_socktype = SOCK_STREAM;
conf.ai_family = AF_INET;
if (getaddrinfo(nullptr, "12350", &conf, &add)) return 0;
SOCKET serverSock = socket(AF_INET, SOCK_STREAM, 0);
char opt = 1;
if (setsockopt(serverSock, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt)) == -1 ||
setsockopt(serverSock, IPPROTO_TCP, TCP_NODELAY, &opt, sizeof(opt)) == -1 ||
bind(serverSock, add->ai_addr, add->ai_addrlen) == -1 ||
listen(serverSock, 0) == -1)
{
close(serverSock);
return 0;
}
printf("Listening....\n");
sockaddr_storage incoming_addr;
int size = sizeof(incoming_addr);
SOCKET clientSock = accept(serverSock, (sockaddr*)&incoming_addr, &size);
printf("Client connected\n");
char buf[1] = { 0 };
int sendCount = 0;
while (true)
{
time_t t = time(nullptr);
tm *lt = localtime(&t);
printf("%02d:%02d:%02d Send to client %i\n", lt->tm_hour, lt->tm_min, lt->tm_sec, ++sendCount);
if (send(clientSock, buf, 1, 0) < 1) break;
Sleep(200);
}
close(serverSock);
return 0;
}
Edit: Add WireShark capture image:
Wireshark shot when stuck happen
I have test it (code posted here) in my home, and another WIFI environment, it worked fine. The only difference is the WIFI environment, so I think maybe our company WIFI network settings cause this issue.

ASensorEventQueue_hasEvents returns -1 and ALooper_pollAll returns -4 on basic usage

I am trying to write an android app which uses the Java SDK for the GUI, and the NDK to draw things and read the Linear Acceleration sensor (based of the accelerometer but does not include gravity). Basically, I want to know when the device is being shaken and my app is active.
I am using ASensorEventQueue_hasEvents and ALooper_pollAll in a fairly basic manner, I think, but they return -1 and -4 respectively. These are errors, but I have no clue what they mean or what I could be doing wrong. Any help would be welcome, thanks! Here are some code snips:
To start listening for data from the linear acceleration sensor, I did:
//sensorManager = ASensorManager_getInstance();
sensorManager = ASensorManager_getInstanceForPackage(mypackage);
sensor = ASensorManager_getDefaultSensor(sensorManager, ASENSOR_TYPE_LINEAR_ACCELERATION);
if (sensor == nullptr) {
return env->NewStringUTF("Linear accelerometer is not present");
}
looper = ALooper_prepare(0);
if (looper == nullptr) {
return env->NewStringUTF("Could not get looper.");
}
eventQueue = ASensorManager_createEventQueue(sensorManager, looper, EVENT_TYPE_LINEAR_ACCELEROMETER, nullptr, nullptr);
if (eventQueue == nullptr) {
return env->NewStringUTF("Could not create event queue for sensor");
}
//ASensorEventQueue_disableSensor(eventQueue, sensor);
int minDelay = ASensor_getMinDelay(sensor);
minDelay = std::max(minDelay, MAX_EVENT_REPORT_TIME);
int rc = ASensorEventQueue_registerSensor(eventQueue, sensor, minDelay, minDelay);
rc = ASensorEventQueue_setEventRate(eventQueue, sensor, minDelay);
return env->NewStringUTF("");
And my drawing loop looks like:
int rc = ASensorEventQueue_enableSensor(eventQueue, sensor);
std::vector<std::string> results;
while (!stopDrawing) {
int event = 0;
void *data = nullptr;
int rc;
rc = ASensorEventQueue_hasEvents(eventQueue); // testing
rc = ALooper_pollAll(wait_timeout, nullptr, &event, &data);
if (rc == ALOOPER_POLL_TIMEOUT) {
waiting += wait_timeout;
if (waiting > EVENT_LOOP_TIMEOUT) {
// If no data has been received, we have been waiting for a while, so assume
// the shaking has stopped
ASensorEventQueue_disableSensor(eventQueue, sensor);
}
} else if (rc == ALOOPER_POLL_ERROR) {
return env->NewStringUTF("A looper error occurred");
} else if (rc >= 0 && event == EVENT_TYPE_LINEAR_ACCELEROMETER) {
std::vector<ASensorEvent> events;
events.resize(100);
while (ASensorEventQueue_hasEvents(eventQueue) == 1) {
ssize_t nbrEvents = ASensorEventQueue_getEvents(eventQueue, events.data(), events.size());
if (nbrEvents < 0) {
// an error has occurred
return env->NewStringUTF("");
}
float x = 0;
float y = 0;
float z = 0;
for (int i = 0; i < nbrEvents; i++) {
x += events[i].acceleration.x;
y += events[i].acceleration.y;
z += events[i].acceleration.z;
}
graphics.updateAcceleration(x, y, z);
}
}
graphics.updateUniformBuffer();
graphics.drawFrame();
}
Note: the call to ASensorEventQueue_hasEvents right before ALooper_pollAll was just so that I could see what it was returning since the ALooper_pollAll call always returned an error. When running it in the debugger, I see that hasEvents is returning -1 and pollAll is returning -4. This is happening in SDK versions 24 and 26 (I haven't tried 27 and vulkan is not supported below 24). Any ideas why?
I had the precise symptoms described here, and for me it turned out that I was preparing the looper on a different thread than I was polling on.

libusb - How to select an alternate setting of a usb-soundcard for stereo recording?

I have a USB sound card that has the following setup and allows for stereo recording with 48000hz 2 channels 16 bit, so I'm trying to set it up that way:
UsbConfiguration[mId=1,mName=null, mAttributes=160, mMaxPower=50,
mInterfaces=[
UsbInterface[mId=0,mAlternateSetting=0,mName=null,mClass=1,mSubclass=1,mProtocol=0,
mEndpoints=[]
UsbInterface[mId=1,mAlternateSetting=0,mName=null,mClass=1,mSubclass=2,mProtocol=0,
mEndpoints=[]
UsbInterface[mId=1,mAlternateSetting=1,mName=null,mClass=1,mSubclass=2,mProtocol=0,
mEndpoints=[UsbEndpoint[mAddress=4,mAttributes=9,mMaxPacketSize=384,mInterval=1]]
UsbInterface[mId=1,mAlternateSetting=2,mName=null,mClass=1,mSubclass=2,mProtocol=0,
mEndpoints=[UsbEndpoint[mAddress=4,mAttributes=9,mMaxPacketSize=576,mInterval=1]]
UsbInterface[mId=1,mAlternateSetting=3,mName=null,mClass=1,mSubclass=2,mProtocol=0,
mEndpoints=[UsbEndpoint[mAddress=4,mAttributes=9,mMaxPacketSize=192,mInterval=1]]
UsbInterface[mId=2,mAlternateSetting=0,mName=null,mClass=1,mSubclass=2,mProtocol=0,
mEndpoints=[]
UsbInterface[mId=2,mAlternateSetting=1,mName=null,mClass=1,mSubclass=2,mProtocol=0,
mEndpoints=[UsbEndpoint[mAddress=138,mAttributes=5,mMaxPacketSize=196,mInterval=1]]
UsbInterface[mId=2,mAlternateSetting=2,mName=null,mClass=1,mSubclass=2,mProtocol=0,
mEndpoints=[UsbEndpoint[mAddress=138,mAttributes=5,mMaxPacketSize=294,mInterval=1]]
UsbInterface[mId=2,mAlternateSetting=3,mName=null,mClass=1,mSubclass=2,mProtocol=0,
mEndpoints=[UsbEndpoint[mAddress=138,mAttributes=5,mMaxPacketSize=388,mInterval=1]]
UsbInterface[mId=2,mAlternateSetting=4,mName=null,mClass=1,mSubclass=2,mProtocol=0,
mEndpoints=[UsbEndpoint[mAddress=138,mAttributes=5,mMaxPacketSize=582,mInterval=1]]
UsbInterface[mId=3,mAlternateSetting=0,mName=null,mClass=3,mSubclass=0,mProtocol=0,
mEndpoints=[UsbEndpoint[mAddress=130,mAttributes=3,mMaxPacketSize=16,mInterval=16]
]
]
I'm trying to select and use the incoming interface with the alternate setting for stereo input and do the same thing with the interface for the stereo output.
For the input, I've tried to do it natively with the following code:
int AndroidUSBAudioIO_start(int sampleRate, int bufferSize, void *callback, void *clientData) {
int rc = -1;
if (androidUSBAudioIO == NULL) {
androidUSBAudioIO = (AndroidUSBAudioIOInternals *) malloc(sizeof(AndroidUSBAudioIOInternals));
}
androidUSBAudioIO->samplerate = sampleRate;
androidUSBAudioIO->buffersize = bufferSize;
androidUSBAudioIO->callback = (audioUSBProcessingCallback *) callback;
androidUSBAudioIO->clientData = clientData;
androidUSBAudioIO->maruStream = 0;
androidUSBAudioIO->isSetup = 0;
androidUSBAudioIO->isPlaying = 0;
rc = libusb_init(NULL);
if (rc < 0) {
}
androidUSBAudioIO->deviceHandle = libusb_open_device_with_vid_pid(NULL, VID, PID);
if (!androidUSBAudioIO->deviceHandle) {
rc = -1;
goto out;
}
rc = libusb_reset_device(androidUSBAudioIO->deviceHandle);
if (rc < 0) {
goto out;
}
rc = libusb_set_configuration(androidUSBAudioIO->deviceHandle, 1);
if (rc < 0) {
}
rc = libusb_kernel_driver_active(androidUSBAudioIO->deviceHandle, IFACE_NUM);
if (rc == 1) {
rc = libusb_detach_kernel_driver(androidUSBAudioIO->deviceHandle, IFACE_NUM);
if (rc < 0) {
goto out;
}
}
rc = libusb_claim_interface(androidUSBAudioIO->deviceHandle, IFACE_NUM);
if (rc < 0) {
goto out;
}
rc = libusb_set_interface_alt_setting(androidUSBAudioIO->deviceHandle, 1, 2);
if (rc < 0) {
printf("libusb_claim_interface: %s.\n", libusb_error_name(rc));
goto out;
}
...
I'm getting the following error at when setting the alternate interface:
Fatal signal 11 (SIGSEGV) at 0x0000001d (code=1), thread 10303
and also tried to do it from java with the following code upon receiving the permission to use the device:
UsbDeviceConnection mUsbDevConn = mUsbManager.openDevice(mAudioDevice);
int mReqType = 0x01; //
int mRequest = 0x0B; // SET_INTERFACE USB SPEC CONSTANT
int mValue = 0x02; // alt settings
int mIndex = 0x01; // interface nr
byte[] mBuffer = null;
int mLength = 0;
int mTimout = 1000;
mUsbDevConn.controlTransfer(UsbConstants.USB_DIR_OUT | mReqType, mRequest, mValue, mIndex, mBuffer, mLength, mTimout);
I'm getting the following error:
Error (status 6: **UNKNOWN**)
What am I missing?
I think it would be highly unusual for the libusb_set_interface_alt_setting call itself to cause the SIGSEGV. I would expect that either a prior call would cause this, or the SIGSEGV is an indirect effect of the call. That is, this call changes the alternate setting, which effectively starts the transfer of audio data. If the buffers, other data structures or the callback are not setup correctly a SIGSEGV may result.
In your situation, I would put more debug messages in the code, including in the library and your callback to try to narrow down the last thing before the crash.
If the code was working for a "mono" device, have a look at what has changed in the move to "Stereo". Perhaps the data-packet size (buffers) needs to be larger.
As far as the Java version is concerned, the error 6 may be related to the fact that you don't seem to be detaching any kernel drivers or claiming the interface before trying to change the alternate setting.
In the past I found it necessary to detach kernel drivers from each and every interface including HID interfaces to free up the allocated bus bandwidth before starting the audio.
Finally, if the free version of usbEffects (Android App) works with this device, you can connect adb to the phone via Wi-Fi and run the app with the device connected to see the debug messages that will tell if the requestType, request etc parameters are correct for this hardware.

android ad-hoc udp broadcast hangs on send(ndk)

I'm trying to send a broadcast message in my ad-hoc network - 192.168.2.x, using native c code.
But the app hangs on sendto(). btw the hang is not connected to the while, that wraps it, as I can see in debug, it only tries sendto() ones and hangs.
Is there something wrong with my code?
int broadcast(char* msg){
int bcast_sock;
struct sockaddr_in their_addr; // connector's address information
if((bcast_sock = socket(AF_INET, SOCK_DGRAM, 0)) == -1)
{
LOGE("ERROR BROADCASTING , socket wasn't created");
}
int broadcastEnable=1;
int ret=setsockopt(bcast_sock, SOL_SOCKET, SO_BROADCAST, &broadcastEnable, sizeof(broadcastEnable));
if(ret < 0)
{
LOGE("ERROR BROADCASTING, Coulnt set broadcast enable through socket options");
}
their_addr.sin_family = AF_INET; // host byte order
their_addr.sin_port = htons(BROADCAST_PORT); // short, network byte order
their_addr.sin_addr.s_addr = htonl(INADDR_BROADCAST);
memset(&(their_addr.sin_zero), '\0', 8); // zero the rest of the struct
while(-1 == (int)sendto(bcast_sock, msg, strlen(msg), 0,
(struct sockaddr *)&their_addr, sizeof(struct sockaddr)));
return 0;
}
A couple of notes:
This of course are rooted phones.
If I broadcast using java broadcast, it works fine, I'm able to receive the broadcast ie:
datagramSocket.setBroadcast(true);
int receiverPort = 4949;
sendPacket = new DatagramPacket(data, data.length, IPAddress, receiverPort);
Non broadcast messages are also being sent and received using native code.
The CPU usage becomes ~ 95% .
I've also tried simply sending to 192.168.2.255.
any ideas?
I solved it by changing address conversion from:
their_addr.sin_addr.s_addr = htonl(INADDR_BROADCAST);
to
inet_aton(IP,&their_addr.sin_addr);
where IP = "192.168.2.1"
Thanks for the help epx

Categories

Resources