Android Connection Service report outgoing call without showing native call UI? - android

I have developed a voice calling app with android ConnectionServices and telecom framework.
I can show native call screen when receive incoming call notification from firebase and everything is fine.
When I start an outgoing call I just want to report this call to Connection service so that if any call from gsm received it should report busy to caller. In callkit if you call reporting Outgoing call native callkit screen doesnt appears on my app screen but android shows native call.
is it possible to make native screen invisible for outgoing calls?
my function to report outgoing call is below;
public void startCall(String number, String uuidString) {
UUID uuid = null;
if (uuidString != null) {
uuid = UUID.fromString(uuidString);
}
//TODO: allow name passed in as well
Log.d(TAG, "startCall number: $number");
Bundle extras = new Bundle();
Uri uri = Uri.fromParts(PhoneAccount.SCHEME_TEL, number, null);
Bundle callExtras = new Bundle();
callExtras.putString(Constants.EXTRA_CALL_NUMBER, number);
if (uuid != null) {
callExtras.putString(Constants.EXTRA_CALL_UUID, uuid.toString());
}
extras.putParcelable(TelecomManager.EXTRA_PHONE_ACCOUNT_HANDLE, voipUtilties.handle);
extras.putParcelable(TelecomManager.EXTRA_OUTGOING_CALL_EXTRAS, callExtras);
if (ActivityCompat.checkSelfPermission(Application.context, Manifest.permission.CALL_PHONE) != PackageManager.PERMISSION_GRANTED) {
Log.d("VOIP_pLuGIN", "Yetki Yok");
return;
}else{
voipUtilties.telecomManager.placeCall(uri, extras);
}
}
and overrided outgoingConnection in Service class is like below.
#Override
public Connection onCreateOutgoingConnection(PhoneAccountHandle
connectionManagerPhoneAccount, ConnectionRequest request) {
Bundle extras = request.getExtras();
Connection outgoingCallConnection = null;
String number = request.getAddress().getSchemeSpecificPart();
String extrasNumber = extras.getString(EXTRA_CALL_NUMBER);
String displayName = extras.getString(EXTRA_CALLER_NAME);
boolean isForeground = VoipConnectionService.isRunning(this.getApplicationContext());
Log.d(TAG, "makeOutgoingCall:, number: " + number + ", displayName:" + displayName);
// Wakeup application if needed
if (!isForeground) {
Log.d(TAG, "onCreateOutgoingConnection: Waking up application");
//TODO:
}
outgoingCallConnection = createConnection(request);
outgoingCallConnection.setDialing();
outgoingCallConnection.setAudioModeIsVoip(true);
outgoingCallConnection.setCallerDisplayName(displayName, TelecomManager.PRESENTATION_ALLOWED);
// ‍️Weirdly on some Samsung phones (A50, S9...) using `setInitialized` will not display the native UI ...
// when making a call from the native Phone application. The call will still be displayed correctly without it.
if (!Build.MANUFACTURER.equalsIgnoreCase("Samsung")) {
outgoingCallConnection.setInitialized();
}
Log.d(TAG, "onCreateOutgoingConnection: calling");
final String uuid = outgoingCallConnection.getExtras().getString(EXTRA_CALL_UUID);
setAllOthersOnHold(uuid);
final String address = outgoingCallConnection.getExtras().getString(EXTRA_CALL_NUMBER);
Log.d(TAG,"Created call's uuid" + uuid);
return outgoingCallConnection;
}
any idea will be appriciated.

I was using this package for my project. In VoipUtilities.kt Changing PhoneAccount capability to CAPABILITY_SELF_MANAGED instead of CAPABILITY_CALL_PROVIDER and in VoipconnectionService.java I added this code chunk to createConnection function solved my problem.
if((phoneAccount.getCapabilities() & PhoneAccount.CAPABILITY_SELF_MANAGED) == PhoneAccount.CAPABILITY_SELF_MANAGED) {
Log.d(TAG, "[VoiceConnectionService] PhoneAccount is SELF_MANAGED, so connection will be too");
connection.setConnectionProperties(Connection.PROPERTY_SELF_MANAGED);
}

Related

How to know typing status in 1:1 conversation in Smack 4.2 in Android?

I am building a chat application in Android. I send and receive message, but I don't know how can I know the other user status and typing status of the conversation.
I have been reading to use ChatManagerLister, but it is deprecated in this version.
In Smack 4.2 the class ChatManager does not have methods to listen chat states.
Hi used chatstates ejabberd protocol for typing status. please read this doc might be helpful you XEP-0085: Chat State Notifications
For android side you need to implement following code
Message msg= (Message) stanza;
// below ChatStateExtension for Compossing message.
ChatStateExtension state = (ChatStateExtension)msg.getExtension("http://jabber.org/protocol/chatstates");//jabber:x:event
// if state (ChatStateExtension) !=null and is composing then call listener method if not error.
if(state!=null) {
Log.d(AppConstants.ELEMENT,"ChatStateExtension : " + state.toXML());
if (state.getElementName().equals("composing")) {
if (msg.getType().equals(Message.Type.error)) {
return;
}
if (iCallBackForTypingListener != null) {
DelayInformation timestamp = (DelayInformation) msg.getExtension("delay", "urn:xmpp:delay");
if (timestamp == null)
timestamp = (DelayInformation) msg.getExtension("x", "jabber:x:delay");
if (timestamp != null && timestamp.getReason().equalsIgnoreCase("Offline Storage")){ //return if delay info is Offline Storage
return;
}
//update your typing listener
iCallBackForTypingListener.onTypingStanza(fromJID, typingSender);
}
// xmpp.updateChatState(fromJID, state.getElementName(), sender);
return;
} else if (state.getElementName().equals("paused")) {
return;
}

Identify Messages using Message API Android

I have probably a simple question but has me stumped. For my Android Wear application, I have two sensors working (step counter and heartrate).The wear app then sends these values back to the mobile application. I am sending them using the Message API. My stepcount sendMessage() and heartrate sendMessage() method look the same. Here is my heartrate sendMessage method.
private void sendMessageToHandheld(final String message) {
if (mGoogleApiClient == null)
return;
Log.d(LOG_TAG,"sending a message to handheld: "+message);
// use the api client to send the heartbeat value to our handheld
final PendingResult<NodeApi.GetConnectedNodesResult> nodes = Wearable.NodeApi.getConnectedNodes(mGoogleApiClient);
nodes.setResultCallback(new ResultCallback<NodeApi.GetConnectedNodesResult>() {
#Override
public void onResult(NodeApi.GetConnectedNodesResult result) {
final List<Node> nodes = result.getNodes();
if (nodes != null) {
for (int i=0; i<nodes.size(); i++) {
final Node node = nodes.get(i);
Wearable.MessageApi.sendMessage(mGoogleApiClient, node.getId(), message, bytes);
}
}
}
});
}
Problem is I am only using one messageReceived method on the mobile. So I cant differentiate from the step value coming in or the heartrate value coming in.
#Override
public void onMessageReceived(MessageEvent messageEvent) {
super.onMessageReceived(messageEvent);
Log.d(LOG_TAG, "received a message from wear: " + messageEvent.getPath());
if (messageEvent.getPath().contains("HEARTBEAT")){
// save the new heartbeat value
currentValue = Integer.parseInt(messageEvent.getPath());
if(handler!=null) {
// if a handler is registered, send the value as new message
Log.d(LOG_TAG, "received a heartbeat message from wear: " + messageEvent.getPath());
handler.sendEmptyMessage(currentValue);
}
}
else {
// save the new steps value
currentStepValue = Integer.parseInt(messageEvent.getPath());
if (handler != null) {
// if a handler is registered, send the value as new message
Log.d(LOG_TAG, "received a step message from wear: " + messageEvent.getPath());
handler.sendEmptyMessage(currentStepValue);
}
}
I tried passing in a byte array into the Heartrate sendMessage() and other strings as flags so that I could tell the values apart but no luck. Anyone know what the best way to go about this would be?
Any help would be appreciated.
Cheers
It seems you are sending the data inside the path attribute. This is not the correct use of this parameter.
Let's take a look at the MessageApi.sendMessage(GoogleApiClient client, String nodeId, String path, byte[] data method.
What you want to do is use String path to provide identifier for your message, for example in your case it would be step_counter and heartbeat. This way you can identify it on the other side, when you receive the message.
The sensor data should go into data field. You can put it raw there, but a better way is to create a DataMap and then serialize it into byte[]. This way you can enrich the data later easily.

Problems when sending a continuous stream of data over BLE

I'm wondering if anybody can help me figure out what is causing the data I am sending to become corrupt.
My setup is currently an Arduino pro mini with a HM-10 bluetooth module connected (I have also tried HM-11 Module too) and an Android application to receive the bluetooth data.
Module setup: http://letsmakerobots.com/node/38009
If I send data with big enough intervals then the data is fine, but if I send the data continuously I see messages getting mixed up and lost. To test this I send "$0.1,0.2,0.3,0.4,0.5" to the Android application from the Arduino, sometimes the stream of data appears to send fine but other times it is really quite scrambled. Please see the below graphs that demonstrate this:
Good case:
Bad case:
Arduino code:
String inputString = ""; //Hold the incoming data.
boolean stringComplete = false; //Determines if the string is complete.
boolean realtime = false;
void setup()
{
Serial.begin(9600);
delay(500);
Serial.print("AT+START");
delay(500);
}
void loop()
{
if(stringComplete)
{
if(inputString.equals("rStart"))
{
Serial.println("$startACK");
realtime = true;
}
else if(inputString.equals("stop"))
{
Serial.println("$stopACK");
realtime = false;
}
else{
Serial.print(inputString);
}
inputString = "";
stringComplete = false;
}
if(realtime)
{
Serial.println("$0.1,0.2,0.3,0.4,0.5,0.6");
delay(10);
}
}
void serialEvent() {
while (Serial.available())
{
// get the new byte:
char inChar = (char)Serial.read();
if (inChar == '\n')
{
stringComplete = true;
}
else
{
inputString += inChar;
}
}
}
The Android side just receives the data and then parses it in an IntentService:
#Override
protected void onHandleIntent(Intent intent) {
//Incoming command.
String rawData = intent.getStringExtra(DataProcessingIntentService.REQUEST);
//Append our new data to our data helper.
Log.i(this.getClass().getName(), "Previous Raw: (" + DataProcessingHelper.getInstance().getData() + ")");
DataProcessingHelper.getInstance().appendData(rawData);
Log.i(this.getClass().getName(), "New Raw: (" + DataProcessingHelper.getInstance().getData() + ")");
commandStartIndex = DataProcessingHelper.getInstance().getData().indexOf("$");
commandEndIndex = DataProcessingHelper.getInstance().getData().indexOf("\n");
//Set this as the data starting point.
if(commandStartIndex != -1){
DataProcessingHelper.getInstance().offsetData(commandStartIndex);
}
//Ensure that a command has been found and that the end index is after the starting index.
if(commandStartIndex != -1 && commandEndIndex > commandStartIndex){
//Remove the command structure from the command.
command = DataProcessingHelper.getInstance().getData().substring(commandStartIndex+1, commandEndIndex-1); //Remove the \r\n end command.
DataProcessingHelper.getInstance().offsetData(commandEndIndex+1);
if(command.length() > 1){
//Split the data out of the comand.
splitData = command.split(",");
Log.i(this.getClass().getName(), "Broadcasting the processed data. (" + command + ")");
//Broadcast data.
Intent broadcastIntent = new Intent();
broadcastIntent.setAction(DataProcessingIntentService.RESPONSE);
broadcastIntent.addCategory(Intent.CATEGORY_DEFAULT);
broadcastIntent.putExtra(DataProcessingIntentService.RESPONSE, splitData);
sendBroadcast(broadcastIntent);
}else{
Log.e(this.getClass().getName(), "Command is less than 1 character long!");
}
}
}
Thank you for any help!
I have now figured out what was causing this problem. It appears that BLE only supports a maximum of 20 bytes per a transaction. The time between these transactions is different depending on what you are using. I'm currently using notifications which means that I can send 20 bytes every 7.5 milliseconds maximum. I have opted for 10 milliseconds to be safe. I will now need to look into breaking up packets into 20 bytes maximum to ensure no data corruption.

Issue in getting friend request using smack android in offline state

I am using smack api in developing a chat application in android. I am facing an issue in getting the buddy request in offline state. I am getting the buddy request received by using the following code.
LoginScreen.connection.addPacketListener(new PacketListener() {
public void processPacket(Packet packet) {
final Presence newPresence = (Presence) packet;
final Presence.Type presenceType = newPresence.getType();
final String fromId = newPresence.getFrom();
final String toId = newPresence.getTo();
final RosterEntry newEntry = roster.getEntry(fromId);
final String name = fromId.substring(0, fromId.indexOf("#"));
if (presenceType == Presence.Type.subscribed) {
Log.i("test-chat", "#####SUBSCRIBED#########");
}
if (presenceType == Presence.Type.subscribe) {
//adding buddy request to local DB
}
}
}, new PacketFilter() {
public boolean accept(Packet packet) {
if (packet instanceof Presence) {
Presence presence = (Presence) packet;
if (presence.getType().equals(Presence.Type.subscribed)
|| presence.getType().equals(Presence.Type.subscribe)
|| presence.getType().equals(Presence.Type.unsubscribed)
|| presence.getType().equals(Presence.Type.unsubscribe)
|| presence.getType().equals(Presence.Type.available)
|| presence.getType().equals(Presence.Type.unavailable)) {
return true;
}
}
return false;
}
});
This is working fine when user is online. But suppose when the user is offline another user sents a buddy request to that user. When the user comes online I am unable to get the buddy request because the listener is not getting called. The listener needs to be called, to fetch all the buddy request that was got during offline state.
Please help.
Your code is fine. This happened to me as well. Well the actual problem is you are registering the listener after the request is already received.
Try registering the listener before you login the user and you will see that the request is received even when you are offline.
This will actually solve the problem. Try this and if you still face any problem... please elaborate your situation.
Hope this helps.

Connecting to existing Google Chromecast Session from Android (for generic remote control)

I am creating a generic Chromecast remote control app. Most of the guts of the app are already created and I've managed to get Chromecast volume control working (by connecting to a Chromecast device along side another app that is casting - YouTube for example).
What I've having difficult with is performing other media commands such as play, pause, seek, etc.
Use case example:
1. User opens YouTube on their android device and starts casting a video.
2. User opens my app and connects to the same Chromecast device.
3. Volume control from my app (works now)
4. Media control (play, pause, etc) (does not yet work)
I found the Cast api reference that explains that you can sendMessage(ApiClient, namespace, message) with media commands; however the "message" (JSON) requires the sessionId of the current application (Youtube in this case). I have tried the following, but the connection to the current application always fails; status.isSuccess() is always false:
Cast.CastApi
.joinApplication(mApiClient)
.setResultCallback(
new ResultCallback<Cast.ApplicationConnectionResult>() {
#Override
public void onResult(
Cast.ApplicationConnectionResult result) {
Status status = result.getStatus();
if (status.isSuccess()) {
ApplicationMetadata applicationMetadata = result
.getApplicationMetadata();
sessionId = result.getSessionId();
String applicationStatus = result
.getApplicationStatus();
boolean wasLaunched = result
.getWasLaunched();
Log.i(TAG,
"Joined Application with sessionId: "
+ sessionId
+ " Application Status: "
+ applicationStatus);
} else {
// teardown();
Log.e(TAG,
"Could not join application: "
+ status.toString());
}
}
});
Is is possible to get the sessionId of an already running cast application from a generic remote control app (like the one I am creating)? If so, am I right in my assumption that I can then perform media commands on the connected Chromecast device using something like this:
JSONObject message = new JSONObject();
message.put("mediaSessionId", sessionId);
message.put("requestId", 9999);
message.put("type", "PAUSE");
Cast.CastApi.sendMessage(mApiClient,
"urn:x-cast:com.google.cast.media", message.toString());
Update:
I have tried the recommendations provided by #Ali Naddaf but unfortunately they are not working. After creating mRemoteMediaPlayer in onCreate, I also do requestStatus(mApiClient) in the onConnected callback (in the ConnectionCallbacks). When I try to .play(mApiClient) I get an IllegalStateException stating that there is no current media session. Also, I tried doing joinApplication and in the callback performed result.getSessionId; which returns null.
A few comments and answers:
You can get the sessionId from the callback of launchApplication or joinApplication; in the "onResult(result)", you can get the sessionId from: result.getSessionId()
YouTube is still not on the official SDK so YMMV, for apps using official SDK, you should be able to use the above approach (most of it)
Why are you trying to set up a message yourself? Why not building a RemoteMediaPlayer and using play/pause that is provided there? Whenever you are working with the media playback through the official channel, always use the RemoteMediaPlayer (don't forget to call requestStatus() on it after creating it).
Yes it is possible , First you have to save sesionId and CastDevice device id
and when remove app from background and again open app please check is there sessionId then call bello line.
Cast.CastApi.joinApplication(apiClient, APP_ID,sid).setResultCallback(connectionResultCallback);
if you get success result then need to implement further process in connectionResultCallback listener.
//Get selected device which you selected before
#Override
public void onRouteAdded(MediaRouter router, MediaRouter.RouteInfo route) {
// Log.d("Route Added", "onRouteAdded");
/* if (router.getRoutes().size() > 1)
Toast.makeText(homeScreenActivity, "'onRouteAdded :: " + router.getRoutes().size() + " -- " + router.getRoutes().get(1).isSelected(), Toast.LENGTH_SHORT).show();
else
Toast.makeText(homeScreenActivity, "'onRouteAdded :: " + router.getRoutes(), Toast.LENGTH_SHORT).show();*/
if (router != null && router.getRoutes() != null && router.getRoutes().size() > 1) {
// Show the button when a device is discovered.
// Toast.makeText(homeScreenActivity, "'onRouteAdded :: " + router.getRoutes().size() + " -- " + router.getRoutes().get(1).isSelected(), Toast.LENGTH_SHORT).show();
mMediaRouteButton.setVisibility(View.VISIBLE);
titleLayout.setVisibility(View.GONE);
castName.setVisibility(View.VISIBLE);
selectedDevice = CastDevice.getFromBundle(route.getExtras());
routeInfoArrayList = router.getRoutes();
titleLayout.setVisibility(View.GONE);
if (!isCastConnected) {
String deid = MyPref.getInstance(homeScreenActivity).readPrefs(MyPref.CAST_DEVICE_ID);
for (int i = 0; i < routeInfoArrayList.size(); i++) {
if (routeInfoArrayList.get(i).getExtras() != null && CastDevice.getFromBundle(routeInfoArrayList.get(i).getExtras()).getDeviceId().equalsIgnoreCase(deid)) {
selectedDevice = CastDevice.getFromBundle(routeInfoArrayList.get(i).getExtras());
routeInfoArrayList.get(i).select();
ReSelectedDevice(selectedDevice, routeInfoArrayList.get(i).getName());
break;
}
}
}
}
}
//Reconnect google Api Client
public void reConnectGoogleApiClient() {
if (apiClient == null) {
Cast.CastOptions apiOptions = new
Cast.CastOptions.Builder(selectedDevice, castClientListener).build();
apiClient = new GoogleApiClient.Builder(this)
.addApi(Cast.API, apiOptions)
.addConnectionCallbacks(reconnectionCallback)
.addOnConnectionFailedListener(connectionFailedListener)
.build();
apiClient.connect();
}
}
// join Application
private final GoogleApiClient.ConnectionCallbacks reconnectionCallback = new GoogleApiClient.ConnectionCallbacks() {
#Override
public void onConnected(Bundle bundle) {
// Toast.makeText(homeScreenActivity, "" + isDeviceSelected(), Toast.LENGTH_SHORT).show();
try {
String sid = MyPref.getInstance(homeScreenActivity).readPrefs(MyPref.CAST_SESSION_ID);
String deid = MyPref.getInstance(homeScreenActivity).readPrefs(MyPref.CAST_DEVICE_ID);
if (sid != null && deid != null && sid.length() > 0 && deid.length() > 0)
Cast.CastApi.joinApplication(apiClient, APP_ID, sid).setResultCallback(connectionResultCallback);
isApiConnected = true;
} catch (Exception e) {
}
}
#Override
public void onConnectionSuspended(int i) {
isCastConnected = false;
isApiConnected = false;
}
};

Categories

Resources