I'm using CastCompanionLibrary-Android and I'm trying to set custom TextTrackStyle for the captions.
I'm setting this TexTextStyle to the MediaInfo while I'm creating it:
// set CC style
TextTrackStyle textTrackStyle = new TextTrackStyle();
textTrackStyle.setBackgroundColor(Color.parseColor("#FFFFFF"));
textTrackStyle.setForegroundColor(ContextCompat.getColor(mContext, R.color.blue));
MediaInfo mediaInfo = new MediaInfo.Builder(url)
.setStreamDuration(movieVideoItem.getDuration())
.setStreamType(MediaInfo.STREAM_TYPE_NONE)
.setContentType(type)
.setMetadata(mediaMetadata)
.setMediaTracks(tracks)
.setCustomData(customData)
.setTextTrackStyle(textTrackStyle)
.build();
But there is no visible result on the Chromecast side. I also tried to change VideoCastManager method setTextTrackStyle:
public void setTextTrackStyle(TextTrackStyle style) {
// CUSTOM TEXT TRACK STYLE HERE
TextTrackStyle textTrackStyle = new TextTrackStyle();
textTrackStyle.setBackgroundColor(Color.parseColor("#FF0000"));
textTrackStyle.setForegroundColor(Color.parseColor("#0000FF"));
mRemoteMediaPlayer.setTextTrackStyle(mApiClient, textTrackStyle)
.setResultCallback(new ResultCallback<MediaChannelResult>() {
#Override
public void onResult(MediaChannelResult result) {
if (!result.getStatus().isSuccess()) {
onFailed(R.string.ccl_failed_to_set_track_style,
result.getStatus().getStatusCode());
}
}
});
for (VideoCastConsumer consumer : mVideoConsumers) {
try {
consumer.onTextTrackStyleChanged(textTrackStyle);
} catch (Exception e) {
LOGE(TAG, "onTextTrackStyleChanged(): Failed to inform " + consumer, e);
}
}
}
But in this ResultCallback I'm getting error code 2103 which I found here: developers google
public static final int REPLACED
Status code indicating that the request's progress is no longer being tracked because another request of the same type has been made before the first request completed.
Constant Value: 2103
I don't know what this error code means, or what I'm doing wrong. The Chromecast implementation should be able to handle custom TextTrackStyle (at least for embedded VTT type)
MediaManager.onMetadataLoaded = function (event) {
.......
console.log("### RESOLVED TEXT TRACK TYPE " + textTrackType);
if (textTrackType ==
sampleplayer.TextTrackType.SIDE_LOADED_TTML &&
event.message && event.message.activeTrackIds && event.message.media &&
event.message.media.tracks) {
_processTtmlCues(
event.message.activeTrackIds, event.message.media.tracks);
} else if (!textTrackType || textTrackType == sampleplayer.TextTrackType.SIDE_LOADED_UNSUPPORTED) {
// If we do not have a textTrackType, check if the tracks are embedded
_maybeLoadEmbeddedTracksMetadata(event);
}
MediaManager['onMetadataLoadedOrig'](event);
}
function _maybeLoadEmbeddedTracksMetadata(info) {
if (!info.message || !info.message.media) {
return;
}
var tracksInfo = _readInBandTracksInfo();
if (tracksInfo) {
textTrackType = sampleplayer.TextTrackType.EMBEDDED;
tracksInfo.textTrackStyle = info.message.media.textTrackStyle;
MediaManager.loadTracksInfo(tracksInfo);
}
}
The result on the Chromecast is just white text with black background.
// EDIT Chromecast receiver log added:
There is Chromecast receiver log where I found some TextTrackStyle but this is not my style that I'm trying to set:
Received message: {"data":"{\"requestId\":4,\"type\":\"EDIT_TRACKS_INFO\",\"textTrackStyle\":{\"fontScale\":1,\"foregroundColor\":\"#4285F4FF\",\"backgroundColor\":\"#FFFFFFFF\"},\"mediaSessionId\":1}"
Those are different colors that I'm actually sending from my Android Sender app. I cannot see any TextTrackStyle inside the onLoad method at all. So I'm not sure if the problem is on Android side or on Chromecast side?
Related
I am integratingV3 version in my app.. Notification controls and app lock controls does not appear for the device which initiated the casting . if i connect from the other devices i could see the controls..
My cast provider is as follows
public class CastOptionsProvider implements OptionsProvider {
public static final String CUSTOM_NAMESPACE = "urn:x-cast:com.test.cast.player";
// #Override
// public CastOptions getCastOptions(Context context) {
// List<String> supportedNamespaces = new ArrayList<>();
// supportedNamespaces.add(CUSTOM_NAMESPACE);
// CastOptions castOptions = new CastOptions.Builder()
// .setReceiverApplicationId(context.getString(R.string.app_id))
// .setSupportedNamespaces(supportedNamespaces)
// .build();
// return castOptions;
// }
#Override
public CastOptions getCastOptions(Context context) {
List<String> supportedNamespaces = new ArrayList<>();
supportedNamespaces.add(CUSTOM_NAMESPACE);
NotificationOptions notificationOptions = new NotificationOptions.Builder()
.setActions(Arrays.asList(MediaIntentReceiver.ACTION_SKIP_NEXT,
MediaIntentReceiver.ACTION_TOGGLE_PLAYBACK,
MediaIntentReceiver.ACTION_STOP_CASTING), new int[]{1, 2})
.setTargetActivityClassName(CustomExpandedControlsActivity.class.getName())
.build();
CastMediaOptions mediaOptions = new CastMediaOptions.Builder()
.setImagePicker(new ImagePickerImpl())
.setNotificationOptions(notificationOptions)
.setExpandedControllerActivityClassName(CustomExpandedControlsActivity.class.getName())
.build();
return new CastOptions.Builder()
.setReceiverApplicationId(context.getString(R.string.app_id))
//.setSupportedNamespaces(supportedNamespaces)
.setCastMediaOptions(mediaOptions)
.build();
}
#Override
public List<SessionProvider> getAdditionalSessionProviders(Context appContext) {
return null;
}
private static class ImagePickerImpl extends ImagePicker {
#Override
public WebImage onPickImage(MediaMetadata mediaMetadata, int type) {
if ((mediaMetadata == null) || !mediaMetadata.hasImages()) {
return null;
}
List<WebImage> images = mediaMetadata.getImages();
if (images.size() == 1) {
return images.get(0);
} else {
if (type == ImagePicker.IMAGE_TYPE_MEDIA_ROUTE_CONTROLLER_DIALOG_BACKGROUND) {
return images.get(0);
} else {
return images.get(1);
}
}
}
}
}
Probably a bit late to help you, but I'll answer in case this helps others. I wrestled with this issue for several days. In the end the problem turned out to be in the custom receiver app we developed, not in the Android app. The presence of the playback controls in the sender app (the Android side) depends on receiving exactly the right payload in the messages sent via the message bus in the media namespace (urn:x-cast:com.google.cast.media). So if your receiver app is not providing all of the correct data structures, or sending things in an unexpected sequence, the playback controls will not show up on the Android side. To debug this you'll need to compare the logs from an app that works with the one that doesn't. You can see what messages are coming back to the Android sender by adding a listener for the media namespace channel:
public static final String MEDIA_NAMESPACE = "urn:x-cast:com.google.cast.media";
private Cast.MessageReceivedCallback messageReceivedCallback = new Cast.MessageReceivedCallback() {
#Override
public void onMessageReceived(CastDevice castDevice, String namespace, String message) {
Log.d(TAG, "Received message (" + namespace + "): " + message);
}
};
castSession.setMessageReceivedCallbacks(MEDIA_NAMESPACE, messageReceivedCallback);
In my case there were two problems. The receiver app was not sending the volume in the correct format, and we were not sending out an initial 'IDLE' message that included all of the correct media information. Any deviation from the expected message format can result in a parse error on the sender side that breaks the normal flow. If that happens you won't see any information for the loaded media or the playback controls when you begin casting. The first message sent out always seems to be an 'IDLE' message, and it looks like this in our app:
{
"type":"MEDIA_STATUS",
"status":[
{
"mediaSessionId":1,
"playbackRate":1,
"playerState":"IDLE",
"currentTime":0,
"supportedMediaCommands":15,
"volume":{
"level":1,
"muted":false
},
"media":{
"contentId":"http://your.server/movie.mp4",
"streamType":"BUFFERED",
"contentType":"application/x-mpegurl",
"metadata":{
"metadataType":1,
"images":[
{
"url":"http://your.server/some.jpg",
"width":200,
"height":200
}
],
"title":"The Movie",
"subtitle":"The Thing Worth Watching"
},
"duration":0,
"customData":{
"description":"A very cool movie that you will probably want to see."
}
},
"currentItemId":1,
"extendedStatus":{
"playerState":"LOADING",
"media":{
"contentId":"http://your.server/movie.mp4",
"streamType":"BUFFERED",
"contentType":"application/x-mpegurl",
"metadata":{
"metadataType":1,
"images":[
{
"url":"http://your.server/some.jpg",
"width":200,
"height":200
}
],
"title":"The Movie",
"subtitle":"The Thing Worth Watching"
},
"duration":0,
"customData":{
"description":"A very cool movie that you will probably want to see."
}
}
},
"repeatMode":"REPEAT_OFF"
}
],
"requestId":0
}
Your app might not be providing all of the same data, so your message will look a bit different, but you should make sure that all of the fields that you need are there and have the correct members and data types.
I have the following code to make requests to a REST API, using Xamarin and an Android device:
public class ApiBase
{
HttpClient m_HttpClient;
public ApiBase(string baseAddress, string username, string password)
{
if (!baseAddress.EndsWith("/"))
{
baseAddress += "/";
}
var handler = new HttpClientHandler();
if (handler.SupportsAutomaticDecompression)
{
handler.AutomaticDecompression = DecompressionMethods.GZip;
}
m_HttpClient = new HttpClient(handler);
m_HttpClient.BaseAddress = new Uri(baseAddress);
var credentialsString = Convert.ToBase64String(Encoding.UTF8.GetBytes(username + ":" + password));
m_HttpClient.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Basic", credentialsString);
m_HttpClient.Timeout = new TimeSpan(0, 0, 30);
}
protected async Task<XElement> HttpGetAsync(string method)
{
try
{
HttpResponseMessage response = await m_HttpClient.GetAsync(method);
if (response.IsSuccessStatusCode)
{
// the request was successful, parse the returned string as xml and return the XElement
var xml = await response.Content.ReadAsAsync<XElement>();
return xml;
}
// the request was not successful -> return null
else
{
return null;
}
}
// some exception occured -> return null
catch (Exception)
{
return null;
}
}
}
If i have it like this, the first and the second call to HttpGetAsync work perfectly, but from the 3rd on the GetAsyncstalls and eventually throws an exception due to the timeout. I send these calls consecutively, there are not 2 of them running simultaneously since the results of the previous call are needed to decide the next call.
I tried using the app Packet Capture to look at the requests and responses to find out if i'm sending an incorrect request. But it looks like the request which fails in the end is never even sent.
Through experimentation i found out that everything works fine if don't set the AutomaticDecompression.
It also works fine if i change the HttpGetAsync method to this:
protected async Task<XElement> HttpGetAsync(string method)
{
try
{
// send the request
var response = await m_HttpClient.GetStringAsync(method);
if (string.IsNullOrEmpty(response))
{
return null;
}
var xml = XElement.Parse(response);
return xml;
}
// some exception occured -> return null
catch (Exception)
{
return null;
}
}
So basically using i'm m_HttpClient.GetStringAsync instead of m_HttpClient.GetAsync and then change the fluff around it to work with the different return type. If i do it like this, everything works without any problems.
Does anyone have an idea why GetAsync doesn't work properly (doesn't seem to send the 3rd request) with AutomaticDecompression, where as GetStringAsync works flawlessly?
There are bug reports about this exact issue:
https://bugzilla.xamarin.com/show_bug.cgi?id=21477
The bug is marked as RESOLVED FIXED and the recomended action is to update to the latest stable build. But there are other (newer) bugreports that indicate the same thing that are still open, ex:
https://bugzilla.xamarin.com/show_bug.cgi?id=34747
I made a workaround by implementing my own HttpHandler like so:
public class DecompressionHttpClientHandler : HttpClientHandler
{
protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
request.Headers.AcceptEncoding.Add(new System.Net.Http.Headers.StringWithQualityHeaderValue("gzip"));
var msg = await base.SendAsync(request, cancellationToken);
if (msg.Content.Headers.ContentEncoding.Contains("gzip"))
{
var compressedStream = await msg.Content.ReadAsStreamAsync();
var uncompresedStream = new System.IO.Compression.GZipStream(compressedStream, System.IO.Compression.CompressionMode.Decompress);
msg.Content = new StreamContent(uncompresedStream);
}
return msg;
}
}
Note that the code above is just an example and not a final solution. For example the request will not be compressed and all headers will be striped from the result. But you get the idea.
I have successfully been running WebRTC in my Android app for a while, using libjingle.so and PeerConnectionClient.java, etc., from Google's code library. However, I am now running into a problem where a user starts a connection as audio only (i.e., an audio call), but then toggles video on. I augmented the existing setVideoEnabled() in PeerConnectionClient as such:
public void setVideoEnabled(final boolean enable) {
executor.execute(new Runnable() {
#Override
public void run() {
renderVideo = enable;
if (localVideoTrack != null) {
localVideoTrack.setEnabled(renderVideo);
} else {
if (renderVideo) {
//AC: create a video track
String cameraDeviceName = VideoCapturerAndroid.getDeviceName(0);
String frontCameraDeviceName =
VideoCapturerAndroid.getNameOfFrontFacingDevice();
if (numberOfCameras > 1 && frontCameraDeviceName != null) {
cameraDeviceName = frontCameraDeviceName;
}
Log.i(TAG, "Opening camera: " + cameraDeviceName);
videoCapturer = VideoCapturerAndroid.create(cameraDeviceName);
if (createVideoTrack(videoCapturer) != null) {
mediaStream.addTrack(localVideoTrack);
localVideoTrack.setEnabled(renderVideo);
peerConnection.addStream(mediaStream);
} else {
Log.d(TAG, "Local video track is still null");
}
} else {
Log.d(TAG, "Local video track is null");
}
}
if (remoteVideoTrack != null) {
remoteVideoTrack.setEnabled(renderVideo);
} else {
Log.d(TAG,"Remote video track is null");
}
}
});
}
This allows me successfully see a local inset of the device's video camera, but it doesn't send the video to the remove client. I thought the peerConnection.addStream() call would do that, but perhaps I am missing something else?
To avoid building an external mechanism of communication between peers that will involve an answer from the second peer that the new stream can be added, you can always start with existing (but sometimes empty) video stream. Now it is just the matter of filling this stream with content when (and if) necessary.
I've written an Android Wear application which receives an image wrapped in an Asset from a phone app using the Data API. The app used to work fine and has not been changed in ages but recently I started to find the image passed from the phone app was failing to be rendered on the screen of the wearable. On investigation I found that one of the methods, getFdForAsset was failing with a wearable status code of 4005 which means Asset Unavailable. See https://developers.google.com/android/reference/com/google/android/gms/wearable/WearableStatusCodes
I process data events in a call to my wearable app's onDataChanged method like this:
public void onDataChanged(DataEventBuffer dataEvents) {
LOGD(TAG, "XXXX MainActivity.onDataChanged()");
final List<DataEvent> events = FreezableUtils.freezeIterable(dataEvents);
dataEvents.close();
LOGD(TAG, "onDataChanged data event count=" + events.size());
for (DataEvent event : events) {
if (event.getType() == DataEvent.TYPE_CHANGED) {
String path = event.getDataItem().getUri().getPath();
if (IMAGE_PATH.equals(path)) {
DataMapItem dataMapItem = DataMapItem.fromDataItem(event.getDataItem());
LOGD(TAG, "onDataChanged getting image asset");
Asset photo = dataMapItem.getDataMap()
.getAsset(IMAGE_KEY);
LOGD(TAG, "onDataChanged photo asset="+photo);
final String toi_name = dataMapItem.getDataMap().getString(GYBO_NAME);
final String toi_info = dataMapItem.getDataMap().getString(GYBO_INFO);
current_toi_name = toi_name;
current_toi_info = toi_info;
LOGD(TAG, "onDataChanged TOI name="+toi_name);
LOGD(TAG, "onDataChanged TOI info="+toi_info);
Bitmap bitmap = loadBitmapFromAsset(google_api_client, photo);
And then attempt to create a Bitmap from the Asset in this method:
private Bitmap loadBitmapFromAsset(GoogleApiClient apiClient, Asset asset) {
if (asset == null) {
throw new IllegalArgumentException("XXXX Asset must be non-null");
}
DataApi.GetFdForAssetResult result = Wearable.DataApi.getFdForAsset(
apiClient, asset).await();
if (result == null) {
Log.w(TAG, "XXXX getFdForAsset returned null");
return null;
}
if (result.getStatus().isSuccess()) {
Log.d(TAG, "XXXX loadBitmapFromAsset getFdForAsset was successful");
} else {
Log.d(TAG, "XXXX loadBitmapFromAsset getFdForAsset was not successful. Error="+result.getStatus().getStatusCode()+":"+result.getStatus().getStatusMessage());
// Seeing status code 4005 here which means Asset Unavailable
}
InputStream assetInputStream = result.getInputStream();
if (assetInputStream == null) {
Log.w(TAG, "XXXX Requested an unknown Asset");
result.release();
return null;
}
result.release();
return BitmapFactory.decodeStream(assetInputStream);
}
The Asset object itself is not null, so it's coming across from the mobile app OK. And the path of the data event is being correctly recognised as being one which contains an image.
Does anyone have any idea as to why I'm getting this result and how to resolve it?
Thanks
one important thing... wearable as well as mobile module have to have the same signing certificate; just make sure if you define it via your build.gradle it's the same. this affects transferring assets... other data were synced w/o issues even with different certificates;
I was recently fighting with this issue and found this was the cause of ASSET_UNAVAILABLE, while adding wear module to existing app which had custom debug signing certificate defined in build.gradle - I had to have this certificate even for wearable for asset sync to work.
How are you sending the image? I found that if I used Asset.createFromUri(), it didn't work and gave me the ASSET UNAVAILABLE error. But when I switched to Asset.createFromFd(), it worked.
Here's the code that worked for me:
private static Asset createAssetFromBitmap(String imagePath) throws FileNotFoundException {
// creating from Uri doesn't work: gives a ASSET_UNAVAILABLE error
//return Asset.createFromUri(Uri.parse(imagePath));
final File file = new File(imagePath);
final ParcelFileDescriptor fd = ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_ONLY);
return Asset.createFromFd(fd);
}
I am creating a generic Chromecast remote control app. Most of the guts of the app are already created and I've managed to get Chromecast volume control working (by connecting to a Chromecast device along side another app that is casting - YouTube for example).
What I've having difficult with is performing other media commands such as play, pause, seek, etc.
Use case example:
1. User opens YouTube on their android device and starts casting a video.
2. User opens my app and connects to the same Chromecast device.
3. Volume control from my app (works now)
4. Media control (play, pause, etc) (does not yet work)
I found the Cast api reference that explains that you can sendMessage(ApiClient, namespace, message) with media commands; however the "message" (JSON) requires the sessionId of the current application (Youtube in this case). I have tried the following, but the connection to the current application always fails; status.isSuccess() is always false:
Cast.CastApi
.joinApplication(mApiClient)
.setResultCallback(
new ResultCallback<Cast.ApplicationConnectionResult>() {
#Override
public void onResult(
Cast.ApplicationConnectionResult result) {
Status status = result.getStatus();
if (status.isSuccess()) {
ApplicationMetadata applicationMetadata = result
.getApplicationMetadata();
sessionId = result.getSessionId();
String applicationStatus = result
.getApplicationStatus();
boolean wasLaunched = result
.getWasLaunched();
Log.i(TAG,
"Joined Application with sessionId: "
+ sessionId
+ " Application Status: "
+ applicationStatus);
} else {
// teardown();
Log.e(TAG,
"Could not join application: "
+ status.toString());
}
}
});
Is is possible to get the sessionId of an already running cast application from a generic remote control app (like the one I am creating)? If so, am I right in my assumption that I can then perform media commands on the connected Chromecast device using something like this:
JSONObject message = new JSONObject();
message.put("mediaSessionId", sessionId);
message.put("requestId", 9999);
message.put("type", "PAUSE");
Cast.CastApi.sendMessage(mApiClient,
"urn:x-cast:com.google.cast.media", message.toString());
Update:
I have tried the recommendations provided by #Ali Naddaf but unfortunately they are not working. After creating mRemoteMediaPlayer in onCreate, I also do requestStatus(mApiClient) in the onConnected callback (in the ConnectionCallbacks). When I try to .play(mApiClient) I get an IllegalStateException stating that there is no current media session. Also, I tried doing joinApplication and in the callback performed result.getSessionId; which returns null.
A few comments and answers:
You can get the sessionId from the callback of launchApplication or joinApplication; in the "onResult(result)", you can get the sessionId from: result.getSessionId()
YouTube is still not on the official SDK so YMMV, for apps using official SDK, you should be able to use the above approach (most of it)
Why are you trying to set up a message yourself? Why not building a RemoteMediaPlayer and using play/pause that is provided there? Whenever you are working with the media playback through the official channel, always use the RemoteMediaPlayer (don't forget to call requestStatus() on it after creating it).
Yes it is possible , First you have to save sesionId and CastDevice device id
and when remove app from background and again open app please check is there sessionId then call bello line.
Cast.CastApi.joinApplication(apiClient, APP_ID,sid).setResultCallback(connectionResultCallback);
if you get success result then need to implement further process in connectionResultCallback listener.
//Get selected device which you selected before
#Override
public void onRouteAdded(MediaRouter router, MediaRouter.RouteInfo route) {
// Log.d("Route Added", "onRouteAdded");
/* if (router.getRoutes().size() > 1)
Toast.makeText(homeScreenActivity, "'onRouteAdded :: " + router.getRoutes().size() + " -- " + router.getRoutes().get(1).isSelected(), Toast.LENGTH_SHORT).show();
else
Toast.makeText(homeScreenActivity, "'onRouteAdded :: " + router.getRoutes(), Toast.LENGTH_SHORT).show();*/
if (router != null && router.getRoutes() != null && router.getRoutes().size() > 1) {
// Show the button when a device is discovered.
// Toast.makeText(homeScreenActivity, "'onRouteAdded :: " + router.getRoutes().size() + " -- " + router.getRoutes().get(1).isSelected(), Toast.LENGTH_SHORT).show();
mMediaRouteButton.setVisibility(View.VISIBLE);
titleLayout.setVisibility(View.GONE);
castName.setVisibility(View.VISIBLE);
selectedDevice = CastDevice.getFromBundle(route.getExtras());
routeInfoArrayList = router.getRoutes();
titleLayout.setVisibility(View.GONE);
if (!isCastConnected) {
String deid = MyPref.getInstance(homeScreenActivity).readPrefs(MyPref.CAST_DEVICE_ID);
for (int i = 0; i < routeInfoArrayList.size(); i++) {
if (routeInfoArrayList.get(i).getExtras() != null && CastDevice.getFromBundle(routeInfoArrayList.get(i).getExtras()).getDeviceId().equalsIgnoreCase(deid)) {
selectedDevice = CastDevice.getFromBundle(routeInfoArrayList.get(i).getExtras());
routeInfoArrayList.get(i).select();
ReSelectedDevice(selectedDevice, routeInfoArrayList.get(i).getName());
break;
}
}
}
}
}
//Reconnect google Api Client
public void reConnectGoogleApiClient() {
if (apiClient == null) {
Cast.CastOptions apiOptions = new
Cast.CastOptions.Builder(selectedDevice, castClientListener).build();
apiClient = new GoogleApiClient.Builder(this)
.addApi(Cast.API, apiOptions)
.addConnectionCallbacks(reconnectionCallback)
.addOnConnectionFailedListener(connectionFailedListener)
.build();
apiClient.connect();
}
}
// join Application
private final GoogleApiClient.ConnectionCallbacks reconnectionCallback = new GoogleApiClient.ConnectionCallbacks() {
#Override
public void onConnected(Bundle bundle) {
// Toast.makeText(homeScreenActivity, "" + isDeviceSelected(), Toast.LENGTH_SHORT).show();
try {
String sid = MyPref.getInstance(homeScreenActivity).readPrefs(MyPref.CAST_SESSION_ID);
String deid = MyPref.getInstance(homeScreenActivity).readPrefs(MyPref.CAST_DEVICE_ID);
if (sid != null && deid != null && sid.length() > 0 && deid.length() > 0)
Cast.CastApi.joinApplication(apiClient, APP_ID, sid).setResultCallback(connectionResultCallback);
isApiConnected = true;
} catch (Exception e) {
}
}
#Override
public void onConnectionSuspended(int i) {
isCastConnected = false;
isApiConnected = false;
}
};