I have implemented the ExoPlayer in my application using the example from the Codelab : https://codelabs.developers.google.com/codelabs/exoplayer-intro/#3, algo with the example from https://medium.com/google-exoplayer/playing-ads-with-exoplayer-and-ima-868dfd767ea, the only difference is that I use AdsMediaSource instead of the deprecated ImaAdsMediaSource.
My Implementation is this:
class HostVideoFullFragment : Fragment(), AdsMediaSource.MediaSourceFactory {
override fun getSupportedTypes() = intArrayOf(C.TYPE_DASH, C.TYPE_HLS, C.TYPE_OTHER)
override fun createMediaSource(uri: Uri?, handler: Handler?, listener: MediaSourceEventListener?): MediaSource {
#C.ContentType val type = Util.inferContentType(uri)
return when (type) {
C.TYPE_DASH -> {
DashMediaSource.Factory(
DefaultDashChunkSource.Factory(mediaDataSourceFactory),
manifestDataSourceFactory)
.createMediaSource(uri, handler, listener)
}
C.TYPE_HLS -> {
HlsMediaSource.Factory(mediaDataSourceFactory)
.createMediaSource(uri, handler, listener)
}
C.TYPE_OTHER -> {
ExtractorMediaSource.Factory(mediaDataSourceFactory)
.createMediaSource(uri, handler, listener)
}
else -> throw IllegalStateException("Unsupported type for createMediaSource: $type")
}
}
private var player: SimpleExoPlayer? = null
private lateinit var playerView: SimpleExoPlayerView
private lateinit var binding: FragmentHostVideoFullBinding
private var playbackPosition: Long = 0
private var currentWindow: Int = 0
private var playWhenReady = true
private var inErrorState: Boolean = false
private lateinit var adsLoader: ImaAdsLoader
private lateinit var manifestDataSourceFactory: DataSource.Factory
private lateinit var mediaDataSourceFactory: DataSource.Factory
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
//Initialize the adsLoader
adsLoader = ImaAdsLoader(activity as Context, Uri.parse("https://pubads.g.doubleclick.net/gampad/ads?sz=640x480&iu=/124319096/external/ad_rule_samples&ciu_szs=300x250&ad_rule=1&impl=s&gdfp_req=1&env=vp&output=vmap&unviewed_position_start=1&cust_params=deployment%3Ddevsite%26sample_ar%3Dpremidpost&cmsid=496&vid=short_onecue&correlator="))
manifestDataSourceFactory = DefaultDataSourceFactory(
context, Util.getUserAgent(context, "BUO-APP"))//TODO change the applicationName with the right application name
//
mediaDataSourceFactory = DefaultDataSourceFactory(
context,
Util.getUserAgent(context, "BUO-APP"),//TODO change the applicationName with the right application name
DefaultBandwidthMeter())
}
private fun initializePlayer() {
/*
* Since the player can change from null (when we release resources) to not null we have to check if it's null.
* If it is then reset again
* */
if (player == null) {
//Initialize the Exo Player
player = ExoPlayerFactory.newSimpleInstance(DefaultRenderersFactory(activity as Context),
DefaultTrackSelector())
}
val uri = Uri.parse(videoURl)
val mediaSourceWithAds = buildMediaSourceWithAds(uri)
//Bind the view from the xml to the SimpleExoPlayer instance
playerView.player = player
//Add the listener that listens for errors
player?.addListener(PlayerEventListener())
player?.seekTo(currentWindow, playbackPosition)
player?.prepare(mediaSourceWithAds, true, false)
//In case we could not set the exo player
player?.playWhenReady = playWhenReady
//We got here without an error, therefore set the inErrorState as false
inErrorState = false
//Re update the retry button since, this method could have come from a retry click
updateRetryButton()
}
private inner class PlayerEventListener : Player.DefaultEventListener() {
fun updateResumePosition() {
player?.let {
currentWindow = player!!.currentWindowIndex
playbackPosition = Math.max(0, player!!.contentPosition)
}
}
override fun onPlayerStateChanged(playWhenReady: Boolean, playbackState: Int) {
//The player state has ended
//TODO check if there is going to be a UI change here
// if (playbackState == Player.STATE_ENDED) {
// showControls()
// }
// updateButtonVisibilities()
}
override fun onPositionDiscontinuity(#Player.DiscontinuityReason reason: Int) {
if (inErrorState) {
// This will only occur if the user has performed a seek whilst in the error state. Update
// the resume position so that if the user then retries, playback will resume from the
// position to which they seek.
updateResumePosition()
}
}
override fun onPlayerError(e: ExoPlaybackException?) {
var errorString: String? = null
//Check what was the error so that we can show the user what was the correspond problem
if (e?.type == ExoPlaybackException.TYPE_RENDERER) {
val cause = e.rendererException
if (cause is MediaCodecRenderer.DecoderInitializationException) {
// Special case for decoder initialization failures.
errorString = if (cause.decoderName == null) {
when {
cause.cause is MediaCodecUtil.DecoderQueryException -> getString(R.string.error_querying_decoders)
cause.secureDecoderRequired -> getString(R.string.error_no_secure_decoder,
cause.mimeType)
else -> getString(R.string.error_no_decoder,
cause.mimeType)
}
} else {
getString(R.string.error_instantiating_decoder,
cause.decoderName)
}
}
}
if (errorString != null) {
//Show the toast with the proper error
Toast.makeText(activity as Context, errorString, Toast.LENGTH_LONG).show()
}
inErrorState = true
if (isBehindLiveWindow(e)) {
clearResumePosition()
initializePlayer()
} else {
updateResumePosition()
updateRetryButton()
}
}
}
private fun clearResumePosition() {
//Clear the current resume position, since there was an error
currentWindow = C.INDEX_UNSET
playbackPosition = C.TIME_UNSET
}
/*
* This is for the multi window support
* */
private fun isBehindLiveWindow(e: ExoPlaybackException?): Boolean {
if (e?.type != ExoPlaybackException.TYPE_SOURCE) {
return false
}
var cause: Throwable? = e.sourceException
while (cause != null) {
if (cause is BehindLiveWindowException) {
return true
}
cause = cause.cause
}
return false
}
private fun buildMediaSourceWithAds(uri: Uri): MediaSource {
/*
* This content media source is the video itself without the ads
* */
val contentMediaSource = ExtractorMediaSource.Factory(
DefaultHttpDataSourceFactory("BUO-APP")).createMediaSource(uri) //TODO change the user agent
/*
* The method constructs and returns a ExtractorMediaSource for the given uri.
* We simply use a new DefaultHttpDataSourceFactory which only needs a user agent string.
* By default the factory will use a DefaultExtractorFactory for the media source.
* This supports almost all non-adaptive audio and video formats supported on Android. It will recognize our mp3 file and play it nicely.
* */
return AdsMediaSource(
contentMediaSource,
/* adMediaSourceFactory= */ this,
adsLoader,
playerView.overlayFrameLayout,
/* eventListener= */ null, null)
}
override fun onStart() {
super.onStart()
if (Util.SDK_INT > 23) {
initializePlayer()
}
}
override fun onResume() {
super.onResume()
hideSystemUi()
/*
* Starting with API level 24 Android supports multiple windows.
* As our app can be visible but not active in split window mode, we need to initialize the player in onStart.
* Before API level 24 we wait as long as possible until we grab resources, so we wait until onResume before initializing the player.
* */
if ((Util.SDK_INT <= 23 || player == null)) {
initializePlayer()
}
}
}
The ad never shows and if it shows it shows a rendering error E/ExoPlayerImplInternal: Renderer error. which never allows the video to show. I've run the examples from the IMA ads https://developers.google.com/interactive-media-ads/docs/sdks/android/ example code and it doesn't work neither. Does anyone have implemented the Exo Player succesfully with the latest ExoPlayer library version?
Please Help. Thanks!
When on an emulator, be sure to enable gpu rendering on the virtual device
The problem is that the emulator can not render videos. Therefore it wasn't showing the ads or the video. Run the app on a phone and it will work
Related
I am using USB to communicate audio and video from some external hardware.
Video is displayed on a Surface as long as the app is in foreground but audio should keep playing even when in background.
Using threads doesn't seem to work, when the app goes to background the audio starts to stutter and sound very bad. I believe i need to use a service, am i wrong?
But from all the online documentation i can't figure out, what is the best type of service (or any) to use for such long running application and the best way to communicate the large amount of data between its Activity & Surface?
I looked into plain services, WorkManagers, ForegroundService, ServiceSocket and many others
Here is the current code i am using to run the streamed audio from the USB. It is using a coroutine and as said, audio starts to stutter when app in background
class AudioTrackPlayer {
private val channel = Channel<Boolean>(capacity = 15)
private var dataList = mutableListOf<ByteArray>()
private var readIndex = 0
private var writeIndex = 0
private val scope = CoroutineScope(Job() + Dispatchers.IO)
private var audio: AudioTrack? = null
var lastSize = 0
var isPlaying = false
companion object {
private const val DATA_LIST_SIZE = 11
}
init {
scope.launch {
startThread()
}
}
fun config(channels: Int, sampleRate: Int):Boolean {
//......
}
private suspend fun startThread() = withContext(Dispatchers.IO) {
var data:ByteArray
var isPlayingLast = false
var finalByteArray:ByteArray
channel.consumeEach {
if (audio != null) {
if (isPlaying) {
if (readIndex != writeIndex) {
isPlayingLast = true
data = dataList[readIndex]
lastSize = data.size - 8
if (audio?.playState != AudioTrack.PLAYSTATE_PLAYING) {
audio?.play()
}
audio?.write(data, 8, lastSize)
readIndex++
if (readIndex == DATA_LIST_SIZE) {
readIndex = 0
}
}
} else {
if (isPlayingLast) {
isPlayingLast = false
finalByteArray = ByteArray(lastSize * 2)
if (audio?.playState != AudioTrack.PLAYSTATE_PLAYING) {
audio!!.play()
}
audio!!.write(finalByteArray, 0, finalByteArray.size)
audio!!.stop()
audio!!.flush()
writeIndex = readIndex
}
}
}
}
}
fun write(data: ByteArray) {
scope.launch {
if (isPlaying) {
dataList.add(writeIndex, data)
writeIndex++
if (writeIndex == DATA_LIST_SIZE) {
writeIndex = 0
}
channel.send(true)
}
}
}
fun play() {
//...
}
fun stop() {
//....
}
}
I am using Exoplayer to create my own music player. I am also adding the option to download the track but I have a problem when I am trying to download the track that I am playing. I add a notification to the download to check the progress of the download and it appears but it even doesn't start. What I think is that it might have some kind of problem with the buffering cache and the download since they are stored in the same folder.
To download the tracks I do the following:
override fun addDownloadTrack(track: Track) {
getIfTrackIsCached.run({ isCached ->
if (!isCached) {
val data = Util.toByteArray(track.title.byteInputStream())
val downloadRequest =
DownloadRequest(track.id, DownloadRequest.TYPE_PROGRESSIVE, Uri.parse(track.href), Collections.emptyList(), track.id, data)
DownloadService.sendAddDownload(context, ExoPlayerDownloadService::class.java, downloadRequest, false)
}
}, ::onError, GetIfTrackIsCached.Params(track.id))
}
This is the DownloadService:
class ExoPlayerDownloadService : DownloadService(
FOREGROUND_NOTIFICATION_ID,
DEFAULT_FOREGROUND_NOTIFICATION_UPDATE_INTERVAL,
Constants.CHANNEL_DOWNLOAD_ID,
R.string.exo_download_notification_channel_name) {
private val manager: DownloadManager by inject()
private val channelIsCached: ChannelPublisher<CachedMedia> by inject(PUBLISHER_IS_CACHED)
private val notificationHelper: DownloadNotificationHelper by inject()
private var nextNotificationId: Int = FOREGROUND_NOTIFICATION_ID
override fun onCreate() {
super.onCreate()
if (!manager.isInitialized) {
manager.maxParallelDownloads = MAX_PARALLEL_DOWNLOADS
}
}
override fun getDownloadManager(): DownloadManager = manager
override fun getForegroundNotification(downloads: MutableList<Download>?): Notification {
var text = ""
var index = 1
downloads?.forEach { text += "${if (downloads.size > 1) "${index++} - " else ""}${Util.fromUtf8Bytes(it.request.data)}\n" }
return notificationHelper.buildProgressNotification(R.drawable.ic_stat_downloading, null, text, downloads)
}
override fun getScheduler(): Scheduler? = null
override fun onDownloadChanged(download: Download?) {
val notification = when (download?.state) {
Download.STATE_COMPLETED -> {
channelIsCached.publish(CachedMedia(download.request.id, true))
notificationHelper.buildDownloadCompletedNotification(R.drawable.ic_stat_download_complete, null, Util.fromUtf8Bytes(download.request.data))
}
Download.STATE_FAILED ->
notificationHelper.buildDownloadFailedNotification(R.drawable.ic_stat_download_failed, null, Util.fromUtf8Bytes(download.request.data))
else -> null
}
notification?.let { NotificationUtil.setNotification(this#ExoPlayerDownloadService, ++nextNotificationId, it) }
}
companion object {
private const val MAX_PARALLEL_DOWNLOADS = 3
private const val FOREGROUND_NOTIFICATION_ID = 2000
}
}
And to create the cache I use this:
SimpleCache(File(androidContext().cacheDir, CACHE_MEDIA_FOLDER), NoOpCacheEvictor(), get<DatabaseProvider>())
How can I avoid conflicts between buffering cache and downloaded files?
I had this issue also, and found the solution!
The downloading documentation states
The CacheDataSource.Factory should be configured as read-only to avoid downloading that content as well during playback.
To do this you must call setCacheWriteDataSinkFactory(null) on your CacheDataSource.Factory object.
This will prevent the stream from writing to the cache, allowing the downloader to write as expected.
I am using interactive video broadcasting in my app.
I am attaching class in which I am using live streaming.
I am getting the audio issue when I go back from the live streaming screen to the previous screen. I still listen to the audio of the host.
previously I was using leave channel method and destroying rtc client object, but after implementing this when I go back from streaming class then it closes all users screen who are using this app because of leave channel method. after that, I removed this option from my on destroy method.
Now I am using disable audio method which disables the audio but when I open live streaming class it doesn't enable audio. Enable audio method is not working I also used the mute audio local stream method and rtc handler on user mute audio method.
I am getting error--
"LiveStreamingActivity has leaked IntentReceiver io.agora.rtc.internal.AudioRoutingController$HeadsetBroadcastReceiver#101a7a7
that was originally registered here. Are you missing a call to
unregisterReceiver()? android.app.IntentReceiverLeaked: Activity
com.allin.activities.home.homeActivities.LiveStreamingActivity has
leaked IntentReceiver
io.agora.rtc.internal.AudioRoutingController$HeadsetBroadcastReceiver#101a7a7
that was originally registered here. Are you missing a call to
unregisterReceiver()?"
Receiver is registering in SDK and exception is coming inside the SDK that is jar file I can't edit.
Please help this in resolving my issue as I have to live the app on
play store.
//firstly I have tried this but it automatically stops other
devices streaming.
override fun onDestroy() {
/* if (mRtcEngine != null) {
leaveChannel()
RtcEngine.destroy(mRtcEngine)
mRtcEngine = null
}*/
//second I have tried disabling the audio so that user will
not hear
the host voice
if (mRtcEngine != null) //
{
mRtcEngine!!.disableAudio()
}
super.onDestroy()
}
// then I when I came back from the previous screen to live streaming activity everything is initializing again but the audio is not able to audible.
override fun onResume() {
super.onResume()
Log.e("resume", "resume")
if (mRtcEngine != null) {
mRtcEngine!!.enableAudio()
// mRtcEngine!!.resumeAudio()
}
}
code I am using
//agora rtc engine and handler initialization-----------------
private var mRtcEngine: RtcEngine? = null
private var mRtcEventHandler = object : IRtcEngineEventHandler() {
#SuppressLint("LongLogTag")
override fun onFirstRemoteVideoDecoded(uid: Int, width: Int,
height: Int, elapsed: Int) {
}
override fun onUserOffline(uid: Int, reason: Int) {
runOnUiThread {
val a = reason //if login =0 user is offline
try {
if (mUid == uid) {
if (surfaceView?.parent != null)
(surfaceView?.parent as ViewGroup).removeAllViews()
if (mRtcEngine != null) {
leaveChannel()
RtcEngine.destroy(mRtcEngine)
mRtcEngine = null
}
setResult(IntentConstants.REQUEST_CODE_LIVE_STREAMING)
finish()
}
} catch (e: Exception) {
e.printStackTrace()
}
}
}
override fun onUserMuteVideo(uid: Int, muted: Boolean) {
runOnUiThread {
// onRemoteUserVideoMuted(uid, muted);
Log.e("video","muted")
}
}
override fun onAudioQuality(uid: Int, quality: Int, delay:
Short, lost: Short) {
super.onAudioQuality(uid, quality, delay, lost)
Log.e("", "")
}
override fun onUserJoined(uid: Int, elapsed: Int) {
// super.onUserJoined(uid, elapsed)
mUid = uid
runOnUiThread {
try {
setupRemoteVideo(mUid!!)
} catch (e: Exception) {
e.printStackTrace()
}
}
Log.e("differnt_uid----", mUid.toString())
}
}
private fun initAgoraEngineAndJoinChannel() {
if(mRtcEngine==null)
{
initializeAgoraEngine()
setupVideoProfile()
}
}
//initializing rtc engine class
#Throws(Exception::class)
private fun initializeAgoraEngine() {
try {
var s = RtcEngine.getSdkVersion()
mRtcEngine = RtcEngine.create(baseContext, AgoraConstants.APPLICATION_ID, mRtcEventHandler)
} catch (e: Exception) {
// Log.e(LOG_TAG, Log.getStackTraceString(e));
throw RuntimeException("NEED TO check rtc sdk init fatal error\n" + Log.getStackTraceString(e))
}
}
#Throws(Exception::class)
private fun setupVideoProfile() {
//mRtcEngine?.muteAllRemoteAudioStreams(true)
// mLogger.log("channelName account = " + channelName + ",uid = " + 0);
mRtcEngine?.enableVideo()
//mRtcEngine.clearVideoCompositingLayout();
mRtcEngine?.enableLocalVideo(false)
mRtcEngine?.setEnableSpeakerphone(false)
mRtcEngine?.muteLocalAudioStream(true)
joinChannel()
mRtcEngine?.setVideoProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING, true)
mRtcEngine?.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING)
mRtcEngine?.setClientRole(Constants.CLIENT_ROLE_AUDIENCE,"")
val speaker = mRtcEngine?.isSpeakerphoneEnabled
val camerafocus = mRtcEngine?.isCameraAutoFocusFaceModeSupported
Log.e("", "")
}
#Throws(Exception::class)
private fun setupRemoteVideo(uid: Int) {
val container = findViewById<FrameLayout>(R.id.fl_video_container)
if (container.childCount >= 1) {
return
}
surfaceView = RtcEngine.CreateRendererView(baseContext)
container.addView(surfaceView)
mRtcEngine?.setupRemoteVideo(VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid))
mRtcEngine?.setRemoteVideoStreamType(uid, 1)
mRtcEngine?.setCameraAutoFocusFaceModeEnabled(false)
mRtcEngine?.muteRemoteAudioStream(uid, false)
mRtcEngine?.adjustPlaybackSignalVolume(0)
// mRtcEngine.setVideoProfile(Constants.VIDEO_PROFILE_180P, false); // Earlier than 2.3.0
surfaceView?.tag = uid // for mark purpose
val audioManager: AudioManager =
this#LiveStreamingActivity.getSystemService(Context.AUDIO_SERVICE) as AudioManager
//audioManager.mode = AudioManager.MODE_IN_CALL
val isConnected: Boolean = audioManager.isWiredHeadsetOn
if (isConnected) {
/* audioManager.isSpeakerphoneOn = false
audioManager.isWiredHeadsetOn = true*/
mRtcEngine?.setEnableSpeakerphone(false)
mRtcEngine?.setDefaultAudioRoutetoSpeakerphone(false)
mRtcEngine?.setSpeakerphoneVolume(0)
mRtcEngine?.enableInEarMonitoring(true)
// Sets the in-ear monitoring volume to 50% of original volume.
mRtcEngine?.setInEarMonitoringVolume(200)
mRtcEngine?.adjustPlaybackSignalVolume(200)
} else {
/* audioManager.isSpeakerphoneOn = true
audioManager.isWiredHeadsetOn = false*/
mRtcEngine?.setEnableSpeakerphone(true)
mRtcEngine?.setDefaultAudioRoutetoSpeakerphone(true)
mRtcEngine?.setSpeakerphoneVolume(50)
mRtcEngine?.adjustPlaybackSignalVolume(50)
mRtcEngine?.enableInEarMonitoring(false)
// Sets the in-ear monitoring volume to 50% of original volume.
mRtcEngine?.setInEarMonitoringVolume(0)
}
Log.e("", "")
}
#Throws(Exception::class)
private fun joinChannel() {
mRtcEngine?.joinChannel(
null,
AgoraConstants.CHANNEL_NAME,
"Extra Optional Data",
0
) // if you do not specify the uid, we will generate the uid for you
}
#Throws(Exception::class)
private fun leaveChannel() {
mRtcEngine!!.leaveChannel()
}
I think first you want to put setupRemoteVideo in onFirstRemoteVideoDecoded callback instead of the onUserJoined callback. Also, in the onDestroy callback, you should call RtcEngine.destroy() instead of RtcEngine.destroy(mRtcEngine).
I try to cast youtube video from my android app to chromecast or smart tv through miracast.
But I can cast only source url for video like https://media.w3.org/2010/05/sintel/trailer.mp4
How can I cast web page with youtube or vimeo url video?
I know that YouTube app can cast video to some smart tv without chromecast and it looks like YouTube TV page. For example here https://www.youtube.com/watch?v=x5ImUYDjocY
I try to use Presentation API to set WebView in it:
#TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
class CastPresentation constructor(outerContext: Context?, display: Display?) : Presentation(outerContext, display) {
override fun onCreate(savedInstanceState: Bundle?) {
val wv = WebView(context)
wv.settings.javaScriptEnabled = true
wv.webChromeClient = WebChromeClient()
wv.loadUrl("https://www.youtube.com/watch?v=DxGLn_Cu5l0")
setContentView(wv)
super.onCreate(savedInstanceState)
}
}
But it doesn't affect. I don't undertand how to use it.
This is how I use it:
#TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
class CastDelegate constructor(private val activity: AppCompatActivity) {
private var mediaRouter: MediaRouter? = null
private var mediaRouteSelector: MediaRouteSelector? = null
// Variables to hold the currently selected route and its playback client
private var route: MediaRouter.RouteInfo? = null
private var remotePlaybackClient: RemotePlaybackClient? = null
private var presentation: Presentation? = null
// Define the Callback object and its methods, save the object in a class variable
private val mediaRouterCallback = object : MediaRouter.Callback() {
override fun onRouteSelected(router: MediaRouter, route: MediaRouter.RouteInfo) {
Timber.d("CastDelegate --> onRouteSelected: route=$route")
if (route.supportsControlCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK)) {
// Stop local playback (if necessary)
// ...
// Save the new route
this#CastDelegate.route = route
// Attach a new playback client
remotePlaybackClient = RemotePlaybackClient(activity, this#CastDelegate.route)
// Start remote playback (if necessary)
// ...
updatePresentation()
val uri = Uri.parse("https://media.w3.org/2010/05/sintel/trailer.mp4")
remotePlaybackClient?.play(uri, null, null, 0, null, object: RemotePlaybackClient.ItemActionCallback() {
override fun onResult(data: Bundle?, sessionId: String?, sessionStatus: MediaSessionStatus?, itemId: String?, itemStatus: MediaItemStatus?) {
super.onResult(data, sessionId, sessionStatus, itemId, itemStatus)
}
})
}
}
override fun onRouteUnselected(router: MediaRouter, route: MediaRouter.RouteInfo, reason: Int) {
Timber.d("CastDelegate --> onRouteUnselected: route=$route")
if (route.supportsControlCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK)) {
// Changed route: tear down previous client
this#CastDelegate.route?.also {
remotePlaybackClient?.release()
remotePlaybackClient = null
}
// Save the new route
this#CastDelegate.route = route
updatePresentation()
when (reason) {
MediaRouter.UNSELECT_REASON_ROUTE_CHANGED -> {
// Resume local playback (if necessary)
// ...
}
}
}
}
override fun onRoutePresentationDisplayChanged(router: MediaRouter?, route: MediaRouter.RouteInfo?) {
updatePresentation()
}
}
fun onCreate() {
// Get the media router service.
mediaRouter = MediaRouter.getInstance(activity)
// Create a route selector for the type of routes your app supports.
mediaRouteSelector = MediaRouteSelector.Builder()
// These are the framework-supported intents
.addControlCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK)
.build()
// val selectedRoute = mediaRouter?.selectedRoute ?: return
// val presentationDisplay = selectedRoute.presentationDisplay ?: return
// presentation = CastPresentation(activity, presentationDisplay)
// presentation?.show()
}
fun onStart() {
mediaRouteSelector?.also { selector ->
mediaRouter?.addCallback(selector, mediaRouterCallback,
MediaRouter.CALLBACK_FLAG_REQUEST_DISCOVERY)
}
updatePresentation()
}
fun onStop() {
mediaRouter?.removeCallback(mediaRouterCallback)
presentation?.dismiss()
presentation = null
}
fun onCreateOptionsMenu(menu: Menu?, inflater: MenuInflater?) {
// Attach the MediaRouteSelector to the menu item
val mediaRouteMenuItem = menu?.findItem(R.id.media_route_menu_item)
val mediaRouteActionProvider = MenuItemCompat.getActionProvider(mediaRouteMenuItem) as MediaRouteActionProvider
// Attach the MediaRouteSelector that you built in onCreate()
mediaRouteSelector?.also(mediaRouteActionProvider::setRouteSelector)
}
private fun updatePresentation() {
// Get the current route and its presentation display.
val selectedRoute = mediaRouter?.selectedRoute
val presentationDisplay = selectedRoute?.presentationDisplay
// Dismiss the current presentation if the display has changed.
if (presentation?.display != presentationDisplay) {
Timber.d("CastDelegate --> Dismissing presentation because the current route no longer " + "has a presentation display.")
presentation?.dismiss()
presentation = null
}
// Show a new presentation if needed.
if (presentation == null && presentationDisplay != null) {
Timber.d("CastDelegate --> Showing presentation on display: $presentationDisplay")
presentation = CastPresentation(activity, presentationDisplay)
try {
presentation?.show()
} catch (ex: WindowManager.InvalidDisplayException) {
Timber.d("CastDelegate --> Couldn't show presentation! Display was removed in the meantime.", ex)
presentation = null
}
}
}
}
As a result now playing video https://media.w3.org/2010/05/sintel/trailer.mp4 from
remotePlaybackClient?.play(...)
I have the following code for music recognition. I am using intent service to do all the music recognition in the service. I have done all the basic steps like adding all the permissions required and adding the ACRCloud android SDK in the project.
class SongIdentifyService(discoverPresenter : DiscoverPresenter? = null) : IACRCloudListener , IntentService("SongIdentifyService") {
private val callback : SongIdentificationCallback? = discoverPresenter
private val mClient : ACRCloudClient by lazy { ACRCloudClient() }
private val mConfig : ACRCloudConfig by lazy { ACRCloudConfig() }
private var initState : Boolean = false
private var mProcessing : Boolean = false
override fun onHandleIntent(intent: Intent?) {
Log.d("SongIdentifyService", "onHandeIntent called" )
setUpConfig()
addConfigToClient()
if (callback != null) {
startIdentification(callback)
}
}
public fun setUpConfig(){
Log.d("SongIdentifyService", "setupConfig called")
this.mConfig.acrcloudListener = this#SongIdentifyService
this.mConfig.host = "some-host"
this.mConfig.accessKey = "some-accesskey"
this.mConfig.accessSecret = "some-secret"
this.mConfig.protocol = ACRCloudConfig.ACRCloudNetworkProtocol.PROTOCOL_HTTP // PROTOCOL_HTTPS
this.mConfig.reqMode = ACRCloudConfig.ACRCloudRecMode.REC_MODE_REMOTE
}
// Called to start identifying/discovering the song that is currently playing
fun startIdentification(callback: SongIdentificationCallback)
{
Log.d("SongIdentifyService", "startIdentification called")
if(!initState)
{
Log.d("AcrCloudImplementation", "init error")
}
if(!mProcessing) {
mProcessing = true
if (!mClient.startRecognize()) {
mProcessing = false
Log.d("AcrCloudImplementation" , "start error")
}
}
}
// Called to stop identifying/discovering song
fun stopIdentification()
{
Log.d("SongIdentifyService", "stopIdentification called")
if(mProcessing)
{
mClient.stopRecordToRecognize()
}
mProcessing = false
}
fun cancelListeningToIdentifySong()
{
if(mProcessing)
{
mProcessing = false
mClient.cancel()
}
}
fun addConfigToClient(){
Log.d("SongIdentifyService", "addConfigToClient called")
this.initState = this.mClient.initWithConfig(this.mConfig)
if(this.initState)
{
this.mClient.startPreRecord(3000)
}
}
override fun onResult(result: String?) {
Log.d("SongIdentifyService", "onResult called")
Log.d("SongIdentifyService",result)
mClient.cancel()
mProcessing = false
val result = Gson().fromJson(result, SongIdentificationResult :: class.java)
if(result.status.code == 3000)
{
callback!!.onOfflineError()
}
else if(result.status.code == 1001)
{
callback!!.onSongNotFound()
}
else if(result.status.code == 0 )
{
callback!!.onSongFound(MusicDataMapper().convertFromDataModel(result))
//callback!!.onSongFound(Song("", "", ""))
}
else
{
callback!!.onGenericError()
}
}
override fun onVolumeChanged(p0: Double) {
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
}
interface SongIdentificationCallback {
// Called when the user is offline and music identification failed
fun onOfflineError()
// Called when a generic error occurs and music identification failed
fun onGenericError()
// Called when music identification completed but couldn't identify the song
fun onSongNotFound()
// Called when identification completed and a matching song was found
fun onSongFound(song: Song)
}
}
Now when I am starting the service I am getting the following error:
I checked the implementation of the ACRCloudClient and its extends android Activity. Also ACRCloudClient uses shared preferences(that's why I am getting a null pointer exception).
Since keeping a reference to an activity in a service is not a good Idea its best to Implement the above code in the activity. All the implementation of recognizing is being done in a separate thread anyway in the ACRCloudClient class so there is no point of creating another service for that.