Android Camera X ViewPort - android

I use this code for Camera X binding. When I run app for the first time everything works fine. But when I close app and start it again I get "ViewPort is NULL". Why viewPort is NULL I do not understand. What is wrong with my code?
#OptIn(markerClass = androidx.camera.lifecycle.ExperimentalUseCaseGroupLifecycle.class)
private void bindAllCameraUseCases() {
if (cameraProvider != null) {
// As required by CameraX API, unbinds all use cases before trying to re-bind any of them.
cameraProvider.unbindAll();
createPreviewUseCase();
createImageCaptureUseCase();
createAnalysisUseCase();
if (previewUseCase != null && analysisUseCase != null && imageCaptureUseCase != null) {
ViewPort viewPort = ((PreviewView)findViewById(R.id.preview_view)).getViewPort();
if (viewPort != null) {
#OptIn(markerClass = androidx.camera.core.ExperimentalUseCaseGroup.class)
UseCaseGroup useCaseGroup = new UseCaseGroup.Builder()
.addUseCase(previewUseCase)
.addUseCase(analysisUseCase)
.addUseCase(imageCaptureUseCase)
.setViewPort(viewPort)
.build();
camera = cameraProvider.bindToLifecycle(/* lifecycleOwner= */ this, cameraSelector, useCaseGroup);
LiveData<Integer> torchStateObserver = camera.getCameraInfo().getTorchState();
torchStateObserver.observe(this, new Observer<Integer>() {
#Override
public void onChanged(Integer state) {
torchState = state;
}
});
} else {
Toast.makeText(this, "VIEWPORT is NULL", Toast.LENGTH_SHORT).show();
}
}
}
}

Related

Android webrtc still running after app exited with back button

I'm new to Webrtc, I'm using the AWS Webrtc demo with Android Nav Component. When I exited the app with the back button, I can see that Webrtc is still running or I can see the following log:
EglRenderer: cameraSurfaceViewDropping frame - No surface
In my fragment at the onStop method my code is as follow:
Thread.setDefaultUncaughtExceptionHandler(null)
if (rootEglBase != null) {
rootEglBase!!.release()
rootEglBase = null
}
if (remoteView != null) {
remoteView!!.release()
remoteView = null
}
if (localPeer != null) {
localPeer!!.dispose()
localPeer = null
}
if (videoSource != null) {
videoSource!!.dispose()
videoSource = null
}
if (videoCapturer != null) {
try {
videoCapturer?.stopCapture()
videoCapturer?.dispose()
} catch (e: InterruptedException) {
Timber.e("Failed to stop webrtc video capture. $e ")
}
videoCapturer = null
}
if (client != null) {
this.client!!.disconnect()
this.client = null
}
peerConnectionFoundMap.clear()
pendingIceCandidatesMap.clear()
However, I can see the problem only when I exited the app through the back button, if I killed the app, I don't get the log. Has anyone experienced this?
Thank you.
This is the way you should destroy your WebRTC session on onDestroy() or onStop().
if (videoCaptureAndroid != null) {
videoCaptureAndroid?.stopCapture()
videoCaptureAndroid = null
}
if (localPeer != null) {
localPeer?.close()
localPeer = null
}
if (videoSource != null) {
videoSource?.dispose()
videoSource = null
}
if (audioSource != null) {
audioSource?.dispose()
audioSource = null
}
if (localAudioTrack != null) {
localAudioTrack?.dispose()
localAudioTrack = null
}
if (currentRemoteMediaStream != null) {
currentRemoteMediaStream?.dispose()
currentRemoteMediaStream = null
}
if (localVideoView != null) {
localVideoView?.release()
localVideoView = null
}
if (remoteVideoView != null) {
remoteVideoView?.release()
remoteVideoView = null
}
rootEglBase.release();

How to move object from Anchor to Anchor?

My use case is:
tap on the screen and save the "point" as starting anchor
tap on the screen second time and save the "point" as end anchor
push the button that will move the object from starting to end anchor
I've built my own node that is using ObjectAnimator similar like in the solar system example. My only problem is that I do not know how to determine start and end point for the the evaluator. My first thought was to take the x,y,z from Pose of start and end anchor
Vector3 start = new Vector3(startAnchor.getPose().tx(), startAnchor.getPose().ty(), startAnchor.getPose().tz());
Vector3 end = new Vector3(endAnchor.getPose().tx(), endAnchor.getPose().ty(), endAnchor.getPose().tz());
…
movingAnimation.setObjectValues(startingPoint, endPoint);
movingAnimation.setPropertyName("localPosition");
movingAnimation.setEvaluator(new Vector3Evaluator());
but when I do that animation is done from completely different places.
I haven't found any reference to built-in tools for such operation.
I'm using Sceneform.
So the question is: How to make a fluent animation (a simple slide is enough) from anchor A to anchor B?
I did this in the HelloSceneform sample. I created the first AnchorNode and added the "andy" node as a child. On the next tap, I created the endPosition AnchorNode and started the animation to move to that position.
The thing to remember is that if you are using the positions of objects with a different parent, you want to use worldPosition vs. localPosition.
private void onPlaneTap(HitResult hitResult, Plane plane, MotionEvent motionEvent) {
if (andyRenderable == null) {
return;
}
// Create the Anchor.
Anchor anchor = hitResult.createAnchor();
// Create the starting position.
if (startNode == null) {
startNode = new AnchorNode(anchor);
startNode.setParent(arFragment.getArSceneView().getScene());
// Create the transformable andy and add it to the anchor.
andy = new Node();
andy.setParent(startNode);
andy.setRenderable(andyRenderable);
} else {
// Create the end position and start the animation.
endNode = new AnchorNode(anchor);
endNode.setParent(arFragment.getArSceneView().getScene());
startWalking();
}
}
private void startWalking() {
objectAnimation = new ObjectAnimator();
objectAnimation.setAutoCancel(true);
objectAnimation.setTarget(andy);
// All the positions should be world positions
// The first position is the start, and the second is the end.
objectAnimation.setObjectValues(andy.getWorldPosition(), endNode.getWorldPosition());
// Use setWorldPosition to position andy.
objectAnimation.setPropertyName("worldPosition");
// The Vector3Evaluator is used to evaluator 2 vector3 and return the next
// vector3. The default is to use lerp.
objectAnimation.setEvaluator(new Vector3Evaluator());
// This makes the animation linear (smooth and uniform).
objectAnimation.setInterpolator(new LinearInterpolator());
// Duration in ms of the animation.
objectAnimation.setDuration(500);
objectAnimation.start();
}
/**
* This is an example activity that uses the Sceneform UX package to make common AR tasks easier.
*/
public class MainActivity extends AppCompatActivity {
private static final String TAG = MainActivity.class.getSimpleName();
private static final double MIN_OPENGL_VERSION = 3.1;
Session mSession;
private ArFragment arFragment;
private ArSceneView arSceneView;
private ModelRenderable andyRenderable;
private boolean shouldConfigureSession = false;
private boolean modelAdded = false;
private ObjectAnimator objectAnimation;
private TransformableNode andy;
private AnchorNode endNode;
private GestureDetector trackableGestureDetector;
/**
* Returns false and displays an error message if Sceneform can not run, true if Sceneform can run
* on this device.
* <p>
* <p>Sceneform requires Android N on the device as well as OpenGL 3.1 capabilities.
* <p>
* <p>Finishes the activity if Sceneform can not run
*/
public static boolean checkIsSupportedDeviceOrFinish(final Activity activity) {
if (Build.VERSION.SDK_INT < VERSION_CODES.N) {
Log.e(TAG, "Sceneform requires Android N or later");
Toast.makeText(activity, "Sceneform requires Android N or later", Toast.LENGTH_LONG).show();
activity.finish();
return false;
}
String openGlVersionString =
((ActivityManager) activity.getSystemService(Context.ACTIVITY_SERVICE))
.getDeviceConfigurationInfo()
.getGlEsVersion();
if (Double.parseDouble(openGlVersionString) < MIN_OPENGL_VERSION) {
Log.e(TAG, "Sceneform requires OpenGL ES 3.1 later");
Toast.makeText(activity, "Sceneform requires OpenGL ES 3.1 or later", Toast.LENGTH_LONG)
.show();
activity.finish();
return false;
}
return true;
}
#Override
#SuppressWarnings({"AndroidApiChecker", "FutureReturnValueIgnored"})
// CompletableFuture requires api level 24
// FutureReturnValueIgnored is not valid
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (!checkIsSupportedDeviceOrFinish(this)) {
return;
}
setContentView(R.layout.activity_main);
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.READ_EXTERNAL_STORAGE}, 105);
arFragment = (ArFragment) getSupportFragmentManager().findFragmentById(R.id.ux_fragment);
if (arFragment != null) {
arFragment.getPlaneDiscoveryController().hide();
arFragment.getPlaneDiscoveryController().setInstructionView(null);
}
arSceneView = arFragment.getArSceneView();
arSceneView.getScene().addOnUpdateListener((this::onUpdateFrame));
arFragment.getArSceneView().getScene().addOnPeekTouchListener(this::handleOnTouch);
this.trackableGestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
public boolean onSingleTapUp(MotionEvent e) {
onSingleTap(e);
return true;
}
public boolean onDown(MotionEvent e) {
return true;
}
});
// When you build a Renderable, Sceneform loads its resources in the background while returning
// a CompletableFuture. Call thenAccept(), handle(), or check isDone() before calling get().
File file = new File(Environment.getExternalStorageDirectory(), "model.sfb");
Uri photoURI = Uri.fromFile(file);
Callable callable = () -> (InputStream) new FileInputStream(file);
FutureTask task = new FutureTask<>(callable);
new Thread(task).start();
ModelRenderable.builder()
.setSource(this, R.raw.model) //.setSource(this, callable)
.build()
.thenAccept(renderable -> andyRenderable = renderable)
.exceptionally(
throwable -> {
Toast toast =
Toast.makeText(this, "Unable to load andy renderable", Toast.LENGTH_LONG);
toast.setGravity(Gravity.CENTER, 0, 0);
toast.show();
return null;
});
arFragment.setOnTapArPlaneListener(
(HitResult hitResult, Plane plane, MotionEvent motionEvent) -> {
if (andyRenderable == null) {
return;
}
if (modelAdded) {
endNode = new AnchorNode(hitResult.createAnchor());
endNode.setParent(arFragment.getArSceneView().getScene());
startWalking();
}
});
}
private void handleOnTouch(HitTestResult hitTestResult, MotionEvent motionEvent) {
// First call ArFragment's listener to handle TransformableNodes.
arFragment.onPeekTouch(hitTestResult, motionEvent);
// Check for touching a Sceneform node
if (hitTestResult.getNode() != null) {
return;
}
// Otherwise call gesture detector.
trackableGestureDetector.onTouchEvent(motionEvent);
}
private void onSingleTap(MotionEvent motionEvent) {
Frame frame = arFragment.getArSceneView().getArFrame();
if (frame != null && motionEvent != null && frame.getCamera().getTrackingState() == TrackingState.TRACKING) {
for (HitResult hit : frame.hitTest(motionEvent)) {
Trackable trackable = hit.getTrackable();
if (trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose())) {
Plane plane = (Plane) trackable;
endNode = new AnchorNode(plane.createAnchor(plane.getCenterPose()));
endNode.setParent(arFragment.getArSceneView().getScene());
startWalking();
// Handle plane hits.
break;
} else if (trackable instanceof Point) {
// Handle point hits
Point point = (Point) trackable;
endNode = new AnchorNode(point.createAnchor(hit.getHitPose()));
endNode.setParent(arFragment.getArSceneView().getScene());
startWalking();
} else if (trackable instanceof AugmentedImage) {
// Handle image hits.
AugmentedImage image = (AugmentedImage) trackable;
endNode = new AnchorNode(image.createAnchor(image.getCenterPose()));
endNode.setParent(arFragment.getArSceneView().getScene());
startWalking();
}
}
}
}
private void startWalking() {
objectAnimation = new ObjectAnimator();
objectAnimation.setAutoCancel(true);
objectAnimation.setTarget(andy);
// All the positions should be world positions
// The first position is the start, and the second is the end.
objectAnimation.setObjectValues(andy.getWorldPosition(), endNode.getWorldPosition());
// Use setWorldPosition to position andy.
objectAnimation.setPropertyName("worldPosition");
// The Vector3Evaluator is used to evaluator 2 vector3 and return the next
// vector3. The default is to use lerp.
objectAnimation.setEvaluator(new Vector3Evaluator());
// This makes the animation linear (smooth and uniform).
objectAnimation.setInterpolator(new LinearInterpolator());
// Duration in ms of the animation.
objectAnimation.setDuration(500);
objectAnimation.start();
}
private void configureSession() {
Config config = new Config(mSession);
if (!setupAugmentedImageDb(config)) {
Toast.makeText(this, "Could not setup augmented", Toast.LENGTH_SHORT).show();
}
config.setUpdateMode(Config.UpdateMode.LATEST_CAMERA_IMAGE);
mSession.configure(config);
}
#Override
public void onPause() {
super.onPause();
if (mSession != null) {
// Note that the order matters - GLSurfaceView is paused first so that it does not try
// to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
// still call session.update() and get a SessionPausedException.
arSceneView.pause();
mSession.pause();
}
}
#Override
protected void onResume() {
super.onResume();
if (mSession == null) {
String message = null;
Exception exception = null;
try {
mSession = new Session(this);
} catch (UnavailableArcoreNotInstalledException
e) {
message = "Please install ARCore";
exception = e;
} catch (UnavailableApkTooOldException e) {
message = "Please update ARCore";
exception = e;
} catch (UnavailableSdkTooOldException e) {
message = "Please update this app";
exception = e;
} catch (Exception e) {
message = "This device does not support AR";
exception = e;
}
if (message != null) {
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
Log.e(TAG, "Exception creating session", exception);
return;
}
shouldConfigureSession = true;
}
if (shouldConfigureSession) {
configureSession();
shouldConfigureSession = false;
arSceneView.setupSession(mSession);
}
}
private void onUpdateFrame(FrameTime frameTime) {
Frame frame = arSceneView.getArFrame();
Collection<AugmentedImage> updatedAugmentedImages =
frame.getUpdatedTrackables(AugmentedImage.class);
Log.d("size----", String.valueOf(updatedAugmentedImages.size()));
for (AugmentedImage augmentedImage : updatedAugmentedImages) {
if (augmentedImage.getTrackingState() == TrackingState.TRACKING) {
// Check camera image matches our reference image
if (augmentedImage.getName().contains("car")) {
if (!modelAdded) {
modelAdded = true;
Anchor anchor = augmentedImage.createAnchor(augmentedImage.getCenterPose());
AnchorNode anchorNode = new AnchorNode(anchor);
anchorNode.setParent(arFragment.getArSceneView().getScene());
// Create the transformable andy and add it to the anchor.
andy = new TransformableNode(arFragment.getTransformationSystem());
andy.setParent(anchorNode);
andy.setRenderable(andyRenderable);
andy.select();
}
}
}
}
}
private boolean setupAugmentedImageDb(Config config) {
AugmentedImageDatabase augmentedImageDatabase;
Bitmap augmentedImageBitmap = loadAugmentedImage();
if (augmentedImageBitmap == null) {
return false;
}
augmentedImageDatabase = new AugmentedImageDatabase(mSession);
augmentedImageDatabase.addImage("car", augmentedImageBitmap);
config.setAugmentedImageDatabase(augmentedImageDatabase);
return true;
}
private Bitmap loadAugmentedImage() {
try (InputStream is = getAssets().open("car.jpeg")) {
return BitmapFactory.decodeStream(is);
} catch (IOException e) {
Log.e(TAG, "IO exception loading augmented image bitmap.", e);
}
return null;
}
}

Efficient loading of images and videos first frame in recyclerview

Working on media browser type of application where I read files from USB with custom drivers. File expose by NanoHTTPServer. Not important point here but just for information.
The following code logic works pretty well but it looks broken and start struggling with fast scrolling.
Is there any way to load videos frame faster and avoid hanging and delay in frames. Is there any flaws in this code? As I am using rxjava.
#Override
public void onBindViewHolder(BasicHolder holder,final int position) {
...
if(multiMedia.isImage()){
Glide.with(context)
.load(multiMedia.getUrl())
.crossFade()
.into(holder.imageThumbnail);
}else{
Bitmap bitmap = mThumbnailsCache.get(multiMedia.getUrl()+THUMBNAIL_AT_PERCENT);
if(bitmap == null || multiMedia.getLength() == 0){
//check for already running task
if(holder.imageThumbnail.getTag(R.string.tag_for_video) == null){
loadVideoThumbnail(holder,multiMedia,position);
}
}else {
holder.imageThumbnail.setImageBitmap(bitmap);
}
}
....
//this method will trigger rxjava task on io and set back results emitted by
private void loadVideoThumbnail(final BasicHolder holder, final MultiMedia multiMedia,final int position){
final SoftReference<BasicHolder> mHolderRef = new SoftReference<>(holder);
final Subscription sub = extractFrameAndDuration(multiMedia.getUrl(), THUMBNAIL_AT_PERCENT)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(emittedObject -> {
if (emittedObject == null || mHolderRef.get() == null) return;
if (mHolderRef.get().getLayoutPosition() != position) {
notifyItemChanged(position);
return;
}
if (emittedObject instanceof Bitmap) {
ImageView imageThumbnail = mHolderRef.get().imageThumbnail;
if (imageThumbnail.getTag(R.string.tag_for_video) != null) {
imageThumbnail.setTag(R.string.tag_for_video, null);
imageThumbnail.setImageBitmap((Bitmap) emittedObject);
}
} else {
multiMedia.setLength((long)emittedObject);
TextView textInfo = mHolderRef.get().textInfo;
if (textInfo.getTag() == Filetype.MOVIE) {
mHolderRef.get().viewHighlight.setData(multiMedia.getSavedClips(),multiMedia.getLength());
textInfo.setText(VideoUtils.getTotalClipDuration((long) emittedObject, App.context));
}
}
}, Throwable::printStackTrace);
if(reqSubQueue.size() > 8){
//cancel previous requests
reqSubQueue.pop().unsubscribe();
}
reqSubQueue.add(sub);
holder.imageThumbnail.setTag(R.string.tag_for_video,sub);
}
...
//this method will get thumbnail from video url
private Observable<Object> extractFrameAndDuration(String url, final long atPercent){
return Observable.create(subscriber -> {
final MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
subscriber.add(new Subscription() {
#Override
public void unsubscribe() {
//this will actually cancel loading
Observable.fromCallable(() -> {
mediaMetadataRetriever.release();
return true;
}).subscribeOn(Schedulers.io())
.subscribe();
}
#Override
public boolean isUnsubscribed() {
return false;
}
});
try{
mediaMetadataRetriever.setDataSource(url, new HashMap<>());
String durationString = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
final long duration = Long.parseLong(TextUtils.isEmpty(durationString)? "0": durationString);
//cache duration
clipsDurations.put(url,duration);
subscriber.onNext(duration);
long timeUs = (long) (duration / 100.0 * atPercent);
if(timeUs > duration) timeUs = duration;
Bitmap bitmap = mThumbnailsCache.get(url+THUMBNAIL_AT_PERCENT);
if(bitmap == null){
//1 milli sec = 1000 microseconds
bitmap = MediaUtils.getFrameFromVideoAt(mediaMetadataRetriever, timeUs * 1000);
if(bitmap != null){
bitmap = MediaUtils.resize(bitmap,thumbSize,thumbSize);
mThumbnailsCache.put(url+THUMBNAIL_AT_PERCENT,bitmap);
subscriber.onNext(bitmap);
}
}else {
subscriber.onNext(bitmap);
}
}
catch (Exception e){
e.printStackTrace();
}
finally{
if (mediaMetadataRetriever != null){
mediaMetadataRetriever.release();
}
}
//remove item from queue
subscriber.onCompleted();
reqSubQueue.remove(subscriber);
});
}
All requests get canceled in Activity.onStop. reqSubQueue is ArrayDeque
Edit method description is added
I am sure some pro-developers will find out flaws.

Ideas to refactor code with many Lists

I need a little help to get an idea of how to refactor my code, but I can't see options besides what's done, I would like to add the objects but not using so many lists (and if's conditions).
Here is my code, if anyone could help, I appreciate. Thanks
#ViewById
BannerHomeViewPager place1, place2, place3, place4, place5, place6, place7,
place8, place9;
The lists:
private List<HomeItem> allHomeItems = new ArrayList<HomeItem>(),
placeItems1 = new ArrayList<HomeItem>(),
placeItems2 = new ArrayList<HomeItem>(),
placeItems3 = new ArrayList<HomeItem>(),
placeItems4 = new ArrayList<HomeItem>(),
placeItems5 = new ArrayList<HomeItem>(),
placeItems6 = new ArrayList<HomeItem>(),
placeItems7 = new ArrayList<HomeItem>(),
placeItems8 = new ArrayList<HomeItem>(),
placeItems9 = new ArrayList<HomeItem>();
1) Items mocked, ok.
2)
#UiThread
void updateUI() {
if (allHomeItems != null && allHomeItems.size() > 0) {
for (HomeItem item : allHomeItems) {
if (item.getPlacement().contains("1")) {
placeItems1.add(item);
} else if (item.getPlacement().contains("2")) {
placeItems2.add(item);
} else if (item.getPlacement().contains("3")) {
placeItems3.add(item);
} else if (item.getPlacement().contains("4")) {
placeItems4.add(item);
} else if (item.getPlacement().contains("5")) {
placeItems5.add(item);
} else if (item.getPlacement().contains("6")) {
placeItems6.add(item);
} else if (item.getPlacement().contains("7")) {
placeItems7.add(item);
} else if (item.getPlacement().contains("8")) {
placeItems8.add(item);
} else {
placeItems9.add(item);
}
}
}
setupAdapters();
}
3) setupAdapters()
private void setupAdapters() {
if (place1 != null)
place1.update(placeItems1);
if (place2 != null)
place2.update(placeItems2);
if (place3 != null)
place3.update(placeItems3);
if (place4 != null)
place4.update(placeItems4);
if (place5 != null)
place5.update(placeItems5);
if (place6 != null)
place6.update(placeItems6);
if (place7 != null)
place7.update(placeItems7);
if (place8 != null)
place8.update(placeItems8);
if (place9 != null)
place9.update(placeItems9);
}
As #DanielBo answer:
private Map<String, ArrayList<HomeItem>> placeItems = new HashMap<String,ArrayList<HomeItem>>();
void updateUI() {
if (allHomeItems != null && allHomeItems.size() > 0) {
for (HomeItem item : allHomeItems) {
if(!placeItems.containsKey(item.getPlacement())){
placeItems.put(item.getPlacement(), new ArrayList<HomeItem>());
}
placeItems.get(item.getPlacement()).add(item);
}
}
setupAdapters();
}
But why are you using so many views? I really can't say if this is correct bcs i don't know the purpose of this, but I can't figure a good use to so many list views in the same layout...

Chromecast SDK (Android) - is there a way to check whether the media playing on the cast device has finished playing?

Is there an onFinished listener of some sort? Or do we have to compare the current stream position against the duration of the track?
It's not pretty but you can make this call:
if (mRemoteMediaPlayer.getMediaStatus().getPlayerState() == MediaStatus.PLAYER_STATE_IDLE
&& mRemoteMediaPlayer.getMediaStatus().getIdleReason() == MediaStatus.IDLE_REASON_FINISHED) {
...
}
Prem,
There is currently no callback to register for such event. One alternative (and-not-so-pretty) approach is the following: on the receiver, listen for "ended" event of the media element and send an event back to the sender through a private channel. Another approach is what you suggested: check position against duration. When SDK graduates to general availability, better and cleaner approaches will be available to accomplish what you want.
Here is solution:
You just need to take one more variable mIdleReason.
1) Initialize mIdleReason as
public int mIdleReason=MediaStatus.IDLE_REASON_NONE;
2) Update value at method loadMedia
public void loadMedia(String url, MediaMetadata movieMetadata, CastSession castSession, boolean autoPlay, long position) {
if (castSession == null || !castSession.isConnected()) {
return;
}
MediaInfo mediaInfo = new MediaInfo.Builder(url)
.setStreamType(MediaInfo.STREAM_TYPE_BUFFERED)
.setContentType("videos/m3u8")
.setMetadata(movieMetadata)
.build();
mRemoteMediaClient = castSession.getRemoteMediaClient();
mRemoteMediaClient.addListener(mRemoteMediaClientListener);
mRemoteMediaClient.load(mediaInfo, autoPlay, position);
mIdleReason = MediaStatus.IDLE_REASON_NONE;
}
3) Update value at onStatusUpdate:
private RemoteMediaClient.Listener mRemoteMediaClientListener = new RemoteMediaClient.Listener() {
#Override
public void onStatusUpdated() {
if (mRemoteMediaClient == null || mediaPlayerListener == null) {
return;
}
MediaStatus mediaStatus = mRemoteMediaClient.getMediaStatus();
if (mediaStatus != null) {
int playerStatus = mediaStatus.getPlayerState();
Log.d("PlayerState", "onStatusUpdated() called, progress= "+mSeekBar.getProgress() +", stream duration= "+ mRemoteMediaClient.getStreamDuration()+" mSeekBar.getProgress() == mRemoteMediaClient.getStreamDuration()="+(mSeekBar.getProgress() == mRemoteMediaClient.getStreamDuration()));
Log.d("PlayerState", "onStatusUpdated() called playerStatus="+playerStatus+", idleReason="+mediaStatus.getIdleReason());
if (playerStatus == MediaStatus.PLAYER_STATE_PLAYING) {
mediaPlayerListener.playing();
mIdleReason = MediaStatus.IDLE_REASON_FINISHED;
} else if (playerStatus == MediaStatus.PLAYER_STATE_BUFFERING) {
mediaPlayerListener.buffering();
mIdleReason = MediaStatus.IDLE_REASON_FINISHED;
} else if (playerStatus == MediaStatus.PLAYER_STATE_PAUSED) {
mediaPlayerListener.paused();
} else if (playerStatus == MediaStatus.IDLE_REASON_INTERRUPTED) {
mediaPlayerListener.error();
} else if (playerStatus == MediaStatus.IDLE_REASON_ERROR) {
mediaPlayerListener.error();
}else if(playerStatus == MediaStatus.PLAYER_STATE_IDLE && mediaStatus.getIdleReason() == MediaStatus.IDLE_REASON_FINISHED&& mIdleReason == MediaStatus.IDLE_REASON_FINISHED){
mediaPlayerListener.played();
}
}
}
#Override
public void onMetadataUpdated() {
Log.d("", "onMetadataUpdated: ");
}
#Override
public void onQueueStatusUpdated() {
Log.d("", "onQueueStatusUpdated: ");
}
#Override
public void onPreloadStatusUpdated() {
Log.d("", "onPreloadStatusUpdated: ");
}
#Override
public void onSendingRemoteMediaRequest() {
Log.d("", "onSendingRemoteMediaRequest: ");
}
#Override
public void onAdBreakStatusUpdated() {
Log.d("", "onAdBreakStatusUpdated: ");
}
};

Categories

Resources