Playing HLS with ExoPlayer - android

I'm trying to create a very simple view by extending SurfaceView and rendering the ExoPlayer video to the its surface. I want to support HLS and ONLY HLS. I seem to get audio consistently but I never see video rendered to the screen. I'm curious as to what I could be doing wrong.
The only error that I am seeing in my Logcat is the following:
E/OMXMaster﹕ A component of name 'OMX.qcom.audio.decoder.aac' already exists, ignoring this one.
Below is my code.
private static final int BUFFER_SEGMENT_SIZE = 256 * 1024;
private static final int BUFFER_SEGMENTS = 64;
private ExoPlayer mExoPlayer;
private Handler mHandler;
private AudioCapabilitiesReceiver mAudioCapabilitiesReceiver;
private AudioCapabilities mAudioCapabilities;
private ManifestFetcher<HlsPlaylist> playlistFetcher;
private String mUserAgent;
String url = "http://solutions.brightcove.com/bcls/assets/videos/Great-Blue-Heron.m3u8";
public ExoPlayerView(Context context) {
super(context);
}
public ExoPlayerView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public ExoPlayerView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
#Override
public void init() {
mHandler = new Handler();
mUserAgent = Util.getUserAgent(getContext(), "CX Video Player");
HlsPlaylistParser parser = new HlsPlaylistParser();
playlistFetcher = new ManifestFetcher<>(url, new DefaultUriDataSource(getContext(), mUserAgent),
parser);
mAudioCapabilitiesReceiver = new AudioCapabilitiesReceiver(getContext(), this);
mAudioCapabilitiesReceiver.register();
}
#Override
public void play() {
mExoPlayer.setPlayWhenReady(true);
}
#Override
public void stop() {
mExoPlayer.stop();
release();
}
#Override
public void pause() {
mExoPlayer.setPlayWhenReady(false);
}
#Override
public void seekTo(long timeMillis) {
mExoPlayer.seekTo(timeMillis);
}
#Override
public long getCurrentPosition() {
return mExoPlayer.getCurrentPosition();
}
#Override
public boolean isPlaying() {
return false;
}
#Override
public void playNext() {
}
#Override
public boolean isPlayingLastVideo() {
return false;
}
#Override
public int getDuration() {
return (int)mExoPlayer.getDuration();
}
#Override
public void addVideo(Uri uri) {
}
#Override
public void addVideos(List<Uri> uris) {
}
#Override
public void release() {
mAudioCapabilitiesReceiver.unregister();
mExoPlayer.release();
}
#Override
public void onSingleManifest(HlsPlaylist hlsPlaylist) {
final int numRenderers = 2;
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
int[] variantIndices = null;
if (hlsPlaylist instanceof HlsMasterPlaylist) {
HlsMasterPlaylist masterPlaylist = (HlsMasterPlaylist) hlsPlaylist;
try {
variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
getContext(), masterPlaylist.variants, null, false);
} catch (MediaCodecUtil.DecoderQueryException e) {
e.printStackTrace();
return;
}
if (variantIndices.length == 0) {
return;
}
}
DataSource dataSource = new DefaultUriDataSource(getContext(), bandwidthMeter, mUserAgent);
HlsChunkSource hlsChunkSource = new HlsChunkSource(dataSource, url, hlsPlaylist, bandwidthMeter,
variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE, mAudioCapabilities);
HlsSampleSource hlsSampleSource = new HlsSampleSource(hlsChunkSource, loadControl, BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE);
// Build the track renderers
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(hlsSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING);
TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(hlsSampleSource);
// Build the ExoPlayer and start playback
mExoPlayer = ExoPlayer.Factory.newInstance(numRenderers);
mExoPlayer.prepare(videoRenderer, audioRenderer);
// Pass the surface to the video renderer.
mExoPlayer.sendMessage(videoRenderer, MediaCodecVideoTrackRenderer.MSG_SET_SURFACE, getHolder().getSurface());
mExoPlayer.setPlayWhenReady(true);
}
#Override
public void onSingleManifestError(IOException e) {
}
#Override
public void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities) {
mAudioCapabilities = audioCapabilities;
playlistFetcher.singleLoad(mHandler.getLooper(), this);
}
}

So I found my issue. After going back through the demo code, I noticed that the demo SurfaceView was wrapped in com.google.android.exoplayer.AspectRatioFrameLayout. After wrapping my SurfaceView in this same layout, the video magically started playing.

I had a similar situation where i could hear the audio but no video, just a black screen. Turns out i was trying to send the set surface mesagge:
player.sendMessage(videoRenderer, MediaCodecVideoTrackRenderer.MSG_SET_SURFACE, surface);
before the video render was actually initialised and ready which is why i could hear audio only

Related

How to correctly implement android Lifecycle with the architecture component

I'm new to architecture component, I have created a ViewModel class and implemented LifecycleObserver as per the architecture component, inside the ViewModel class i have a overriden run() method of runnable interface and call it recursively with delay by the help of handler class, inside this value images array changes and wrapped by the live data.
MainActivity observe the changes of the images array and as the value changes, ImageView binds the value of images array.Main activity is also the lifecycle owner.
Now i want to implement Lifecycle observer, so that when i pause my application run() method should also pause and when i resume the application it start from where i pause.
I tried it by creating pause and resume method in the handler, but it didn't work.
Help me with this.
MainActivity Class
private ImageViewModel imageViewModel;
private ImageView imageView;
private ProgressBar progressBar;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
showDialogueBox();
}
private void showDialogueBox() {
final Dialog dialog = new Dialog(MainActivity.this);
dialog.requestWindowFeature(Window.FEATURE_NO_TITLE);
dialog.setContentView(R.layout.dialogue_box);
dialog.setTitle("custom");
Button button =(Button)dialog.findViewById(R.id.button1);
button.setEnabled(true);
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
imageViewModel = ViewModelProviders.of(MainActivity.this).get(ImageViewModel.class);
subscribe();
dialog.cancel();
}
});
dialog.show();
}
private void subscribe() {
final Observer<Integer> imageTimeObserver = new Observer<Integer>() {
#Override
public void onChanged(#Nullable Integer integer) {
imageView = findViewById(R.id.imageView);
imageView.setImageResource(integer);
getLifecycle().addObserver(new BrainalyseComponent(MainActivity.this,getApplicationContext(),imageViewModel));
}
};
final Observer<Integer> progressbarTimeObserver = new Observer<Integer>() {
#Override
public void onChanged(#Nullable Integer integer) {
progressBar = findViewById(R.id.progressBar);
progressBar.setProgress(integer);
}
};
imageViewModel.getImage().observe(this,imageTimeObserver);
imageViewModel.getProgressbarStatus().observe(this,progressbarTimeObserver);
}
ViewModel class
private int imagesIndex;
private int delay;
public Handler handler;
private MutableLiveData<Integer> imageLiveData = new MutableLiveData<>();
private MutableLiveData<Integer> progressbarLiveData = new MutableLiveData<>();
private int progressBarStatus;
private HashMap<Integer,Integer> imagesAndDelay;
private int images[] =
{
R.drawable.food_1,
R.drawable.food_2,
R.drawable.food_3,
R.drawable.food_4,
R.drawable.food_5,
R.drawable.food_6,
R.drawable.food_7,
R.drawable.food_8,
R.drawable.food_9,
R.drawable.food_10
};
public ImageViewModel(){
imagesIndex = 0;
progressBarStatus = 0;
delay = 2;//to be changed as delay will be discussed
handler = new Handler();
imagesAndDelay = new HashMap<>();
shuffleImages();
runnable.run();
}
private void shuffleImages() {
Random random = new Random();
for (int i = 0; i < images.length; i++) {
int j = random.nextInt(images.length);
int temp = images[i];
images[i] = images[j];
images[j] = temp;
}
}
public Runnable runnable = new Runnable() {
#Override
public void run() {
if (imagesIndex<images.length){
progressBarStatus += 100/images.length;
progressbarLiveData.postValue(progressBarStatus);
imageLiveData.postValue(images[imagesIndex]);
imagesAndDelay.put(images[imagesIndex],delay);
imagesIndex++;
delay += 2;
}else {
stopTask();
return;
}
handler.postDelayed(runnable,2000);
}
};
public Thread newThread = new Thread(){
public void run(){
if (imagesIndex<images.length){
progressBarStatus += 100/images.length;
progressbarLiveData.postValue(progressBarStatus);
imageLiveData.postValue(images[imagesIndex]);
imagesAndDelay.put(images[imagesIndex],delay);
imagesIndex++;
delay += 2;
}else {
stopTask();
return;
}
try {
sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
}finally {
run();
}
}
};
public LiveData<Integer> getImage(){
return imageLiveData;
}
public LiveData<Integer> getProgressbarStatus(){
return progressbarLiveData;
}
private void stopTask() {
delay = 2;
progressBarStatus = 0;
imagesIndex = 0;
Utility.setImagesAndDelay(imagesAndDelay);
handler.removeCallbacks(runnable);
}
LifecycleObsever
private Context mContext;
private static final String LOG_TAG = BrainalyseComponent.class.getSimpleName();
private ImageView imageView;
private ProgressBar progressBar;
private Integer integer;
LifecycleOwner lifecycleOwner;
private ImageViewModel imageViewModel;
public BrainalyseComponent(LifecycleOwner lifecycleOwner, Context context, ImageViewModel imageViewModel) {
this.mContext = context;
this.imageView = imageView;
this.imageViewModel = imageViewModel;
this.integer = integer;
this.progressBar = progressBar;
this.lifecycleOwner = lifecycleOwner;
}
#OnLifecycleEvent(Lifecycle.Event.ON_RESUME)
public void onResume(){
Log.d(LOG_TAG,"on resume of app");
}
#OnLifecycleEvent(Lifecycle.Event.ON_PAUSE)
public void onPause(){
}
You're missing listener and you'll need to remove it at some point (from onDestroy for example)
public BrainalyseComponent(LifecycleOwner lifecycleOwner, Context context, ImageViewModel imageViewModel) {
this.mContext = context;
this.imageView = imageView;
this.imageViewModel = imageViewModel;
this.integer = integer;
this.progressBar = progressBar;
this.lifecycleOwner = lifecycleOwner;
lifecycleOwner.getLifecycle().addObserver(this);
}
#OnLifecycleEvent(Lifecycle.Event.ON_DESTROY)
public void onDestroy(){
lifecycleOwner.getLifecycle().removeObserver(this);
}

How to use Picasso to load into BackgroundManager drawable?

I am making an Android TV app. I want to load images from URLs into BackgroundManager drawables using Picasso.
In Glide, it is done using the following code:
Glide.with(getActivity())
.load(uri)
.centerCrop()
.error(mDefaultBackground)
.into(new SimpleTarget<GlideDrawable>(width, height) {
#Override
public void onResourceReady(GlideDrawable resource,
GlideAnimation<? super GlideDrawable> glideAnimation) {
mBackgroundManager.setDrawable(resource);
}
});
How do I do the same using Picasso?
here is solution i found.
public class PicassoBackgroundManager {
private static final String TAG = PicassoBackgroundManager.class.getSimpleName();
private static int BACKGROUND_UPDATE_DELAY = 500;
private final int DEFAULT_BACKGROUND_RES_ID = R.drawable.default_background;
private static Drawable mDefaultBackground;
// Handler attached with main thread
private final Handler mHandler = new Handler(Looper.getMainLooper());
private Activity mActivity;
private BackgroundManager mBackgroundManager = null;
private DisplayMetrics mMetrics;
private URI mBackgroundURI;
private PicassoBackgroundManagerTarget mBackgroundTarget;
Timer mBackgroundTimer; // null when no UpdateBackgroundTask is running.
public PicassoBackgroundManager (Activity activity) {
mActivity = activity;
mDefaultBackground = activity.getDrawable(DEFAULT_BACKGROUND_RES_ID);
mBackgroundManager = BackgroundManager.getInstance(activity);
mBackgroundManager.attach(activity.getWindow());
mBackgroundTarget = new PicassoBackgroundManagerTarget(mBackgroundManager);
mMetrics = new DisplayMetrics();
activity.getWindowManager().getDefaultDisplay().getMetrics(mMetrics);
}
/**
* if UpdateBackgroundTask is already running, cancel this task and start new task.
*/
private void startBackgroundTimer() {
if (mBackgroundTimer != null) {
mBackgroundTimer.cancel();
}
mBackgroundTimer = new Timer();
/* set delay time to reduce too much background image loading process */
mBackgroundTimer.schedule(new UpdateBackgroundTask(), BACKGROUND_UPDATE_DELAY);
}
private class UpdateBackgroundTask extends TimerTask {
#Override
public void run() {
/* Here is TimerTask thread, not UI thread */
mHandler.post(new Runnable() {
#Override
public void run() {
/* Here is main (UI) thread */
if (mBackgroundURI != null) {
updateBackground(mBackgroundURI);
}
}
});
}
}
public void updateBackgroundWithDelay(String url) {
try {
URI uri = new URI(url);
updateBackgroundWithDelay(uri);
} catch (URISyntaxException e) {
/* skip updating background */
Log.e(TAG, e.toString());
}
}
/**
* updateBackground with delay
* delay time is measured in other Timer task thread.
* #param uri
*/
public void updateBackgroundWithDelay(URI uri) {
mBackgroundURI = uri;
startBackgroundTimer();
}
private void updateBackground(URI uri) {
try {
Picasso.with(mActivity)
.load(uri.toString())
.resize(mMetrics.widthPixels, mMetrics.heightPixels)
.centerCrop()
.error(mDefaultBackground)
.into(mBackgroundTarget);
} catch (Exception e) {
Log.e(TAG, e.toString());
}
}
/**
* Copied from AOSP sample code.
* Inner class
* Picasso target for updating default_background images
*/
public class PicassoBackgroundManagerTarget implements Target {
BackgroundManager mBackgroundManager;
public PicassoBackgroundManagerTarget(BackgroundManager backgroundManager) {
this.mBackgroundManager = backgroundManager;
}
#Override
public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom loadedFrom) {
this.mBackgroundManager.setBitmap(bitmap);
}
#Override
public void onBitmapFailed(Drawable drawable) {
this.mBackgroundManager.setDrawable(drawable);
}
#Override
public void onPrepareLoad(Drawable drawable) {
// Do nothing, default_background manager has its own transitions
}
#Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
PicassoBackgroundManagerTarget that = (PicassoBackgroundManagerTarget) o;
if (!mBackgroundManager.equals(that.mBackgroundManager))
return false;
return true;
}
#Override
public int hashCode() {
return mBackgroundManager.hashCode();
}
}
}
Then in your activity or fragment use this snippet below
PicassoBackgroundManager picassoBackgroundManager = new PicassoBackgroundManager(getActivity());
picassoBackgroundManager.updateBackgroundWithDelay("some.image.url");

How to get user ID or info in onAuthenticationSucceeded method for android fingerprint

I am implementing an android fingerprint authentication. I want to know which user, who has registered in device before, is authenticating. Is there any information about the user, who has registered and is valid for the device, in the FingerprintManager.AuthenticationResult argument in onAuthenticationSucceeded method?!
I am using this sample.
this is my class, which is implementing FingerprintManager.AuthenticationCallback:
public class FingerprintUiHelper extends FingerprintManager.AuthenticationCallback {
private static final long ERROR_TIMEOUT_MILLIS = 1600;
private static final long SUCCESS_DELAY_MILLIS = 1300;
private final FingerprintManager mFingerprintManager;
private final ImageView mIcon;
private final TextView mErrorTextView;
private final Callback mCallback;
private CancellationSignal mCancellationSignal;
private boolean mSelfCancelled;
/**
* Constructor for {#link FingerprintUiHelper}.
*/
FingerprintUiHelper(FingerprintManager fingerprintManager,
ImageView icon, TextView errorTextView, Callback callback) {
mFingerprintManager = fingerprintManager;
mIcon = icon;
mErrorTextView = errorTextView;
mCallback = callback;
}
public boolean isFingerprintAuthAvailable() {
// The line below prevents the false positive inspection from Android Studio
// noinspection ResourceType
return mFingerprintManager.isHardwareDetected()
&& mFingerprintManager.hasEnrolledFingerprints();
}
public void startListening(FingerprintManager.CryptoObject cryptoObject) {
if (!isFingerprintAuthAvailable()) {
return;
}
mCancellationSignal = new CancellationSignal();
mSelfCancelled = false;
// The line below prevents the false positive inspection from Android Studio
// noinspection ResourceType
mFingerprintManager
.authenticate(cryptoObject, mCancellationSignal, 0 /* flags */, this, null);
mIcon.setImageResource(R.drawable.ic_fp_40px);
}
public void stopListening() {
if (mCancellationSignal != null) {
mSelfCancelled = true;
mCancellationSignal.cancel();
mCancellationSignal = null;
}
}
#Override
public void onAuthenticationError(int errMsgId, CharSequence errString) {
if (!mSelfCancelled) {
showError(errString);
mIcon.postDelayed(new Runnable() {
#Override
public void run() {
mCallback.onError();
}
}, ERROR_TIMEOUT_MILLIS);
}
}
#Override
public void onAuthenticationHelp(int helpMsgId, CharSequence helpString) {
showError(helpString);
}
#Override
public void onAuthenticationFailed() {
showError(mIcon.getResources().getString(
R.string.fingerprint_not_recognized));
}
#Override
public void onAuthenticationSucceeded(FingerprintManager.AuthenticationResult result) {
mErrorTextView.removeCallbacks(mResetErrorTextRunnable);
mIcon.setImageResource(R.drawable.ic_fingerprint_success);
mErrorTextView.setTextColor(
mErrorTextView.getResources().getColor(R.color.success_color, null));
mErrorTextView.setText(
mErrorTextView.getResources().getString(R.string.fingerprint_success));
mIcon.postDelayed(new Runnable() {
#Override
public void run() {
mCallback.onAuthenticated();
}
}, SUCCESS_DELAY_MILLIS);
}
private void showError(CharSequence error) {
mIcon.setImageResource(R.drawable.ic_fingerprint_error);
mErrorTextView.setText(error);
mErrorTextView.setTextColor(
mErrorTextView.getResources().getColor(R.color.warning_color, null));
mErrorTextView.removeCallbacks(mResetErrorTextRunnable);
mErrorTextView.postDelayed(mResetErrorTextRunnable, ERROR_TIMEOUT_MILLIS);
}
private Runnable mResetErrorTextRunnable = new Runnable() {
#Override
public void run() {
mErrorTextView.setTextColor(
mErrorTextView.getResources().getColor(R.color.hint_color, null));
mErrorTextView.setText(
mErrorTextView.getResources().getString(R.string.fingerprint_hint));
mIcon.setImageResource(R.drawable.ic_fp_40px);
}
};
public interface Callback {
void onAuthenticated();
void onError();
}
}

I dont know how to change ok glass phrase in google glass

I want to make glass application with offline voice recognition with no ok glass
what I want to know is changing ok glass to other words ( something like "start").
I saw the source decompiled GlassHome.apk and GlassVoice.apk.
I knew that setting to ok glass is related with VoiceInputHelper, voice_label_ok_glass in String.xml
so I tried to change all of string "ok glass" to "nice"(temp guard phrase) in String.xml
but when I said any word (like "hahaha" or "kakaka") , all of word I said is recognized to my guard phrase ("nice") by VoiceService.
what should I do for changing "ok glass" to my guard phrase and working it right ???????
(P.S sorry my bad english. I hope you understand what question means)
here is my code ( I tried to set VoiceConfig to "nice")
public class MainActivity extends GlassActivity implements VoiceListener {
public static final String TEST_SERVICE_EXTRAS_KEY = "serviceExtras";
private ImageView gradientView;
private GuardHintAnimator guardHintAnimator;
private TextView guardPhraseView;
private boolean isRunning = false;
private final FormattingLogger logger = FormattingLoggers.getLogger(this);
private VoiceConfig onWindowFocusChangedRecoverConfig;
private VoiceConfig voiceConfig;
#VisibleForTesting
VoiceInputHelper voiceInputHelper;
private IVoiceMenuDialog voiceMenuDialog;
public FormattingLogger getLogger()
{
return this.logger;
}
public boolean isRunning()
{
return this.isRunning;
}
#Override
protected void onCreateInternal(Bundle bundle) {
super.onCreateInternal(bundle);
this.voiceInputHelper = new VoiceInputHelper(this, new DelegatingVoiceListener(this)
{
public VoiceConfig onVoiceCommand(VoiceCommand paramAnonymousVoiceCommand)
{
if ((!MainActivity.this.hasWindowFocus()) && (!MainActivity.this.isMessageShowing()))
{
MainActivity.this.logger.d("Ignoring voice command because we don't have window focus.", new Object[0]);
return null;
}
Log.d("listener",paramAnonymousVoiceCommand.toString());
//return super.onVoiceCommand(paramAnonymousVoiceCommand);
return null;
}
}, getVoiceServiceExtras());
}
protected void onPauseInternal()
{
this.isRunning = false;
super.onPauseInternal();
closeVoiceMenu();
this.voiceInputHelper.setVoiceConfig(VoiceConfig.OFF);
this.voiceInputHelper.unregisterGrammarLoaders();
}
public void closeVoiceMenu()
{
if (this.voiceMenuDialog != null)
{
this.voiceMenuDialog.dismiss(false);
this.voiceMenuDialog = null;
}
}
public void onPrepareVoiceMenu(VoiceMenuDialog paramVoiceMenuDialog) {}
public boolean onResampledAudioData(byte[] paramArrayOfByte, int paramInt1, int paramInt2)
{
return false;
}
protected void onResumeInternal()
{
this.isRunning = true;
super.onResumeInternal();
this.voiceInputHelper.registerGrammarLoaders();
this.voiceInputHelper.setWantAudioData(shouldProvideAudioData());
NetworkUtil.checkNetwork();
VoiceConfig localVoiceConfig = new VoiceConfig();
String[] arrayOfString = new String[1];
arrayOfString[0] = "nice";
localVoiceConfig = localVoiceConfig.setCustomPhrases(arrayOfString).setShouldSaveAudio(true);
voiceInputHelper.setVoiceConfig(localVoiceConfig);
}
public boolean isVoiceMenuShowing()
{
return (this.voiceMenuDialog != null) && (this.voiceMenuDialog.isShowing());
}
public VoiceConfig onVoiceCommand(VoiceCommand paramVoiceCommand)
{
Log.d("hhh",paramVoiceCommand.toString());
this.logger.w("Unrecognized voice command: %s", new Object[] { paramVoiceCommand });
return null;
}
protected Bundle getVoiceServiceExtras()
{
Bundle localBundle = new Bundle();
/* if (getIntent().hasExtra("serviceExtras"))
{
localBundle.putAll(getIntent().getBundleExtra("serviceExtras"));
}*/
return localBundle;
}
public void setVoiceConfig(VoiceConfig paramVoiceConfig)
{
this.voiceConfig = paramVoiceConfig;
if (paramVoiceConfig != null) {
this.voiceInputHelper.setVoiceConfig(this.voiceConfig);
}
}
public boolean shouldProvideAudioData()
{
return false;
}
public void onVoiceConfigChanged(VoiceConfig paramVoiceConfig, boolean paramBoolean) {}
}
DelegatingVoiceListener :
class DelegatingVoiceListener implements VoiceListener
{
private final VoiceListener delegate;
DelegatingVoiceListener(VoiceListener paramVoiceListener)
{
this.delegate = paramVoiceListener;
}
public FormattingLogger getLogger()
{
return this.delegate.getLogger();
}
public boolean isRunning()
{
return this.delegate.isRunning();
}
public boolean onResampledAudioData(byte[] paramArrayOfByte, int paramInt1, int paramInt2)
{
return this.delegate.onResampledAudioData(paramArrayOfByte, paramInt1, paramInt2);
}
public VoiceConfig onVoiceCommand(VoiceCommand paramVoiceCommand)
{
return this.delegate.onVoiceCommand(paramVoiceCommand);
}
public void onVoiceConfigChanged(VoiceConfig paramVoiceConfig, boolean paramBoolean)
{
this.delegate.onVoiceConfigChanged(paramVoiceConfig, paramBoolean);
}
}
You need to request special permissions in your manifest to implement unlisted voice commands. Go here. However, I doubt you can change the 'ok glass' voice command. You can still try if you really want to.

Android sound pool function

is it possible to check the status of "Sound pool". i want to perform some function when it will start and stop. like ;mediaplayer's function "isplaying() ". is sound pool has this type of functionality...
Unfortunately it looks like the SoundPool API doesn't provide that functionality; it lets you start and stop a sound but doesn't have a method to check its status. In one of my apps I basically hacked around it by maintaining my own boolean that I set true when I started the sound, then set false when I stopped it or enough time had passed.
//your code to start sound here
long soundStartTime = System.currentTimeMillis();
soundPlaying=true;
//other code here
if(System.currentTimeMillis()-soundStartTime > SOUND_LENGTH_MILLIS){
soundPlaying=false;
//also set false at beginning of level and whenever you stop it manually
Refer this code..
SoundPoolManager Class
package com.demosoft.music;
import android.media.SoundPool;
import android.media.JetPlayer;
class SoundPoolEvent
{
public SoundPoolEvent(int eventType,int eventSound)
{
this.eventType = eventType;
this.eventSound = eventSound;
}
public int eventType;
public int eventSound;
public static final int SOUND_PLAY=0;
public static final int SOUND_STOP=1;
public static final int SOUND_MUSIC_PLAY=2;
public static final int SOUND_MUSIC_PAUSE=3;
public static final int SOUND_MUSIC_STOP=4;
public static final int SOUND_MUSIC_RESUME=5;
}
class SoundStatus
{
public SoundStatus()
{
}
public static final int STATUS_LOOPING_NOT_STARTED=0;
public static final int STATUS_LOOPING_PAUSED=1;
public static final int STATUS_LOOPING_PLAYING=2;
}
public class SoundPoolManager implements Sound
{
SoundPoolManager(android.content.Context context)
{
this.context = context;
soundEvents = new java.util.LinkedList();
sounds = new java.util.HashMap();
handles = new java.util.HashMap();
streamIds = new java.util.HashMap();
isRunning = false;
finished = false;
this.musicPlayer =JetPlayer.getJetPlayer();
this.musicPlayer.loadJetFile(context.getResources().openRawResourceFd(R.raw.notify));
byte segmentId = 0;
this.musicPlayer.queueJetSegment(0, -1, -1, 0, 0, segmentId++);
}
public void addSound(int resid, boolean isLooping)
{
sounds.put(resid, new Boolean(isLooping));
}
public void startSound()
{
this.soundPool = new android.media.SoundPool(this.sounds.size(),android.media.AudioManager.STREAM_MUSIC,100);
java.util.Iterator iterator = sounds.keySet().iterator();
while(iterator.hasNext())
{
int soundid = iterator.next().intValue();
int soundhandle = this.soundPool.load(this.context, soundid, 1);
handles.put(new Integer(soundid), new Integer(soundhandle));
}
}
public void stopSound()
{
try
{
java.util.Iterator iterator = sounds.keySet().iterator();
while(iterator.hasNext())
{
int soundid = iterator.next().intValue();
this.soundPool.pause( this.handles.get(soundid).intValue());
this.soundPool.stop(this.handles.get(soundid).intValue());
}
}
catch(Exception e)
{
}
finally
{
try
{
this.musicPlayer.pause();
}
catch(Exception e1)
{
}
try
{
this.musicPlayer.release();
}
catch(Exception e2)
{
}
try
{
this.soundPool.release();
}
catch(Exception e3)
{
}
}
}
public int currentPlayer;
private boolean isRunning;
private java.util.HashMap sounds;
private java.util.HashMap handles;
private java.util.HashMap streamIds;
private android.content.Context context;
private java.util.LinkedList soundEvents;
private java.util.HashMap mediaPlayers;
public void stopSound(int resid)
{
}
public void playSound(int resid)
{
if(soundEvents!=null)
{
try
{
android.media.AudioManager mgr = (android.media.AudioManager) context.getSystemService(android.content.Context.AUDIO_SERVICE);
int streamVolume = mgr.getStreamVolume(android.media.AudioManager.STREAM_MUSIC);
int streamID = soundPool.play(handles.get( resid).intValue(), streamVolume, streamVolume, 1, 0, 1.0f);
int maxvolume = mgr.getStreamMaxVolume(android.media.AudioManager.STREAM_MUSIC);
mgr.setStreamVolume(android.media.AudioManager.STREAM_MUSIC, maxvolume, 0);
this.streamIds.put(resid, streamID);
}
catch(Exception e)
{
}
}
}
public void startMusic(int resid)
{
this.musicPlayer.play();
}
public void stopMusic(int resid)
{
this.musicPlayer.pause();
}
public void pauseMusic(int resid)
{
this.musicPlayer.pause();
}
public void resumeMusic(int resid)
{
this.musicPlayer.play();
}
SoundPool soundPool;
JetPlayer musicPlayer;
boolean finished = false;
}
And add the interface of Sound..... to the above package
package com.demosoft.music;
public interface Sound {
public void addSound(int resid, boolean isLooping);
public void startSound();
public void stopSound();
public void stopSound(int resid);
public void playSound(int resid);
public void startMusic(int resid);
public void stopMusic(int resid);
public void pauseMusic(int resid);
public void resumeMusic(int resid);
}
And u can use in difffernt ways ....
1. creating the soundpoolmanager instance class ......
2. by using Sound interface instance,....
SoundPoolManager m = new SoundPoolManager(context);
m.addSound(R.raw.demo, false);
m.addSound(R.raw.soft,true);
m.startSound();
m.playSound(R.raw.demo);
m.playMusic(R.raw.soft);
or
private Sound soundManager;
soundManager.playSound(R.raw.demo);
public synchronized void stopMusic()
{
soundManager.stopSound();
//message.sendToTarget();
this.soundManager.stopSound();
}

Categories

Resources