I have a Xamarin app that was not meant to handle the talkback functionality of android, because for it to work well it had to be build in a specific way.
My app is a little order, and I simply can't make a do-over of the whole thing.
So, what is happening?
My Xamarin app is made with non-native libs, that are not supported by the Talkback, so, when the user turns on the Talkback functionality the app effectively stops receiving the DPAD events since they are handled by the systems Accessibility Service.
That service, gets the events, and tries to handle them within my app, but, since my components are non-native, the system does not recognize them and the DPAD is wasted, hence, the illusion that the DPADs are not working.
So, what do you have to do if you just want to handle the DPADs (and nothing else) yourself with Talkback on?
The answer to this post will contain the code that describes the following behavior:
1. The talkback wont be able to 'talk' about your components
2. The DPAD events will be handled by an Accessibility Delegate
3. A virtual DPAD will handle the navigation
4. The green rectangle used for focus will be disabled, since you wont need it anyway
5. The app will look exactly the same with Talkback on and off
This post was made for educational purposes, since I had a hard time coming up with the solution, and hope the next guy finds it helpfull.
The first step is to create a class that inherits the AccessibilityDelegateCompat in order to create our own Accessibility Service.
class MyAccessibilityHelper : AccessibilityDelegateCompat
{
const string Tag = "MyAccessibilityHelper";
const int ROOT_NODE = -1;
const int INVALID_NODE = -1000;
const string NODE_CLASS_NAME = "My_Node";
public const int NODE_UP = 1;
public const int NODE_LEFT = 2;
public const int NODE_CENTER = 3;
public const int NODE_RIGHT = 4;
public const int NODE_DOWN = 5;
private class MyAccessibilityProvider : AccessibilityNodeProviderCompat
{
private readonly MyAccessibilityHelper mHelper;
public MyAccessibilityProvider(MyAccessibilityHelper helper)
{
mHelper = helper;
}
public override bool PerformAction(int virtualViewId, int action, Bundle arguments)
{
return mHelper.PerformNodeAction(virtualViewId, action, arguments);
}
public override AccessibilityNodeInfoCompat CreateAccessibilityNodeInfo(int virtualViewId)
{
var node = mHelper.CreateNode(virtualViewId);
return AccessibilityNodeInfoCompat.Obtain(node);
}
}
private readonly View mView;
private readonly MyAccessibilityProvider mProvider;
private Dictionary<int, Rect> mRects = new Dictionary<int, Rect>();
private int mAccessibilityFocusIndex = INVALID_NODE;
public MyAccessibilityHelper(View view)
{
mView = view;
mProvider = new MyAccessibilityProvider(this);
}
public override AccessibilityNodeProviderCompat GetAccessibilityNodeProvider(View host)
{
return mProvider;
}
public override void SendAccessibilityEvent(View host, int eventType)
{
Android.Util.Log.Debug(Tag, "SendAccessibilityEvent: host={0} eventType={1}", host, eventType);
base.SendAccessibilityEvent(host, eventType);
}
public void AddRect(int id, Rect rect)
{
mRects.Add(id, rect);
}
public AccessibilityNodeInfoCompat CreateNode(int virtualViewId)
{
var node = AccessibilityNodeInfoCompat.Obtain(mView);
if (virtualViewId == ROOT_NODE)
{
node.ContentDescription = "Root node";
ViewCompat.OnInitializeAccessibilityNodeInfo(mView, node);
foreach (var r in mRects)
{
node.AddChild(mView, r.Key);
}
}
else
{
node.ContentDescription = "";
node.ClassName = NODE_CLASS_NAME;
node.Enabled = true;
node.Focusable = true;
var r = mRects[virtualViewId];
node.SetBoundsInParent(r);
int[] offset = new int[2];
mView.GetLocationOnScreen(offset);
node.SetBoundsInScreen(new Rect(offset[0] + r.Left, offset[1] + r.Top, offset[0] + r.Right, offset[1] + r.Bottom));
node.PackageName = mView.Context.PackageName;
node.SetSource(mView, virtualViewId);
node.SetParent(mView);
node.VisibleToUser = true;
if (virtualViewId == mAccessibilityFocusIndex)
{
node.AccessibilityFocused = true;
node.AddAction(AccessibilityNodeInfoCompat.ActionClearAccessibilityFocus);
}
else
{
node.AccessibilityFocused = false;
node.AddAction(AccessibilityNodeInfoCompat.FocusAccessibility);
}
}
return node;
}
private AccessibilityEvent CreateEvent(int virtualViewId, EventTypes eventType)
{
var e = AccessibilityEvent.Obtain(eventType);
if (virtualViewId == ROOT_NODE)
{
ViewCompat.OnInitializeAccessibilityEvent(mView, e);
}
else
{
var record = AccessibilityEventCompat.AsRecord(e);
record.Enabled = true;
record.SetSource(mView, virtualViewId);
record.ClassName = NODE_CLASS_NAME;
e.PackageName = mView.Context.PackageName;
}
return e;
}
public bool SendEventForVirtualView(int virtualViewId, EventTypes eventType)
{
if (mView.Parent == null)
return false;
var e = CreateEvent(virtualViewId, eventType);
return ViewParentCompat.RequestSendAccessibilityEvent(mView.Parent, mView, e);
}
public bool PerformNodeAction(int virtualViewId, int action, Bundle arguments)
{
if (virtualViewId == ROOT_NODE)
{
return ViewCompat.PerformAccessibilityAction(mView, action, arguments);
}
else
{
switch (action)
{
case AccessibilityNodeInfoCompat.ActionAccessibilityFocus:
if (virtualViewId != mAccessibilityFocusIndex)
{
if (mAccessibilityFocusIndex != INVALID_NODE)
{
SendEventForVirtualView(mAccessibilityFocusIndex, EventTypes.ViewAccessibilityFocusCleared);
}
mAccessibilityFocusIndex = virtualViewId;
mView.Invalidate();
SendEventForVirtualView(virtualViewId, EventTypes.ViewAccessibilityFocused);
// virtual key event
switch (virtualViewId)
{
case NODE_UP:
HandleDpadEvent(Keycode.DpadUp);
break;
case NODE_LEFT:
HandleDpadEvent(Keycode.DpadLeft);
break;
case NODE_RIGHT:
HandleDpadEvent(Keycode.DpadRight);
break;
case NODE_DOWN:
HandleDpadEvent(Keycode.DpadDown);
break;
}
// refocus center
SendEventForVirtualView(NODE_CENTER, EventTypes.ViewAccessibilityFocused);
return true;
}
break;
case AccessibilityNodeInfoCompat.ActionClearAccessibilityFocus:
mView.RequestFocus();
if (virtualViewId == mAccessibilityFocusIndex)
{
mAccessibilityFocusIndex = INVALID_NODE;
mView.Invalidate();
SendEventForVirtualView(virtualViewId, EventTypes.ViewAccessibilityFocusCleared);
return true;
}
break;
}
}
return false;
}
private void HandleDpadEvent(Keycode keycode)
{
//Here you know what DPAD was pressed
//You can create your own key event and send it to your app
//This code depends on your own application, and I wont be providing the code
//Note, it is important to handle both, the KeyDOWN and the KeyUP event for it to work
}
}
Since the code is a bit large, I'll just explain the crutal parts.
Once the talkback is active, the dictionary (from our view bellow) will be used to create a virtual tree node of our virtual DPAD. With that in mind, the function PerformNodeAction will be the most important one.
It handles the actions once a virtual node was focused by the Accessibility system, based on the provided id of the virtual element, there are two parts, the first one is the ROOT_NODE, which is the view iteslf that contains our virtual dpad, which for the most part can be ignored, but the seond part is where the handling is done.
The second part is where the actions ActionAccessibilityFocus and ActionClearAccessibilityFocus are handled. The two of witch are both important, but the first one is where we can finally handle our virtual dpad.
What is done here is that with the provided virtual ID from the dictionary, we know which DPAD was selected (virtualViewId). Based on the selected DPAD, we can perform the action we want in the HandleDpadEvent function. What is important to notice, is that after we handle the selecteds DPAD event, we will refocus our CENTER node, in order to be ready to handle the next button press. This is very important, since, you dont want to find yourself in a situation where you go DOWN, and then UP, just for the virtual dpad to focus the CENTER pad.
So, I'll epeat myself, the refocusing of the CENTER pad after the previous' DPAD event was handled needs to be done in order for us to know EXACTLY where we will be after the next DPAD button was pressed!
There is one function that I wont post here, since the code for it is very specific for my app, the function is HandleDpadEvent, there you must create a keydown and a keyup event and send it to your main activity where the function onKeyDown/Up will be triggered. Once you do that, the delegate is done.
And once the Delegate is done, we have to make our view like this:
/**
* SimplestCustomView
*/
public class AccessibilityHelperView : View
{
private MyAccessibilityHelper mHelper;
Dictionary<int, Rect> virtualIdRectMap = new Dictionary<int, Rect>();
public AccessibilityHelperView(Context context) :
base(context)
{
Init();
}
public AccessibilityHelperView(Context context, IAttributeSet attrs) :
base(context, attrs)
{
Init();
}
public AccessibilityHelperView(Context context, IAttributeSet attrs, int defStyle) :
base(context, attrs, defStyle)
{
Init();
}
public void Init()
{
this.SetFocusable(ViewFocusability.Focusable);
this.Focusable = true;
this.FocusedByDefault = true;
setRectangle();
mHelper = new MyAccessibilityHelper(this);
ViewCompat.SetAccessibilityDelegate(this, mHelper);
foreach (var r in virtualIdRectMap)
{
mHelper.AddRect(r.Key, r.Value);
}
}
private void setRectangle()
{
virtualIdRectMap.Add(MRAccessibilityHelper.NODE_CENTER, new Rect(1, 1, 2, 2));
virtualIdRectMap.Add(MRAccessibilityHelper.NODE_LEFT, new Rect(0, 1, 1, 2));
virtualIdRectMap.Add(MRAccessibilityHelper.NODE_UP, new Rect(1, 0, 2, 1));
virtualIdRectMap.Add(MRAccessibilityHelper.NODE_RIGHT, new Rect(2, 1, 3, 2));
virtualIdRectMap.Add(MRAccessibilityHelper.NODE_DOWN, new Rect(1, 2, 2, 3));
}
protected override void OnDraw(Canvas canvas)
{
base.OnDraw(canvas);
}
}
That view looks like this:
What is to notice?
The size of the node pads is in pixels, and they will be found on the top left corner of your app.
They are set to that single pixel size, because the Talkback functionality would otherwise select the first node pad that was added to the dictionary with a green rectangle (thats standard behavior for talkback)
All the rectangles in the view are added to a dictionary that will be used in our own Accessibility Delegate, to mention here is that the CENTER pad was added first, and therefor will be in focus once the talkback is activated by default
The Init function
The Init function is crutial for this, there we will create our view, and set some talkback parameters nessessary for our virtual dpad to be recognized by the systems own Accessibility Service.
Also, there will our Accessibility Delegate be initialized and our dictionary with all the created DPADs.
Ok, so far, we made a Delegate and a View, I placed them both in the same file, so they can see each other. But it is not a must.
So what now? We must add the AccessibilityHelperView to our app, in the MainActivity.cs file
AccessibilityHelperView mAccessibilityHelperView;
In the OnCreate function, you can add the following code to initiate the view:
mAccessibilityHelperView = new AccessibilityHelperView(this);
In the OnResume function, you can check if the talkback is on or off, based on the result, you can add or remove the mAccessibilityHelperView from your mBackgroundLayout(AddView, and RemoveView).
The OnResume function should look like this:
if (TalkbackEnabled && !_isVirtualDPadShown)
{
mBackgroundLayout.AddView(mAccessibilityHelperView);
_isVirtualDPadShown = true;
}
else if (!TalkbackEnabled && _isVirtualDPadShown)
{
mBackgroundLayout.RemoveView(mAccessibilityHelperView);
_isVirtualDPadShown = false;
}
The TalkbackEnabled variable is a local one that checks if the Talkback service is on or off, like this:
public bool TalkbackEnabled
{
get
{
AccessibilityManager am = MyApp.Instance.GetSystemService(Context.AccessibilityService) as AccessibilityManager;
if (am == null) return false;
String TALKBACK_SETTING_ACTIVITY_NAME = "com.android.talkback.TalkBackPreferencesActivity";
var serviceList = am.GetEnabledAccessibilityServiceList(FeedbackFlags.AllMask);
foreach (AccessibilityServiceInfo serviceInfo in serviceList)
{
String name = serviceInfo.SettingsActivityName;
if (name.Equals(TALKBACK_SETTING_ACTIVITY_NAME))
{
Log.Debug(LogArea, "Talkback is active");
return true;
}
}
Log.Debug(LogArea, "Talkback is inactive");
return false;
}
}
That should be all you need to make it work.
Hope I could help you out.
I have a strange issue with onQueryTextChanged method with SearchView in android. My problem is that when specific text e.g: "Al a" has been entered (i.e. right after entering in letter 'a' in the search Field) the searchView closes by itself. But if I enter in "Al A" i.e. capitalizing 'a' there is no issue. Similarly the same issue persists with "Al b", "Al c",.....etc etc. At first I thought it had something to do with escape sequences but that's not true. I am clueless, please help.
private void search(SearchView searchView) {
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
#Override
public boolean onQueryTextSubmit(String query) {
return false;
}
#Override
public boolean onQueryTextChange(String newText) {
System.out.println("text entered="+newText);
if (condition!= null) {
if (newText.length()!=0) { //if newText is not empty
} else {
isFiltersActive = false;
}
ListAdapter.getFilter().filter(newText);
} else
isFiltersActive = false;
return true;
}
});
searchView.setOnCloseListener(new SearchView.OnCloseListener() {
#Override
public boolean onClose() {
}
});
}
onQueryTextSubmit:
Called when the user submits the query. This could be due to a key
press on the keyboard or due to pressing a submit button. The listener
can override the standard behavior by returning true to indicate that
it has handled the submit request. Otherwise return false to let the
SearchView handle the submission by launching any associated intent.
Return true from onQueryTextSubmit function.
To Check whether String is empty or not, Use
TextUtils.isEmpty() Returns true if the string is null or 0-length.
<CC.CustomEditText
android:id="#+id/receptionIdentityArticle"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="8dp"
android:theme="#style/ExtendProTheme"
android:maxLength="20"
style="#style/ExtendProTheme.EditText"
android:layout_below="#+id/suppliersSearchInputLabel"
local:MvxBind=" Text ArticleSearchClause, Mode=TwoWay; EnterCommand SearchArticlesCommand; Error Errors['ArticleSearchClause']; Click OnSearchClickCommand" />
So i have a CustomEdit that is different from EditText by overriding two events
this.KeyPress += OnEnterKeyPressed;
this.FocusChange += OnFocusChange;
My problem is that Click command is triggered only second time i click on the EditText. First time it just gets focused then then i click it the second time the Click command triggers. I guess it's how it should work, but i would like to catch the first click it's done on the EditText. An other event maybe it is triggered but I could not find a documentation with all the possible binding on EditText. Any ideas how can i catch the first click on an EditText?
You can use the Touch event instead of Click to get the event to fire on the first click. Unfortunately, the behavior you described is normal to Android (even though confusing) and isn't related to MVVMCross.
As #hankide said, use the Touch event instead. You will need to create a custom binding. I happen to have just dealt with this so here it is:
public class MvxViewTouchBinding
: MvxAndroidTargetBinding
{
private readonly View _view;
private IMvxCommand _command;
public MvxViewTouchBinding(View view) : base(view)
{
_view = view;
_view.Touch += ViewOnTouch;
}
private void ViewOnTouch(object sender, View.TouchEventArgs eventArgs)
{
eventArgs.Handled = false;
if (_command != null)
{
_command.Execute();
}
}
public override void SetValue(object value)
{
_command = (IMvxCommand)value;
}
protected override void Dispose(bool isDisposing)
{
if (isDisposing)
{
_view.Touch -= ViewOnTouch;
}
base.Dispose(isDisposing);
}
protected override void SetValueImpl(object target, object value)
{
}
public override Type TargetType
{
get { return typeof(IMvxCommand); }
}
public override MvxBindingMode DefaultMode
{
get { return MvxBindingMode.OneWay; }
}
}
and in your Setup.cs put
protected override void FillTargetFactories(MvvmCross.Binding.Bindings.Target.Construction.IMvxTargetBindingFactoryRegistry registry)
{
base.FillTargetFactories(registry);
registry.RegisterCustomBindingFactory<View>("Touch",
view => new MvxViewTouchBinding(view));
}
Then you can bind to Touch instead of Click.
I have an Entry and a Button:
<StackLayout>
<CustomViews:ChatEntryView x:Name="ChatEntry" />
<Button Text="Send" Command="SendCommand"/>
</StackLayout>
What I wanted to achieve here is that when the user starts types something on the Entry control and then presses the button, it should not hide the keyboard (or lose the Entry Focus).
The ChatEntryView here is just actually a custom view that inherits from the Entry control and what I did inside:
1.) Added an Unfocused handler
Unfocused += ChatEntryView_Unfocused;
void ChatEntryView_Unfocused(object sender, FocusEventArgs e)
{
this.Focus();
}
2.) Tried Handling on PropertyChanged
protected override void OnPropertyChanged(string propertyName = null)
{
this.Focus();
base.OnPropertyChanged(propertyName);
}
3.) Tried Handling on PropertyChanging
protected override void OnPropertyChanging(string propertyName = null)
{
this.Focus();
base.OnPropertyChanging(propertyName);
}
But all the three methods doesn't seem to work. I was able to do a work around on IOS by making a custom renderer and it's not very neat (by actually interfacing to the Control.ShouldEndEditing on IOS).
But my problem now is on Android, as I don't exactly know how to do this on Android and there's no Control.ShouldEndEditing (the interface on Android) that I can work with.
What happens by using the handlers above is that, the keyboard for the entry view still loses focus and then immediately gets focuses again which is very odd.
The keyboard pushes down (loses focus) and then pushes up (forced focus).
I know it's too late to anwser this question, but it might be helpful for someone else, I added this code to MainActivity, it might not be a neat solution, but works for me:
private bool _lieAboutCurrentFocus;
public override bool DispatchTouchEvent(MotionEvent ev)
{
var focused = CurrentFocus;
bool customEntryRendererFocused = focused != null && focused.Parent is CustomEntryRenderer_Droid;
_lieAboutCurrentFocus = customEntryRendererFocused;
var result = base.DispatchTouchEvent(ev);
_lieAboutCurrentFocus = false;
return result;
}
public override View CurrentFocus
{
get
{
if (_lieAboutCurrentFocus)
{
return null;
}
return base.CurrentFocus;
}
}
I have Android WebView which displays some links as: Link1TextLink2Text Now I would like to retrieve Link1Text and Link2Text when I long press these links. I have contextMenu implemented in the code and I could successfully get the link urls (http://link1.html, http://link2.html) using HitTestResult getExtra() method but how ccan I get those link texts ?FYI, I require those link texts for implementing "Copy link text" option in the contextMenu.
To get the text of an achor link:
I. Hook a touchstart listener to every web pages in the onPageFinished() callback of WebViewClient via evaluateJavascript. like:
//Javascripts to evaluate in onPageFinished
const w=window;
w.addEventListener('touchstart',wrappedOnDownFunc);
function wrappedOnDownFunc(e){
if(e.touches.length==1){
w._touchtarget = e.touches[0].target;
}
console.log('hey touched something ' +w._touchtarget);
}
note we've saved the touch target.
II. Then implement OnLongClicklisenter for webview. use evaluateJavascript again when you long pressed on a link object:
#Override
public boolean onLongClick(View v) {
WebView.HitTestResult result = ((WebView)v).getHitTestResult();
if (null == result) return false;
int type = result.getType();
switch (type) {
case WebView.HitTestResult.SRC_ANCHOR_TYPE:
if(result.getExtra()!=null){
((WebView)v).evaluateJavascript("window._touchtarget?window._touchtarget.innerText:''", new ValueCallback<String>() {
#Override
public void onReceiveValue(String value) {
System.out.println("hey received link text : "+value);
}
});
}
return true;
}
return false;
}
What's more, we can even choose to select the text of the anchor element! Actually this is one of the options that samsung browser offers when you long-pressed an tag .
To achieve this, we still need that recorded touch target. Besides we need 2 new javascript methods:
function selectTouchtarget(){
var tt = w._touchtarget;
if(tt){
w._touchtarget_href = tt.getAttribute("href");
tt.removeAttribute("href");
var sel = w.getSelection();
var range = document.createRange();
range.selectNodeContents(tt);
sel.removeAllRanges();
sel.addRange(range);
}
}
function restoreTouchtarget(){
var tt = w._touchtarget;
if(tt){
tt.setAttribute("href", w._touchtarget_href);
}
}
Finnaly in the onLongClick listener, instead of just fetch the innerText, we programmatically set the selection, trigger the action menu bar, and restore the removed href attribute of our touch target.
case WebViewmy.HitTestResult.SRC_ANCHOR_TYPE:
if(result.getExtra()!=null){
WebViewmy mWebView = ((WebViewmy)v);
mWebView.evaluateJavascript("selectTouchtarget()", new ValueCallback<String>() {
#Override
public void onReceiveValue(String value) {
/* bring in action mode by a fake click on the programmatically selected text. */
MotionEvent te = MotionEvent.obtain(0,0,KeyEvent.ACTION_DOWN,mWebView.lastX,mWebView.lastY,0);
mWebView.dispatchTouchEvent(te);
te.setAction(KeyEvent.ACTION_UP);
mWebView.dispatchTouchEvent(te);
te.recycle();
//if it's not delayed for a while or the href attribute is not removed, then the above code would click into
// the anchor element instead of select it's text.
/* restore href attribute */
mWebView.postDelayed(() -> mWebView.evaluateJavascript("restoreTouchtarget()", null), 100);
}
});
}
return true;
In my case, I've extended the WebView as WebViewmy to record last touched positions, lastX and lastY, in the onTouchEvent method.
Unfortunately, a clear, official way to do this is not available. Although, there are two APIs (selectText and copySelection) which are pending API council approval, that may help to do this, but they are not available at the moment.