Commit dae7e6be authored by David Tseng's avatar David Tseng Committed by Commit Bot

More accurately reflect touched items when touch exploring

This is the first step in a series of changes to improve touch interaction with ChromeVox.

This change:
- introduces a new earcon when entering a touch anchor
- introduces a new earcon when touching a blank area of the screen (defined as an anchor that is neither a touch leaf nor a touch object)
- when moving from a touch anchor to a non-touch anchor, immediately stop speech so the user knows they have left. This will help avoid confusion about what's currently focused or not when hovering a finger. This might also be familiar (e.g. iOS behaves this way).

Note for the last point, we will have to solve the issue of really small touch targets. One potential solution (not for this change) is to come up with a way to do "fuzzy" hit testing.

Change-Id: I262c664bc0d86aa6e8ce96cf1e263db7a8f184cc
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2088510
Commit-Queue: David Tseng <dtseng@chromium.org>
Reviewed-by: default avatarDominic Mazzoni <dmazzoni@chromium.org>
Cr-Commit-Position: refs/heads/master@{#747708}
parent a65d1637
......@@ -54,6 +54,9 @@ DesktopAutomationHandler = class extends BaseAutomationHandler {
/** @private {number?} */
this.delayedAttributeOutputId_;
/** @private {!Date} */
this.lastHoverExit_ = new Date();
this.addListener_(EventType.ALERT, this.onAlert);
this.addListener_(EventType.BLUR, this.onBlur);
this.addListener_(
......@@ -153,7 +156,7 @@ DesktopAutomationHandler = class extends BaseAutomationHandler {
let targetLeaf = null;
let targetObject = null;
while (target && target != target.root) {
if (!targetObject && AutomationPredicate.object(target)) {
if (!targetObject && AutomationPredicate.touchObject(target)) {
targetObject = target;
}
if (AutomationPredicate.touchLeaf(target)) {
......@@ -164,6 +167,19 @@ DesktopAutomationHandler = class extends BaseAutomationHandler {
target = targetLeaf || targetObject;
if (!target) {
// This clears the anchor point in the TouchExplorationController (so
// things like double tap won't be directed to the previous target). It
// also ensures if a user touch explores back to the previous range, it
// will be announced again.
ChromeVoxState.instance.setCurrentRange(null);
// Play a earcon to let the user know they're in the middle of nowhere.
if ((new Date() - this.lastHoverExit_) >
DesktopAutomationHandler.MIN_HOVER_EXIT_SOUND_DELAY_MS) {
ChromeVoxState.instance.nextEarcons_.engine_.onTouchExitAnchor();
this.lastHoverExit_ = new Date();
}
chrome.tts.stop();
return;
}
......@@ -176,6 +192,7 @@ DesktopAutomationHandler = class extends BaseAutomationHandler {
this.textEditHandler_ = null;
}
ChromeVoxState.instance.nextEarcons_.engine_.onTouchEnterAnchor();
Output.forceModeForNextSpeechUtterance(QueueMode.FLUSH);
this.onEventDefault(
new CustomAutomationEvent(evt.type, target, evt.eventFrom));
......@@ -663,6 +680,9 @@ DesktopAutomationHandler.ATTRIBUTE_DELAY_MS = 1500;
*/
DesktopAutomationHandler.announceActions = false;
/** @const {number} */
DesktopAutomationHandler.MIN_HOVER_EXIT_SOUND_DELAY_MS = 500;
/**
* Global instance.
......
......@@ -381,6 +381,34 @@ EarconEngine = class {
this.play('selection_reverse');
}
onTouchEnterAnchor() {
this.play('static', {gain: this.clickVolume});
const freq1 = 220 * Math.pow(EarconEngine.HALF_STEP, 6);
this.generateSinusoidal({
attack: 0.0,
decay: 0.01,
dur: 0.03,
gain: 0.5,
freq: freq1,
overtones: 1,
overtoneFactor: 0.8
});
}
onTouchExitAnchor() {
this.play('static', {gain: this.clickVolume});
const freq1 = 220 * Math.pow(EarconEngine.HALF_STEP, 13);
this.generateSinusoidal({
attack: 0.00001,
decay: 0.01,
dur: 0.1,
gain: 0.3,
freq: freq1,
overtones: 1,
overtoneFactor: 0.1
});
}
/**
* Generate a synthesized musical note based on a sum of sinusoidals shaped
* by an envelope, controlled by a number of properties.
......
......@@ -122,7 +122,7 @@ AutomationPredicate = class {
* @return {boolean}
*/
static touchLeaf(node) {
return !node.firstChild || node.role == Role.BUTTON ||
return !!(!node.firstChild && node.name) || node.role == Role.BUTTON ||
node.role == Role.POP_UP_BUTTON || node.role == Role.SLIDER ||
node.role == Role.TEXT_FIELD ||
(node.role == Role.MENU_ITEM && !hasActionableDescendant(node));
......@@ -238,6 +238,20 @@ AutomationPredicate = class {
node.role != Role.INLINE_TEXT_BOX));
}
/**
* Matches against nodes visited during touch exploration.
* @param {!AutomationNode} node
* @return {boolean}
*/
static touchObject(node) {
// Exclude large objects such as containers.
if (AutomationPredicate.container(node)) {
return false;
}
return AutomationPredicate.object(node);
}
/**
* @param {!AutomationNode} first
* @param {!AutomationNode} second
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment