Commit 1f3da583 authored by dtseng's avatar dtseng Committed by Commit bot

Use category flush for focus events.

At times, we receive multiple focus events. This is technically fine, but it can cause lots of queued up chatter.

In the linke to bug, we are reading a focus event from a previous tab. Ideally the focus event shouldn't be fired (as it once was not), but ChromeVox should be robust enough to not over speak in this case.

Note that in braille, this results in a "shuffling" effect where one focus event flashes briefly before the expected event comes through.

BUG=646126

CQ_INCLUDE_TRYBOTS=master.tryserver.chromium.linux:closure_compilation

Review-Url: https://codereview.chromium.org/2331253002
Cr-Commit-Position: refs/heads/master@{#419240}
parent 55b05c7a
...@@ -127,10 +127,10 @@ Background = function() { ...@@ -127,10 +127,10 @@ Background = function() {
cvox.ExtensionBridge.addMessageListener(this.onMessage_); cvox.ExtensionBridge.addMessageListener(this.onMessage_);
/** @type {!BackgroundKeyboardHandler} */ /** @type {!BackgroundKeyboardHandler} @private */
this.keyboardHandler_ = new BackgroundKeyboardHandler(); this.keyboardHandler_ = new BackgroundKeyboardHandler();
/** @type {!LiveRegions} */ /** @type {!LiveRegions} @private */
this.liveRegions_ = new LiveRegions(this); this.liveRegions_ = new LiveRegions(this);
/** @type {boolean} @private */ /** @type {boolean} @private */
...@@ -139,6 +139,7 @@ Background = function() { ...@@ -139,6 +139,7 @@ Background = function() {
/** /**
* Stores the mode as computed the last time a current range was set. * Stores the mode as computed the last time a current range was set.
* @type {?ChromeVoxMode} * @type {?ChromeVoxMode}
* @private
*/ */
this.mode_ = null; this.mode_ = null;
...@@ -519,6 +520,7 @@ Background.prototype = { ...@@ -519,6 +520,7 @@ Background.prototype = {
* 1. a url is blacklisted for Classic. * 1. a url is blacklisted for Classic.
* 2. the current range is not within web content. * 2. the current range is not within web content.
* @param {string} url * @param {string} url
* @return {boolean}
*/ */
isWhitelistedForCompat_: function(url) { isWhitelistedForCompat_: function(url) {
return this.isBlacklistedForClassic_(url) || (this.getCurrentRange() && return this.isBlacklistedForClassic_(url) || (this.getCurrentRange() &&
...@@ -626,7 +628,7 @@ Background.prototype = { ...@@ -626,7 +628,7 @@ Background.prototype = {
} else if (action == 'onCommand') { } else if (action == 'onCommand') {
CommandHandler.onCommand(msg['command']); CommandHandler.onCommand(msg['command']);
} else if (action == 'flushNextUtterance') { } else if (action == 'flushNextUtterance') {
Output.flushNextSpeechUtterance(); Output.forceModeForNextSpeechUtterance(cvox.QueueMode.FLUSH);
} }
break; break;
} }
......
...@@ -176,7 +176,7 @@ DesktopAutomationHandler.prototype = { ...@@ -176,7 +176,7 @@ DesktopAutomationHandler.prototype = {
* @param {!AutomationEvent} evt * @param {!AutomationEvent} evt
*/ */
onEventWithFlushedOutput: function(evt) { onEventWithFlushedOutput: function(evt) {
Output.flushNextSpeechUtterance(); Output.forceModeForNextSpeechUtterance(cvox.QueueMode.FLUSH);
this.onEventDefault(evt); this.onEventDefault(evt);
}, },
...@@ -187,7 +187,7 @@ DesktopAutomationHandler.prototype = { ...@@ -187,7 +187,7 @@ DesktopAutomationHandler.prototype = {
if (ChromeVoxState.instance.currentRange && if (ChromeVoxState.instance.currentRange &&
evt.target == ChromeVoxState.instance.currentRange.start.node) evt.target == ChromeVoxState.instance.currentRange.start.node)
return; return;
Output.flushNextSpeechUtterance(); Output.forceModeForNextSpeechUtterance(cvox.QueueMode.FLUSH);
this.onEventDefault(evt); this.onEventDefault(evt);
}, },
...@@ -251,12 +251,10 @@ DesktopAutomationHandler.prototype = { ...@@ -251,12 +251,10 @@ DesktopAutomationHandler.prototype = {
this.createTextEditHandlerIfNeeded_(evt.target); this.createTextEditHandlerIfNeeded_(evt.target);
// Since we queue output mostly for live regions support and there isn't a // Category flush speech triggered by events with no source. This includes
// reliable way to know if this focus event resulted from a user's explicit // views.
// action, only flush when the focused node is not web content. if (evt.eventFrom == '')
if (node.root.role == RoleType.desktop) Output.forceModeForNextSpeechUtterance(cvox.QueueMode.CATEGORY_FLUSH);
Output.flushNextSpeechUtterance();
this.onEventDefault(new chrome.automation.AutomationEvent( this.onEventDefault(new chrome.automation.AutomationEvent(
EventType.focus, node, evt.eventFrom)); EventType.focus, node, evt.eventFrom));
}, },
...@@ -427,7 +425,7 @@ DesktopAutomationHandler.prototype = { ...@@ -427,7 +425,7 @@ DesktopAutomationHandler.prototype = {
var override = evt.target.role == RoleType.menuItem || var override = evt.target.role == RoleType.menuItem ||
(evt.target.root == focus.root && (evt.target.root == focus.root &&
focus.root.role == RoleType.desktop); focus.root.role == RoleType.desktop);
Output.flushNextSpeechUtterance(); Output.forceModeForNextSpeechUtterance(cvox.QueueMode.FLUSH);
if (override || AutomationUtil.isDescendantOf(evt.target, focus)) if (override || AutomationUtil.isDescendantOf(evt.target, focus))
this.onEventDefault(evt); this.onEventDefault(evt);
}.bind(this)); }.bind(this));
......
...@@ -112,7 +112,7 @@ ISearch.prototype = { ...@@ -112,7 +112,7 @@ ISearch.prototype = {
* @implements {ISearchHandler} * @implements {ISearchHandler}
*/ */
ISearchUI = function(input) { ISearchUI = function(input) {
/** @type {ChromeVoxState} */ /** @type {ChromeVoxState} @private */
this.background_ = this.background_ =
chrome.extension.getBackgroundPage()['ChromeVoxState']['instance']; chrome.extension.getBackgroundPage()['ChromeVoxState']['instance'];
this.iSearch_ = new ISearch(this.background_.currentRange.start.node); this.iSearch_ = new ISearch(this.background_.currentRange.start.node);
...@@ -204,7 +204,7 @@ ISearchUI.prototype = { ...@@ -204,7 +204,7 @@ ISearchUI.prototype = {
* @private * @private
*/ */
output_: function(node) { output_: function(node) {
Output.flushNextSpeechUtterance(); Output.forceModeForNextSpeechUtterance(cvox.QueueMode.FLUSH);
var o = new Output().withRichSpeechAndBraille( var o = new Output().withRichSpeechAndBraille(
cursors.Range.fromNode(node), null, Output.EventType.NAVIGATE).go(); cursors.Range.fromNode(node), null, Output.EventType.NAVIGATE).go();
......
...@@ -38,7 +38,7 @@ BackgroundKeyboardHandler.prototype = { ...@@ -38,7 +38,7 @@ BackgroundKeyboardHandler.prototype = {
evt.preventDefault(); evt.preventDefault();
evt.stopPropagation(); evt.stopPropagation();
} }
Output.flushNextSpeechUtterance(); Output.forceModeForNextSpeechUtterance(cvox.QueueMode.FLUSH);
return false; return false;
}, },
......
...@@ -58,13 +58,13 @@ var RoleType = chrome.automation.RoleType; ...@@ -58,13 +58,13 @@ var RoleType = chrome.automation.RoleType;
*/ */
Output = function() { Output = function() {
// TODO(dtseng): Include braille specific rules. // TODO(dtseng): Include braille specific rules.
/** @type {!Array<!Spannable>} */ /** @type {!Array<!Spannable>} @private */
this.speechBuffer_ = []; this.speechBuffer_ = [];
/** @type {!Array<!Spannable>} */ /** @type {!Array<!Spannable>} @private */
this.brailleBuffer_ = []; this.brailleBuffer_ = [];
/** @type {!Array<!Object>} */ /** @type {!Array<!Object>} @private */
this.locations_ = []; this.locations_ = [];
/** @type {function(?)} */ /** @type {function(?)} @private */
this.speechEndCallback_; this.speechEndCallback_;
/** /**
...@@ -688,19 +688,21 @@ Output.EventType = { ...@@ -688,19 +688,21 @@ Output.EventType = {
}; };
/** /**
* If true, the next speech utterance will flush instead of the normal * If set, the next speech utterance will use this value instead of the normal
* queueing mode. * queueing mode.
* @type {boolean} * @type {cvox.QueueMode|undefined}
* @private * @private
*/ */
Output.flushNextSpeechUtterance_ = false; Output.forceModeForNextSpeechUtterance_;
/** /**
* Calling this will make the next speech utterance flush even if it would * Calling this will make the next speech utterance use |mode| even if it would
* normally queue or do a category flush. * normally queue or do a category flush. This differs from the |withQueueMode|
* instance method as it can apply to future output.
* @param {cvox.QueueMode} mode
*/ */
Output.flushNextSpeechUtterance = function() { Output.forceModeForNextSpeechUtterance = function(mode) {
Output.flushNextSpeechUtterance_ = true; Output.forceModeForNextSpeechUtterance_ = mode;
}; };
Output.prototype = { Output.prototype = {
...@@ -907,6 +909,7 @@ Output.prototype = { ...@@ -907,6 +909,7 @@ Output.prototype = {
/** /**
* Triggers callback for a speech event. * Triggers callback for a speech event.
* @param {function()} callback * @param {function()} callback
* @return {Output}
*/ */
onSpeechEnd: function(callback) { onSpeechEnd: function(callback) {
this.speechEndCallback_ = function(opt_cleanupOnly) { this.speechEndCallback_ = function(opt_cleanupOnly) {
...@@ -923,9 +926,10 @@ Output.prototype = { ...@@ -923,9 +926,10 @@ Output.prototype = {
// Speech. // Speech.
var queueMode = this.queueMode_; var queueMode = this.queueMode_;
this.speechBuffer_.forEach(function(buff, i, a) { this.speechBuffer_.forEach(function(buff, i, a) {
if (Output.flushNextSpeechUtterance_ && buff.length > 0) { if (Output.forceModeForNextSpeechUtterance_ !== undefined &&
queueMode = cvox.QueueMode.FLUSH; buff.length > 0) {
Output.flushNextSpeechUtterance_ = false; queueMode = Output.forceModeForNextSpeechUtterance_;
Output.forceModeForNextSpeechUtterance_ = undefined;
} }
var speechProps = {}; var speechProps = {};
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment