Commit 5701d705 authored by Akihiro Ota's avatar Akihiro Ota Committed by Commit Bot

ChromeVox: Queue phonetic speech and delay output.

Phonetic hints would get interrupted by other speech output in some
cases (usually when interrupted by a hint).
This change queues and delays phonetic and hint speech, so that
phonetic speech doesn't get dropped.

Change-Id: I1e1a79129a490b34de381405f7a978d1d6cca74d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1801749Reviewed-by: default avatarDavid Tseng <dtseng@chromium.org>
Commit-Queue: Akihiro Ota <akihiroota@chromium.org>
Cr-Commit-Position: refs/heads/master@{#700427}
parent dc0fbc80
...@@ -84,6 +84,8 @@ void SpeechMonitor::Speak(int utterance_id, ...@@ -84,6 +84,8 @@ void SpeechMonitor::Speak(int utterance_id,
utterance_id, content::TTS_EVENT_END, static_cast<int>(utterance.size()), utterance_id, content::TTS_EVENT_END, static_cast<int>(utterance.size()),
0, std::string()); 0, std::string());
std::move(on_speak_finished).Run(true); std::move(on_speak_finished).Run(true);
delay_for_last_utterance_MS_ = CalculateUtteranceDelayMS();
time_of_last_utterance_ = std::chrono::steady_clock::now();
} }
bool SpeechMonitor::StopSpeaking() { bool SpeechMonitor::StopSpeaking() {
...@@ -138,4 +140,16 @@ void SpeechMonitor::SetError(const std::string& error) { ...@@ -138,4 +140,16 @@ void SpeechMonitor::SetError(const std::string& error) {
error_ = error; error_ = error;
} }
double SpeechMonitor::CalculateUtteranceDelayMS() {
std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now();
std::chrono::duration<double> time_span =
std::chrono::duration_cast<std::chrono::duration<double>>(
now - time_of_last_utterance_);
return time_span.count() * 1000;
}
double SpeechMonitor::GetDelayForLastUtteranceMS() {
return delay_for_last_utterance_MS_;
}
} // namespace chromeos } // namespace chromeos
...@@ -5,6 +5,8 @@ ...@@ -5,6 +5,8 @@
#ifndef CHROME_BROWSER_CHROMEOS_ACCESSIBILITY_SPEECH_MONITOR_H_ #ifndef CHROME_BROWSER_CHROMEOS_ACCESSIBILITY_SPEECH_MONITOR_H_
#define CHROME_BROWSER_CHROMEOS_ACCESSIBILITY_SPEECH_MONITOR_H_ #define CHROME_BROWSER_CHROMEOS_ACCESSIBILITY_SPEECH_MONITOR_H_
#include <chrono>
#include "base/containers/circular_deque.h" #include "base/containers/circular_deque.h"
#include "base/macros.h" #include "base/macros.h"
#include "base/memory/ref_counted.h" #include "base/memory/ref_counted.h"
...@@ -46,6 +48,9 @@ class SpeechMonitor : public content::TtsPlatform { ...@@ -46,6 +48,9 @@ class SpeechMonitor : public content::TtsPlatform {
// Blocks until StopSpeaking() is called on TtsController. // Blocks until StopSpeaking() is called on TtsController.
void BlockUntilStop(); void BlockUntilStop();
// Delayed utterances.
double GetDelayForLastUtteranceMS();
private: private:
// TtsPlatform implementation. // TtsPlatform implementation.
bool PlatformImplAvailable() override; bool PlatformImplAvailable() override;
...@@ -74,6 +79,14 @@ class SpeechMonitor : public content::TtsPlatform { ...@@ -74,6 +79,14 @@ class SpeechMonitor : public content::TtsPlatform {
bool did_stop_ = false; bool did_stop_ = false;
std::string error_; std::string error_;
// Delayed utterances.
// Calculates the milliseconds elapsed since the last call to Speak().
double CalculateUtteranceDelayMS();
// Stores the milliseconds elapsed since the last call to Speak().
double delay_for_last_utterance_MS_;
// Stores the last time Speak() was called.
std::chrono::steady_clock::time_point time_of_last_utterance_;
DISALLOW_COPY_AND_ASSIGN(SpeechMonitor); DISALLOW_COPY_AND_ASSIGN(SpeechMonitor);
}; };
......
...@@ -57,6 +57,10 @@ ...@@ -57,6 +57,10 @@
#include "ui/base/test/ui_controls.h" #include "ui/base/test/ui_controls.h"
#include "ui/views/widget/widget.h" #include "ui/views/widget/widget.h"
namespace {
const double kExpectedPhoneticSpeechAndHintDelayMS = 1000;
} // namespace
namespace chromeos { namespace chromeos {
LoggedInSpokenFeedbackTest::LoggedInSpokenFeedbackTest() LoggedInSpokenFeedbackTest::LoggedInSpokenFeedbackTest()
...@@ -828,4 +832,56 @@ IN_PROC_BROWSER_TEST_F(OobeSpokenFeedbackTest, DISABLED_SpokenFeedbackInOobe) { ...@@ -828,4 +832,56 @@ IN_PROC_BROWSER_TEST_F(OobeSpokenFeedbackTest, DISABLED_SpokenFeedbackInOobe) {
"Combo box * of *")); "Combo box * of *"));
} }
IN_PROC_BROWSER_TEST_P(SpokenFeedbackTest,
MoveByCharacterPhoneticSpeechAndHints) {
EnableChromeVox();
ui_test_utils::NavigateToURL(
browser(), GURL("data:text/html,<button autofocus>Click me</button>"));
EXPECT_EQ("Web Content", speech_monitor_.GetNextUtterance());
EXPECT_EQ("Click me", speech_monitor_.GetNextUtterance());
EXPECT_EQ("Button", speech_monitor_.GetNextUtterance());
EXPECT_EQ("Press Search plus Space to activate.",
speech_monitor_.GetNextUtterance());
// Move by character through the button.
// Assert that phonetic speech and hints are delayed.
SendKeyPressWithSearchAndShift(ui::VKEY_RIGHT);
EXPECT_EQ("L", speech_monitor_.GetNextUtterance());
EXPECT_EQ("lima", speech_monitor_.GetNextUtterance());
EXPECT_TRUE(speech_monitor_.GetDelayForLastUtteranceMS() >=
kExpectedPhoneticSpeechAndHintDelayMS);
EXPECT_EQ("Press Search plus Space to activate.",
speech_monitor_.GetNextUtterance());
EXPECT_TRUE(speech_monitor_.GetDelayForLastUtteranceMS() >=
kExpectedPhoneticSpeechAndHintDelayMS);
SendKeyPressWithSearchAndShift(ui::VKEY_RIGHT);
EXPECT_EQ("I", speech_monitor_.GetNextUtterance());
EXPECT_EQ("india", speech_monitor_.GetNextUtterance());
EXPECT_TRUE(speech_monitor_.GetDelayForLastUtteranceMS() >=
kExpectedPhoneticSpeechAndHintDelayMS);
EXPECT_EQ("Press Search plus Space to activate.",
speech_monitor_.GetNextUtterance());
EXPECT_TRUE(speech_monitor_.GetDelayForLastUtteranceMS() >=
kExpectedPhoneticSpeechAndHintDelayMS);
SendKeyPressWithSearchAndShift(ui::VKEY_RIGHT);
EXPECT_EQ("C", speech_monitor_.GetNextUtterance());
EXPECT_EQ("charlie", speech_monitor_.GetNextUtterance());
EXPECT_TRUE(speech_monitor_.GetDelayForLastUtteranceMS() >=
kExpectedPhoneticSpeechAndHintDelayMS);
EXPECT_EQ("Press Search plus Space to activate.",
speech_monitor_.GetNextUtterance());
EXPECT_TRUE(speech_monitor_.GetDelayForLastUtteranceMS() >=
kExpectedPhoneticSpeechAndHintDelayMS);
SendKeyPressWithSearchAndShift(ui::VKEY_RIGHT);
EXPECT_EQ("K", speech_monitor_.GetNextUtterance());
EXPECT_EQ("kilo", speech_monitor_.GetNextUtterance());
EXPECT_TRUE(speech_monitor_.GetDelayForLastUtteranceMS() >=
kExpectedPhoneticSpeechAndHintDelayMS);
EXPECT_EQ("Press Search plus Space to activate.",
speech_monitor_.GetNextUtterance());
EXPECT_TRUE(speech_monitor_.GetDelayForLastUtteranceMS() >=
kExpectedPhoneticSpeechAndHintDelayMS);
}
} // namespace chromeos } // namespace chromeos
...@@ -1168,9 +1168,11 @@ Output.prototype = { ...@@ -1168,9 +1168,11 @@ Output.prototype = {
* @param {!Array<Spannable>} buff Buffer to receive rendered output. * @param {!Array<Spannable>} buff Buffer to receive rendered output.
* @param {!OutputRulesStr} ruleStr * @param {!OutputRulesStr} ruleStr
* @param {!AutomationNode=} opt_prevNode * @param {!AutomationNode=} opt_prevNode
* @param {!Object<string,(string|number|boolean|Function|Array<string>)>=}
* opt_properties
* @private * @private
*/ */
format_: function(node, format, buff, ruleStr, opt_prevNode) { format_: function(node, format, buff, ruleStr, opt_prevNode, opt_properties) {
var tokens = []; var tokens = [];
var args = null; var args = null;
...@@ -1182,7 +1184,7 @@ Output.prototype = { ...@@ -1182,7 +1184,7 @@ Output.prototype = {
tokens = [format]; tokens = [format];
} }
var speechProps = null; var speechProps = opt_properties || null;
tokens.forEach(function(token) { tokens.forEach(function(token) {
// Ignore empty tokens. // Ignore empty tokens.
if (!token) if (!token)
...@@ -1642,7 +1644,8 @@ Output.prototype = { ...@@ -1642,7 +1644,8 @@ Output.prototype = {
} else if (prefix == '@') { } else if (prefix == '@') {
ruleStr.write(' @'); ruleStr.write(' @');
if (this.formatOptions_.auralStyle) { if (this.formatOptions_.auralStyle) {
speechProps = new Output.SpeechProperties(); if (!speechProps)
speechProps = new Output.SpeechProperties();
speechProps['relativePitch'] = -0.2; speechProps['relativePitch'] = -0.2;
} }
var isPluralized = (token[0] == '@'); var isPluralized = (token[0] == '@');
...@@ -2106,6 +2109,10 @@ Output.prototype = { ...@@ -2106,6 +2109,10 @@ Output.prototype = {
return; return;
} }
// Hints should be delayed.
var hintProperties = new Output.SpeechProperties();
hintProperties['delay'] = true;
ruleStr.write('hint_: '); ruleStr.write('hint_: ');
if (EventSourceState.get() == EventSourceType.TOUCH_GESTURE) { if (EventSourceState.get() == EventSourceType.TOUCH_GESTURE) {
if (node.state[StateType.EDITABLE]) { if (node.state[StateType.EDITABLE]) {
...@@ -2113,44 +2120,56 @@ Output.prototype = { ...@@ -2113,44 +2120,56 @@ Output.prototype = {
node, node,
node.state[StateType.FOCUSED] ? '@hint_is_editing' : node.state[StateType.FOCUSED] ? '@hint_is_editing' :
'@hint_double_tap_to_edit', '@hint_double_tap_to_edit',
buff, ruleStr); buff, ruleStr, undefined, hintProperties);
return; return;
} }
var isWithinVirtualKeyboard = AutomationUtil.getAncestors(node).find( var isWithinVirtualKeyboard = AutomationUtil.getAncestors(node).find(
(n) => n.role == RoleType.KEYBOARD); (n) => n.role == RoleType.KEYBOARD);
if (node.defaultActionVerb != 'none' && !isWithinVirtualKeyboard) if (node.defaultActionVerb != 'none' && !isWithinVirtualKeyboard)
this.format_(node, '@hint_double_tap', buff, ruleStr); this.format_(
node, '@hint_double_tap', buff, ruleStr, undefined, hintProperties);
var enteredVirtualKeyboard = var enteredVirtualKeyboard =
uniqueAncestors.find((n) => n.role == RoleType.KEYBOARD); uniqueAncestors.find((n) => n.role == RoleType.KEYBOARD);
if (enteredVirtualKeyboard) if (enteredVirtualKeyboard)
this.format_(node, '@hint_touch_type', buff, ruleStr); this.format_(
node, '@hint_touch_type', buff, ruleStr, undefined, hintProperties);
return; return;
} }
if (node.state[StateType.EDITABLE] && cvox.ChromeVox.isStickyPrefOn) if (node.state[StateType.EDITABLE] && cvox.ChromeVox.isStickyPrefOn)
this.format_(node, '@sticky_mode_enabled', buff, ruleStr); this.format_(
node, '@sticky_mode_enabled', buff, ruleStr, undefined,
hintProperties);
if (node.state[StateType.EDITABLE] && node.state[StateType.FOCUSED] && if (node.state[StateType.EDITABLE] && node.state[StateType.FOCUSED] &&
!this.formatOptions_.braille) { !this.formatOptions_.braille) {
if (node.state[StateType.MULTILINE] || if (node.state[StateType.MULTILINE] ||
node.state[StateType.RICHLY_EDITABLE]) node.state[StateType.RICHLY_EDITABLE])
this.format_(node, '@hint_search_within_text_field', buff, ruleStr); this.format_(
node, '@hint_search_within_text_field', buff, ruleStr, undefined,
hintProperties);
} }
if (AutomationPredicate.checkable(node)) if (AutomationPredicate.checkable(node))
this.format_(node, '@hint_checkable', buff, ruleStr); this.format_(
node, '@hint_checkable', buff, ruleStr, undefined, hintProperties);
else if (AutomationPredicate.clickable(node)) else if (AutomationPredicate.clickable(node))
this.format_(node, '@hint_clickable', buff, ruleStr); this.format_(
node, '@hint_clickable', buff, ruleStr, undefined, hintProperties);
if (node.autoComplete == 'list' || node.autoComplete == 'both' || if (node.autoComplete == 'list' || node.autoComplete == 'both' ||
node.state[StateType.AUTOFILL_AVAILABLE]) { node.state[StateType.AUTOFILL_AVAILABLE]) {
this.format_(node, '@hint_autocomplete_list', buff, ruleStr); this.format_(
node, '@hint_autocomplete_list', buff, ruleStr, undefined,
hintProperties);
} }
if (node.autoComplete == 'inline' || node.autoComplete == 'both') if (node.autoComplete == 'inline' || node.autoComplete == 'both')
this.format_(node, '@hint_autocomplete_inline', buff, ruleStr); this.format_(
node, '@hint_autocomplete_inline', buff, ruleStr, undefined,
hintProperties);
if (node.accessKey) { if (node.accessKey) {
this.append_(buff, Msgs.getMsg('access_key', [node.accessKey])); this.append_(buff, Msgs.getMsg('access_key', [node.accessKey]));
ruleStr.write(Msgs.getMsg('access_key', [node.accessKey])); ruleStr.write(Msgs.getMsg('access_key', [node.accessKey]));
...@@ -2159,14 +2178,17 @@ Output.prototype = { ...@@ -2159,14 +2178,17 @@ Output.prototype = {
// Ancestry based hints. // Ancestry based hints.
if (uniqueAncestors.find( if (uniqueAncestors.find(
/** @type {function(?) : boolean} */ (AutomationPredicate.table))) /** @type {function(?) : boolean} */ (AutomationPredicate.table)))
this.format_(node, '@hint_table', buff, ruleStr); this.format_(
node, '@hint_table', buff, ruleStr, undefined, hintProperties);
if (uniqueAncestors.find(/** @type {function(?) : boolean} */ ( if (uniqueAncestors.find(/** @type {function(?) : boolean} */ (
AutomationPredicate.roles([RoleType.MENU, RoleType.MENU_BAR])))) AutomationPredicate.roles([RoleType.MENU, RoleType.MENU_BAR]))))
this.format_(node, '@hint_menu', buff, ruleStr); this.format_(
node, '@hint_menu', buff, ruleStr, undefined, hintProperties);
if (uniqueAncestors.find(/** @type {function(?) : boolean} */ (function(n) { if (uniqueAncestors.find(/** @type {function(?) : boolean} */ (function(n) {
return !!n.details; return !!n.details;
}))) })))
this.format_(node, '@hint_details', buff, ruleStr); this.format_(
node, '@hint_details', buff, ruleStr, undefined, hintProperties);
}, },
/** /**
......
...@@ -119,7 +119,8 @@ TEST_F('ChromeVoxOutputE2ETest', 'Links', function() { ...@@ -119,7 +119,8 @@ TEST_F('ChromeVoxOutputE2ETest', 'Links', function() {
// Link earcon (based on the name). // Link earcon (based on the name).
{value: {earconId: 'LINK'}, start: 0, end: 10}, {value: {earconId: 'LINK'}, start: 0, end: 10},
{value: 'role', start: 11, end: 15} {value: 'role', start: 11, end: 15},
{value: {'delay': true}, start:16, end:16}
]}, o.speechOutputForTest); ]}, o.speechOutputForTest);
checkBrailleOutput( checkBrailleOutput(
'Click here lnk', 'Click here lnk',
...@@ -137,7 +138,8 @@ TEST_F('ChromeVoxOutputE2ETest', 'Checkbox', function() { ...@@ -137,7 +138,8 @@ TEST_F('ChromeVoxOutputE2ETest', 'Checkbox', function() {
checkSpeechOutput('|Check box|Not checked|Press Search+Space to toggle.', checkSpeechOutput('|Check box|Not checked|Press Search+Space to toggle.',
[ [
{value: new Output.EarconAction('CHECK_OFF'), start: 0, end: 0}, {value: new Output.EarconAction('CHECK_OFF'), start: 0, end: 0},
{value: 'role', start: 1, end: 10} {value: 'role', start: 1, end: 10},
{value: {'delay': true}, start:23, end:23}
], ],
o); o);
checkBrailleOutput( checkBrailleOutput(
...@@ -710,7 +712,8 @@ TEST_F('ChromeVoxOutputE2ETest', 'ToggleButton', function() { ...@@ -710,7 +712,8 @@ TEST_F('ChromeVoxOutputE2ETest', 'ToggleButton', function() {
spans_: [ spans_: [
{value: {earconId: 'CHECK_ON'}, start: 0, end: 0}, {value: {earconId: 'CHECK_ON'}, start: 0, end: 0},
{value: 'name', start: 1, end:10}, {value: 'name', start: 1, end:10},
{value: 'role', start: 11, end: 24} {value: 'role', start: 11, end: 24},
{value: {'delay':true}, start:33, end:33}
]}, o.speechOutputForTest); ]}, o.speechOutputForTest);
assertEquals('Subscribe tgl btn =', o.brailleOutputForTest.string_); assertEquals('Subscribe tgl btn =', o.brailleOutputForTest.string_);
}); });
......
...@@ -98,7 +98,7 @@ cvox.TtsBackground = function() { ...@@ -98,7 +98,7 @@ cvox.TtsBackground = function() {
this.retainPunctuation_ = [';', '?', '!', '\'']; this.retainPunctuation_ = [';', '?', '!', '\''];
/** /**
* The id of a callback returned from setTimeout. * The id of a callback returned by setTimeout.
* @type {number|undefined} * @type {number|undefined}
*/ */
this.timeoutId_; this.timeoutId_;
...@@ -183,13 +183,12 @@ goog.inherits(cvox.TtsBackground, cvox.ChromeTtsBase); ...@@ -183,13 +183,12 @@ goog.inherits(cvox.TtsBackground, cvox.ChromeTtsBase);
/** /**
* The amount of time to wait before speaking a phonetic word for a * The amount of time to wait before speaking a hint.
* letter.
* @type {number} * @type {number}
* @private * @private
* @const * @const
*/ */
cvox.TtsBackground.PHONETIC_DELAY_MS_ = 1000; cvox.TtsBackground.HINT_DELAY_MS_ = 1000;
/** /**
* The list of properties allowed to be passed to the chrome.tts.speak API. * The list of properties allowed to be passed to the chrome.tts.speak API.
...@@ -286,6 +285,9 @@ cvox.TtsBackground.prototype.speak = function( ...@@ -286,6 +285,9 @@ cvox.TtsBackground.prototype.speak = function(
var utterance = new cvox.Utterance(textString, mergedProperties); var utterance = new cvox.Utterance(textString, mergedProperties);
this.speakUsingQueue_(utterance, queueMode); this.speakUsingQueue_(utterance, queueMode);
// Attempt to queue phonetic speech with property['delay']. This ensures that
// phonetic hints are delayed when we process them.
this.pronouncePhonetically_(textString, properties);
return this; return this;
}; };
...@@ -304,6 +306,8 @@ cvox.TtsBackground.prototype.speakUsingQueue_ = function(utterance, queueMode) { ...@@ -304,6 +306,8 @@ cvox.TtsBackground.prototype.speakUsingQueue_ = function(utterance, queueMode) {
(new PanelCommand(PanelCommandType.CLEAR_SPEECH)).send(); (new PanelCommand(PanelCommandType.CLEAR_SPEECH)).send();
if (this.shouldCancel_(this.currentUtterance_, utterance, queueMode)) { if (this.shouldCancel_(this.currentUtterance_, utterance, queueMode)) {
// Clear timeout in case currentUtterance_ is a delayed utterance.
this.clearTimeout_();
this.cancelUtterance_(this.currentUtterance_); this.cancelUtterance_(this.currentUtterance_);
this.currentUtterance_ = null; this.currentUtterance_ = null;
} }
...@@ -346,6 +350,21 @@ cvox.TtsBackground.prototype.startSpeakingNextItemInQueue_ = function() { ...@@ -346,6 +350,21 @@ cvox.TtsBackground.prototype.startSpeakingNextItemInQueue_ = function() {
return; return;
} }
// Clear timeout for delayed utterances (hints and phonetic speech).
this.clearTimeout_();
// Check top of utteranceQueue for delayed utterance.
if (this.utteranceQueue_[0].properties['delay']) {
// Remove 'delay' property and set a timeout to process this utterance after
// the delay has passed.
delete this.utteranceQueue_[0].properties['delay'];
this.timeoutId_ = setTimeout(
() => this.startSpeakingNextItemInQueue_(),
cvox.TtsBackground.HINT_DELAY_MS_);
return;
}
this.currentUtterance_ = this.utteranceQueue_.shift(); this.currentUtterance_ = this.utteranceQueue_.shift();
var utterance = this.currentUtterance_; var utterance = this.currentUtterance_;
var utteranceId = utterance.id; var utteranceId = utterance.id;
...@@ -574,14 +593,6 @@ cvox.TtsBackground.prototype.preprocess = function(text, properties) { ...@@ -574,14 +593,6 @@ cvox.TtsBackground.prototype.preprocess = function(text, properties) {
} }
text = text.replace(pE.regexp, this.createPunctuationReplace_(pE.clear)); text = text.replace(pE.regexp, this.createPunctuationReplace_(pE.clear));
// Try pronouncing phonetically for single characters. Cancel previous calls
// to pronouncePhonetically_ if we fail to pronounce on this invokation or if
// this text is math which should never be pronounced phonetically.
if (properties.math || !properties['phoneticCharacters'] ||
!this.pronouncePhonetically_(text, properties)) {
this.clearTimeout_();
}
// Remove all whitespace from the beginning and end, and collapse all // Remove all whitespace from the beginning and end, and collapse all
// inner strings of whitespace to a single space. // inner strings of whitespace to a single space.
text = text.replace(/\s+/g, ' ').replace(/^\s+|\s+$/g, ''); text = text.replace(/\s+/g, ' ').replace(/^\s+|\s+$/g, '');
...@@ -666,28 +677,28 @@ cvox.TtsBackground.prototype.createPunctuationReplace_ = function(clear) { ...@@ -666,28 +677,28 @@ cvox.TtsBackground.prototype.createPunctuationReplace_ = function(clear) {
/** /**
* Pronounces single letters phonetically after some timeout. * Queues phonetic disambiguation for characters if disambiguation is found.
* @param {string} text The text. * @param {string} text The text for which we want to get phonetic data.
* @param {Object} properties Speech properties to use for this utterance. * @param {Object} properties Speech properties to use for this utterance.
* @return {boolean} True if the text resulted in speech.
* @private * @private
*/ */
cvox.TtsBackground.prototype.pronouncePhonetically_ = function( cvox.TtsBackground.prototype.pronouncePhonetically_ = function(
text, properties) { text, properties) {
// Math should never be pronounced phonetically.
if (properties.math)
return;
text = text.toLowerCase(); text = text.toLowerCase();
// If undefined language, use the UI language of the browser as a best guess. // If undefined language, use the UI language of the browser as a best guess.
if (!properties['lang']) if (!properties['lang'])
properties['lang'] = chrome.i18n.getUILanguage(); properties['lang'] = chrome.i18n.getUILanguage();
text = PhoneticData.getPhoneticDisambiguation(properties['lang'], text);
if (text) { var phoneticText =
this.clearTimeout_(); PhoneticData.getPhoneticDisambiguation(properties['lang'], text);
var self = this; if (phoneticText) {
this.timeoutId_ = setTimeout(function() { properties['delay'] = true;
self.speak(text, cvox.QueueMode.QUEUE, properties); this.speak(phoneticText, cvox.QueueMode.QUEUE, properties);
}, cvox.TtsBackground.PHONETIC_DELAY_MS_);
return true;
} }
return false;
}; };
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment