Commit 1a38192b authored by Akihiro Ota's avatar Akihiro Ota Committed by Commit Bot

ChromeVox: Port earcons lesson.

This change:
1. Adds the earcons lesson to the interactive tutorial. Functionality
for this is included in buildEarconLesson() in i_tutorial.js
2. Adds a test for the lesson.
3. To reduce flakiness, we needed to change how we wait for
the tutorial. Now, we wait for all lesson content to finish loading
before we perform commands and make assertions.

focused. This change also includes an automated test.

Test: Manually verify on-device that earcons are played when text is
Change-Id: I89d2f3c050a74b8d35df47091d88ba443105dadb
Fixed: 1124078
AX-Relnotes: N/A
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2426968
Commit-Queue: Akihiro Ota <akihiroota@chromium.org>
Reviewed-by: default avatarDominic Mazzoni <dmazzoni@chromium.org>
Cr-Commit-Position: refs/heads/master@{#813035}
parent 62676f1b
......@@ -90,6 +90,8 @@ Polymer({
numLessons: {type: Number, value: 0},
numLoadedLessons: {type: Number, value: 0},
activeScreen: {type: String, observer: 'onActiveScreenChanged'},
interactiveMode: {type: Boolean, value: false},
......@@ -436,14 +438,13 @@ Polymer({
{
title: 'Sounds',
content: [
'ChromeVox uses sounds to give you essential and additional ' +
'information. You can use these sounds to navigate more ' +
'quickly by learning what each sound means. Once you get ' +
'more comfortable, you can turn off verbose descriptions in ' +
'speech and rely on them for essential information about the ' +
'page. Here is a complete list of sounds and what they mean',
],
content:
['ChromeVox uses sounds to give you essential and additional ' +
'information. You can use these sounds to navigate more ' +
'quickly by learning what each sound means. Once you get ' +
'more comfortable, you can turn off verbose descriptions in ' +
'speech and rely on them for essential information about the ' +
'page. Here is a complete list of sounds and what they mean'],
medium: InteractionMedium.KEYBOARD,
curriculums: [Curriculum.SOUNDS_AND_SETTINGS]
},
......@@ -488,8 +489,16 @@ Polymer({
/** @override */
ready() {
document.addEventListener('keydown', this.onKeyDown.bind(this));
this.hideAllScreens();
document.addEventListener('keydown', this.onKeyDown.bind(this));
this.addEventListener('lessonready', () => {
this.numLoadedLessons += 1;
if (this.numLoadedLessons === this.lessonData.length) {
this.buildEarconLesson();
this.dispatchEvent(
new CustomEvent('readyfortesting', {composed: true}));
}
});
this.$.lessonTemplate.addEventListener('dom-change', (evt) => {
// Executes once all lessons have been added to the dom.
this.show();
......@@ -968,5 +977,45 @@ Polymer({
// Queue lesson content so it is read after the lesson title.
this.requestSpeech(text, QueueMode.QUEUE);
}
},
/**
* @private
* @suppress {undefinedVars|missingProperties} For referencing
* EarconDescription and Msgs, which are defined on the Panel window.
*/
buildEarconLesson() {
// Find earcon lesson.
let earconLesson;
const elements = this.$.lessonContainer.children;
for (const element of elements) {
if (element.is === 'tutorial-lesson' && element.title === 'Sounds') {
earconLesson = element;
}
}
if (!earconLesson) {
throw new Error('Could not find the earcon lesson.');
}
// Add text and listeners.
for (const earconId in EarconDescription) {
const msgid = EarconDescription[earconId];
const earconElement = document.createElement('p');
earconElement.innerText = Msgs.getMsg(msgid);
earconElement.setAttribute('tabindex', -1);
earconElement.addEventListener(
'focus', this.requestEarcon.bind(this, earconId));
earconLesson.contentDiv.appendChild(earconElement);
}
},
/**
* @param {string} earconId
* @private
*/
requestEarcon(earconId) {
this.dispatchEvent(
new CustomEvent('requestearcon', {composed: true, detail: {earconId}}));
}
});
......@@ -97,7 +97,8 @@ a {
<h1 id="title" tabindex="-1">[[ title ]]</h1>
</template>
<div id="content">
<template is="dom-repeat" items="[[ content ]]" as="text">
<template id="contentTemplate" is="dom-repeat" items="[[ content ]]"
as="text">
<p tabindex="-1">[[ text ]]</p>
</template>
</div>
......
......@@ -51,6 +51,11 @@ export const TutorialLesson = Polymer({
/** @override */
ready() {
this.$.contentTemplate.addEventListener('dom-change', (evt) => {
this.dispatchEvent(new CustomEvent('lessonready', {composed: true}));
});
if (this.practiceFile) {
this.populatePracticeContent();
for (const evt of this.events) {
......@@ -258,4 +263,9 @@ export const TutorialLesson = Polymer({
return false;
},
/** @return {Element} */
get contentDiv() {
return this.$.content;
}
});
\ No newline at end of file
......@@ -8,6 +8,7 @@
goog.provide('Panel');
goog.require('AbstractEarcons');
goog.require('AnnotationsUI');
goog.require('BackgroundKeyboardHandler');
goog.require('BrailleCommandData');
......@@ -187,6 +188,9 @@ Panel = class {
'enable-experimental-accessibility-chromevox-tutorial', (enabled) => {
Panel.iTutorialEnabled_ = enabled;
});
/** @private {boolean} */
Panel.iTutorialReadyForTesting_ = false;
}
/**
......@@ -1230,6 +1234,14 @@ Panel = class {
chrome.extension.getBackgroundPage()['CommandHandler'];
commandHandler.onCommand('fullyDescribe');
});
$('i-tutorial').addEventListener('requestearcon', (evt) => {
const earconId = evt.detail.earconId;
chrome.extension
.getBackgroundPage()['ChromeVox']['earcons']['playEarcon'](earconId);
});
$('i-tutorial').addEventListener('readyfortesting', () => {
Panel.iTutorialReadyForTesting_ = true;
});
}
/**
......
......@@ -82,8 +82,16 @@ ChromeVoxTutorialTest = class extends ChromeVoxNextE2ETest {
if (mutation.type === 'childList') {
for (const node of mutation.addedNodes) {
if (node.id === 'i-tutorial-container') {
// Resolve once the tutorial has been added to the document.
resolve();
// Once the tutorial has been added to the document, we need
// to wait for the lesson templates to load.
const panel = this.getPanel();
if (panel.iTutorialReadyForTesting_) {
resolve();
} else {
panel.iTutorial.addEventListener('readyfortesting', () => {
resolve();
});
}
observer.disconnect();
}
}
......@@ -445,3 +453,33 @@ TEST_F('ChromeVoxTutorialTest', 'AutoReadLesson', function() {
.replay();
});
});
// Tests for correct speech and earcons on the earcons lesson.
TEST_F('ChromeVoxTutorialTest', 'EarconLesson', function() {
const mockFeedback = this.createMockFeedback();
this.runWithLoadedTree(this.simpleDoc, async function(root) {
await this.launchAndWaitForTutorial();
const tutorial = this.getPanel().iTutorial;
const nextObjectAndExpectSpeechAndEarcon = (speech, earcon) => {
mockFeedback.call(doCmd('nextObject'))
.expectSpeech(speech)
.expectEarcon(earcon);
};
mockFeedback.expectSpeech('Choose your tutorial experience')
.call(() => {
// Show the lesson.
tutorial.curriculum = 'sounds_and_settings';
tutorial.showLesson(0);
})
.expectSpeech('Sounds')
.call(doCmd('nextObject'))
.expectSpeech(new RegExp(
'ChromeVox uses sounds to give you essential and additional ' +
'information.'));
nextObjectAndExpectSpeechAndEarcon('A modal alert', Earcon.ALERT_MODAL);
nextObjectAndExpectSpeechAndEarcon(
'A non modal alert', Earcon.ALERT_NONMODAL);
nextObjectAndExpectSpeechAndEarcon('A button', Earcon.BUTTON);
mockFeedback.replay();
});
});
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment