Commit 07d9144f authored by Brandon Wylie's avatar Brandon Wylie Committed by Commit Bot

Hookup the Assistant consent dialog

Connecting buttons to their actions, writing to prefs and displaying the
consent dialog when needed.

mocks: https://www.figma.com/file/b1CT6jM3mj0MdBvgQicy4b
dd: https://docs.google.com/document/d/1ZnmJUYFBEiIx8TjQPxD18e7NAeJa6yIBamAJyR-ggOk

Bug: 1117271
Change-Id: I90dd40c285d9246cfe5b48c62ffdee093c591086
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2531356
Commit-Queue: Brandon Wylie <wylieb@chromium.org>
Reviewed-by: default avatarTed Choc <tedchoc@chromium.org>
Cr-Commit-Position: refs/heads/master@{#826849}
parent 0533f99a
......@@ -350,6 +350,7 @@ chrome_test_java_sources = [
"javatests/src/org/chromium/chrome/browser/omnibox/suggestions/entity/EntitySuggestionProcessorUnitTest.java",
"javatests/src/org/chromium/chrome/browser/omnibox/suggestions/mostvisited/MostVisitedTilesTest.java",
"javatests/src/org/chromium/chrome/browser/omnibox/suggestions/tiles/TileSuggestionProcessorUnitTest.java",
"javatests/src/org/chromium/chrome/browser/omnibox/voice/AssistantVoiceSearchConsentUiRenderTest.java",
"javatests/src/org/chromium/chrome/browser/omnibox/voice/AssistantVoiceSearchConsentUiTest.java",
"javatests/src/org/chromium/chrome/browser/omnibox/voice/VoiceRecognitionHandlerTest.java",
"javatests/src/org/chromium/chrome/browser/page_info/ConnectionInfoViewTest.java",
......
......@@ -4,14 +4,23 @@
package org.chromium.chrome.browser.omnibox.voice;
import static org.chromium.chrome.browser.preferences.ChromePreferenceKeys.ASSISTANT_VOICE_SEARCH_ENABLED;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import androidx.annotation.IntDef;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import org.chromium.base.Callback;
import org.chromium.base.metrics.RecordHistogram;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.autofill_assistant.AutofillAssistantPreferenceFragment;
import org.chromium.chrome.browser.preferences.SharedPreferencesManager;
import org.chromium.chrome.browser.settings.SettingsLauncher;
import org.chromium.components.browser_ui.bottomsheet.BottomSheetContent;
import org.chromium.components.browser_ui.bottomsheet.BottomSheetController;
import org.chromium.components.browser_ui.bottomsheet.BottomSheetControllerProvider;
......@@ -19,34 +28,69 @@ import org.chromium.components.browser_ui.bottomsheet.BottomSheetObserver;
import org.chromium.components.browser_ui.bottomsheet.EmptyBottomSheetObserver;
import org.chromium.ui.base.WindowAndroid;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* The consent ui shown to users when Chrome attempts to use Assistant voice search for the
* first time.
*/
class AssistantVoiceSearchConsentUi implements BottomSheetContent {
class AssistantVoiceSearchConsentUi
implements BottomSheetContent, WindowAndroid.ActivityStateObserver {
private static final String CONSENT_OUTCOME_HISTOGRAM = "Assistant.VoiceSearch.ConsentOutcome";
/**
* Show the consent ui to the user.
* @param windowAndroid The current {@link WindowAndroid} for the app.
* @param completionCallback A Runnable to be invoked if the user is continuing with the
* requested voice search
* @param sharedPreferencesManager The {@link SharedPreferencesManager} to read/write prefs.
* @param settingsLauncher The {@link SettingsLauncher}, used to launch settings.
* @param completionCallback A callback to be invoked if the user is continuing with the
* requested voice search.
*/
static void show(WindowAndroid windowAndroid, Runnable completionCallback) {
static void show(WindowAndroid windowAndroid, SharedPreferencesManager sharedPreferencesManager,
SettingsLauncher settingsLauncher, Callback<Boolean> completionCallback) {
// TODO(wylieb): Inject BottomSheetController into this class properly.
AssistantVoiceSearchConsentUi consentUi =
new AssistantVoiceSearchConsentUi(windowAndroid.getContext().get(),
BottomSheetControllerProvider.from(windowAndroid));
AssistantVoiceSearchConsentUi consentUi = new AssistantVoiceSearchConsentUi(windowAndroid,
windowAndroid.getContext().get(), sharedPreferencesManager, settingsLauncher,
BottomSheetControllerProvider.from(windowAndroid));
consentUi.show(completionCallback);
}
private BottomSheetController mBottomSheetController;
private BottomSheetObserver mBottomSheetObserver;
@IntDef({ConsentOutcome.ACCEPTED_VIA_BUTTON, ConsentOutcome.ACCEPTED_VIA_SETTINGS,
ConsentOutcome.REJECTED_VIA_BUTTON, ConsentOutcome.REJECTED_VIA_SETTINGS,
ConsentOutcome.REJECTED_VIA_DISMISS, ConsentOutcome.MAX_VALUE})
@Retention(RetentionPolicy.SOURCE)
@interface ConsentOutcome {
int ACCEPTED_VIA_BUTTON = 0;
int ACCEPTED_VIA_SETTINGS = 1;
int REJECTED_VIA_BUTTON = 2;
int REJECTED_VIA_SETTINGS = 3;
int REJECTED_VIA_DISMISS = 4;
// STOP: When updating this, also update values in enums.xml.
int MAX_VALUE = 5;
}
private final WindowAndroid mWindowAndroid;
private final Context mContext;
private final SharedPreferencesManager mSharedPreferencesManager;
private final SettingsLauncher mSettingsLauncher;
private final BottomSheetController mBottomSheetController;
private final BottomSheetObserver mBottomSheetObserver;
private View mContentView;
private @Nullable Runnable mCompletionCallback;
private @Nullable Callback<Boolean> mCompletionCallback;
private AssistantVoiceSearchConsentUi(
Context context, BottomSheetController bottomSheetController) {
@VisibleForTesting
AssistantVoiceSearchConsentUi(WindowAndroid windowAndroid, Context context,
SharedPreferencesManager sharedPreferencesManager, SettingsLauncher settingsLauncher,
BottomSheetController bottomSheetController) {
mContext = context;
mSharedPreferencesManager = sharedPreferencesManager;
mSettingsLauncher = settingsLauncher;
mBottomSheetController = bottomSheetController;
mWindowAndroid = windowAndroid;
mWindowAndroid.addActivityStateObserver(this);
mContentView = LayoutInflater.from(context).inflate(
R.layout.assistant_voice_search_consent_ui, /* root= */ null);
......@@ -58,11 +102,11 @@ class AssistantVoiceSearchConsentUi implements BottomSheetContent {
|| reason == BottomSheetController.StateChangeReason.BACK_PRESS) {
// The user dismissed the dialog without pressing a button.
onConsentRejected();
// TODO(wylieb): Record metrics here.
RecordHistogram.recordEnumeratedHistogram(CONSENT_OUTCOME_HISTOGRAM,
ConsentOutcome.REJECTED_VIA_DISMISS, ConsentOutcome.MAX_VALUE);
}
mCompletionCallback.run();
mCompletionCallback = null;
mBottomSheetController.removeObserver(mBottomSheetObserver);
mCompletionCallback.onResult(mSharedPreferencesManager.readBoolean(
ASSISTANT_VOICE_SEARCH_ENABLED, /* default= */ false));
}
};
......@@ -89,32 +133,60 @@ class AssistantVoiceSearchConsentUi implements BottomSheetContent {
* @param completionCallback Callback to be invoked if the user continues with the requested
* voice search.
*/
private void show(@NonNull Runnable completionCallback) {
@VisibleForTesting
void show(@NonNull Callback<Boolean> completionCallback) {
assert mCompletionCallback == null;
assert !mBottomSheetController.isSheetOpen();
mCompletionCallback = completionCallback;
mBottomSheetController.requestShowContent(this, /* animate= */ true);
mBottomSheetController.addObserver(mBottomSheetObserver);
// TODO(wylieb): Record metrics here.
if (!mBottomSheetController.requestShowContent(this, /* animate= */ true)) {
mBottomSheetController.hideContent(
this, /* animate= */ false, BottomSheetController.StateChangeReason.NONE);
completionCallback.onResult(mSharedPreferencesManager.readBoolean(
ASSISTANT_VOICE_SEARCH_ENABLED, /* default= */ false));
destroy();
} else {
mBottomSheetController.addObserver(mBottomSheetObserver);
}
}
private void onConsentAccepted() {
// TODO(wylieb): Implement this.
// TODO(wylieb): Record metrics here.
mSharedPreferencesManager.writeBoolean(ASSISTANT_VOICE_SEARCH_ENABLED, true);
RecordHistogram.recordEnumeratedHistogram(CONSENT_OUTCOME_HISTOGRAM,
ConsentOutcome.ACCEPTED_VIA_BUTTON, ConsentOutcome.MAX_VALUE);
}
private void onConsentRejected() {
// TODO(wylieb): Implement this.
// TODO(wylieb): Record metrics here.
mSharedPreferencesManager.writeBoolean(ASSISTANT_VOICE_SEARCH_ENABLED, false);
RecordHistogram.recordEnumeratedHistogram(CONSENT_OUTCOME_HISTOGRAM,
ConsentOutcome.REJECTED_VIA_BUTTON, ConsentOutcome.MAX_VALUE);
}
/** Open a page to learn more about the consent dialog. */
private void openLearnMore() {
// TODO(wylieb): Implement this.
// TODO(wylieb): Record metrics here.
mSettingsLauncher.launchSettingsActivity(
mContext, AutofillAssistantPreferenceFragment.class, /* fragmentArgs= */ null);
}
// WindowAndroid.ActivityStateObserver implementation.
@Override
public void onActivityResumed() {
// It's possible the user clicked through "learn more" and enabled/disabled it via settings.
if (!mSharedPreferencesManager.contains(ASSISTANT_VOICE_SEARCH_ENABLED)) return;
RecordHistogram.recordEnumeratedHistogram(CONSENT_OUTCOME_HISTOGRAM,
mSharedPreferencesManager.readBoolean(
ASSISTANT_VOICE_SEARCH_ENABLED, /* default= */ false)
? ConsentOutcome.ACCEPTED_VIA_SETTINGS
: ConsentOutcome.REJECTED_VIA_SETTINGS,
ConsentOutcome.MAX_VALUE);
mBottomSheetController.hideContent(this, /* animate= */ true,
BottomSheetController.StateChangeReason.INTERACTION_COMPLETE);
}
@Override
public void onActivityPaused() {}
// BottomSheetContent implementation.
@Override
......@@ -133,7 +205,11 @@ class AssistantVoiceSearchConsentUi implements BottomSheetContent {
}
@Override
public void destroy() {}
public void destroy() {
mCompletionCallback = null;
mBottomSheetController.removeObserver(mBottomSheetObserver);
mWindowAndroid.removeActivityStateObserver(AssistantVoiceSearchConsentUi.this);
}
@Override
public @ContentPriority int getPriority() {
......
......@@ -5,6 +5,7 @@
package org.chromium.chrome.browser.omnibox.voice;
import static org.chromium.chrome.browser.preferences.ChromePreferenceKeys.ASSISTANT_LAST_VERSION;
import static org.chromium.chrome.browser.preferences.ChromePreferenceKeys.ASSISTANT_VOICE_SEARCH_ENABLED;
import static org.chromium.chrome.browser.preferences.ChromePreferenceKeys.ASSISTANT_VOICE_SEARCH_SUPPORTED;
import android.content.Context;
......@@ -162,12 +163,18 @@ public class AssistantVoiceSearchService implements TemplateUrlService.TemplateU
}
}
/** @return Whether the user has had a chance to enable the feature. */
public boolean needsEnabledCheck() {
return !mSharedPrefsManager.contains(ASSISTANT_VOICE_SEARCH_ENABLED);
}
/**
* Checks if the client should use Assistant for voice search. It's
* Checks if the client is eligible Assistant for voice search. It's
* {@link canRequestAssistantVoiceSearch} with an additional check for experiment groups.
*/
public boolean shouldRequestAssistantVoiceSearch() {
return mIsAssistantVoiceSearchEnabled && canRequestAssistantVoiceSearch();
return mIsAssistantVoiceSearchEnabled && canRequestAssistantVoiceSearch()
&& isEnabledByPreference();
}
/** Checks if the client meetings the requirements to use Assistant for voice search. */
......@@ -195,6 +202,15 @@ public class AssistantVoiceSearchService implements TemplateUrlService.TemplateU
return AppCompatResources.getColorStateList(context, id);
}
/**
* @return Whether the user has enabled the feature, ensure {@link needsEnabledCheck} is
* called first.
*/
private boolean isEnabledByPreference() {
return mSharedPrefsManager.readBoolean(
ASSISTANT_VOICE_SEARCH_ENABLED, /* default= */ false);
}
/** Does expensive content provider read to determine if AGSA supports Assistant. */
private void checkIfAssistantEnabled() {
final String currentAgsaVersion = mGsaState.getAgsaVersionName();
......
......@@ -17,12 +17,15 @@ import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import org.chromium.base.Log;
import org.chromium.base.ThreadUtils;
import org.chromium.base.metrics.RecordHistogram;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.omnibox.LocationBarDataProvider;
import org.chromium.chrome.browser.omnibox.suggestions.AutocompleteCoordinator;
import org.chromium.chrome.browser.preferences.SharedPreferencesManager;
import org.chromium.chrome.browser.search_engines.TemplateUrlServiceFactory;
import org.chromium.chrome.browser.settings.SettingsLauncherImpl;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.util.VoiceRecognitionUtil;
import org.chromium.content_public.browser.NavigationHandle;
......@@ -40,6 +43,8 @@ import java.util.List;
* Class containing functionality related to voice search.
*/
public class VoiceRecognitionHandler {
private static final String TAG = "VoiceRecognition";
// The minimum confidence threshold that will result in navigating directly to a voice search
// response (as opposed to treating it like a typed string in the Omnibox).
@VisibleForTesting
......@@ -114,6 +119,9 @@ public class VoiceRecognitionHandler {
* @return The current {@link WindowAndroid}.
*/
WindowAndroid getWindowAndroid();
/** Clears omnibox focus, used to display the dialog when the keyboard is shown. */
void clearOmniboxFocus();
}
/**
......@@ -354,32 +362,12 @@ public class VoiceRecognitionHandler {
Activity activity = windowAndroid.getActivity().get();
if (activity == null) return;
if (mAssistantVoiceSearchService != null) {
// Report the client's eligibility for Assistant voice search.
mAssistantVoiceSearchService.reportUserEligibility();
if (mAssistantVoiceSearchService.shouldRequestAssistantVoiceSearch()) {
startAGSAForAssistantVoiceSearch(windowAndroid, source);
return;
}
}
// Check if we need to request audio permissions. If we don't, then trigger a permissions
// prompt will appear and startVoiceRecognition will be called again.
if (!ensureAudioPermissionGranted(windowAndroid, source)) return;
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(
RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
activity.getComponentName().flattenToString());
intent.putExtra(RecognizerIntent.EXTRA_WEB_SEARCH_ONLY, true);
if (startAGSAForAssistantVoiceSearch(activity, windowAndroid, source)) return;
if (!showSpeechRecognitionIntent(windowAndroid, intent, source)) {
// Requery whether or not the recognition intent can be handled.
isRecognitionIntentPresent(false);
mDelegate.updateMicButtonState();
recordVoiceSearchFailureEventSource(source);
if (!startSystemForVoiceSearch(activity, windowAndroid, source)) {
// TODO(wylieb): Emit histogram here to identify how many users are attempting to use
// voice search, but fail completely.
Log.w(TAG, "Couldn't find suitable provider for voice searching");
}
}
......@@ -418,15 +406,75 @@ public class VoiceRecognitionHandler {
return false;
}
/** Start AGSA to fulfill the current voice search. */
private void startAGSAForAssistantVoiceSearch(
WindowAndroid windowAndroid, @VoiceInteractionSource int source) {
/** Start the system-provided service to fulfill the current voice search. */
private boolean startSystemForVoiceSearch(
Activity activity, WindowAndroid windowAndroid, @VoiceInteractionSource int source) {
// Check if we need to request audio permissions. If we don't, then trigger a permissions
// prompt will appear and startVoiceRecognition will be called again.
if (!ensureAudioPermissionGranted(windowAndroid, source)) return false;
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(
RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
activity.getComponentName().flattenToString());
intent.putExtra(RecognizerIntent.EXTRA_WEB_SEARCH_ONLY, true);
if (!showSpeechRecognitionIntent(windowAndroid, intent, source)) {
// Requery whether or not the recognition intent can be handled.
isRecognitionIntentPresent(false);
mDelegate.updateMicButtonState();
recordVoiceSearchFailureEventSource(source);
return false;
}
return true;
}
/**
* Start AGSA to fulfill the current voice search.
*
* @return Whether AGSA was actually started, when false we should fallback to
* {@link startSystemForVoiceSearch}.
*/
private boolean startAGSAForAssistantVoiceSearch(
Activity activity, WindowAndroid windowAndroid, @VoiceInteractionSource int source) {
if (mAssistantVoiceSearchService == null) return false;
if (mAssistantVoiceSearchService.canRequestAssistantVoiceSearch()
&& mAssistantVoiceSearchService.needsEnabledCheck()) {
mDelegate.clearOmniboxFocus();
AssistantVoiceSearchConsentUi.show(windowAndroid,
SharedPreferencesManager.getInstance(), new SettingsLauncherImpl(),
(useAssistant) -> {
if (useAssistant) {
if (!startAGSAForAssistantVoiceSearch(
activity, windowAndroid, source)) {
// Fallback to system voice search.
startSystemForVoiceSearch(activity, windowAndroid, source);
}
} else {
startSystemForVoiceSearch(activity, windowAndroid, source);
}
});
return true;
}
// Report the client's eligibility for Assistant voice search.
mAssistantVoiceSearchService.reportUserEligibility();
if (!mAssistantVoiceSearchService.shouldRequestAssistantVoiceSearch()) return false;
Intent intent = mAssistantVoiceSearchService.getAssistantVoiceSearchIntent();
if (!showSpeechRecognitionIntent(windowAndroid, intent, source)) {
mDelegate.updateMicButtonState();
recordVoiceSearchFailureEventSource(source);
return false;
}
return true;
}
/**
......
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.omnibox.voice;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import androidx.test.filters.MediumTest;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.base.test.UiThreadTest;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.Feature;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.flags.ChromeSwitches;
import org.chromium.chrome.test.ChromeJUnit4ClassRunner;
import org.chromium.chrome.test.util.ChromeRenderTestRule;
import org.chromium.content_public.browser.test.util.TestThreadUtils;
import org.chromium.ui.test.util.DummyUiActivityTestCase;
import java.io.IOException;
/** Render tests for AssistantVoiceSearchConsentDialog */
@RunWith(ChromeJUnit4ClassRunner.class)
@CommandLineFlags.Add({ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE})
public class AssistantVoiceSearchConsentUiRenderTest extends DummyUiActivityTestCase {
@Rule
public ChromeRenderTestRule mRenderTestRule =
ChromeRenderTestRule.Builder.withPublicCorpus().build();
private ViewGroup mParentView;
private LinearLayout mContentView;
@Override
public void setUpTest() throws Exception {
super.setUpTest();
TestThreadUtils.runOnUiThreadBlocking(() -> {
getActivity().setContentView(R.layout.assistant_voice_search_consent_ui);
});
}
@Test
@UiThreadTest
@MediumTest
@Feature({"RenderTest"})
public void testShow() throws IOException {
mRenderTestRule.render(
getActivity().findViewById(R.id.avs_consent_ui), "avs_consent_ui_ntp");
}
}
\ No newline at end of file
......@@ -4,82 +4,156 @@
package org.chromium.chrome.browser.omnibox.voice;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withEffectiveVisibility;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static androidx.test.espresso.matcher.ViewMatchers.withText;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.is;
import static org.chromium.chrome.test.util.ViewUtils.waitForView;
import static org.chromium.chrome.browser.preferences.ChromePreferenceKeys.ASSISTANT_VOICE_SEARCH_ENABLED;
import androidx.test.espresso.matcher.ViewMatchers;
import androidx.test.filters.MediumTest;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnit;
import org.mockito.junit.MockitoRule;
import org.chromium.base.Callback;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.Feature;
import org.chromium.base.test.util.Criteria;
import org.chromium.base.test.util.CriteriaHelper;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.ChromeTabbedActivity;
import org.chromium.chrome.browser.flags.ChromeSwitches;
import org.chromium.chrome.browser.preferences.SharedPreferencesManager;
import org.chromium.chrome.browser.settings.SettingsLauncherImpl;
import org.chromium.chrome.test.ChromeJUnit4ClassRunner;
import org.chromium.chrome.test.ChromeTabbedActivityTestRule;
import org.chromium.chrome.test.util.ChromeRenderTestRule;
import org.chromium.components.embedder_support.util.UrlConstants;
import org.chromium.components.browser_ui.bottomsheet.BottomSheetController;
import org.chromium.components.browser_ui.bottomsheet.BottomSheetTestSupport;
import org.chromium.content_public.browser.test.util.ClickUtils;
import org.chromium.content_public.browser.test.util.TestThreadUtils;
import java.io.IOException;
/** Tests for AssistantVoiceSearchConsentDialog */
@RunWith(ChromeJUnit4ClassRunner.class)
@CommandLineFlags.Add({ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE})
// TODO(wylieb): Batch these tests if possible.
// TODO(wylieb): Batch these tests.
public class AssistantVoiceSearchConsentUiTest {
@Rule
public ChromeRenderTestRule mRenderTestRule =
ChromeRenderTestRule.Builder.withPublicCorpus().build();
@Rule
public ChromeTabbedActivityTestRule mActivityTestRule = new ChromeTabbedActivityTestRule();
@Rule
public MockitoRule mMockitoRule = MockitoJUnit.rule();
final SharedPreferencesManager mSharedPreferencesManager =
SharedPreferencesManager.getInstance();
@Mock
Callback<Boolean> mCallback;
AssistantVoiceSearchConsentUi mAssistantVoiceSearchConsentUi;
BottomSheetController mBottomSheetController;
BottomSheetTestSupport mBottomSheetTestSupport;
@Before
public void setUp() {
mActivityTestRule.startMainActivityOnBlankPage();
ChromeTabbedActivity cta = mActivityTestRule.getActivity();
mBottomSheetController = cta.getRootUiCoordinatorForTesting().getBottomSheetController();
mBottomSheetTestSupport = new BottomSheetTestSupport(mBottomSheetController);
mAssistantVoiceSearchConsentUi = new AssistantVoiceSearchConsentUi(cta.getWindowAndroid(),
cta, mSharedPreferencesManager, new SettingsLauncherImpl(), mBottomSheetController);
}
@After
public void tearDown() {
mSharedPreferencesManager.removeKey(ASSISTANT_VOICE_SEARCH_ENABLED);
}
private void showConsentUi() {
TestThreadUtils.runOnUiThreadBlocking(() -> {
mAssistantVoiceSearchConsentUi.show(mCallback);
mBottomSheetTestSupport.endAllAnimations();
});
}
@Test
@MediumTest
@Feature({"RenderTest"})
public void testShow() throws IOException {
ChromeTabbedActivity cta = mActivityTestRule.getActivity();
public void testDialogInteractivity_AcceptButton() {
showConsentUi();
TestThreadUtils.runOnUiThreadBlocking(() -> {
ClickUtils.clickButton(mAssistantVoiceSearchConsentUi.getContentView().findViewById(
R.id.button_primary));
mBottomSheetTestSupport.endAllAnimations();
});
CriteriaHelper.pollUiThread(() -> {
Criteria.checkThat(mSharedPreferencesManager.readBoolean(
ASSISTANT_VOICE_SEARCH_ENABLED, /* default= */ false),
is(true));
});
Mockito.verify(mCallback, Mockito.timeout(1000)).onResult(true);
}
@Test
@MediumTest
public void testDialogInteractivity_RejectButton() {
showConsentUi();
mActivityTestRule.loadUrl(UrlConstants.NTP_URL);
TestThreadUtils.runOnUiThreadBlocking(() -> {
AssistantVoiceSearchConsentUi.show(cta.getWindowAndroid(), () -> {});
waitForView(allOf(withId(R.id.avs_consent_ui), isDisplayed()));
ClickUtils.clickButton(mAssistantVoiceSearchConsentUi.getContentView().findViewById(
R.id.button_secondary));
mBottomSheetTestSupport.endAllAnimations();
});
mRenderTestRule.render(cta.findViewById(R.id.avs_consent_ui), "avs_consent_ui_ntp");
CriteriaHelper.pollUiThread(() -> {
Criteria.checkThat(mSharedPreferencesManager.readBoolean(
ASSISTANT_VOICE_SEARCH_ENABLED, /* default= */ true),
is(false));
});
Mockito.verify(mCallback, Mockito.timeout(1000)).onResult(false);
}
@Test
@MediumTest
public void testHide_DialogButtons() {
ChromeTabbedActivity cta = mActivityTestRule.getActivity();
public void testDialogInteractivity_LearnMoreButton() {
showConsentUi();
TestThreadUtils.runOnUiThreadBlocking(() -> {
AssistantVoiceSearchConsentUi.show(cta.getWindowAndroid(), () -> {});
waitForView(allOf(withId(R.id.avs_consent_ui), isDisplayed()));
ClickUtils.clickButton(cta.findViewById(R.id.button_primary));
waitForView(allOf(withId(R.id.avs_consent_ui),
withEffectiveVisibility(ViewMatchers.Visibility.GONE)));
AssistantVoiceSearchConsentUi.show(cta.getWindowAndroid(), () -> {});
ClickUtils.clickButton(cta.findViewById(R.id.button_secondary));
waitForView(allOf(withId(R.id.avs_consent_ui),
withEffectiveVisibility(ViewMatchers.Visibility.GONE)));
ClickUtils.clickButton(mAssistantVoiceSearchConsentUi.getContentView().findViewById(
R.id.avs_consent_ui_learn_more));
mBottomSheetTestSupport.endAllAnimations();
});
onView(withText(mActivityTestRule.getActivity().getResources().getString(
R.string.avs_setting_category_title)))
.check(matches(isDisplayed()));
Mockito.verify(mCallback, Mockito.times(0)).onResult(/* meaningless value */ true);
}
@Test
@MediumTest
public void testDialogInteractivity_BackButton() {
showConsentUi();
TestThreadUtils.runOnUiThreadBlocking(() -> { mBottomSheetTestSupport.handleBackPress(); });
CriteriaHelper.pollUiThread(() -> {
Criteria.checkThat(mSharedPreferencesManager.readBoolean(
ASSISTANT_VOICE_SEARCH_ENABLED, /* default= */ true),
is(false));
});
Mockito.verify(mCallback).onResult(false);
}
}
\ No newline at end of file
......@@ -356,6 +356,9 @@ public class VoiceRecognitionHandlerTest {
return mWindowAndroid;
}
@Override
public void clearOmniboxFocus() {}
public boolean updatedMicButtonState() {
return mUpdatedMicButtonState;
}
......@@ -509,6 +512,7 @@ public class VoiceRecognitionHandlerTest {
});
doReturn(false).when(mAssistantVoiceSearchService).shouldRequestAssistantVoiceSearch();
doReturn(false).when(mAssistantVoiceSearchService).needsEnabledCheck();
doReturn(mIntent).when(mAssistantVoiceSearchService).getAssistantVoiceSearchIntent();
mHandler.setAssistantVoiceSearchService(mAssistantVoiceSearchService);
}
......
......@@ -8,6 +8,7 @@ import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.chromium.chrome.browser.preferences.ChromePreferenceKeys.ASSISTANT_LAST_VERSION;
import static org.chromium.chrome.browser.preferences.ChromePreferenceKeys.ASSISTANT_VOICE_SEARCH_ENABLED;
import static org.chromium.chrome.browser.preferences.ChromePreferenceKeys.ASSISTANT_VOICE_SEARCH_SUPPORTED;
import android.app.Activity;
......@@ -95,6 +96,7 @@ public class AssistantVoiceSearchServiceUnitTest {
doReturn(true).when(mGsaState).canAgsaHandleIntent(any());
doReturn(true).when(mGsaState).agsaSupportsAssistantVoiceSearch();
doReturn(true).when(mGsaState).doesGsaAccountMatchChrome();
mSharedPreferencesManager.writeBoolean(ASSISTANT_VOICE_SEARCH_ENABLED, true);
mAssistantVoiceSearchService = new AssistantVoiceSearchService(mContext, mExternalAuthUtils,
mTemplateUrlService, mGsaState, null, mSharedPreferencesManager);
......@@ -113,6 +115,13 @@ public class AssistantVoiceSearchServiceUnitTest {
Assert.assertTrue(mAssistantVoiceSearchService.shouldRequestAssistantVoiceSearch());
}
@Test
@Feature("OmniboxAssistantVoiceSearch")
public void testStartVoiceRecognition_StartsAssistantVoiceSearch_DisabledByPref() {
mSharedPreferencesManager.writeBoolean(ASSISTANT_VOICE_SEARCH_ENABLED, false);
Assert.assertFalse(mAssistantVoiceSearchService.shouldRequestAssistantVoiceSearch());
}
@Test
@Feature("OmniboxAssistantVoiceSearch")
public void testStartVoiceRecognition_StartsAssistantVoiceSearch_ChromeNotSigned() {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment