Commit 0cae0f63 authored by Brandon Wylie's avatar Brandon Wylie Committed by Commit Bot

Add hl parameter to voice queries when the language is available

Bug: 1106862
Change-Id: Icfa00b176f54701caf7e6eadb4c6f4f749d40c5c
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2348016
Commit-Queue: Brandon Wylie <wylieb@chromium.org>
Reviewed-by: default avatarDavid Trainor <dtrainor@chromium.org>
Reviewed-by: default avatarSky Malice <skym@chromium.org>
Cr-Commit-Position: refs/heads/master@{#800376}
parent a883e341
......@@ -14,6 +14,7 @@ import android.speech.RecognizerIntent;
import android.text.TextUtils;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import org.chromium.base.ThreadUtils;
......@@ -42,6 +43,10 @@ public class VoiceRecognitionHandler {
// response (as opposed to treating it like a typed string in the Omnibox).
@VisibleForTesting
public static final float VOICE_SEARCH_CONFIDENCE_NAVIGATE_THRESHOLD = 0.9f;
// Extra containing the languages for the returned voice transcriptions (ArrayList<String>).
// This language is only returned for queries handled by Assistant.
@VisibleForTesting
static final String VOICE_QUERY_RESULT_LANGUAGES = "android.speech.extra.LANGUAGE";
private final Delegate mDelegate;
private Long mQueryStartTimeMs;
......@@ -112,15 +117,23 @@ public class VoiceRecognitionHandler {
public static class VoiceResult {
private final String mMatch;
private final float mConfidence;
@Nullable
private final String mLanguage;
public VoiceResult(String match, float confidence) {
this(match, confidence, null);
}
/**
* Creates an instance of a VoiceResult.
* @param match The text match from the voice recognition.
* @param confidence The confidence value of the recognition that should go from 0.0 to 1.0.
* @param language The language of the returned query.
*/
public VoiceResult(String match, float confidence) {
public VoiceResult(String match, float confidence, @Nullable String language) {
mMatch = match;
mConfidence = confidence;
mLanguage = language;
}
/**
......@@ -136,6 +149,13 @@ public class VoiceRecognitionHandler {
public float getConfidence() {
return mConfidence;
}
/**
* @return The IETF language tag for this result.
*/
public @Nullable String getLanguage() {
return mLanguage;
}
}
public VoiceRecognitionHandler(Delegate delegate) {
......@@ -245,6 +265,16 @@ public class VoiceRecognitionHandler {
url = TemplateUrlServiceFactory.get()
.getUrlForVoiceSearchQuery(topResultQuery)
.getSpec();
// If a language was returned to us from voice recognition, then use it. Currently,
// this is only returned when Google is the search engine. Since Google always has
// the query as a url parameter so appending this param will always be safe.
if (topResult.getLanguage() != null) {
// TODO(crbug.com/1117271): Cleanup these assertions when Assistant launches.
assert url.contains("?") : "URL must contain at least one URL param.";
assert !url.contains("#") : "URL must not contain a fragment.";
url += "&hl=" + topResult.getLanguage();
}
}
// Since voice was used, we need to let the frame know that there was a user gesture.
......@@ -271,9 +301,12 @@ public class VoiceRecognitionHandler {
ArrayList<String> strings = extras.getStringArrayList(RecognizerIntent.EXTRA_RESULTS);
float[] confidences = extras.getFloatArray(RecognizerIntent.EXTRA_CONFIDENCE_SCORES);
ArrayList<String> languages = extras.getStringArrayList(VOICE_QUERY_RESULT_LANGUAGES);
if (strings == null || confidences == null) return null;
if (strings.size() != confidences.length) return null;
// Langues is optional, so only check the size when it's non-null.
if (languages != null && languages.size() != strings.size()) return null;
AutocompleteCoordinator autocompleteCoordinator = mDelegate.getAutocompleteCoordinator();
assert autocompleteCoordinator != null;
......@@ -288,8 +321,9 @@ public class VoiceRecognitionHandler {
// the voice engine.
String culledString = strings.get(i).replaceAll(" ", "");
String url = autocompleteCoordinator.qualifyPartialURLQuery(culledString);
results.add(
new VoiceResult(url == null ? strings.get(i) : culledString, confidences[i]));
String language = languages == null ? null : languages.get(i);
results.add(new VoiceResult(
url == null ? strings.get(i) : culledString, confidences[i], language));
}
return results;
}
......
......@@ -16,6 +16,7 @@ import android.speech.RecognizerIntent;
import android.view.ViewGroup;
import androidx.annotation.ColorRes;
import androidx.annotation.Nullable;
import androidx.test.filters.SmallTest;
import org.junit.After;
......@@ -297,6 +298,7 @@ public class VoiceRecognitionHandlerTest {
* Test implementation of {@link VoiceRecognitionHandler.Delegate}.
*/
private class TestDelegate implements VoiceRecognitionHandler.Delegate {
private String mUrl;
private boolean mUpdatedMicButtonState;
private AutocompleteCoordinator mAutocompleteCoordinator;
......@@ -309,7 +311,9 @@ public class VoiceRecognitionHandlerTest {
}
@Override
public void loadUrlFromVoice(String url) {}
public void loadUrlFromVoice(String url) {
mUrl = url;
}
@Override
public void updateMicButtonState() {
......@@ -337,6 +341,10 @@ public class VoiceRecognitionHandlerTest {
public boolean updatedMicButtonState() {
return mUpdatedMicButtonState;
}
public String getUrl() {
return mUrl;
}
}
/**
......@@ -707,6 +715,29 @@ public class VoiceRecognitionHandlerTest {
});
}
@Test
@SmallTest
public void testCallback_successWithLangues() {
// Needs to run on the UI thread because we use the TemplateUrlService on success.
TestThreadUtils.runOnUiThreadBlocking(() -> {
mWindowAndroid.setVoiceResults(createDummyBundle("testing",
VoiceRecognitionHandler.VOICE_SEARCH_CONFIDENCE_NAVIGATE_THRESHOLD, "en-us"));
startVoiceRecognition(VoiceInteractionSource.OMNIBOX);
Assert.assertEquals(
VoiceInteractionSource.OMNIBOX, mHandler.getVoiceSearchStartEventSource());
Assert.assertEquals(
VoiceInteractionSource.OMNIBOX, mHandler.getVoiceSearchFinishEventSource());
Assert.assertEquals(true, mHandler.getVoiceSearchResult());
Assert.assertTrue(VoiceRecognitionHandler.VOICE_SEARCH_CONFIDENCE_NAVIGATE_THRESHOLD
== mHandler.getVoiceConfidenceValue());
assertVoiceResultsAreEqual(mAutocompleteVoiceResults, new String[] {"testing"},
new float[] {
VoiceRecognitionHandler.VOICE_SEARCH_CONFIDENCE_NAVIGATE_THRESHOLD},
new String[] {"en-us"});
Assert.assertTrue(mDelegate.getUrl().contains("&hl=en-us"));
});
}
@Test
@SmallTest
public void testParseResults_EmptyBundle() {
......@@ -720,6 +751,8 @@ public class VoiceRecognitionHandlerTest {
createDummyBundle(new String[] {"blah"}, new float[] {0f, 1f})));
Assert.assertNull(mHandler.convertBundleToVoiceResults(
createDummyBundle(new String[] {"blah", "foo"}, new float[] {7f})));
Assert.assertNull(mHandler.convertBundleToVoiceResults(createDummyBundle(
new String[] {"blah", "foo"}, new float[] {7f, 1f}, new String[] {"foo"})));
}
@Test
......@@ -773,29 +806,55 @@ public class VoiceRecognitionHandlerTest {
}
private static Bundle createDummyBundle(String text, float confidence) {
return createDummyBundle(new String[] {text}, new float[] {confidence});
return createDummyBundle(new String[] {text}, new float[] {confidence}, null);
}
private static Bundle createDummyBundle(
String text, float confidence, @Nullable String language) {
return createDummyBundle(new String[] {text}, new float[] {confidence},
language == null ? null : new String[] {language});
}
private static Bundle createDummyBundle(String[] texts, float[] confidences) {
return createDummyBundle(texts, confidences, null);
}
private static Bundle createDummyBundle(
String[] texts, float[] confidences, @Nullable String[] languages) {
Bundle b = new Bundle();
b.putStringArrayList(
RecognizerIntent.EXTRA_RESULTS, new ArrayList<String>(Arrays.asList(texts)));
b.putFloatArray(RecognizerIntent.EXTRA_CONFIDENCE_SCORES, confidences);
if (languages != null) {
b.putStringArrayList(VoiceRecognitionHandler.VOICE_QUERY_RESULT_LANGUAGES,
new ArrayList<String>(Arrays.asList(languages)));
}
return b;
}
private static void assertVoiceResultsAreEqual(
List<VoiceResult> results, String[] texts, float[] confidences) {
assertVoiceResultsAreEqual(results, texts, confidences, null);
}
private static void assertVoiceResultsAreEqual(
List<VoiceResult> results, String[] texts, float[] confidences, String[] languages) {
Assert.assertTrue("Invalid array sizes",
results.size() == texts.length && texts.length == confidences.length);
if (languages != null) {
Assert.assertTrue("Invalid array sizes", confidences.length == languages.length);
}
for (int i = 0; i < texts.length; ++i) {
VoiceResult result = results.get(i);
Assert.assertEquals("Match text is not equal", texts[i], result.getMatch());
Assert.assertEquals(
"Confidence is not equal", confidences[i], result.getConfidence(), 0);
if (languages != null) {
Assert.assertEquals("Languages not equal", result.getLanguage(), languages[i]);
}
}
}
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment