Commit e247d795 authored by David Black's avatar David Black Committed by Commit Bot

Fix crash in Assistant that occurs on LibAssistant error.

We expect the lifecycle of a query to be:
- Pend: occurs on interaction start
- Commit: occurs on query finalize
- Activate: occurs on query response received

When LibAssistant error'd out, due to connectivity loss for example,
the order of events delivered caused us to miss stages in the query
lifecycle.

Now, we will maintain the expected lifecycle and show an error message
to the user when this occcurs.

Bug: b:112600782, b:112056476
Change-Id: Ib01860564d88326110db285e09d15470bed58598
Reviewed-on: https://chromium-review.googlesource.com/1176976
Commit-Queue: David Black <dmblack@google.com>
Reviewed-by: default avatarXiaohui Chen <xiaohuic@chromium.org>
Reviewed-by: default avatarOliver Chang <ochang@chromium.org>
Cr-Commit-Position: refs/heads/master@{#584490}
parent b7aa302b
...@@ -14,9 +14,11 @@ ...@@ -14,9 +14,11 @@
#include "ash/assistant/util/deep_link_util.h" #include "ash/assistant/util/deep_link_util.h"
#include "ash/public/interfaces/voice_interaction_controller.mojom.h" #include "ash/public/interfaces/voice_interaction_controller.mojom.h"
#include "ash/shell.h" #include "ash/shell.h"
#include "ash/strings/grit/ash_strings.h"
#include "ash/voice_interaction/voice_interaction_controller.h" #include "ash/voice_interaction/voice_interaction_controller.h"
#include "base/optional.h" #include "base/optional.h"
#include "base/strings/utf_string_conversions.h" #include "base/strings/utf_string_conversions.h"
#include "ui/base/l10n/l10n_util.h"
namespace ash { namespace ash {
...@@ -158,6 +160,14 @@ void AssistantInteractionController::OnInteractionStarted( ...@@ -158,6 +160,14 @@ void AssistantInteractionController::OnInteractionStarted(
if (is_voice_interaction) { if (is_voice_interaction) {
assistant_interaction_model_.SetInputModality(InputModality::kVoice); assistant_interaction_model_.SetInputModality(InputModality::kVoice);
assistant_interaction_model_.SetMicState(MicState::kOpen); assistant_interaction_model_.SetMicState(MicState::kOpen);
// When a voice interaction is initiated by hotword, we haven't yet set a
// pending query so this is our earliest opportunity.
if (assistant_interaction_model_.pending_query().type() ==
AssistantQueryType::kNull) {
assistant_interaction_model_.SetPendingQuery(
std::make_unique<AssistantVoiceQuery>());
}
} else { } else {
// TODO(b/112000321): It should not be possible to reach this code without // TODO(b/112000321): It should not be possible to reach this code without
// having previously pended a query. It does currently happen, however, in // having previously pended a query. It does currently happen, however, in
...@@ -275,12 +285,7 @@ void AssistantInteractionController::OnTextResponse( ...@@ -275,12 +285,7 @@ void AssistantInteractionController::OnTextResponse(
std::make_unique<AssistantTextElement>(response)); std::make_unique<AssistantTextElement>(response));
} }
void AssistantInteractionController::OnSpeechRecognitionStarted() { void AssistantInteractionController::OnSpeechRecognitionStarted() {}
assistant_interaction_model_.SetInputModality(InputModality::kVoice);
assistant_interaction_model_.SetMicState(MicState::kOpen);
assistant_interaction_model_.SetPendingQuery(
std::make_unique<AssistantVoiceQuery>());
}
void AssistantInteractionController::OnSpeechRecognitionIntermediateResult( void AssistantInteractionController::OnSpeechRecognitionIntermediateResult(
const std::string& high_confidence_text, const std::string& high_confidence_text,
...@@ -312,12 +317,31 @@ void AssistantInteractionController::OnSpeechLevelUpdated(float speech_level) { ...@@ -312,12 +317,31 @@ void AssistantInteractionController::OnSpeechLevelUpdated(float speech_level) {
assistant_interaction_model_.SetSpeechLevel(speech_level); assistant_interaction_model_.SetSpeechLevel(speech_level);
} }
void AssistantInteractionController::OnTtsStarted() { void AssistantInteractionController::OnTtsStarted(bool due_to_error) {
if (assistant_interaction_model_.interaction_state() != if (assistant_interaction_model_.interaction_state() !=
InteractionState::kActive) { InteractionState::kActive) {
return; return;
} }
// Commit the pending query in whatever state it's in. In most cases the
// pending query is already committed, but we must always commit the pending
// query before finalizing a pending result.
if (assistant_interaction_model_.pending_query().type() !=
AssistantQueryType::kNull) {
assistant_interaction_model_.CommitPendingQuery();
}
if (due_to_error) {
// In the case of an error occurring during a voice interaction, this is our
// earliest indication that the mic has closed.
assistant_interaction_model_.SetMicState(MicState::kClosed);
// Add an error message to the response.
assistant_interaction_model_.pending_response()->AddUiElement(
std::make_unique<AssistantTextElement>(
l10n_util::GetStringUTF8(IDS_ASH_ASSISTANT_ERROR_GENERIC)));
}
// We have an agreement with the server that TTS will always be the last part // We have an agreement with the server that TTS will always be the last part
// of an interaction to be processed. To be timely in updating UI, we use // of an interaction to be processed. To be timely in updating UI, we use
// this as an opportunity to finalize the Assistant response and update the // this as an opportunity to finalize the Assistant response and update the
......
...@@ -87,7 +87,7 @@ class AssistantInteractionController ...@@ -87,7 +87,7 @@ class AssistantInteractionController
void OnSpeechRecognitionEndOfUtterance() override; void OnSpeechRecognitionEndOfUtterance() override;
void OnSpeechRecognitionFinalResult(const std::string& final_result) override; void OnSpeechRecognitionFinalResult(const std::string& final_result) override;
void OnSpeechLevelUpdated(float speech_level) override; void OnSpeechLevelUpdated(float speech_level) override;
void OnTtsStarted() override; void OnTtsStarted(bool due_to_error) override;
// DialogPlateObserver: // DialogPlateObserver:
void OnDialogPlateButtonPressed(DialogPlateButtonId id) override; void OnDialogPlateButtonPressed(DialogPlateButtonId id) override;
......
...@@ -391,7 +391,7 @@ void AssistantManagerServiceImpl::OnRespondingStarted(bool is_error_response) { ...@@ -391,7 +391,7 @@ void AssistantManagerServiceImpl::OnRespondingStarted(bool is_error_response) {
FROM_HERE, FROM_HERE,
base::BindOnce( base::BindOnce(
&AssistantManagerServiceImpl::OnRespondingStartedOnMainThread, &AssistantManagerServiceImpl::OnRespondingStartedOnMainThread,
weak_factory_.GetWeakPtr())); weak_factory_.GetWeakPtr(), is_error_response));
} }
void AssistantManagerServiceImpl::OnSpeechLevelUpdated( void AssistantManagerServiceImpl::OnSpeechLevelUpdated(
...@@ -804,8 +804,10 @@ void AssistantManagerServiceImpl::OnRecognitionStateChangedOnMainThread( ...@@ -804,8 +804,10 @@ void AssistantManagerServiceImpl::OnRecognitionStateChangedOnMainThread(
} }
} }
void AssistantManagerServiceImpl::OnRespondingStartedOnMainThread() { void AssistantManagerServiceImpl::OnRespondingStartedOnMainThread(
interaction_subscribers_.ForAllPtrs([](auto* ptr) { ptr->OnTtsStarted(); }); bool is_error_response) {
interaction_subscribers_.ForAllPtrs(
[is_error_response](auto* ptr) { ptr->OnTtsStarted(is_error_response); });
} }
void AssistantManagerServiceImpl::OnSpeechLevelUpdatedOnMainThread( void AssistantManagerServiceImpl::OnSpeechLevelUpdatedOnMainThread(
......
...@@ -169,7 +169,7 @@ class AssistantManagerServiceImpl ...@@ -169,7 +169,7 @@ class AssistantManagerServiceImpl
assistant_client::ConversationStateListener::RecognitionState state, assistant_client::ConversationStateListener::RecognitionState state,
const assistant_client::ConversationStateListener::RecognitionResult& const assistant_client::ConversationStateListener::RecognitionResult&
recognition_result); recognition_result);
void OnRespondingStartedOnMainThread(); void OnRespondingStartedOnMainThread(bool is_error_response);
void OnSpeechLevelUpdatedOnMainThread(const float speech_level); void OnSpeechLevelUpdatedOnMainThread(const float speech_level);
void OnModifySettingsAction(const std::string& modify_setting_args_proto); void OnModifySettingsAction(const std::string& modify_setting_args_proto);
......
...@@ -92,8 +92,9 @@ interface AssistantInteractionSubscriber { ...@@ -92,8 +92,9 @@ interface AssistantInteractionSubscriber {
// Assistant got an instantaneous speech level update in dB. // Assistant got an instantaneous speech level update in dB.
OnSpeechLevelUpdated(float speech_level); OnSpeechLevelUpdated(float speech_level);
// Assistant has started speaking. // Assistant has started speaking. When TTS is started due to an error that
OnTtsStarted(); // occurred during the interaction, |due_to_error| is true.
OnTtsStarted(bool due_to_error);
}; };
// Subscribes to assistant's notification event. These events are server // Subscribes to assistant's notification event. These events are server
......
...@@ -111,7 +111,7 @@ class Service : public service_manager::Service, ...@@ -111,7 +111,7 @@ class Service : public service_manager::Service,
void OnSpeechRecognitionFinalResult( void OnSpeechRecognitionFinalResult(
const std::string& final_result) override{}; const std::string& final_result) override{};
void OnSpeechLevelUpdated(float speech_level) override{}; void OnSpeechLevelUpdated(float speech_level) override{};
void OnTtsStarted() override{}; void OnTtsStarted(bool due_to_error) override{};
void BindAssistantSettingsManager( void BindAssistantSettingsManager(
mojom::AssistantSettingsManagerRequest request); mojom::AssistantSettingsManagerRequest request);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment