Commit 4b48f2de authored by Hongchan Choi's avatar Hongchan Choi Committed by Commit Bot

Move Autoplay-related code from BaseAudioContext to AudioContext

This CL splits the Autoplay-related code from BAC to AC. It simply
changes the code location without changing logic.

Note that the code around resume() promise resolvers has not been
modified in this CL. The code is not directly related to the auto
play and it needs a bit of rewriting.

Bug: 851608
Test: AudioContextAutoplayTest
Change-Id: I2e0b2b10fc56e86e4724635aba0fe452c73c54da
Reviewed-on: https://chromium-review.googlesource.com/1097988Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarRaymond Toy <rtoy@chromium.org>
Commit-Queue: Hongchan Choi <hongchan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#567082}
parent 4b840ba0
......@@ -305,10 +305,10 @@ jumbo_source_set("unit_tests") {
"serviceworkers/web_embedded_worker_impl_test.cc",
"wake_lock/screen_wake_lock_test.cc",
"webaudio/audio_basic_processor_handler_test.cc",
"webaudio/audio_context_autoplay_test.cc",
"webaudio/audio_context_test.cc",
"webaudio/audio_worklet_global_scope_test.cc",
"webaudio/audio_worklet_thread_test.cc",
"webaudio/base_audio_context_test.cc",
"webaudio/convolver_node_test.cc",
"webaudio/dynamics_compressor_node_test.cc",
"webaudio/script_processor_node_test.cc",
......
......@@ -4,6 +4,7 @@
#include "third_party/blink/renderer/modules/webaudio/audio_context.h"
#include "build/build_config.h"
#include "third_party/blink/public/platform/web_audio_latency_hint.h"
#include "third_party/blink/renderer/bindings/core/v8/exception_state.h"
#include "third_party/blink/renderer/bindings/core/v8/script_promise_resolver.h"
......@@ -109,12 +110,39 @@ AudioContext* AudioContext::Create(Document& document,
AudioContext::AudioContext(Document& document,
const WebAudioLatencyHint& latency_hint)
: BaseAudioContext(&document, kRealtimeContext),
context_id_(g_context_id++) {
context_id_(g_context_id++),
user_gesture_required_(false) {
destination_node_ = DefaultAudioDestinationNode::Create(this, latency_hint);
switch (GetAutoplayPolicy()) {
case AutoplayPolicy::Type::kNoUserGestureRequired:
break;
case AutoplayPolicy::Type::kUserGestureRequired:
case AutoplayPolicy::Type::kUserGestureRequiredForCrossOrigin:
if (document.GetFrame() &&
document.GetFrame()->IsCrossOriginSubframe()) {
autoplay_status_ = AutoplayStatus::kAutoplayStatusFailed;
user_gesture_required_ = true;
}
break;
case AutoplayPolicy::Type::kDocumentUserActivationRequired:
autoplay_status_ = AutoplayStatus::kAutoplayStatusFailed;
user_gesture_required_ = true;
break;
}
Initialize();
}
void AudioContext::Uninitialize() {
DCHECK(IsMainThread());
RecordAutoplayStatus();
BaseAudioContext::Uninitialize();
}
AudioContext::~AudioContext() {
DCHECK(!autoplay_status_.has_value());
#if DEBUG_AUDIONODE_REFERENCES
fprintf(stderr, "[%16p]: AudioContext::~AudioContext(): %u\n", this,
context_id_);
......@@ -274,4 +302,108 @@ double AudioContext::baseLatency() const {
return FramesPerBuffer() / static_cast<double>(sampleRate());
}
void AudioContext::MaybeRecordStartAttempt() {
if (!user_gesture_required_ || !AreAutoplayRequirementsFulfilled())
return;
DCHECK(!autoplay_status_.has_value() ||
autoplay_status_ != AutoplayStatus::kAutoplayStatusSucceeded);
autoplay_status_ = AutoplayStatus::kAutoplayStatusFailedWithStart;
}
AutoplayPolicy::Type AudioContext::GetAutoplayPolicy() const {
if (RuntimeEnabledFeatures::AutoplayIgnoresWebAudioEnabled()) {
// When ignored, the policy is different on Android compared to Desktop.
#if defined(OS_ANDROID)
return AutoplayPolicy::Type::kUserGestureRequired;
#else
// Force no user gesture required on desktop.
return AutoplayPolicy::Type::kNoUserGestureRequired;
#endif
}
Document* document = GetDocument();
DCHECK(document);
return AutoplayPolicy::GetAutoplayPolicyForDocument(*document);
}
bool AudioContext::AreAutoplayRequirementsFulfilled() const {
switch (GetAutoplayPolicy()) {
case AutoplayPolicy::Type::kNoUserGestureRequired:
return true;
case AutoplayPolicy::Type::kUserGestureRequired:
case AutoplayPolicy::Type::kUserGestureRequiredForCrossOrigin:
return Frame::HasTransientUserActivation(
GetDocument() ? GetDocument()->GetFrame() : nullptr);
case AutoplayPolicy::Type::kDocumentUserActivationRequired:
return AutoplayPolicy::IsDocumentAllowedToPlay(*GetDocument());
}
NOTREACHED();
return false;
}
void AudioContext::MaybeUnlockUserGesture() {
if (!user_gesture_required_ || !AreAutoplayRequirementsFulfilled())
return;
DCHECK(!autoplay_status_.has_value() ||
autoplay_status_ != AutoplayStatus::kAutoplayStatusSucceeded);
user_gesture_required_ = false;
autoplay_status_ = AutoplayStatus::kAutoplayStatusSucceeded;
}
bool AudioContext::IsAllowedToStart() const {
if (!user_gesture_required_)
return true;
Document* document = ToDocument(GetExecutionContext());
DCHECK(document);
switch (GetAutoplayPolicy()) {
case AutoplayPolicy::Type::kNoUserGestureRequired:
NOTREACHED();
break;
case AutoplayPolicy::Type::kUserGestureRequired:
case AutoplayPolicy::Type::kUserGestureRequiredForCrossOrigin:
DCHECK(document->GetFrame() &&
document->GetFrame()->IsCrossOriginSubframe());
document->AddConsoleMessage(ConsoleMessage::Create(
kOtherMessageSource, kWarningMessageLevel,
"The AudioContext was not allowed to start. It must be resumed (or "
"created) from a user gesture event handler. https://goo.gl/7K7WLu"));
break;
case AutoplayPolicy::Type::kDocumentUserActivationRequired:
document->AddConsoleMessage(ConsoleMessage::Create(
kOtherMessageSource, kWarningMessageLevel,
"The AudioContext was not allowed to start. It must be resumed (or "
"created) after a user gesture on the page. https://goo.gl/7K7WLu"));
break;
}
return false;
}
void AudioContext::RecordAutoplayStatus() {
if (!autoplay_status_.has_value())
return;
DEFINE_STATIC_LOCAL(
EnumerationHistogram, autoplay_histogram,
("WebAudio.Autoplay", AutoplayStatus::kAutoplayStatusCount));
DEFINE_STATIC_LOCAL(
EnumerationHistogram, cross_origin_autoplay_histogram,
("WebAudio.Autoplay.CrossOrigin", AutoplayStatus::kAutoplayStatusCount));
autoplay_histogram.Count(autoplay_status_.value());
if (GetDocument()->GetFrame() &&
GetDocument()->GetFrame()->IsCrossOriginSubframe()) {
cross_origin_autoplay_histogram.Count(autoplay_status_.value());
}
autoplay_status_.reset();
}
} // namespace blink
......@@ -7,6 +7,7 @@
#include "third_party/blink/renderer/bindings/core/v8/script_promise.h"
#include "third_party/blink/renderer/bindings/core/v8/script_promise_resolver.h"
#include "third_party/blink/renderer/core/html/media/autoplay_policy.h"
#include "third_party/blink/renderer/modules/webaudio/audio_context_options.h"
#include "third_party/blink/renderer/modules/webaudio/base_audio_context.h"
#include "third_party/blink/renderer/platform/heap/handle.h"
......@@ -44,17 +45,59 @@ class MODULES_EXPORT AudioContext : public BaseAudioContext {
void getOutputTimestamp(ScriptState*, AudioTimestamp&);
double baseLatency() const;
// For metrics purpose, records when start() is called on a
// AudioScheduledSourceHandler or a AudioBufferSourceHandler without a user
// gesture while the AudioContext requires a user gesture.
void MaybeRecordStartAttempt() final;
protected:
AudioContext(Document&, const WebAudioLatencyHint&);
void Uninitialize() final;
void DidClose() final;
private:
friend class AudioContextAutoplayTest;
// Do not change the order of this enum, it is used for metrics.
enum AutoplayStatus {
// The AudioContext failed to activate because of user gesture requirements.
kAutoplayStatusFailed = 0,
// Same as AutoplayStatusFailed but start() on a node was called with a user
// gesture.
kAutoplayStatusFailedWithStart = 1,
// The AudioContext had user gesture requirements and was able to activate
// with a user gesture.
kAutoplayStatusSucceeded = 2,
// Keep at the end.
kAutoplayStatusCount
};
// Returns the AutoplayPolicy currently applying to this instance.
AutoplayPolicy::Type GetAutoplayPolicy() const;
// Returns whether the autoplay requirements are fulfilled.
bool AreAutoplayRequirementsFulfilled() const;
// If any, unlock user gesture requirements if a user gesture is being
// processed.
void MaybeUnlockUserGesture();
// Returns whether the AudioContext is allowed to start rendering.
bool IsAllowedToStart() const;
// Record the current autoplay status and clear it.
void RecordAutoplayStatus();
void StopRendering();
unsigned context_id_;
Member<ScriptPromiseResolver> close_resolver_;
// Whether a user gesture is required to start this AudioContext.
bool user_gesture_required_;
base::Optional<AutoplayStatus> autoplay_status_;
};
} // namespace blink
......
......@@ -34,7 +34,6 @@
#include "third_party/blink/renderer/core/dom/document.h"
#include "third_party/blink/renderer/core/dom/dom_exception.h"
#include "third_party/blink/renderer/core/frame/settings.h"
#include "third_party/blink/renderer/core/html/media/autoplay_policy.h"
#include "third_party/blink/renderer/core/html/media/html_media_element.h"
#include "third_party/blink/renderer/core/inspector/console_message.h"
#include "third_party/blink/renderer/core/inspector/console_types.h"
......@@ -94,7 +93,6 @@ BaseAudioContext::BaseAudioContext(Document* document,
is_cleared_(false),
is_resolving_resume_promises_(false),
has_posted_cleanup_task_(false),
user_gesture_required_(false),
connection_count_(0),
deferred_task_handler_(DeferredTaskHandler::Create()),
context_state_(kSuspended),
......@@ -103,41 +101,13 @@ BaseAudioContext::BaseAudioContext(Document* document,
periodic_wave_square_(nullptr),
periodic_wave_sawtooth_(nullptr),
periodic_wave_triangle_(nullptr),
output_position_() {
switch (context_type) {
case kRealtimeContext:
switch (GetAutoplayPolicy()) {
case AutoplayPolicy::Type::kNoUserGestureRequired:
break;
case AutoplayPolicy::Type::kUserGestureRequired:
case AutoplayPolicy::Type::kUserGestureRequiredForCrossOrigin:
if (document->GetFrame() &&
document->GetFrame()->IsCrossOriginSubframe()) {
autoplay_status_ = AutoplayStatus::kAutoplayStatusFailed;
user_gesture_required_ = true;
}
break;
case AutoplayPolicy::Type::kDocumentUserActivationRequired:
autoplay_status_ = AutoplayStatus::kAutoplayStatusFailed;
user_gesture_required_ = true;
break;
}
break;
case kOfflineContext:
// Nothing needed for offline context
break;
default:
NOTREACHED();
break;
}
}
output_position_() {}
BaseAudioContext::~BaseAudioContext() {
GetDeferredTaskHandler().ContextWillBeDestroyed();
DCHECK(!active_source_nodes_.size());
DCHECK(!is_resolving_resume_promises_);
DCHECK(!resume_resolvers_.size());
DCHECK(!autoplay_status_.has_value());
}
void BaseAudioContext::Initialize() {
......@@ -187,8 +157,6 @@ void BaseAudioContext::Uninitialize() {
DCHECK(listener_);
listener_->WaitForHRTFDatabaseLoaderThreadCompletion();
RecordAutoplayStatus();
Clear();
}
......@@ -607,15 +575,6 @@ PeriodicWave* BaseAudioContext::GetPeriodicWave(int type) {
}
}
void BaseAudioContext::MaybeRecordStartAttempt() {
if (!user_gesture_required_ || !AreAutoplayRequirementsFulfilled())
return;
DCHECK(!autoplay_status_.has_value() ||
autoplay_status_ != AutoplayStatus::kAutoplayStatusSucceeded);
autoplay_status_ = AutoplayStatus::kAutoplayStatusFailedWithStart;
}
String BaseAudioContext::state() const {
// These strings had better match the strings for AudioContextState in
// AudioContext.idl.
......@@ -682,38 +641,6 @@ Document* BaseAudioContext::GetDocument() const {
return ToDocument(GetExecutionContext());
}
AutoplayPolicy::Type BaseAudioContext::GetAutoplayPolicy() const {
if (RuntimeEnabledFeatures::AutoplayIgnoresWebAudioEnabled()) {
// When ignored, the policy is different on Android compared to Desktop.
#if defined(OS_ANDROID)
return AutoplayPolicy::Type::kUserGestureRequired;
#else
// Force no user gesture required on desktop.
return AutoplayPolicy::Type::kNoUserGestureRequired;
#endif
}
Document* document = GetDocument();
DCHECK(document);
return AutoplayPolicy::GetAutoplayPolicyForDocument(*document);
}
bool BaseAudioContext::AreAutoplayRequirementsFulfilled() const {
switch (GetAutoplayPolicy()) {
case AutoplayPolicy::Type::kNoUserGestureRequired:
return true;
case AutoplayPolicy::Type::kUserGestureRequired:
case AutoplayPolicy::Type::kUserGestureRequiredForCrossOrigin:
return Frame::HasTransientUserActivation(
GetDocument() ? GetDocument()->GetFrame() : nullptr);
case AutoplayPolicy::Type::kDocumentUserActivationRequired:
return AutoplayPolicy::IsDocumentAllowedToPlay(*GetDocument());
}
NOTREACHED();
return false;
}
void BaseAudioContext::NotifySourceNodeStartedProcessing(AudioNode* node) {
DCHECK(IsMainThread());
GraphAutoLocker locker(this);
......@@ -886,48 +813,6 @@ void BaseAudioContext::RejectPendingDecodeAudioDataResolvers() {
decode_audio_resolvers_.clear();
}
void BaseAudioContext::MaybeUnlockUserGesture() {
if (!user_gesture_required_ || !AreAutoplayRequirementsFulfilled())
return;
DCHECK(!autoplay_status_.has_value() ||
autoplay_status_ != AutoplayStatus::kAutoplayStatusSucceeded);
user_gesture_required_ = false;
autoplay_status_ = AutoplayStatus::kAutoplayStatusSucceeded;
}
bool BaseAudioContext::IsAllowedToStart() const {
if (!user_gesture_required_)
return true;
Document* document = ToDocument(GetExecutionContext());
DCHECK(document);
switch (GetAutoplayPolicy()) {
case AutoplayPolicy::Type::kNoUserGestureRequired:
NOTREACHED();
break;
case AutoplayPolicy::Type::kUserGestureRequired:
case AutoplayPolicy::Type::kUserGestureRequiredForCrossOrigin:
DCHECK(document->GetFrame() &&
document->GetFrame()->IsCrossOriginSubframe());
document->AddConsoleMessage(ConsoleMessage::Create(
kOtherMessageSource, kWarningMessageLevel,
"The AudioContext was not allowed to start. It must be resumed (or "
"created) from a user gesture event handler."));
break;
case AutoplayPolicy::Type::kDocumentUserActivationRequired:
document->AddConsoleMessage(ConsoleMessage::Create(
kOtherMessageSource, kWarningMessageLevel,
"The AudioContext was not allowed to start. It must be resume (or "
"created) after a user gesture on the page. https://goo.gl/7K7WLu"));
break;
}
return false;
}
AudioIOPosition BaseAudioContext::OutputPosition() {
DCHECK(IsMainThread());
GraphAutoLocker locker(this);
......@@ -950,27 +835,6 @@ void BaseAudioContext::RejectPendingResolvers() {
RejectPendingDecodeAudioDataResolvers();
}
void BaseAudioContext::RecordAutoplayStatus() {
if (!autoplay_status_.has_value())
return;
DEFINE_STATIC_LOCAL(
EnumerationHistogram, autoplay_histogram,
("WebAudio.Autoplay", AutoplayStatus::kAutoplayStatusCount));
DEFINE_STATIC_LOCAL(
EnumerationHistogram, cross_origin_autoplay_histogram,
("WebAudio.Autoplay.CrossOrigin", AutoplayStatus::kAutoplayStatusCount));
autoplay_histogram.Count(autoplay_status_.value());
if (GetDocument()->GetFrame() &&
GetDocument()->GetFrame()->IsCrossOriginSubframe()) {
cross_origin_autoplay_histogram.Count(autoplay_status_.value());
}
autoplay_status_.reset();
}
const AtomicString& BaseAudioContext::InterfaceName() const {
return EventTargetNames::AudioContext;
}
......@@ -986,7 +850,6 @@ void BaseAudioContext::StartRendering() {
// set the state.
DCHECK(IsMainThread());
DCHECK(destination_node_);
DCHECK(IsAllowedToStart());
if (context_state_ == kSuspended) {
destination()->GetAudioDestinationHandler().StartRendering();
......
......@@ -34,7 +34,6 @@
#include "third_party/blink/renderer/bindings/modules/v8/v8_decode_success_callback.h"
#include "third_party/blink/renderer/core/dom/events/event_listener.h"
#include "third_party/blink/renderer/core/dom/pausable_object.h"
#include "third_party/blink/renderer/core/html/media/autoplay_policy.h"
#include "third_party/blink/renderer/core/typed_arrays/array_buffer_view_helpers.h"
#include "third_party/blink/renderer/core/typed_arrays/dom_typed_array.h"
#include "third_party/blink/renderer/modules/event_target_modules.h"
......@@ -309,9 +308,6 @@ class MODULES_EXPORT BaseAudioContext
DEFINE_ATTRIBUTE_EVENT_LISTENER(statechange);
// Start the AudioContext. `isAllowedToStart()` MUST be called
// before. This does NOT set the context state to running. The
// caller must set the state AFTER calling startRendering.
void StartRendering();
void NotifyStateChange();
......@@ -328,10 +324,8 @@ class MODULES_EXPORT BaseAudioContext
// initialized internally if necessary.
PeriodicWave* GetPeriodicWave(int type);
// For metrics purpose, records when start() is called on a
// AudioScheduledSourceHandler or a AudioBufferSourceHandler without a user
// gesture while the AudioContext requires a user gesture.
void MaybeRecordStartAttempt();
// Called by AudioScheduledSourceNode.start() for the Autoplay metric.
virtual void MaybeRecordStartAttempt() = 0;
// AudioWorklet IDL
AudioWorklet* audioWorklet() const;
......@@ -359,7 +353,7 @@ class MODULES_EXPORT BaseAudioContext
explicit BaseAudioContext(Document*, enum ContextType);
void Initialize();
void Uninitialize();
virtual void Uninitialize();
void SetContextState(AudioContextState);
......@@ -386,33 +380,13 @@ class MODULES_EXPORT BaseAudioContext
void RejectPendingDecodeAudioDataResolvers();
// If any, unlock user gesture requirements if a user gesture is being
// processed.
void MaybeUnlockUserGesture();
// Returns whether the AudioContext is allowed to start rendering.
bool IsAllowedToStart() const;
AudioIOPosition OutputPosition();
// Returns the Document wich wich the instance is associated.
Document* GetDocument() const;
private:
friend class BaseAudioContextAutoplayTest;
friend class DISABLED_BaseAudioContextAutoplayTest;
// Do not change the order of this enum, it is used for metrics.
enum AutoplayStatus {
// The AudioContext failed to activate because of user gesture requirements.
kAutoplayStatusFailed = 0,
// Same as AutoplayStatusFailed but start() on a node was called with a user
// gesture.
kAutoplayStatusFailedWithStart = 1,
// The AudioContext had user gesture requirements and was able to activate
// with a user gesture.
kAutoplayStatusSucceeded = 2,
// Keep at the end.
kAutoplayStatusCount
};
friend class AudioContextAutoplayTest;
bool is_cleared_;
void Clear();
......@@ -421,15 +395,6 @@ class MODULES_EXPORT BaseAudioContext
// haven't finished playing. Make sure to release them here.
void ReleaseActiveSourceNodes();
// Returns the Document wich wich the instance is associated.
Document* GetDocument() const;
// Returns the AutoplayPolicy currently applying to this instance.
AutoplayPolicy::Type GetAutoplayPolicy() const;
// Returns whether the autoplay requirements are fulfilled.
bool AreAutoplayRequirementsFulfilled() const;
// Listener for the PannerNodes
Member<AudioListener> listener_;
......@@ -472,9 +437,6 @@ class MODULES_EXPORT BaseAudioContext
// resolvers.
virtual void RejectPendingResolvers();
// Record the current autoplay status and clear it.
void RecordAutoplayStatus();
// True if we're in the process of resolving promises for resume(). Resolving
// can take some time and the audio context process loop is very fast, so we
// don't want to call resolve an excessive number of times.
......@@ -485,9 +447,6 @@ class MODULES_EXPORT BaseAudioContext
// thread. Cleared by the main thread task once it has run.
bool has_posted_cleanup_task_;
// Whether a user gesture is required to start this AudioContext.
bool user_gesture_required_;
unsigned connection_count_;
// Graph locking.
......@@ -520,7 +479,6 @@ class MODULES_EXPORT BaseAudioContext
// It is somewhat arbitrary and could be increased if necessary.
enum { kMaxNumberOfChannels = 32 };
base::Optional<AutoplayStatus> autoplay_status_;
AudioIOPosition output_position_;
Member<AudioWorklet> audio_worklet_;
......
......@@ -85,6 +85,9 @@ class MODULES_EXPORT OfflineAudioContext final : public BaseAudioContext {
// from the map (m_scheduledSuspends) and resolved.
void ResolveSuspendOnMainThread(size_t);
// OfflineAudioContext is not affected by Autoplay, so this MUST do nothing.
void MaybeRecordStartAttempt() final {}
// The HashMap with 'zero' key is needed because |currentSampleFrame| can be
// zero.
using SuspendMap = HeapHashMap<size_t,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment