Commit a1f63b8a authored by Antonio Gomes's avatar Antonio Gomes Committed by Commit Bot

Factor SourceType enum out of AudioDeviceFactory class

.. and move it to Blink exposed API, so it can be used by both
content and blink code while audio_device_factory.cc|h is not Onion
souped.

Eventually, the enum can be either moved back to AudioDeviceFactory
class, if it ends up being part of the Blink API, or left as is,
given that there is not plans to Onion soup the chromecast/media/audio code.

This is a preparation CL for execution phase 3.3.10 on the
design document [1].

[1] https://docs.google.com/document/d/1rHJGi1U72qZsOGIctIf7GKRaY8d7BgtVW8MaMYumzYY

BUG=704136
R=guidou@chromium.org, haraken@chromium.org

Change-Id: I1b57f4b1493994fb1438df23a34cdb02c2d42208
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1677387Reviewed-by: default avatarKenneth MacKay <kmackay@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Commit-Queue: Antonio Gomes <tonikitoo@igalia.com>
Cr-Commit-Position: refs/heads/master@{#672593}
parent 49f07d0d
......@@ -16,6 +16,7 @@ include_rules = [
"+ui/gfx/geometry",
"+ui/gfx/overlay_transform.h",
"+services/service_manager/public",
"+third_party/blink/public/platform/audio/web_audio_device_source_type.h",
"+third_party/widevine/cdm/buildflags.h",
"+third_party/widevine/cdm/widevine_cdm_common.h",
]
......@@ -104,7 +104,7 @@ CastAudioDeviceFactory::CreateFinalAudioRendererSink(
scoped_refptr<::media::AudioRendererSink>
CastAudioDeviceFactory::CreateAudioRendererSink(
content::AudioDeviceFactory::SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const ::media::AudioSinkParameters& params) {
// Use default implementation.
......@@ -113,7 +113,7 @@ CastAudioDeviceFactory::CreateAudioRendererSink(
scoped_refptr<::media::SwitchableAudioRendererSink>
CastAudioDeviceFactory::CreateSwitchableAudioRendererSink(
content::AudioDeviceFactory::SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const ::media::AudioSinkParameters& params) {
return base::MakeRefCounted<NonSwitchableAudioRendererSink>(NewOutputDevice(
......@@ -129,4 +129,4 @@ CastAudioDeviceFactory::CreateAudioCapturerSource(
}
} // namespace media
} // namespace chromecast
\ No newline at end of file
} // namespace chromecast
......@@ -10,6 +10,7 @@
#include "base/memory/ref_counted.h"
#include "content/renderer/media/audio/audio_device_factory.h"
#include "media/audio/audio_sink_parameters.h"
#include "third_party/blink/public/platform/audio/web_audio_device_source_type.h"
namespace media {
class AudioCapturerSource;
......@@ -31,13 +32,13 @@ class CastAudioDeviceFactory : public content::AudioDeviceFactory {
base::TimeDelta auth_timeout) override;
scoped_refptr<::media::AudioRendererSink> CreateAudioRendererSink(
content::AudioDeviceFactory::SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const ::media::AudioSinkParameters& params) override;
scoped_refptr<::media::SwitchableAudioRendererSink>
CreateSwitchableAudioRendererSink(
content::AudioDeviceFactory::SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const ::media::AudioSinkParameters& params) override;
......
......@@ -66,13 +66,13 @@ scoped_refptr<media::AudioOutputDevice> NewOutputDevice(
// This is where we decide which audio will go to mixers and which one to
// AudioOutputDevice directly.
bool IsMixable(AudioDeviceFactory::SourceType source_type) {
bool IsMixable(blink::WebAudioDeviceSourceType source_type) {
// Media element must ALWAYS go through mixer.
return source_type == AudioDeviceFactory::kSourceMediaElement;
return source_type == blink::WebAudioDeviceSourceType::kMediaElement;
}
scoped_refptr<media::SwitchableAudioRendererSink> NewMixableSink(
AudioDeviceFactory::SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params) {
RenderThreadImpl* render_thread = RenderThreadImpl::current();
......@@ -87,19 +87,19 @@ scoped_refptr<media::SwitchableAudioRendererSink> NewMixableSink(
} // namespace
media::AudioLatency::LatencyType AudioDeviceFactory::GetSourceLatencyType(
AudioDeviceFactory::SourceType source) {
blink::WebAudioDeviceSourceType source) {
switch (source) {
case AudioDeviceFactory::kSourceWebAudioInteractive:
case blink::WebAudioDeviceSourceType::kWebAudioInteractive:
return media::AudioLatency::LATENCY_INTERACTIVE;
case AudioDeviceFactory::kSourceNone:
case AudioDeviceFactory::kSourceWebRtc:
case AudioDeviceFactory::kSourceNonRtcAudioTrack:
case AudioDeviceFactory::kSourceWebAudioBalanced:
case blink::WebAudioDeviceSourceType::kNone:
case blink::WebAudioDeviceSourceType::kWebRtc:
case blink::WebAudioDeviceSourceType::kNonRtcAudioTrack:
case blink::WebAudioDeviceSourceType::kWebAudioBalanced:
return media::AudioLatency::LATENCY_RTC;
case AudioDeviceFactory::kSourceMediaElement:
case AudioDeviceFactory::kSourceWebAudioPlayback:
case blink::WebAudioDeviceSourceType::kMediaElement:
case blink::WebAudioDeviceSourceType::kWebAudioPlayback:
return media::AudioLatency::LATENCY_PLAYBACK;
case AudioDeviceFactory::kSourceWebAudioExact:
case blink::WebAudioDeviceSourceType::kWebAudioExact:
return media::AudioLatency::LATENCY_EXACT_MS;
}
NOTREACHED();
......@@ -118,7 +118,7 @@ AudioDeviceFactory::NewAudioRendererMixerSink(
// static
scoped_refptr<media::AudioRendererSink>
AudioDeviceFactory::NewAudioRendererSink(
SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params) {
if (factory_) {
......@@ -144,7 +144,7 @@ AudioDeviceFactory::NewAudioRendererSink(
// static
scoped_refptr<media::SwitchableAudioRendererSink>
AudioDeviceFactory::NewSwitchableAudioRendererSink(
SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params) {
if (factory_) {
......@@ -199,7 +199,7 @@ media::OutputDeviceInfo AudioDeviceFactory::GetOutputDeviceInfo(
{base::TaskPriority::BEST_EFFORT,
base::TaskShutdownBehavior::CONTINUE_ON_SHUTDOWN}),
base::BindRepeating(&AudioDeviceFactory::NewAudioRendererSink,
AudioDeviceFactory::kSourceNone),
blink::WebAudioDeviceSourceType::kNone),
kDeleteTimeout);
return cache->GetSinkInfo(render_frame_id, params.session_id,
params.device_id);
......
......@@ -15,6 +15,7 @@
#include "media/audio/audio_source_parameters.h"
#include "media/base/audio_latency.h"
#include "media/base/output_device_info.h"
#include "third_party/blink/public/platform/audio/web_audio_device_source_type.h"
namespace media {
class AudioRendererSink;
......@@ -31,25 +32,9 @@ namespace content {
// AudioCapturerSourceFactory.
class CONTENT_EXPORT AudioDeviceFactory {
public:
// Types of audio sources. Each source can have individual mixing and/or
// latency requirements for output. The source is specified by the client when
// requesting output sink from the factory, and the factory creates the output
// sink basing on those requirements.
enum SourceType {
kSourceNone = 0,
kSourceMediaElement,
kSourceWebRtc,
kSourceNonRtcAudioTrack,
kSourceWebAudioInteractive,
kSourceWebAudioBalanced,
kSourceWebAudioPlayback,
kSourceWebAudioExact,
kSourceLast = kSourceWebAudioExact // Only used for validation of format.
};
// Maps the source type to the audio latency it requires.
static media::AudioLatency::LatencyType GetSourceLatencyType(
SourceType source);
blink::WebAudioDeviceSourceType source);
// Creates a sink for AudioRendererMixer. |render_frame_id| refers to the
// RenderFrame containing the entity producing the audio. Note: These sinks do
......@@ -66,7 +51,7 @@ class CONTENT_EXPORT AudioDeviceFactory {
// TODO(olka): merge it with NewRestartableOutputDevice() as soon as
// AudioOutputDevice is fixed to be restartable.
static scoped_refptr<media::AudioRendererSink> NewAudioRendererSink(
SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params);
......@@ -74,7 +59,7 @@ class CONTENT_EXPORT AudioDeviceFactory {
// Basing on |source_type| and build configuration, audio played out through
// the sink goes to AOD directly or can be mixed with other audio before that.
static scoped_refptr<media::SwitchableAudioRendererSink>
NewSwitchableAudioRendererSink(SourceType source_type,
NewSwitchableAudioRendererSink(blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params);
......@@ -109,13 +94,13 @@ class CONTENT_EXPORT AudioDeviceFactory {
base::TimeDelta auth_timeout) = 0;
virtual scoped_refptr<media::AudioRendererSink> CreateAudioRendererSink(
SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params) = 0;
virtual scoped_refptr<media::SwitchableAudioRendererSink>
CreateSwitchableAudioRendererSink(
SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params) = 0;
......
......@@ -53,12 +53,12 @@ class MockAudioDeviceFactory : public AudioDeviceFactory {
base::TimeDelta auth_timeout));
MOCK_METHOD3(CreateAudioRendererSink,
scoped_refptr<media::AudioRendererSink>(
SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params));
MOCK_METHOD3(CreateSwitchableAudioRendererSink,
scoped_refptr<media::SwitchableAudioRendererSink>(
SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params));
......
......@@ -287,7 +287,7 @@ blink::WebMediaPlayer* MediaFactory::CreateMediaPlayer(
scoped_refptr<media::SwitchableAudioRendererSink> audio_renderer_sink =
AudioDeviceFactory::NewSwitchableAudioRendererSink(
AudioDeviceFactory::kSourceMediaElement,
blink::WebAudioDeviceSourceType::kMediaElement,
render_frame_->GetRoutingID(),
media::AudioSinkParameters(0, sink_id.Utf8()));
......
......@@ -19,6 +19,7 @@
#include "media/base/audio_timestamp_helper.h"
#include "media/base/limits.h"
#include "media/base/silent_sink_suspender.h"
#include "third_party/blink/public/platform/audio/web_audio_device_source_type.h"
#include "third_party/blink/public/web/web_local_frame.h"
#include "third_party/blink/public/web/web_view.h"
......@@ -32,20 +33,20 @@ namespace content {
namespace {
AudioDeviceFactory::SourceType GetLatencyHintSourceType(
blink::WebAudioDeviceSourceType GetLatencyHintSourceType(
WebAudioLatencyHint::AudioContextLatencyCategory latency_category) {
switch (latency_category) {
case WebAudioLatencyHint::kCategoryInteractive:
return AudioDeviceFactory::kSourceWebAudioInteractive;
return blink::WebAudioDeviceSourceType::kWebAudioInteractive;
case WebAudioLatencyHint::kCategoryBalanced:
return AudioDeviceFactory::kSourceWebAudioBalanced;
return blink::WebAudioDeviceSourceType::kWebAudioBalanced;
case WebAudioLatencyHint::kCategoryPlayback:
return AudioDeviceFactory::kSourceWebAudioPlayback;
return blink::WebAudioDeviceSourceType::kWebAudioPlayback;
case WebAudioLatencyHint::kCategoryExact:
return AudioDeviceFactory::kSourceWebAudioExact;
return blink::WebAudioDeviceSourceType::kWebAudioExact;
}
NOTREACHED();
return AudioDeviceFactory::kSourceWebAudioInteractive;
return blink::WebAudioDeviceSourceType::kWebAudioInteractive;
}
int GetOutputBufferSize(const blink::WebAudioLatencyHint& latency_hint,
......
......@@ -95,12 +95,12 @@ class RendererWebAudioDeviceImplTest
base::TimeDelta));
MOCK_METHOD3(CreateSwitchableAudioRendererSink,
scoped_refptr<media::SwitchableAudioRendererSink>(
SourceType,
blink::WebAudioDeviceSourceType,
int,
const media::AudioSinkParameters&));
scoped_refptr<media::AudioRendererSink> CreateAudioRendererSink(
SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params) override {
scoped_refptr<media::MockAudioRendererSink> mock_sink =
......
......@@ -150,8 +150,8 @@ void TrackAudioRenderer::Start() {
// ...and |sink_| will get audio data from us.
DCHECK(!sink_);
sink_ = AudioDeviceFactory::NewAudioRendererSink(
AudioDeviceFactory::kSourceNonRtcAudioTrack, playout_render_frame_id_,
{session_id_, output_device_id_});
blink::WebAudioDeviceSourceType::kNonRtcAudioTrack,
playout_render_frame_id_, {session_id_, output_device_id_});
base::AutoLock auto_lock(thread_lock_);
prior_elapsed_render_time_ = base::TimeDelta();
......@@ -247,8 +247,8 @@ void TrackAudioRenderer::SwitchOutputDevice(
scoped_refptr<media::AudioRendererSink> new_sink =
AudioDeviceFactory::NewAudioRendererSink(
AudioDeviceFactory::kSourceNonRtcAudioTrack, playout_render_frame_id_,
{session_id_, device_id});
blink::WebAudioDeviceSourceType::kNonRtcAudioTrack,
playout_render_frame_id_, {session_id_, device_id});
media::OutputDeviceStatus new_sink_status =
new_sink->GetOutputDeviceInfo().device_status();
......@@ -318,7 +318,7 @@ void TrackAudioRenderer::MaybeStartSink() {
// Specify the latency info to be passed to the browser side.
sink_params.set_latency_tag(AudioDeviceFactory::GetSourceLatencyType(
AudioDeviceFactory::kSourceNonRtcAudioTrack));
blink::WebAudioDeviceSourceType::kNonRtcAudioTrack));
sink_->Initialize(sink_params, this);
sink_->Start();
......@@ -348,8 +348,8 @@ void TrackAudioRenderer::ReconfigureSink(const media::AudioParameters& params) {
sink_->Stop();
sink_started_ = false;
sink_ = AudioDeviceFactory::NewAudioRendererSink(
AudioDeviceFactory::kSourceNonRtcAudioTrack, playout_render_frame_id_,
{session_id_, output_device_id_});
blink::WebAudioDeviceSourceType::kNonRtcAudioTrack,
playout_render_frame_id_, {session_id_, output_device_id_});
MaybeStartSink();
}
......
......@@ -190,7 +190,8 @@ bool WebRtcAudioRenderer::Initialize(WebRtcAudioRendererSource* source) {
media::AudioSinkParameters sink_params(session_id_, output_device_id_);
sink_params.processing_id = source->GetAudioProcessingId();
sink_ = AudioDeviceFactory::NewAudioRendererSink(
AudioDeviceFactory::kSourceWebRtc, source_render_frame_id_, sink_params);
blink::WebAudioDeviceSourceType::kWebRtc, source_render_frame_id_,
sink_params);
media::OutputDeviceStatus sink_status =
sink_->GetOutputDeviceInfo().device_status();
......@@ -378,7 +379,7 @@ void WebRtcAudioRenderer::SwitchOutputDevice(
sink_params.processing_id = source_->GetAudioProcessingId();
scoped_refptr<media::AudioRendererSink> new_sink =
AudioDeviceFactory::NewAudioRendererSink(
AudioDeviceFactory::kSourceWebRtc, source_render_frame_id_,
blink::WebAudioDeviceSourceType::kWebRtc, source_render_frame_id_,
sink_params);
media::OutputDeviceStatus status =
new_sink->GetOutputDeviceInfo().device_status();
......@@ -693,7 +694,7 @@ void WebRtcAudioRenderer::PrepareSink() {
// Specify the latency info to be passed to the browser side.
new_sink_params.set_latency_tag(AudioDeviceFactory::GetSourceLatencyType(
AudioDeviceFactory::AudioDeviceFactory::kSourceWebRtc));
blink::WebAudioDeviceSourceType::kWebRtc));
sink_->Initialize(new_sink_params, this);
}
......
......@@ -19,6 +19,7 @@
#include "media/base/mock_audio_renderer_sink.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/audio/web_audio_device_source_type.h"
#include "third_party/blink/public/platform/modules/mediastream/web_media_stream_audio_renderer.h"
#include "third_party/blink/public/platform/scheduler/test/renderer_scheduler_test_support.h"
#include "third_party/blink/public/platform/web_media_stream.h"
......@@ -79,9 +80,9 @@ class WebRtcAudioRendererTest : public testing::Test,
renderer_ = new WebRtcAudioRenderer(
blink::scheduler::GetSingleThreadTaskRunnerForTesting(), stream_, 1, 1,
device_id);
EXPECT_CALL(
*this, MockCreateAudioRendererSink(AudioDeviceFactory::kSourceWebRtc, _,
_, device_id, _));
EXPECT_CALL(*this, MockCreateAudioRendererSink(
blink::WebAudioDeviceSourceType::kWebRtc, _, _,
device_id, _));
EXPECT_CALL(*source_.get(), SetOutputDeviceForAec(device_id));
EXPECT_TRUE(renderer_->Initialize(source_.get()));
......@@ -98,18 +99,18 @@ class WebRtcAudioRendererTest : public testing::Test,
base::TimeDelta));
MOCK_METHOD3(CreateSwitchableAudioRendererSink,
scoped_refptr<media::SwitchableAudioRendererSink>(
SourceType,
blink::WebAudioDeviceSourceType,
int,
const media::AudioSinkParameters&));
MOCK_METHOD5(MockCreateAudioRendererSink,
void(SourceType,
void(blink::WebAudioDeviceSourceType,
int,
int,
const std::string&,
const base::Optional<base::UnguessableToken>&));
scoped_refptr<media::AudioRendererSink> CreateAudioRendererSink(
SourceType source_type,
blink::WebAudioDeviceSourceType source_type,
int render_frame_id,
const media::AudioSinkParameters& params) override {
mock_sink_ = new media::MockAudioRendererSink(
......@@ -250,7 +251,7 @@ TEST_F(WebRtcAudioRendererTest, SwitchOutputDevice) {
EXPECT_CALL(*mock_sink_.get(), Stop());
EXPECT_CALL(*this, MockCreateAudioRendererSink(
AudioDeviceFactory::kSourceWebRtc, _, _,
blink::WebAudioDeviceSourceType::kWebRtc, _, _,
kOtherOutputDeviceId, kAudioProcessingId));
EXPECT_CALL(*source_.get(), AudioRendererThreadStopped());
EXPECT_CALL(*source_.get(), SetOutputDeviceForAec(kOtherOutputDeviceId));
......@@ -277,7 +278,7 @@ TEST_F(WebRtcAudioRendererTest, SwitchOutputDeviceInvalidDevice) {
renderer_proxy_->Start();
EXPECT_CALL(*this, MockCreateAudioRendererSink(
AudioDeviceFactory::kSourceWebRtc, _, _,
blink::WebAudioDeviceSourceType::kWebRtc, _, _,
kInvalidOutputDeviceId, kAudioProcessingId));
EXPECT_CALL(*this, MockSwitchDeviceCallback(
media::OUTPUT_DEVICE_STATUS_ERROR_INTERNAL));
......@@ -301,7 +302,7 @@ TEST_F(WebRtcAudioRendererTest, InitializeWithInvalidDevice) {
kInvalidOutputDeviceId);
EXPECT_CALL(*this, MockCreateAudioRendererSink(
AudioDeviceFactory::kSourceWebRtc, _, _,
blink::WebAudioDeviceSourceType::kWebRtc, _, _,
kInvalidOutputDeviceId, kAudioProcessingId));
EXPECT_FALSE(renderer_->Initialize(source_.get()));
......
......@@ -120,6 +120,7 @@ source_set("test_headers") {
# the browser- and renderer-side components.
source_set("blink_headers") {
sources = [
"platform/audio/web_audio_device_source_type.h",
"platform/blame_context.h",
"platform/code_cache_loader.h",
"platform/file_path_conversion.h",
......
file://content/renderer/media/OWNERS
# COMPONENT: Internals>Media
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_PUBLIC_PLATFORM_AUDIO_WEB_AUDIO_DEVICE_SOURCE_TYPE_H_
#define THIRD_PARTY_BLINK_PUBLIC_PLATFORM_AUDIO_WEB_AUDIO_DEVICE_SOURCE_TYPE_H_
namespace blink {
// Types of audio sources. Each source can have individual mixing and/or
// latency requirements for output. The source is specified by the client when
// requesting output sink from the factory, and the factory creates the output
// sink basing on those requirements.
//
// TODO(crbug.com/704136): This enum originally belongs to AudioDeviceFactory
// class (currently in //content). Move it back to the aforementiened class,
// when audio_device_factory.cc|h gets Onion souped.
enum class WebAudioDeviceSourceType {
kNone = 0,
kMediaElement,
kWebRtc,
kNonRtcAudioTrack,
kWebAudioInteractive,
kWebAudioBalanced,
kWebAudioPlayback,
kWebAudioExact,
kLast = kWebAudioExact // Only used for validation of format.
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_PUBLIC_PLATFORM_AUDIO_WEB_AUDIO_DEVICE_SOURCE_TYPE_H_
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment