Commit 0d166660 authored by Tommi's avatar Tommi Committed by Commit Bot

Allow <audio> elements to start rendering audio without waiting for video.

Video frames might never arrive. Waiting is appropriate for a <video> tag,
but not for <audio>.

TBR=hubbe@chromium.org

Bug: 738379
Change-Id: I3f6c886c32b06f6def8024ca585179ca81a6ef29
Reviewed-on: https://chromium-review.googlesource.com/575998
Commit-Queue: Tommi <tommi@chromium.org>
Reviewed-by: default avatarJochen Eisinger <jochen@chromium.org>
Reviewed-by: default avatarHenrik Boström <hbos@chromium.org>
Reviewed-by: default avatarMounir Lamouri <mlamouri@chromium.org>
Cr-Commit-Position: refs/heads/master@{#488645}
parent e868c489
......@@ -247,19 +247,23 @@ void WebMediaPlayerMS::Load(LoadType load_type,
RenderFrame* const frame = RenderFrame::FromWebFrame(frame_);
int routing_id = MSG_ROUTING_NONE;
GURL url = source.IsURL() ? GURL(source.GetAsURL()) : GURL();
if (frame) {
// Report UMA and RAPPOR metrics.
GURL url = source.IsURL() ? GURL(source.GetAsURL()) : GURL();
media::ReportMetrics(load_type, url, frame_->GetSecurityOrigin(),
media_log_.get());
audio_renderer_ = renderer_factory_->GetAudioRenderer(
web_stream, frame->GetRoutingID(), initial_audio_output_device_id_,
initial_security_origin_);
if (!audio_renderer_)
WebRtcLogMessage("Warning: Failed to instantiate audio renderer.");
routing_id = frame->GetRoutingID();
}
audio_renderer_ = renderer_factory_->GetAudioRenderer(
web_stream, routing_id, initial_audio_output_device_id_,
initial_security_origin_);
if (!audio_renderer_)
WebRtcLogMessage("Warning: Failed to instantiate audio renderer.");
if (!video_frame_provider_ && !audio_renderer_) {
SetNetworkState(WebMediaPlayer::kNetworkStateNetworkError);
return;
......@@ -271,7 +275,13 @@ void WebMediaPlayerMS::Load(LoadType load_type,
}
if (video_frame_provider_)
video_frame_provider_->Start();
if (audio_renderer_ && !video_frame_provider_) {
// When associated with an <audio> element, we don't want to wait for the
// first video fram to become available as we do for <video> elements
// (<audio> elements can also be assigned video tracks).
// For more details, see crbug.com/738379
if (audio_renderer_ &&
(client_->IsAudioElement() || !video_frame_provider_)) {
// This is audio-only mode.
SetReadyState(WebMediaPlayer::kReadyStateHaveMetadata);
SetReadyState(WebMediaPlayer::kReadyStateHaveEnoughData);
......
......@@ -202,6 +202,33 @@ class MockMediaStreamVideoRenderer : public MediaStreamVideoRenderer {
base::TimeDelta delay_till_next_generated_frame_;
};
class MockMediaStreamAudioRenderer : public MediaStreamAudioRenderer {
public:
MockMediaStreamAudioRenderer() {}
void Start() override {}
void Stop() override {}
void Play() override {}
void Pause() override {}
void SetVolume(float volume) override {}
media::OutputDeviceInfo GetOutputDeviceInfo() override {
return media::OutputDeviceInfo();
}
void SwitchOutputDevice(
const std::string& device_id,
const url::Origin& security_origin,
const media::OutputDeviceStatusCB& callback) override {}
base::TimeDelta GetCurrentRenderTime() const override {
return base::TimeDelta();
}
bool IsLocalRenderer() const override { return true; }
protected:
~MockMediaStreamAudioRenderer() override {}
};
void MockMediaStreamVideoRenderer::Start() {
started_ = true;
paused_ = false;
......@@ -356,13 +383,26 @@ class MockRenderFactory : public MediaStreamRendererFactory {
int render_frame_id,
const std::string& device_id,
const url::Origin& security_origin) override {
return nullptr;
return audio_renderer_;
}
void set_audio_renderer(scoped_refptr<MediaStreamAudioRenderer> renderer) {
audio_renderer_ = std::move(renderer);
}
void set_support_video_renderer(bool support) {
DCHECK(!provider_);
support_video_renderer_ = support;
}
bool support_video_renderer() const { return support_video_renderer_; }
private:
const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
scoped_refptr<MediaStreamVideoRenderer> provider_;
ReusableMessageLoopEvent* const message_loop_controller_;
bool support_video_renderer_ = true;
scoped_refptr<MediaStreamAudioRenderer> audio_renderer_;
};
scoped_refptr<MediaStreamVideoRenderer> MockRenderFactory::GetVideoRenderer(
......@@ -373,6 +413,9 @@ scoped_refptr<MediaStreamVideoRenderer> MockRenderFactory::GetVideoRenderer(
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
const scoped_refptr<base::TaskRunner>& worker_task_runner,
media::GpuVideoAcceleratorFactories* gpu_factories) {
if (!support_video_renderer_)
return nullptr;
provider_ = new MockMediaStreamVideoRenderer(task_runner_,
message_loop_controller_, error_cb, repaint_cb);
......@@ -475,6 +518,7 @@ class WebMediaPlayerMSTest
return blink::WebMediaPlayer::TrackId();
}
bool HasNativeControls() override { return false; }
bool IsAudioElement() override { return is_audio_element_; }
blink::WebMediaPlayer::DisplayType DisplayType() const override {
return blink::WebMediaPlayer::DisplayType::kInline;
}
......@@ -507,6 +551,7 @@ class WebMediaPlayerMSTest
WebMediaPlayerMSCompositor* compositor_;
ReusableMessageLoopEvent message_loop_controller_;
blink::WebLayer* web_layer_;
bool is_audio_element_ = false;
private:
// Main function trying to ask WebMediaPlayerMS to submit a frame for
......@@ -534,9 +579,12 @@ MockMediaStreamVideoRenderer* WebMediaPlayerMSTest::LoadAndGetFrameProvider(
EXPECT_TRUE(!!compositor_);
compositor_->SetAlgorithmEnabledForTesting(algorithm_enabled);
MockMediaStreamVideoRenderer* const provider = render_factory_->provider();
EXPECT_TRUE(!!provider);
EXPECT_TRUE(provider->Started());
MockMediaStreamVideoRenderer* provider = nullptr;
if (render_factory_->support_video_renderer()) {
provider = render_factory_->provider();
EXPECT_TRUE(!!provider);
EXPECT_TRUE(provider->Started());
}
testing::Mock::VerifyAndClearExpectations(this);
return provider;
......@@ -621,6 +669,62 @@ void WebMediaPlayerMSTest::SizeChanged() {
CheckSizeChanged(frame_size);
}
TEST_F(WebMediaPlayerMSTest, NoDataDuringLoadForVideo) {
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata))
.Times(0);
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveEnoughData))
.Times(0);
LoadAndGetFrameProvider(true);
message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK);
testing::Mock::VerifyAndClearExpectations(this);
EXPECT_CALL(*this, DoSetWebLayer(false));
}
TEST_F(WebMediaPlayerMSTest, NoWaitForFrameForAudio) {
is_audio_element_ = true;
scoped_refptr<MediaStreamAudioRenderer> audio_renderer(
new MockMediaStreamAudioRenderer());
render_factory_->set_audio_renderer(audio_renderer);
EXPECT_CALL(*this, DoNetworkStateChanged(
blink::WebMediaPlayer::kNetworkStateLoading));
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveNothing));
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveEnoughData));
player_->Load(blink::WebMediaPlayer::kLoadTypeURL,
blink::WebMediaPlayerSource(),
blink::WebMediaPlayer::kCORSModeUnspecified);
message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK);
testing::Mock::VerifyAndClearExpectations(this);
EXPECT_CALL(*this, DoSetWebLayer(false));
}
TEST_F(WebMediaPlayerMSTest, NoWaitForFrameForAudioOnly) {
render_factory_->set_support_video_renderer(false);
scoped_refptr<MediaStreamAudioRenderer> audio_renderer(
new MockMediaStreamAudioRenderer());
render_factory_->set_audio_renderer(audio_renderer);
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveMetadata));
EXPECT_CALL(*this, DoReadyStateChanged(
blink::WebMediaPlayer::kReadyStateHaveEnoughData));
LoadAndGetFrameProvider(true);
EXPECT_CALL(*this, DoSetWebLayer(false));
}
TEST_F(WebMediaPlayerMSTest, Playing_Normal) {
// This test sends a bunch of normal frames with increasing timestamps
// and verifies that they are produced by WebMediaPlayerMS in appropriate
......
......@@ -105,6 +105,7 @@ class DummyWebMediaPlayerClient : public blink::WebMediaPlayerClient {
return blink::WebMediaPlayer::TrackId();
}
bool HasNativeControls() override { return false; }
bool IsAudioElement() override { return false; }
blink::WebMediaPlayer::DisplayType DisplayType() const override {
return blink::WebMediaPlayer::DisplayType::kInline;
}
......
......@@ -4096,6 +4096,10 @@ bool HTMLMediaElement::HasNativeControls() {
return ShouldShowControls(RecordMetricsBehavior::kDoRecord);
}
bool HTMLMediaElement::IsAudioElement() {
return IsHTMLAudioElement();
}
WebMediaPlayer::DisplayType HTMLMediaElement::DisplayType() const {
return IsFullscreen() ? WebMediaPlayer::DisplayType::kFullscreen
: WebMediaPlayer::DisplayType::kInline;
......
......@@ -414,6 +414,7 @@ class CORE_EXPORT HTMLMediaElement
bool IsAutoplayingMuted() final;
void ActivateViewportIntersectionMonitoring(bool) final;
bool HasNativeControls() final;
bool IsAudioElement() final;
WebMediaPlayer::DisplayType DisplayType() const override;
WebRemotePlaybackClient* RemotePlaybackClient() final {
return remote_playback_client_;
......
......@@ -124,6 +124,9 @@ class BLINK_PLATFORM_EXPORT WebMediaPlayerClient {
// that the controls are currently visible.
virtual bool HasNativeControls() = 0;
// Returns true iff the client represents an HTML <audio> element.
virtual bool IsAudioElement() = 0;
// Returns the current display type of the media element.
virtual WebMediaPlayer::DisplayType DisplayType() const = 0;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment