Commit 0d2b1013 authored by Emircan Uysaler's avatar Emircan Uysaler Committed by Commit Bot

Wait for a frame before loadedmetadata event is fired in WebMediaPlayerMS

This CL moves frame checks from WebMediaPlayerMS to WebMediaPlayerMSCompositor
so that they happen after |current_frame_| is updated via SetCurrentFrame().

Bug: 894317
Change-Id: I93ea3160cee18fb012d3fd6c7c32fb5fe935868f
Reviewed-on: https://chromium-review.googlesource.com/c/1289751Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarEmircan Uysaler <emircan@chromium.org>
Commit-Queue: Emircan Uysaler <emircan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#601160}
parent 5d214c23
......@@ -81,10 +81,7 @@ class WebMediaPlayerMS::FrameDeliverer {
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
scoped_refptr<base::TaskRunner> worker_task_runner,
media::GpuVideoAcceleratorFactories* gpu_factories)
: last_frame_opaque_(true),
last_frame_rotation_(media::VIDEO_ROTATION_0),
received_first_frame_(false),
main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
: main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
player_(player),
enqueue_frame_cb_(enqueue_frame_cb),
media_task_runner_(media_task_runner),
......@@ -174,43 +171,18 @@ class WebMediaPlayerMS::FrameDeliverer {
void EnqueueFrame(const scoped_refptr<media::VideoFrame>& frame) {
DCHECK(io_thread_checker_.CalledOnValidThread());
bool tracing_enabled = false;
TRACE_EVENT_CATEGORY_GROUP_ENABLED("media", &tracing_enabled);
if (tracing_enabled) {
base::TimeTicks render_time;
if (frame->metadata()->GetTimeTicks(
media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) {
TRACE_EVENT1("media", "EnqueueFrame", "Ideal Render Instant",
render_time.ToInternalValue());
} else {
TRACE_EVENT0("media", "EnqueueFrame");
}
}
const bool is_opaque = media::IsOpaque(frame->format());
media::VideoRotation video_rotation = media::VIDEO_ROTATION_0;
ignore_result(frame->metadata()->GetRotation(
media::VideoFrameMetadata::ROTATION, &video_rotation));
if (!received_first_frame_) {
received_first_frame_ = true;
last_frame_opaque_ = is_opaque;
last_frame_rotation_ = video_rotation;
main_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::OnFirstFrameReceived,
player_, video_rotation, is_opaque));
} else {
if (last_frame_opaque_ != is_opaque) {
last_frame_opaque_ = is_opaque;
main_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::OnOpacityChanged,
player_, is_opaque));
}
if (last_frame_rotation_ != video_rotation) {
last_frame_rotation_ = video_rotation;
main_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::OnRotationChanged,
player_, video_rotation, is_opaque));
{
bool tracing_enabled = false;
TRACE_EVENT_CATEGORY_GROUP_ENABLED("media", &tracing_enabled);
if (tracing_enabled) {
base::TimeTicks render_time;
if (frame->metadata()->GetTimeTicks(
media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) {
TRACE_EVENT1("media", "EnqueueFrame", "Ideal Render Instant",
render_time.ToInternalValue());
} else {
TRACE_EVENT0("media", "EnqueueFrame");
}
}
}
......@@ -234,9 +206,6 @@ class WebMediaPlayerMS::FrameDeliverer {
weak_factory_for_pool_.InvalidateWeakPtrs();
}
bool last_frame_opaque_;
media::VideoRotation last_frame_rotation_;
bool received_first_frame_;
bool render_frame_suspended_ = false;
const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
......@@ -1112,7 +1081,7 @@ void WebMediaPlayerMS::OnFirstFrameReceived(media::VideoRotation video_rotation,
bridge_ = std::move(create_bridge_callback_)
.Run(this, compositor_->GetUpdateSubmissionStateCallback());
bridge_->CreateSurfaceLayer();
bridge_->SetContentsOpaque(opaque_);
bridge_->SetContentsOpaque(is_opaque);
compositor_task_runner_->PostTask(
FROM_HERE,
......@@ -1123,11 +1092,13 @@ void WebMediaPlayerMS::OnFirstFrameReceived(media::VideoRotation video_rotation,
media::BindToCurrentLoop(base::BindRepeating(
&WebMediaPlayerMS::OnFrameSinkDestroyed, AsWeakPtr()))));
}
OnRotationChanged(video_rotation);
OnOpacityChanged(is_opaque);
SetReadyState(WebMediaPlayer::kReadyStateHaveMetadata);
SetReadyState(WebMediaPlayer::kReadyStateHaveEnoughData);
OnRotationChanged(video_rotation, is_opaque);
TriggerResize();
ResetCanvasCache();
}
void WebMediaPlayerMS::OnOpacityChanged(bool is_opaque) {
......@@ -1135,42 +1106,28 @@ void WebMediaPlayerMS::OnOpacityChanged(bool is_opaque) {
DCHECK(thread_checker_.CalledOnValidThread());
opaque_ = is_opaque;
if (!bridge_) {
// Opacity can be changed during the session without resetting
// |video_layer_|.
video_layer_->SetContentsOpaque(opaque_);
} else {
DCHECK(bridge_);
bridge_->SetContentsOpaque(opaque_);
compositor_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMSCompositor::UpdateIsOpaque,
compositor_, opaque_));
}
}
void WebMediaPlayerMS::OnRotationChanged(media::VideoRotation video_rotation,
bool is_opaque) {
void WebMediaPlayerMS::OnRotationChanged(media::VideoRotation video_rotation) {
DVLOG(1) << __func__;
DCHECK(thread_checker_.CalledOnValidThread());
video_rotation_ = video_rotation;
opaque_ = is_opaque;
if (!bridge_) {
// Keep the old |video_layer_| alive until SetCcLayer() is called with a new
// pointer, as it may use the pointer from the last call.
auto new_video_layer =
cc::VideoLayer::Create(compositor_.get(), video_rotation);
new_video_layer->SetContentsOpaque(is_opaque);
get_client()->SetCcLayer(new_video_layer.get());
video_layer_ = std::move(new_video_layer);
} else {
compositor_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMSCompositor::UpdateRotation,
compositor_, video_rotation));
}
}
......
......@@ -190,6 +190,11 @@ class CONTENT_EXPORT WebMediaPlayerMS
void OnPictureInPictureModeEnded() override;
void OnPictureInPictureControlClicked(const std::string& control_id) override;
void OnFirstFrameReceived(media::VideoRotation video_rotation,
bool is_opaque);
void OnOpacityChanged(bool is_opaque);
void OnRotationChanged(media::VideoRotation video_rotation);
bool CopyVideoTextureToPlatformTexture(
gpu::gles2::GLES2Interface* gl,
unsigned target,
......@@ -247,11 +252,6 @@ class CONTENT_EXPORT WebMediaPlayerMS
// invalid.
void OnFrameSinkDestroyed();
void OnFirstFrameReceived(media::VideoRotation video_rotation,
bool is_opaque);
void OnOpacityChanged(bool is_opaque);
void OnRotationChanged(media::VideoRotation video_rotation, bool is_opaque);
bool IsInPictureInPicture() const;
// Need repaint due to state change.
......
......@@ -227,21 +227,11 @@ void WebMediaPlayerMSCompositor::EnableSubmission(
video_frame_provider_client_ = submitter_.get();
}
void WebMediaPlayerMSCompositor::UpdateRotation(media::VideoRotation rotation) {
DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
submitter_->SetRotation(rotation);
}
void WebMediaPlayerMSCompositor::SetForceSubmit(bool force_submit) {
DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
submitter_->SetForceSubmit(force_submit);
}
void WebMediaPlayerMSCompositor::UpdateIsOpaque(bool is_opaque) {
DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
submitter_->SetIsOpaque(is_opaque);
}
gfx::Size WebMediaPlayerMSCompositor::GetCurrentSize() {
DCHECK(thread_checker_.CalledOnValidThread());
base::AutoLock auto_lock(current_frame_lock_);
......@@ -355,22 +345,25 @@ bool WebMediaPlayerMSCompositor::UpdateCurrentFrame(
if (rendering_frame_buffer_)
RenderUsingAlgorithm(deadline_min, deadline_max);
bool tracing_or_dcheck_enabled = false;
TRACE_EVENT_CATEGORY_GROUP_ENABLED("media", &tracing_or_dcheck_enabled);
{
bool tracing_or_dcheck_enabled = false;
TRACE_EVENT_CATEGORY_GROUP_ENABLED("media", &tracing_or_dcheck_enabled);
#if DCHECK_IS_ON()
tracing_or_dcheck_enabled = true;
tracing_or_dcheck_enabled = true;
#endif // DCHECK_IS_ON()
if (tracing_or_dcheck_enabled) {
base::TimeTicks render_time;
if (!current_frame_->metadata()->GetTimeTicks(
media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) {
DCHECK(!rendering_frame_buffer_)
<< "VideoFrames need REFERENCE_TIME to use "
"sophisticated video rendering algorithm.";
if (tracing_or_dcheck_enabled) {
base::TimeTicks render_time;
if (!current_frame_->metadata()->GetTimeTicks(
media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) {
DCHECK(!rendering_frame_buffer_)
<< "VideoFrames need REFERENCE_TIME to use "
"sophisticated video rendering algorithm.";
}
TRACE_EVENT_END2("media", "UpdateCurrentFrame", "Ideal Render Instant",
render_time.ToInternalValue(), "Serial", serial_);
}
TRACE_EVENT_END2("media", "UpdateCurrentFrame", "Ideal Render Instant",
render_time.ToInternalValue(), "Serial", serial_);
}
return !current_frame_rendered_;
}
......@@ -522,10 +515,45 @@ void WebMediaPlayerMSCompositor::SetCurrentFrame(
++dropped_frame_count_;
current_frame_rendered_ = false;
const bool size_changed = !current_frame_ || current_frame_->natural_size() !=
frame->natural_size();
scoped_refptr<media::VideoFrame> old_frame = std::move(current_frame_);
current_frame_ = frame;
if (size_changed) {
CheckForFrameChanges(old_frame, frame);
}
void WebMediaPlayerMSCompositor::CheckForFrameChanges(
const scoped_refptr<media::VideoFrame>& old_frame,
const scoped_refptr<media::VideoFrame>& new_frame) {
DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
const bool new_frame_is_opaque = media::IsOpaque(new_frame->format());
media::VideoRotation new_frame_video_rotation = media::VIDEO_ROTATION_0;
ignore_result(new_frame->metadata()->GetRotation(
media::VideoFrameMetadata::ROTATION, &new_frame_video_rotation));
if (!old_frame) {
main_message_loop_->task_runner()->PostTask(
FROM_HERE,
base::BindOnce(&WebMediaPlayerMS::OnFirstFrameReceived, player_,
new_frame_video_rotation, new_frame_is_opaque));
return;
}
media::VideoRotation old_frame_video_rotation = media::VIDEO_ROTATION_0;
ignore_result(old_frame->metadata()->GetRotation(
media::VideoFrameMetadata::ROTATION, &old_frame_video_rotation));
if (new_frame_video_rotation != old_frame_video_rotation) {
main_message_loop_->task_runner()->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::OnRotationChanged, player_,
new_frame_video_rotation));
if (submitter_)
submitter_->SetRotation(new_frame_video_rotation);
}
if (new_frame_is_opaque != media::IsOpaque(old_frame->format())) {
main_message_loop_->task_runner()->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::OnOpacityChanged, player_,
new_frame_is_opaque));
if (submitter_)
submitter_->SetIsOpaque(new_frame_is_opaque);
}
if (old_frame->natural_size() != new_frame->natural_size()) {
main_message_loop_->task_runner()->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::TriggerResize, player_));
}
......
......@@ -94,14 +94,8 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor
bool is_opaque,
blink::WebFrameSinkDestroyedCallback frame_sink_destroyed_callback);
// Updates the rotation information for frames given to |submitter_|.
void UpdateRotation(media::VideoRotation rotation);
// Notifies the |submitter_| that the frames must be submitted.
void SetForceSubmit(bool);
// Updates the opacity information for frames given to |submitter_|.
void UpdateIsOpaque(bool);
void SetForceSubmit(bool force_submit);
// VideoFrameProvider implementation.
void SetVideoFrameProviderClient(
......@@ -161,6 +155,10 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor
// Update |current_frame_| and |dropped_frame_count_|
void SetCurrentFrame(const scoped_refptr<media::VideoFrame>& frame);
// Following the update to |current_frame_|, this will check for changes that
// require updating video layer.
void CheckForFrameChanges(const scoped_refptr<media::VideoFrame>& old_frame,
const scoped_refptr<media::VideoFrame>& new_frame);
void StartRenderingInternal();
void StopRenderingInternal();
......
......@@ -739,6 +739,12 @@ void WebMediaPlayerMSTest::NetworkStateChanged() {
void WebMediaPlayerMSTest::ReadyStateChanged() {
blink::WebMediaPlayer::ReadyState state = player_->GetReadyState();
DoReadyStateChanged(state);
if (state == blink::WebMediaPlayer::ReadyState::kReadyStateHaveMetadata &&
!player_->HasAudio()) {
const auto& size = player_->NaturalSize();
EXPECT_GT(size.width, 0);
EXPECT_GT(size.height, 0);
}
if (state == blink::WebMediaPlayer::ReadyState::kReadyStateHaveEnoughData)
player_->Play();
}
......@@ -1080,7 +1086,7 @@ TEST_P(WebMediaPlayerMSTest, RotationChange) {
MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(true);
const int kTestBrake = static_cast<int>(FrameType::TEST_BRAKE);
static int tokens[] = {0, 33, kTestBrake};
int tokens[] = {0, kTestBrake};
std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int));
provider->QueueFrames(timestamps, false, false, 17, media::VIDEO_ROTATION_90);
if (enable_surface_layer_for_video_) {
......@@ -1104,6 +1110,8 @@ TEST_P(WebMediaPlayerMSTest, RotationChange) {
EXPECT_EQ(kStandardWidth, natural_size.height);
// Change rotation.
tokens[0] = 33;
timestamps = std::vector<int>(tokens, tokens + arraysize(tokens));
provider->QueueFrames(timestamps, false, false, 17, media::VIDEO_ROTATION_0);
if (enable_surface_layer_for_video_) {
EXPECT_CALL(*submitter_ptr_, SetRotation(media::VIDEO_ROTATION_0));
......@@ -1135,9 +1143,10 @@ TEST_P(WebMediaPlayerMSTest, OpacityChange) {
// Push one opaque frame.
const int kTestBrake = static_cast<int>(FrameType::TEST_BRAKE);
static int tokens[] = {0, kTestBrake};
int tokens[] = {0, kTestBrake};
std::vector<int> timestamps(tokens, tokens + arraysize(tokens));
provider->QueueFrames(timestamps, true);
if (enable_surface_layer_for_video_) {
EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
EXPECT_CALL(*submitter_ptr_, StartRendering());
......@@ -1153,29 +1162,33 @@ TEST_P(WebMediaPlayerMSTest, OpacityChange) {
CheckSizeChanged(gfx::Size(kStandardWidth, kStandardHeight)));
message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK);
if (!enable_surface_layer_for_video_) {
ASSERT_TRUE(layer_ != nullptr);
EXPECT_TRUE(layer_->contents_opaque());
}
// Push one transparent frame.
tokens[0] = 33;
timestamps = std::vector<int>(tokens, tokens + arraysize(tokens));
provider->QueueFrames(timestamps, false);
if (enable_surface_layer_for_video_) {
EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
EXPECT_CALL(*submitter_ptr_, SetIsOpaque(false));
}
provider->QueueFrames(timestamps, false);
message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK);
if (!enable_surface_layer_for_video_)
if (!enable_surface_layer_for_video_) {
EXPECT_FALSE(layer_->contents_opaque());
}
// Push another transparent frame.
tokens[0] = 66;
timestamps = std::vector<int>(tokens, tokens + arraysize(tokens));
provider->QueueFrames(timestamps, true);
if (enable_surface_layer_for_video_) {
EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(true));
EXPECT_CALL(*submitter_ptr_, SetIsOpaque(true));
}
// Push another opaque frame.
provider->QueueFrames(timestamps, true);
message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK);
if (!enable_surface_layer_for_video_)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment