Commit 0d2b1013 authored by Emircan Uysaler's avatar Emircan Uysaler Committed by Commit Bot

Wait for a frame before loadedmetadata event is fired in WebMediaPlayerMS

This CL moves frame checks from WebMediaPlayerMS to WebMediaPlayerMSCompositor
so that they happen after |current_frame_| is updated via SetCurrentFrame().

Bug: 894317
Change-Id: I93ea3160cee18fb012d3fd6c7c32fb5fe935868f
Reviewed-on: https://chromium-review.googlesource.com/c/1289751Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarEmircan Uysaler <emircan@chromium.org>
Commit-Queue: Emircan Uysaler <emircan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#601160}
parent 5d214c23
...@@ -81,10 +81,7 @@ class WebMediaPlayerMS::FrameDeliverer { ...@@ -81,10 +81,7 @@ class WebMediaPlayerMS::FrameDeliverer {
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner, scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
scoped_refptr<base::TaskRunner> worker_task_runner, scoped_refptr<base::TaskRunner> worker_task_runner,
media::GpuVideoAcceleratorFactories* gpu_factories) media::GpuVideoAcceleratorFactories* gpu_factories)
: last_frame_opaque_(true), : main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
last_frame_rotation_(media::VIDEO_ROTATION_0),
received_first_frame_(false),
main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
player_(player), player_(player),
enqueue_frame_cb_(enqueue_frame_cb), enqueue_frame_cb_(enqueue_frame_cb),
media_task_runner_(media_task_runner), media_task_runner_(media_task_runner),
...@@ -174,43 +171,18 @@ class WebMediaPlayerMS::FrameDeliverer { ...@@ -174,43 +171,18 @@ class WebMediaPlayerMS::FrameDeliverer {
void EnqueueFrame(const scoped_refptr<media::VideoFrame>& frame) { void EnqueueFrame(const scoped_refptr<media::VideoFrame>& frame) {
DCHECK(io_thread_checker_.CalledOnValidThread()); DCHECK(io_thread_checker_.CalledOnValidThread());
bool tracing_enabled = false; {
TRACE_EVENT_CATEGORY_GROUP_ENABLED("media", &tracing_enabled); bool tracing_enabled = false;
if (tracing_enabled) { TRACE_EVENT_CATEGORY_GROUP_ENABLED("media", &tracing_enabled);
base::TimeTicks render_time; if (tracing_enabled) {
if (frame->metadata()->GetTimeTicks( base::TimeTicks render_time;
media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) { if (frame->metadata()->GetTimeTicks(
TRACE_EVENT1("media", "EnqueueFrame", "Ideal Render Instant", media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) {
render_time.ToInternalValue()); TRACE_EVENT1("media", "EnqueueFrame", "Ideal Render Instant",
} else { render_time.ToInternalValue());
TRACE_EVENT0("media", "EnqueueFrame"); } else {
} TRACE_EVENT0("media", "EnqueueFrame");
} }
const bool is_opaque = media::IsOpaque(frame->format());
media::VideoRotation video_rotation = media::VIDEO_ROTATION_0;
ignore_result(frame->metadata()->GetRotation(
media::VideoFrameMetadata::ROTATION, &video_rotation));
if (!received_first_frame_) {
received_first_frame_ = true;
last_frame_opaque_ = is_opaque;
last_frame_rotation_ = video_rotation;
main_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::OnFirstFrameReceived,
player_, video_rotation, is_opaque));
} else {
if (last_frame_opaque_ != is_opaque) {
last_frame_opaque_ = is_opaque;
main_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::OnOpacityChanged,
player_, is_opaque));
}
if (last_frame_rotation_ != video_rotation) {
last_frame_rotation_ = video_rotation;
main_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::OnRotationChanged,
player_, video_rotation, is_opaque));
} }
} }
...@@ -234,9 +206,6 @@ class WebMediaPlayerMS::FrameDeliverer { ...@@ -234,9 +206,6 @@ class WebMediaPlayerMS::FrameDeliverer {
weak_factory_for_pool_.InvalidateWeakPtrs(); weak_factory_for_pool_.InvalidateWeakPtrs();
} }
bool last_frame_opaque_;
media::VideoRotation last_frame_rotation_;
bool received_first_frame_;
bool render_frame_suspended_ = false; bool render_frame_suspended_ = false;
const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
...@@ -1112,7 +1081,7 @@ void WebMediaPlayerMS::OnFirstFrameReceived(media::VideoRotation video_rotation, ...@@ -1112,7 +1081,7 @@ void WebMediaPlayerMS::OnFirstFrameReceived(media::VideoRotation video_rotation,
bridge_ = std::move(create_bridge_callback_) bridge_ = std::move(create_bridge_callback_)
.Run(this, compositor_->GetUpdateSubmissionStateCallback()); .Run(this, compositor_->GetUpdateSubmissionStateCallback());
bridge_->CreateSurfaceLayer(); bridge_->CreateSurfaceLayer();
bridge_->SetContentsOpaque(opaque_); bridge_->SetContentsOpaque(is_opaque);
compositor_task_runner_->PostTask( compositor_task_runner_->PostTask(
FROM_HERE, FROM_HERE,
...@@ -1123,11 +1092,13 @@ void WebMediaPlayerMS::OnFirstFrameReceived(media::VideoRotation video_rotation, ...@@ -1123,11 +1092,13 @@ void WebMediaPlayerMS::OnFirstFrameReceived(media::VideoRotation video_rotation,
media::BindToCurrentLoop(base::BindRepeating( media::BindToCurrentLoop(base::BindRepeating(
&WebMediaPlayerMS::OnFrameSinkDestroyed, AsWeakPtr())))); &WebMediaPlayerMS::OnFrameSinkDestroyed, AsWeakPtr()))));
} }
OnRotationChanged(video_rotation);
OnOpacityChanged(is_opaque);
SetReadyState(WebMediaPlayer::kReadyStateHaveMetadata); SetReadyState(WebMediaPlayer::kReadyStateHaveMetadata);
SetReadyState(WebMediaPlayer::kReadyStateHaveEnoughData); SetReadyState(WebMediaPlayer::kReadyStateHaveEnoughData);
TriggerResize();
OnRotationChanged(video_rotation, is_opaque); ResetCanvasCache();
} }
void WebMediaPlayerMS::OnOpacityChanged(bool is_opaque) { void WebMediaPlayerMS::OnOpacityChanged(bool is_opaque) {
...@@ -1135,42 +1106,28 @@ void WebMediaPlayerMS::OnOpacityChanged(bool is_opaque) { ...@@ -1135,42 +1106,28 @@ void WebMediaPlayerMS::OnOpacityChanged(bool is_opaque) {
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
opaque_ = is_opaque; opaque_ = is_opaque;
if (!bridge_) { if (!bridge_) {
// Opacity can be changed during the session without resetting // Opacity can be changed during the session without resetting
// |video_layer_|. // |video_layer_|.
video_layer_->SetContentsOpaque(opaque_); video_layer_->SetContentsOpaque(opaque_);
} else { } else {
DCHECK(bridge_); DCHECK(bridge_);
bridge_->SetContentsOpaque(opaque_); bridge_->SetContentsOpaque(opaque_);
compositor_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMSCompositor::UpdateIsOpaque,
compositor_, opaque_));
} }
} }
void WebMediaPlayerMS::OnRotationChanged(media::VideoRotation video_rotation, void WebMediaPlayerMS::OnRotationChanged(media::VideoRotation video_rotation) {
bool is_opaque) {
DVLOG(1) << __func__; DVLOG(1) << __func__;
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
video_rotation_ = video_rotation; video_rotation_ = video_rotation;
opaque_ = is_opaque;
if (!bridge_) { if (!bridge_) {
// Keep the old |video_layer_| alive until SetCcLayer() is called with a new // Keep the old |video_layer_| alive until SetCcLayer() is called with a new
// pointer, as it may use the pointer from the last call. // pointer, as it may use the pointer from the last call.
auto new_video_layer = auto new_video_layer =
cc::VideoLayer::Create(compositor_.get(), video_rotation); cc::VideoLayer::Create(compositor_.get(), video_rotation);
new_video_layer->SetContentsOpaque(is_opaque);
get_client()->SetCcLayer(new_video_layer.get()); get_client()->SetCcLayer(new_video_layer.get());
video_layer_ = std::move(new_video_layer); video_layer_ = std::move(new_video_layer);
} else {
compositor_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMSCompositor::UpdateRotation,
compositor_, video_rotation));
} }
} }
......
...@@ -190,6 +190,11 @@ class CONTENT_EXPORT WebMediaPlayerMS ...@@ -190,6 +190,11 @@ class CONTENT_EXPORT WebMediaPlayerMS
void OnPictureInPictureModeEnded() override; void OnPictureInPictureModeEnded() override;
void OnPictureInPictureControlClicked(const std::string& control_id) override; void OnPictureInPictureControlClicked(const std::string& control_id) override;
void OnFirstFrameReceived(media::VideoRotation video_rotation,
bool is_opaque);
void OnOpacityChanged(bool is_opaque);
void OnRotationChanged(media::VideoRotation video_rotation);
bool CopyVideoTextureToPlatformTexture( bool CopyVideoTextureToPlatformTexture(
gpu::gles2::GLES2Interface* gl, gpu::gles2::GLES2Interface* gl,
unsigned target, unsigned target,
...@@ -247,11 +252,6 @@ class CONTENT_EXPORT WebMediaPlayerMS ...@@ -247,11 +252,6 @@ class CONTENT_EXPORT WebMediaPlayerMS
// invalid. // invalid.
void OnFrameSinkDestroyed(); void OnFrameSinkDestroyed();
void OnFirstFrameReceived(media::VideoRotation video_rotation,
bool is_opaque);
void OnOpacityChanged(bool is_opaque);
void OnRotationChanged(media::VideoRotation video_rotation, bool is_opaque);
bool IsInPictureInPicture() const; bool IsInPictureInPicture() const;
// Need repaint due to state change. // Need repaint due to state change.
......
...@@ -227,21 +227,11 @@ void WebMediaPlayerMSCompositor::EnableSubmission( ...@@ -227,21 +227,11 @@ void WebMediaPlayerMSCompositor::EnableSubmission(
video_frame_provider_client_ = submitter_.get(); video_frame_provider_client_ = submitter_.get();
} }
void WebMediaPlayerMSCompositor::UpdateRotation(media::VideoRotation rotation) {
DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
submitter_->SetRotation(rotation);
}
void WebMediaPlayerMSCompositor::SetForceSubmit(bool force_submit) { void WebMediaPlayerMSCompositor::SetForceSubmit(bool force_submit) {
DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread()); DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
submitter_->SetForceSubmit(force_submit); submitter_->SetForceSubmit(force_submit);
} }
void WebMediaPlayerMSCompositor::UpdateIsOpaque(bool is_opaque) {
DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
submitter_->SetIsOpaque(is_opaque);
}
gfx::Size WebMediaPlayerMSCompositor::GetCurrentSize() { gfx::Size WebMediaPlayerMSCompositor::GetCurrentSize() {
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
base::AutoLock auto_lock(current_frame_lock_); base::AutoLock auto_lock(current_frame_lock_);
...@@ -355,22 +345,25 @@ bool WebMediaPlayerMSCompositor::UpdateCurrentFrame( ...@@ -355,22 +345,25 @@ bool WebMediaPlayerMSCompositor::UpdateCurrentFrame(
if (rendering_frame_buffer_) if (rendering_frame_buffer_)
RenderUsingAlgorithm(deadline_min, deadline_max); RenderUsingAlgorithm(deadline_min, deadline_max);
bool tracing_or_dcheck_enabled = false; {
TRACE_EVENT_CATEGORY_GROUP_ENABLED("media", &tracing_or_dcheck_enabled); bool tracing_or_dcheck_enabled = false;
TRACE_EVENT_CATEGORY_GROUP_ENABLED("media", &tracing_or_dcheck_enabled);
#if DCHECK_IS_ON() #if DCHECK_IS_ON()
tracing_or_dcheck_enabled = true; tracing_or_dcheck_enabled = true;
#endif // DCHECK_IS_ON() #endif // DCHECK_IS_ON()
if (tracing_or_dcheck_enabled) { if (tracing_or_dcheck_enabled) {
base::TimeTicks render_time; base::TimeTicks render_time;
if (!current_frame_->metadata()->GetTimeTicks( if (!current_frame_->metadata()->GetTimeTicks(
media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) { media::VideoFrameMetadata::REFERENCE_TIME, &render_time)) {
DCHECK(!rendering_frame_buffer_) DCHECK(!rendering_frame_buffer_)
<< "VideoFrames need REFERENCE_TIME to use " << "VideoFrames need REFERENCE_TIME to use "
"sophisticated video rendering algorithm."; "sophisticated video rendering algorithm.";
}
TRACE_EVENT_END2("media", "UpdateCurrentFrame", "Ideal Render Instant",
render_time.ToInternalValue(), "Serial", serial_);
} }
TRACE_EVENT_END2("media", "UpdateCurrentFrame", "Ideal Render Instant",
render_time.ToInternalValue(), "Serial", serial_);
} }
return !current_frame_rendered_; return !current_frame_rendered_;
} }
...@@ -522,10 +515,45 @@ void WebMediaPlayerMSCompositor::SetCurrentFrame( ...@@ -522,10 +515,45 @@ void WebMediaPlayerMSCompositor::SetCurrentFrame(
++dropped_frame_count_; ++dropped_frame_count_;
current_frame_rendered_ = false; current_frame_rendered_ = false;
const bool size_changed = !current_frame_ || current_frame_->natural_size() != scoped_refptr<media::VideoFrame> old_frame = std::move(current_frame_);
frame->natural_size();
current_frame_ = frame; current_frame_ = frame;
if (size_changed) { CheckForFrameChanges(old_frame, frame);
}
void WebMediaPlayerMSCompositor::CheckForFrameChanges(
const scoped_refptr<media::VideoFrame>& old_frame,
const scoped_refptr<media::VideoFrame>& new_frame) {
DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
const bool new_frame_is_opaque = media::IsOpaque(new_frame->format());
media::VideoRotation new_frame_video_rotation = media::VIDEO_ROTATION_0;
ignore_result(new_frame->metadata()->GetRotation(
media::VideoFrameMetadata::ROTATION, &new_frame_video_rotation));
if (!old_frame) {
main_message_loop_->task_runner()->PostTask(
FROM_HERE,
base::BindOnce(&WebMediaPlayerMS::OnFirstFrameReceived, player_,
new_frame_video_rotation, new_frame_is_opaque));
return;
}
media::VideoRotation old_frame_video_rotation = media::VIDEO_ROTATION_0;
ignore_result(old_frame->metadata()->GetRotation(
media::VideoFrameMetadata::ROTATION, &old_frame_video_rotation));
if (new_frame_video_rotation != old_frame_video_rotation) {
main_message_loop_->task_runner()->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::OnRotationChanged, player_,
new_frame_video_rotation));
if (submitter_)
submitter_->SetRotation(new_frame_video_rotation);
}
if (new_frame_is_opaque != media::IsOpaque(old_frame->format())) {
main_message_loop_->task_runner()->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::OnOpacityChanged, player_,
new_frame_is_opaque));
if (submitter_)
submitter_->SetIsOpaque(new_frame_is_opaque);
}
if (old_frame->natural_size() != new_frame->natural_size()) {
main_message_loop_->task_runner()->PostTask( main_message_loop_->task_runner()->PostTask(
FROM_HERE, base::BindOnce(&WebMediaPlayerMS::TriggerResize, player_)); FROM_HERE, base::BindOnce(&WebMediaPlayerMS::TriggerResize, player_));
} }
......
...@@ -94,14 +94,8 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor ...@@ -94,14 +94,8 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor
bool is_opaque, bool is_opaque,
blink::WebFrameSinkDestroyedCallback frame_sink_destroyed_callback); blink::WebFrameSinkDestroyedCallback frame_sink_destroyed_callback);
// Updates the rotation information for frames given to |submitter_|.
void UpdateRotation(media::VideoRotation rotation);
// Notifies the |submitter_| that the frames must be submitted. // Notifies the |submitter_| that the frames must be submitted.
void SetForceSubmit(bool); void SetForceSubmit(bool force_submit);
// Updates the opacity information for frames given to |submitter_|.
void UpdateIsOpaque(bool);
// VideoFrameProvider implementation. // VideoFrameProvider implementation.
void SetVideoFrameProviderClient( void SetVideoFrameProviderClient(
...@@ -161,6 +155,10 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor ...@@ -161,6 +155,10 @@ class CONTENT_EXPORT WebMediaPlayerMSCompositor
// Update |current_frame_| and |dropped_frame_count_| // Update |current_frame_| and |dropped_frame_count_|
void SetCurrentFrame(const scoped_refptr<media::VideoFrame>& frame); void SetCurrentFrame(const scoped_refptr<media::VideoFrame>& frame);
// Following the update to |current_frame_|, this will check for changes that
// require updating video layer.
void CheckForFrameChanges(const scoped_refptr<media::VideoFrame>& old_frame,
const scoped_refptr<media::VideoFrame>& new_frame);
void StartRenderingInternal(); void StartRenderingInternal();
void StopRenderingInternal(); void StopRenderingInternal();
......
...@@ -739,6 +739,12 @@ void WebMediaPlayerMSTest::NetworkStateChanged() { ...@@ -739,6 +739,12 @@ void WebMediaPlayerMSTest::NetworkStateChanged() {
void WebMediaPlayerMSTest::ReadyStateChanged() { void WebMediaPlayerMSTest::ReadyStateChanged() {
blink::WebMediaPlayer::ReadyState state = player_->GetReadyState(); blink::WebMediaPlayer::ReadyState state = player_->GetReadyState();
DoReadyStateChanged(state); DoReadyStateChanged(state);
if (state == blink::WebMediaPlayer::ReadyState::kReadyStateHaveMetadata &&
!player_->HasAudio()) {
const auto& size = player_->NaturalSize();
EXPECT_GT(size.width, 0);
EXPECT_GT(size.height, 0);
}
if (state == blink::WebMediaPlayer::ReadyState::kReadyStateHaveEnoughData) if (state == blink::WebMediaPlayer::ReadyState::kReadyStateHaveEnoughData)
player_->Play(); player_->Play();
} }
...@@ -1080,7 +1086,7 @@ TEST_P(WebMediaPlayerMSTest, RotationChange) { ...@@ -1080,7 +1086,7 @@ TEST_P(WebMediaPlayerMSTest, RotationChange) {
MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(true); MockMediaStreamVideoRenderer* provider = LoadAndGetFrameProvider(true);
const int kTestBrake = static_cast<int>(FrameType::TEST_BRAKE); const int kTestBrake = static_cast<int>(FrameType::TEST_BRAKE);
static int tokens[] = {0, 33, kTestBrake}; int tokens[] = {0, kTestBrake};
std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int)); std::vector<int> timestamps(tokens, tokens + sizeof(tokens) / sizeof(int));
provider->QueueFrames(timestamps, false, false, 17, media::VIDEO_ROTATION_90); provider->QueueFrames(timestamps, false, false, 17, media::VIDEO_ROTATION_90);
if (enable_surface_layer_for_video_) { if (enable_surface_layer_for_video_) {
...@@ -1104,6 +1110,8 @@ TEST_P(WebMediaPlayerMSTest, RotationChange) { ...@@ -1104,6 +1110,8 @@ TEST_P(WebMediaPlayerMSTest, RotationChange) {
EXPECT_EQ(kStandardWidth, natural_size.height); EXPECT_EQ(kStandardWidth, natural_size.height);
// Change rotation. // Change rotation.
tokens[0] = 33;
timestamps = std::vector<int>(tokens, tokens + arraysize(tokens));
provider->QueueFrames(timestamps, false, false, 17, media::VIDEO_ROTATION_0); provider->QueueFrames(timestamps, false, false, 17, media::VIDEO_ROTATION_0);
if (enable_surface_layer_for_video_) { if (enable_surface_layer_for_video_) {
EXPECT_CALL(*submitter_ptr_, SetRotation(media::VIDEO_ROTATION_0)); EXPECT_CALL(*submitter_ptr_, SetRotation(media::VIDEO_ROTATION_0));
...@@ -1135,9 +1143,10 @@ TEST_P(WebMediaPlayerMSTest, OpacityChange) { ...@@ -1135,9 +1143,10 @@ TEST_P(WebMediaPlayerMSTest, OpacityChange) {
// Push one opaque frame. // Push one opaque frame.
const int kTestBrake = static_cast<int>(FrameType::TEST_BRAKE); const int kTestBrake = static_cast<int>(FrameType::TEST_BRAKE);
static int tokens[] = {0, kTestBrake}; int tokens[] = {0, kTestBrake};
std::vector<int> timestamps(tokens, tokens + arraysize(tokens)); std::vector<int> timestamps(tokens, tokens + arraysize(tokens));
provider->QueueFrames(timestamps, true); provider->QueueFrames(timestamps, true);
if (enable_surface_layer_for_video_) { if (enable_surface_layer_for_video_) {
EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer()); EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
EXPECT_CALL(*submitter_ptr_, StartRendering()); EXPECT_CALL(*submitter_ptr_, StartRendering());
...@@ -1153,29 +1162,33 @@ TEST_P(WebMediaPlayerMSTest, OpacityChange) { ...@@ -1153,29 +1162,33 @@ TEST_P(WebMediaPlayerMSTest, OpacityChange) {
CheckSizeChanged(gfx::Size(kStandardWidth, kStandardHeight))); CheckSizeChanged(gfx::Size(kStandardWidth, kStandardHeight)));
message_loop_controller_.RunAndWaitForStatus( message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK); media::PipelineStatus::PIPELINE_OK);
if (!enable_surface_layer_for_video_) { if (!enable_surface_layer_for_video_) {
ASSERT_TRUE(layer_ != nullptr); ASSERT_TRUE(layer_ != nullptr);
EXPECT_TRUE(layer_->contents_opaque()); EXPECT_TRUE(layer_->contents_opaque());
} }
// Push one transparent frame. // Push one transparent frame.
tokens[0] = 33;
timestamps = std::vector<int>(tokens, tokens + arraysize(tokens));
provider->QueueFrames(timestamps, false);
if (enable_surface_layer_for_video_) { if (enable_surface_layer_for_video_) {
EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false)); EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
EXPECT_CALL(*submitter_ptr_, SetIsOpaque(false)); EXPECT_CALL(*submitter_ptr_, SetIsOpaque(false));
} }
provider->QueueFrames(timestamps, false);
message_loop_controller_.RunAndWaitForStatus( message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK); media::PipelineStatus::PIPELINE_OK);
if (!enable_surface_layer_for_video_) if (!enable_surface_layer_for_video_) {
EXPECT_FALSE(layer_->contents_opaque()); EXPECT_FALSE(layer_->contents_opaque());
}
// Push another transparent frame.
tokens[0] = 66;
timestamps = std::vector<int>(tokens, tokens + arraysize(tokens));
provider->QueueFrames(timestamps, true);
if (enable_surface_layer_for_video_) { if (enable_surface_layer_for_video_) {
EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(true)); EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(true));
EXPECT_CALL(*submitter_ptr_, SetIsOpaque(true)); EXPECT_CALL(*submitter_ptr_, SetIsOpaque(true));
} }
// Push another opaque frame.
provider->QueueFrames(timestamps, true);
message_loop_controller_.RunAndWaitForStatus( message_loop_controller_.RunAndWaitForStatus(
media::PipelineStatus::PIPELINE_OK); media::PipelineStatus::PIPELINE_OK);
if (!enable_surface_layer_for_video_) if (!enable_surface_layer_for_video_)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment