Commit ccd1147a authored by Xida Chen's avatar Xida Chen Committed by Commit Bot

Revert "Switch to using an explicit ended signal for HTMLMediaElement."

This reverts commit e11c77ab.

Reason for revert:
Suspect causing layout test failure:
https://ci.chromium.org/p/chromium/builders/ci/linux-trusty-rel/7513

Original change's description:
> Switch to using an explicit ended signal for HTMLMediaElement.
> 
> We've long used now > dur as an implicit signal, but with poorly muxed
> media this can be unreliable. Since EndedPlayback() is used for a few
> important signals, we should ensure it's always right. As such, switch
> to using the explicit signal known by the WebMediaPlayer.
> 
> This has been the source of a lot of flakiness over the years and we've
> been thinking about doing this since 2014.
> 
> The only tricky part is in handling seeks to the end of file correctly;
> our pipeline previously required that play be called before we would
> trigger a real pipeline ended event. Instead we relied on HTMLME just
> declaring the playback ended at now >= dur. Now we will "render" end
> of stream even if playback hasn't started if we have nothing to play.
> 
> Bug: 409280, 1035472
> Test: Updated unittests, all existing tests pass.
> 
> Change-Id: I1fbea34e8ac9e772d5b3573cd57df5d8c065e9c2
> Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2021292
> Reviewed-by: Guido Urdaneta <guidou@chromium.org>
> Reviewed-by: Raymond Toy <rtoy@chromium.org>
> Reviewed-by: Mounir Lamouri <mlamouri@chromium.org>
> Reviewed-by: John Rummell <jrummell@chromium.org>
> Commit-Queue: Dale Curtis <dalecurtis@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#737397}

TBR=dalecurtis@chromium.org,jrummell@chromium.org,mlamouri@chromium.org,rtoy@chromium.org,guidou@chromium.org

Change-Id: I99a7000fa2bc33db0b9abc3b257bc56f0778fb2c
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: 409280, 1035472
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2033541Reviewed-by: default avatarXida Chen <xidachen@chromium.org>
Commit-Queue: Xida Chen <xidachen@chromium.org>
Cr-Commit-Position: refs/heads/master@{#737411}
parent a16fbb09
...@@ -1089,12 +1089,13 @@ base::TimeDelta WebMediaPlayerImpl::GetCurrentTimeInternal() const { ...@@ -1089,12 +1089,13 @@ base::TimeDelta WebMediaPlayerImpl::GetCurrentTimeInternal() const {
double WebMediaPlayerImpl::CurrentTime() const { double WebMediaPlayerImpl::CurrentTime() const {
DCHECK(main_task_runner_->BelongsToCurrentThread()); DCHECK(main_task_runner_->BelongsToCurrentThread());
DCHECK_NE(ready_state_, WebMediaPlayer::kReadyStateHaveNothing); DCHECK_NE(ready_state_, WebMediaPlayer::kReadyStateHaveNothing);
return GetCurrentTimeInternal().InSecondsF();
}
bool WebMediaPlayerImpl::IsEnded() const { // TODO(scherkus): Replace with an explicit ended signal to HTMLMediaElement,
DCHECK(main_task_runner_->BelongsToCurrentThread()); // see http://crbug.com/409280
return ended_; // Note: Duration() may be infinity.
return (ended_ && !std::isinf(Duration()))
? Duration()
: GetCurrentTimeInternal().InSecondsF();
} }
WebMediaPlayer::NetworkState WebMediaPlayerImpl::GetNetworkState() const { WebMediaPlayer::NetworkState WebMediaPlayerImpl::GetNetworkState() const {
......
...@@ -167,7 +167,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl ...@@ -167,7 +167,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
double Duration() const override; double Duration() const override;
virtual double timelineOffset() const; virtual double timelineOffset() const;
double CurrentTime() const override; double CurrentTime() const override;
bool IsEnded() const override;
bool PausedWhenHidden() const override; bool PausedWhenHidden() const override;
......
...@@ -149,8 +149,6 @@ class MEDIA_EXPORT VideoRendererAlgorithm { ...@@ -149,8 +149,6 @@ class MEDIA_EXPORT VideoRendererAlgorithm {
return frame_queue_.back().end_time; return frame_queue_.back().end_time;
} }
const VideoFrame& last_frame() const { return *frame_queue_.back().frame; }
// Current render interval. // Current render interval.
base::TimeDelta render_interval() const { return render_interval_; } base::TimeDelta render_interval() const { return render_interval_; }
......
...@@ -822,14 +822,9 @@ void AudioRendererImpl::DecodedAudioReady(AudioDecoderStream::Status status, ...@@ -822,14 +822,9 @@ void AudioRendererImpl::DecodedAudioReady(AudioDecoderStream::Status status,
bool AudioRendererImpl::HandleDecodedBuffer_Locked( bool AudioRendererImpl::HandleDecodedBuffer_Locked(
scoped_refptr<AudioBuffer> buffer) { scoped_refptr<AudioBuffer> buffer) {
lock_.AssertAcquired(); lock_.AssertAcquired();
bool should_render_end_of_stream = false;
if (buffer->end_of_stream()) { if (buffer->end_of_stream()) {
received_end_of_stream_ = true; received_end_of_stream_ = true;
algorithm_->MarkEndOfStream(); algorithm_->MarkEndOfStream();
// We received no audio to play before EOS, so enter the ended state.
if (first_packet_timestamp_ == kNoTimestamp)
should_render_end_of_stream = true;
} else { } else {
if (buffer->IsBitstreamFormat() && state_ == kPlaying) { if (buffer->IsBitstreamFormat() && state_ == kPlaying) {
if (IsBeforeStartTime(*buffer)) if (IsBeforeStartTime(*buffer))
...@@ -896,13 +891,6 @@ bool AudioRendererImpl::HandleDecodedBuffer_Locked( ...@@ -896,13 +891,6 @@ bool AudioRendererImpl::HandleDecodedBuffer_Locked(
if (received_end_of_stream_ || algorithm_->IsQueueAdequateForPlayback()) { if (received_end_of_stream_ || algorithm_->IsQueueAdequateForPlayback()) {
if (buffering_state_ == BUFFERING_HAVE_NOTHING) if (buffering_state_ == BUFFERING_HAVE_NOTHING)
SetBufferingState_Locked(BUFFERING_HAVE_ENOUGH); SetBufferingState_Locked(BUFFERING_HAVE_ENOUGH);
// This must be done after SetBufferingState_Locked() to ensure the
// proper state transitions for higher levels.
if (should_render_end_of_stream) {
task_runner_->PostTask(
FROM_HERE, base::BindOnce(&AudioRendererImpl::OnPlaybackEnded,
weak_factory_.GetWeakPtr()));
}
return false; return false;
} }
return true; return true;
......
...@@ -78,10 +78,6 @@ ACTION_P(EnterPendingDecoderInitStateAction, test) { ...@@ -78,10 +78,6 @@ ACTION_P(EnterPendingDecoderInitStateAction, test) {
test->EnterPendingDecoderInitState(std::move(arg2)); test->EnterPendingDecoderInitState(std::move(arg2));
} }
ACTION_P(AssertNotYetEnded, test) {
ASSERT_FALSE(test->ended());
}
class AudioRendererImplTest : public ::testing::Test, public RendererClient { class AudioRendererImplTest : public ::testing::Test, public RendererClient {
public: public:
std::vector<std::unique_ptr<AudioDecoder>> CreateAudioDecoderForTest() { std::vector<std::unique_ptr<AudioDecoder>> CreateAudioDecoderForTest() {
...@@ -1135,17 +1131,20 @@ TEST_F(AudioRendererImplTest, RenderingDelayDoesNotOverflow) { ...@@ -1135,17 +1131,20 @@ TEST_F(AudioRendererImplTest, RenderingDelayDoesNotOverflow) {
TEST_F(AudioRendererImplTest, ImmediateEndOfStream) { TEST_F(AudioRendererImplTest, ImmediateEndOfStream) {
Initialize(); Initialize();
{
SCOPED_TRACE("Preroll()");
renderer_->StartPlaying();
WaitForPendingRead();
EXPECT_CALL(*this, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH,
BUFFERING_CHANGE_REASON_UNKNOWN));
DeliverEndOfStream();
}
StartTicking();
renderer_->SetMediaTime(base::TimeDelta()); // Read a single frame. We shouldn't be able to satisfy it.
renderer_->StartPlaying(); EXPECT_FALSE(ended());
WaitForPendingRead(); EXPECT_FALSE(ConsumeBufferedData(OutputFrames(1)));
base::RunLoop().RunUntilIdle();
// The buffering state change must occur before the ended signal.
EXPECT_CALL(*this, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH,
BUFFERING_CHANGE_REASON_UNKNOWN))
.WillOnce(AssertNotYetEnded(this));
DeliverEndOfStream();
EXPECT_TRUE(ended()); EXPECT_TRUE(ended());
} }
......
...@@ -569,6 +569,9 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::Status status, ...@@ -569,6 +569,9 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::Status status,
// Attempt to purge bad frames in case of underflow or backgrounding. // Attempt to purge bad frames in case of underflow or backgrounding.
RemoveFramesForUnderflowOrBackgroundRendering(); RemoveFramesForUnderflowOrBackgroundRendering();
// We may have removed all frames above and have reached end of stream.
MaybeFireEndedCallback_Locked(time_progressing_);
// Update any statistics since the last call. // Update any statistics since the last call.
UpdateStats_Locked(); UpdateStats_Locked();
...@@ -595,10 +598,6 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::Status status, ...@@ -595,10 +598,6 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::Status status,
if (buffering_state_ == BUFFERING_HAVE_NOTHING && HaveEnoughData_Locked()) if (buffering_state_ == BUFFERING_HAVE_NOTHING && HaveEnoughData_Locked())
TransitionToHaveEnough_Locked(); TransitionToHaveEnough_Locked();
// We may have removed all frames above and have reached end of stream. This
// must happen after the buffering state change has been signaled.
MaybeFireEndedCallback_Locked(time_progressing_);
// Always request more decoded video if we have capacity. // Always request more decoded video if we have capacity.
AttemptRead_Locked(); AttemptRead_Locked();
} }
...@@ -802,12 +801,8 @@ void VideoRendererImpl::MaybeFireEndedCallback_Locked(bool time_progressing) { ...@@ -802,12 +801,8 @@ void VideoRendererImpl::MaybeFireEndedCallback_Locked(bool time_progressing) {
if (!received_end_of_stream_ || rendered_end_of_stream_) if (!received_end_of_stream_ || rendered_end_of_stream_)
return; return;
const bool have_frames_after_start_time =
algorithm_->frames_queued() &&
!IsBeforeStartTime(algorithm_->last_frame());
// Don't fire ended if time isn't moving and we have frames. // Don't fire ended if time isn't moving and we have frames.
if (!time_progressing && have_frames_after_start_time) if (!time_progressing && algorithm_->frames_queued())
return; return;
// Fire ended if we have no more effective frames, only ever had one frame, or // Fire ended if we have no more effective frames, only ever had one frame, or
...@@ -820,9 +815,6 @@ void VideoRendererImpl::MaybeFireEndedCallback_Locked(bool time_progressing) { ...@@ -820,9 +815,6 @@ void VideoRendererImpl::MaybeFireEndedCallback_Locked(bool time_progressing) {
} else if (algorithm_->frames_queued() == 1u && } else if (algorithm_->frames_queued() == 1u &&
algorithm_->average_frame_duration().is_zero()) { algorithm_->average_frame_duration().is_zero()) {
should_render_end_of_stream = true; should_render_end_of_stream = true;
} else if (algorithm_->frames_queued() == 1u &&
algorithm_->render_interval().is_zero()) {
should_render_end_of_stream = true;
} else if (algorithm_->frames_queued() == 1u && } else if (algorithm_->frames_queued() == 1u &&
algorithm_->effective_frames_queued() == 1) { algorithm_->effective_frames_queued() == 1) {
const auto end_delay = const auto end_delay =
......
...@@ -416,13 +416,9 @@ TEST_F(VideoRendererImplTest, InitializeAndEndOfStream) { ...@@ -416,13 +416,9 @@ TEST_F(VideoRendererImplTest, InitializeAndEndOfStream) {
{ {
SCOPED_TRACE("Waiting for BUFFERING_HAVE_ENOUGH"); SCOPED_TRACE("Waiting for BUFFERING_HAVE_ENOUGH");
WaitableMessageLoopEvent event; WaitableMessageLoopEvent event;
{ EXPECT_CALL(mock_cb_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH, _))
// Buffering state changes must happen before end of stream. .WillOnce(RunClosure(event.GetClosure()));
testing::InSequence in_sequence; EXPECT_CALL(mock_cb_, OnEnded());
EXPECT_CALL(mock_cb_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH, _))
.WillOnce(RunClosure(event.GetClosure()));
EXPECT_CALL(mock_cb_, OnEnded());
}
SatisfyPendingDecodeWithEndOfStream(); SatisfyPendingDecodeWithEndOfStream();
event.RunAndWait(); event.RunAndWait();
} }
...@@ -432,37 +428,6 @@ TEST_F(VideoRendererImplTest, InitializeAndEndOfStream) { ...@@ -432,37 +428,6 @@ TEST_F(VideoRendererImplTest, InitializeAndEndOfStream) {
Destroy(); Destroy();
} }
TEST_F(VideoRendererImplTest, InitializeAndEndOfStreamOneStaleFrame) {
Initialize();
StartPlayingFrom(10000);
QueueFrames("0");
QueueFrame(DecodeStatus::OK, VideoFrame::CreateEOSFrame());
WaitForPendingDecode();
{
SCOPED_TRACE("Waiting for BUFFERING_HAVE_ENOUGH");
WaitableMessageLoopEvent event;
EXPECT_CALL(mock_cb_, OnVideoNaturalSizeChange(_)).Times(1);
EXPECT_CALL(mock_cb_, FrameReceived(HasTimestampMatcher(0)));
EXPECT_CALL(mock_cb_, OnStatisticsUpdate(_)).Times(AnyNumber());
EXPECT_CALL(mock_cb_, OnVideoOpacityChange(_)).Times(1);
{
// Buffering state changes must happen before end of stream.
testing::InSequence in_sequence;
EXPECT_CALL(mock_cb_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH, _))
.WillOnce(RunClosure(event.GetClosure()));
EXPECT_CALL(mock_cb_, OnEnded());
}
SatisfyPendingDecode();
event.RunAndWait();
}
// Firing a time state changed to true should be ignored...
renderer_->OnTimeProgressing();
EXPECT_FALSE(null_video_sink_->is_started());
Destroy();
}
TEST_F(VideoRendererImplTest, ReinitializeForAnotherStream) { TEST_F(VideoRendererImplTest, ReinitializeForAnotherStream) {
Initialize(); Initialize();
StartPlayingFrom(0); StartPlayingFrom(0);
......
...@@ -190,7 +190,6 @@ class WebMediaPlayer { ...@@ -190,7 +190,6 @@ class WebMediaPlayer {
virtual bool Seeking() const = 0; virtual bool Seeking() const = 0;
virtual double Duration() const = 0; virtual double Duration() const = 0;
virtual double CurrentTime() const = 0; virtual double CurrentTime() const = 0;
virtual bool IsEnded() const = 0;
virtual bool PausedWhenHidden() const { return false; } virtual bool PausedWhenHidden() const { return false; }
......
...@@ -145,7 +145,6 @@ class BLINK_MODULES_EXPORT WebMediaPlayerMS ...@@ -145,7 +145,6 @@ class BLINK_MODULES_EXPORT WebMediaPlayerMS
bool Seeking() const override; bool Seeking() const override;
double Duration() const override; double Duration() const override;
double CurrentTime() const override; double CurrentTime() const override;
bool IsEnded() const override;
// Internal states of loading and network. // Internal states of loading and network.
WebMediaPlayer::NetworkState GetNetworkState() const override; WebMediaPlayer::NetworkState GetNetworkState() const override;
......
...@@ -3289,14 +3289,17 @@ void HTMLMediaElement::TimeChanged() { ...@@ -3289,14 +3289,17 @@ void HTMLMediaElement::TimeChanged() {
// 4.8.12.9 steps 12-14. Needed if no ReadyState change is associated with the // 4.8.12.9 steps 12-14. Needed if no ReadyState change is associated with the
// seek. // seek.
if (seeking_ && ready_state_ >= kHaveCurrentData && if (seeking_ && ready_state_ >= kHaveCurrentData &&
!GetWebMediaPlayer()->Seeking()) { !GetWebMediaPlayer()->Seeking())
FinishSeek(); FinishSeek();
}
double now = CurrentPlaybackPosition();
double dur = duration();
// When the current playback position reaches the end of the media resource // When the current playback position reaches the end of the media resource
// when the direction of playback is forwards, then the user agent must follow // when the direction of playback is forwards, then the user agent must follow
// these steps: // these steps:
if (EndedPlayback(LoopCondition::kIgnored)) { if (!std::isnan(dur) && dur && now >= dur &&
GetDirectionOfPlayback() == kForward) {
// If the media element has a loop attribute specified // If the media element has a loop attribute specified
if (Loop()) { if (Loop()) {
// then seek to the earliest possible position of the media resource and // then seek to the earliest possible position of the media resource and
...@@ -3463,9 +3466,9 @@ bool HTMLMediaElement::CouldPlayIfEnoughData() const { ...@@ -3463,9 +3466,9 @@ bool HTMLMediaElement::CouldPlayIfEnoughData() const {
} }
bool HTMLMediaElement::EndedPlayback(LoopCondition loop_condition) const { bool HTMLMediaElement::EndedPlayback(LoopCondition loop_condition) const {
double dur = duration();
// If we have infinite duration, we'll never have played for long enough to // If we have infinite duration, we'll never have played for long enough to
// have ended playback. // have ended playback.
const double dur = duration();
if (std::isnan(dur) || dur == std::numeric_limits<double>::infinity()) if (std::isnan(dur) || dur == std::numeric_limits<double>::infinity())
return false; return false;
...@@ -3476,13 +3479,20 @@ bool HTMLMediaElement::EndedPlayback(LoopCondition loop_condition) const { ...@@ -3476,13 +3479,20 @@ bool HTMLMediaElement::EndedPlayback(LoopCondition loop_condition) const {
if (ready_state_ < kHaveMetadata) if (ready_state_ < kHaveMetadata)
return false; return false;
DCHECK_EQ(GetDirectionOfPlayback(), kForward); // and the current playback position is the end of the media resource and the
if (auto* wmp = GetWebMediaPlayer()) { // direction of playback is forwards, Either the media element does not have a
return wmp->IsEnded() && // loop attribute specified,
double now = CurrentPlaybackPosition();
if (GetDirectionOfPlayback() == kForward) {
return dur > 0 && now >= dur &&
(loop_condition == LoopCondition::kIgnored || !Loop()); (loop_condition == LoopCondition::kIgnored || !Loop());
} }
return false; // or the current playback position is the earliest possible position and the
// direction of playback is backwards
DCHECK_EQ(GetDirectionOfPlayback(), kBackward);
return now <= EarliestPossiblePosition();
} }
bool HTMLMediaElement::StoppedDueToErrors() const { bool HTMLMediaElement::StoppedDueToErrors() const {
......
...@@ -39,7 +39,6 @@ class MockWebMediaPlayer : public EmptyWebMediaPlayer { ...@@ -39,7 +39,6 @@ class MockWebMediaPlayer : public EmptyWebMediaPlayer {
MOCK_CONST_METHOD0(HasVideo, bool()); MOCK_CONST_METHOD0(HasVideo, bool());
MOCK_CONST_METHOD0(Duration, double()); MOCK_CONST_METHOD0(Duration, double());
MOCK_CONST_METHOD0(CurrentTime, double()); MOCK_CONST_METHOD0(CurrentTime, double());
MOCK_CONST_METHOD0(IsEnded, bool());
MOCK_CONST_METHOD0(GetNetworkState, NetworkState()); MOCK_CONST_METHOD0(GetNetworkState, NetworkState());
MOCK_CONST_METHOD0(WouldTaintOrigin, bool()); MOCK_CONST_METHOD0(WouldTaintOrigin, bool());
MOCK_METHOD1(SetLatencyHint, void(double)); MOCK_METHOD1(SetLatencyHint, void(double));
...@@ -272,8 +271,7 @@ TEST_P(HTMLMediaElementTest, CouldPlayIfEnoughDataRespondsToEnded) { ...@@ -272,8 +271,7 @@ TEST_P(HTMLMediaElementTest, CouldPlayIfEnoughDataRespondsToEnded) {
MockWebMediaPlayer* mock_wmpi = MockWebMediaPlayer* mock_wmpi =
reinterpret_cast<MockWebMediaPlayer*>(Media()->GetWebMediaPlayer()); reinterpret_cast<MockWebMediaPlayer*>(Media()->GetWebMediaPlayer());
ASSERT_NE(mock_wmpi, nullptr); EXPECT_NE(mock_wmpi, nullptr);
EXPECT_CALL(*mock_wmpi, IsEnded()).WillRepeatedly(Return(false));
EXPECT_TRUE(CouldPlayIfEnoughData()); EXPECT_TRUE(CouldPlayIfEnoughData());
// Playback can only end once the ready state is above kHaveMetadata. // Playback can only end once the ready state is above kHaveMetadata.
...@@ -286,7 +284,6 @@ TEST_P(HTMLMediaElementTest, CouldPlayIfEnoughDataRespondsToEnded) { ...@@ -286,7 +284,6 @@ TEST_P(HTMLMediaElementTest, CouldPlayIfEnoughDataRespondsToEnded) {
testing::Mock::VerifyAndClearExpectations(mock_wmpi); testing::Mock::VerifyAndClearExpectations(mock_wmpi);
EXPECT_CALL(*mock_wmpi, CurrentTime()) EXPECT_CALL(*mock_wmpi, CurrentTime())
.WillRepeatedly(Return(Media()->duration())); .WillRepeatedly(Return(Media()->duration()));
EXPECT_CALL(*mock_wmpi, IsEnded()).WillRepeatedly(Return(true));
EXPECT_FALSE(CouldPlayIfEnoughData()); EXPECT_FALSE(CouldPlayIfEnoughData());
EXPECT_TRUE(Media()->ended()); EXPECT_TRUE(Media()->ended());
} }
......
...@@ -66,7 +66,6 @@ class MockWebMediaPlayer : public WebMediaPlayer { ...@@ -66,7 +66,6 @@ class MockWebMediaPlayer : public WebMediaPlayer {
bool Seeking() const override { return false; } bool Seeking() const override { return false; }
double Duration() const override { return 0.0; } double Duration() const override { return 0.0; }
double CurrentTime() const override { return 0.0; } double CurrentTime() const override { return 0.0; }
bool IsEnded() const override { return false; }
NetworkState GetNetworkState() const override { return kNetworkStateEmpty; } NetworkState GetNetworkState() const override { return kNetworkStateEmpty; }
ReadyState GetReadyState() const override { return kReadyStateHaveNothing; } ReadyState GetReadyState() const override { return kReadyStateHaveNothing; }
SurfaceLayerMode GetVideoSurfaceLayerMode() const override { SurfaceLayerMode GetVideoSurfaceLayerMode() const override {
......
...@@ -783,11 +783,6 @@ double WebMediaPlayerMS::CurrentTime() const { ...@@ -783,11 +783,6 @@ double WebMediaPlayerMS::CurrentTime() const {
return 0.0; return 0.0;
} }
bool WebMediaPlayerMS::IsEnded() const {
// MediaStreams never end.
return false;
}
WebMediaPlayer::NetworkState WebMediaPlayerMS::GetNetworkState() const { WebMediaPlayer::NetworkState WebMediaPlayerMS::GetNetworkState() const {
DVLOG(2) << __func__ << ", state:" << network_state_; DVLOG(2) << __func__ << ", state:" << network_state_;
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
......
...@@ -46,7 +46,6 @@ class EmptyWebMediaPlayer : public WebMediaPlayer { ...@@ -46,7 +46,6 @@ class EmptyWebMediaPlayer : public WebMediaPlayer {
bool Seeking() const override { return false; } bool Seeking() const override { return false; }
double Duration() const override { return 0.0; } double Duration() const override { return 0.0; }
double CurrentTime() const override { return 0.0; } double CurrentTime() const override { return 0.0; }
bool IsEnded() const override { return false; }
NetworkState GetNetworkState() const override { return kNetworkStateIdle; } NetworkState GetNetworkState() const override { return kNetworkStateIdle; }
ReadyState GetReadyState() const override { return kReadyStateHaveNothing; } ReadyState GetReadyState() const override { return kReadyStateHaveNothing; }
WebString GetErrorMessage() const override; WebString GetErrorMessage() const override;
......
...@@ -17,36 +17,33 @@ async_test(function(t) { ...@@ -17,36 +17,33 @@ async_test(function(t) {
assert_false(video.paused); assert_false(video.paused);
//Starting seek to duration by setting video.currentTime to video.duration. //Starting seek to duration by setting video.currentTime to video.duration.
video.currentTime = video.duration; video.currentTime = video.duration;
testCommonAttributes({'seeking': true, 'ended': false}); testCommonAttributes(true);
return watcher.wait_for("seeking"); return watcher.wait_for("seeking");
})).then(t.step_func(function() { })).then(t.step_func(function() {
testCommonAttributes({'seeking': true, 'ended': false}); testCommonAttributes(true);
assert_false(video.paused); assert_false(video.paused);
return watcher.wait_for("timeupdate"); return watcher.wait_for("timeupdate");
})).then(t.step_func(function() { })).then(t.step_func(function() {
// Exactly when 'video.ended' will become true may vary between the testCommonAttributes(false);
// start of the seek until the pause for ended.
testCommonAttributes({'seeking': false});
return watcher.wait_for("seeked"); return watcher.wait_for("seeked");
})).then(t.step_func(function() { })).then(t.step_func(function() {
testCommonAttributes({'seeking': false}); testCommonAttributes(false);
return watcher.wait_for("timeupdate"); return watcher.wait_for("timeupdate");
})).then(t.step_func(function() { })).then(t.step_func(function() {
testCommonAttributes({'seeking': false}); testCommonAttributes(false);
return watcher.wait_for("pause"); return watcher.wait_for("pause");
})).then(t.step_func(function() { })).then(t.step_func(function() {
assert_true(video.paused, "should be paused after seeking"); assert_true(video.paused);
testCommonAttributes({'seeking': false, 'ended': true}); testCommonAttributes(false);
return watcher.wait_for("ended"); return watcher.wait_for("ended");
})).then(t.step_func_done(function() { })).then(t.step_func_done(function() {
testCommonAttributes({'seeking': false, 'ended': true}); testCommonAttributes(false);
assert_true(video.paused, "should be paused upon ended"); assert_true(video.paused);
})); }));
function testCommonAttributes(testExpectations) { function testCommonAttributes(seekingExpected) {
assert_equals(video.seeking, testExpectations.seeking); assert_equals(video.seeking, seekingExpected);
if ('ended' in testExpectations) assert_true(video.ended);
assert_equals(video.ended, testExpectations.ended);
assert_equals(video.currentTime, video.duration); assert_equals(video.currentTime, video.duration);
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment