Commit b4a829e9 authored by Mina Almasry's avatar Mina Almasry Committed by Commit Bot

[Chromecast] Add test for Starting in Paused state

Bug: b/110736115
Test: Run on device
Change-Id: I3679b50e77dc6410a26dc8d1d44acb71d589e23f
Reviewed-on: https://chromium-review.googlesource.com/1115868
Commit-Queue: Mina Almasry <almasrymina@chromium.org>
Reviewed-by: default avatarKenneth MacKay <kmackay@chromium.org>
Cr-Commit-Position: refs/heads/master@{#572344}
parent 5ef15c7b
...@@ -271,7 +271,7 @@ class AudioVideoPipelineDeviceTest : public testing::Test { ...@@ -271,7 +271,7 @@ class AudioVideoPipelineDeviceTest : public testing::Test {
// Pause settings // Pause settings
std::vector<PauseInfo> pause_pattern_; std::vector<PauseInfo> pause_pattern_;
int pause_pattern_idx_; size_t pause_pattern_idx_;
DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest); DISALLOW_COPY_AND_ASSIGN(AudioVideoPipelineDeviceTest);
}; };
...@@ -828,7 +828,7 @@ void AudioVideoPipelineDeviceTest::MonitorLoop() { ...@@ -828,7 +828,7 @@ void AudioVideoPipelineDeviceTest::MonitorLoop() {
#if defined(ENABLE_VIDEO_WITH_MIXED_AUDIO) #if defined(ENABLE_VIDEO_WITH_MIXED_AUDIO)
// Do AV sync checks. // Do AV sync checks.
MediaPipelineBackendForMixer* backend_for_mixer = MediaPipelineBackendForMixer* backend_for_mixer =
reinterpret_cast<MediaPipelineBackendForMixer*>(backend_.get()); static_cast<MediaPipelineBackendForMixer*>(backend_.get());
DCHECK(backend_for_mixer); DCHECK(backend_for_mixer);
int64_t playback_start_time = int64_t playback_start_time =
...@@ -842,7 +842,7 @@ void AudioVideoPipelineDeviceTest::MonitorLoop() { ...@@ -842,7 +842,7 @@ void AudioVideoPipelineDeviceTest::MonitorLoop() {
EXPECT_TRUE( EXPECT_TRUE(
backend_for_mixer->video_decoder()->GetCurrentPts(&timestamp, &vpts)) backend_for_mixer->video_decoder()->GetCurrentPts(&timestamp, &vpts))
<< "Getting VPTS failed at current time=" << "Getting VPTS failed at current time="
<< " current time=" << backend_for_mixer->MonotonicClockNow() << backend_for_mixer->MonotonicClockNow()
<< " playback should have started at=" << playback_start_time; << " playback should have started at=" << playback_start_time;
// Check video started at the correct time. // Check video started at the correct time.
...@@ -856,7 +856,7 @@ void AudioVideoPipelineDeviceTest::MonitorLoop() { ...@@ -856,7 +856,7 @@ void AudioVideoPipelineDeviceTest::MonitorLoop() {
} }
#endif #endif
if (!pause_pattern_.empty() && if (!pause_pattern_.empty() && pause_pattern_idx_ < pause_pattern_.size() &&
pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() && pause_pattern_[pause_pattern_idx_].delay >= base::TimeDelta() &&
media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) { media_time >= pause_time_ + pause_pattern_[pause_pattern_idx_].delay) {
// Do Pause // Do Pause
...@@ -893,8 +893,42 @@ void AudioVideoPipelineDeviceTest::OnPauseCompleted() { ...@@ -893,8 +893,42 @@ void AudioVideoPipelineDeviceTest::OnPauseCompleted() {
EXPECT_LT(media_time, EXPECT_LT(media_time,
pause_time_ + base::TimeDelta::FromMilliseconds(kPausePtsSlackMs)); pause_time_ + base::TimeDelta::FromMilliseconds(kPausePtsSlackMs));
#if defined(ENABLE_VIDEO_WITH_MIXED_AUDIO)
// Do AV sync checks.
MediaPipelineBackendForMixer* backend_for_mixer =
static_cast<MediaPipelineBackendForMixer*>(backend_.get());
DCHECK(backend_for_mixer);
int64_t playback_start_time =
backend_for_mixer->GetPlaybackStartTimeForTesting();
if (backend_for_mixer->audio_decoder() &&
backend_for_mixer->video_decoder() &&
backend_for_mixer->MonotonicClockNow() > playback_start_time + 50000) {
// Check the audio time.
base::TimeDelta audio_time = base::TimeDelta::FromMicroseconds(
backend_for_mixer->audio_decoder()->GetCurrentPts());
EXPECT_LT(audio_time, pause_time_ + base::TimeDelta::FromMilliseconds(
kPausePtsSlackMs));
// Check the video time.
int64_t timestamp = 0;
int64_t pts = 0;
EXPECT_TRUE(
backend_for_mixer->video_decoder()->GetCurrentPts(&timestamp, &pts))
<< "Getting VPTS failed at current time="
<< backend_for_mixer->MonotonicClockNow()
<< " playback should have started at=" << playback_start_time;
base::TimeDelta video_time = base::TimeDelta::FromMicroseconds(pts);
EXPECT_LT(video_time, pause_time_ + base::TimeDelta::FromMilliseconds(
kPausePtsSlackMs));
}
#endif
pause_time_ = media_time; pause_time_ = media_time;
pause_pattern_idx_ = (pause_pattern_idx_ + 1) % pause_pattern_.size(); ++pause_pattern_idx_;
VLOG(2) << "Pause complete, restarting media clock"; VLOG(2) << "Pause complete, restarting media clock";
RunPlaybackChecks(); RunPlaybackChecks();
...@@ -1206,5 +1240,14 @@ TEST_F(AudioVideoPipelineDeviceTest, Mp4Playback) { ...@@ -1206,5 +1240,14 @@ TEST_F(AudioVideoPipelineDeviceTest, Mp4Playback) {
base::RunLoop().Run(); base::RunLoop().Run();
} }
TEST_F(AudioVideoPipelineDeviceTest, Mp4PlaybackStartsPaused) {
set_sync_type(MediaPipelineDeviceParams::kModeSyncPts);
AddPause(base::TimeDelta(), base::TimeDelta::FromSeconds(1));
ConfigureForFile("bear.mp4");
Start();
base::RunLoop().Run();
}
} // namespace media } // namespace media
} // namespace chromecast } // namespace chromecast
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment