Commit ed62626f authored by Markus Handell's avatar Markus Handell Committed by Commit Bot

MediaStreamRemoteVideoSource: implement encoded output.

This change implements encoded output support for
MediaStreamRemoteVideoSource.

Change-Id: I645c97533e6d8c37c49450abc71c8916772d083f
Bug: 1013590
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1935115
Commit-Queue: Markus Handell <handellm@google.com>
Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Cr-Commit-Position: refs/heads/master@{#720925}
parent 12db8dbd
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
#include "third_party/blink/renderer/platform/wtf/functional.h" #include "third_party/blink/renderer/platform/wtf/functional.h"
#include "third_party/blink/renderer/platform/wtf/thread_safe_ref_counted.h" #include "third_party/blink/renderer/platform/wtf/thread_safe_ref_counted.h"
#include "third_party/webrtc/api/video/i420_buffer.h" #include "third_party/webrtc/api/video/i420_buffer.h"
#include "third_party/webrtc/api/video/video_sink_interface.h" #include "third_party/webrtc/api/video/recordable_encoded_frame.h"
#include "third_party/webrtc/rtc_base/time_utils.h" // for TimeMicros #include "third_party/webrtc/rtc_base/time_utils.h" // for TimeMicros
namespace WTF { namespace WTF {
...@@ -43,15 +43,68 @@ struct CrossThreadCopier<scoped_refptr<webrtc::VideoFrameBuffer>> ...@@ -43,15 +43,68 @@ struct CrossThreadCopier<scoped_refptr<webrtc::VideoFrameBuffer>>
namespace blink { namespace blink {
namespace {
class WebRtcEncodedVideoFrame : public EncodedVideoFrame {
public:
explicit WebRtcEncodedVideoFrame(const webrtc::RecordableEncodedFrame& frame)
: buffer_(frame.encoded_buffer()),
codec_(FromWebRtcVideoCodec(frame.codec())),
is_key_frame_(frame.is_key_frame()),
resolution_(frame.resolution().width, frame.resolution().height) {
if (frame.color_space()) {
color_space_ = WebRtcToMediaVideoColorSpace(*frame.color_space());
}
}
base::span<const uint8_t> Data() const override {
return base::make_span(buffer_->data(), buffer_->size());
}
media::VideoCodec Codec() const override { return codec_; }
bool IsKeyFrame() const override { return is_key_frame_; }
base::Optional<media::VideoColorSpace> ColorSpace() const override {
return color_space_;
}
gfx::Size Resolution() const override { return resolution_; }
private:
static media::VideoCodec FromWebRtcVideoCodec(webrtc::VideoCodecType codec) {
switch (codec) {
case webrtc::kVideoCodecVP8:
return media::kCodecVP8;
case webrtc::kVideoCodecVP9:
return media::kCodecVP9;
case webrtc::kVideoCodecH264:
return media::kCodecH264;
default:
return media::kUnknownVideoCodec;
}
}
rtc::scoped_refptr<const webrtc::EncodedImageBufferInterface> buffer_;
media::VideoCodec codec_;
bool is_key_frame_;
base::Optional<media::VideoColorSpace> color_space_;
gfx::Size resolution_;
};
} // namespace
// Internal class used for receiving frames from the webrtc track on a // Internal class used for receiving frames from the webrtc track on a
// libjingle thread and forward it to the IO-thread. // libjingle thread and forward it to the IO-thread.
class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate
: public WTF::ThreadSafeRefCounted<RemoteVideoSourceDelegate>, : public WTF::ThreadSafeRefCounted<RemoteVideoSourceDelegate>,
public rtc::VideoSinkInterface<webrtc::VideoFrame> { public rtc::VideoSinkInterface<webrtc::VideoFrame>,
public rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame> {
public: public:
RemoteVideoSourceDelegate( RemoteVideoSourceDelegate(
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner, scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
const VideoCaptureDeliverFrameCB& new_frame_callback); VideoCaptureDeliverFrameCB new_frame_callback,
EncodedVideoFrameCB encoded_frame_callback);
protected: protected:
friend class WTF::ThreadSafeRefCounted<RemoteVideoSourceDelegate>; friend class WTF::ThreadSafeRefCounted<RemoteVideoSourceDelegate>;
...@@ -62,33 +115,54 @@ class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate ...@@ -62,33 +115,54 @@ class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate
// thread. // thread.
void OnFrame(const webrtc::VideoFrame& frame) override; void OnFrame(const webrtc::VideoFrame& frame) override;
// VideoSinkInterface<webrtc::RecordableEncodedFrame>
void OnFrame(const webrtc::RecordableEncodedFrame& frame) override;
void DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame); void DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame);
private: private:
void OnEncodedVideoFrameOnIO(scoped_refptr<EncodedVideoFrame> frame,
base::TimeTicks estimated_capture_time);
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_; scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
// |frame_callback_| is accessed on the IO thread. // |frame_callback_| is accessed on the IO thread.
VideoCaptureDeliverFrameCB frame_callback_; VideoCaptureDeliverFrameCB frame_callback_;
// |encoded_frame_callback_| is accessed on the IO thread.
EncodedVideoFrameCB encoded_frame_callback_;
// Timestamp of the first received frame. // Timestamp of the first received frame.
base::TimeDelta start_timestamp_; base::TimeDelta start_timestamp_;
// WebRTC Chromium timestamp diff // WebRTC Chromium timestamp diff
const base::TimeDelta time_diff_; const base::TimeDelta time_diff_;
// Timestamp of the first received encoded frame.
base::TimeDelta start_timestamp_encoded_;
// WebRTC Chromium timestamp diff
const base::TimeDelta time_diff_encoded_;
}; };
MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate:: MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::
RemoteVideoSourceDelegate( RemoteVideoSourceDelegate(
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner, scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
const VideoCaptureDeliverFrameCB& new_frame_callback) VideoCaptureDeliverFrameCB new_frame_callback,
EncodedVideoFrameCB encoded_frame_callback)
: io_task_runner_(io_task_runner), : io_task_runner_(io_task_runner),
frame_callback_(new_frame_callback), frame_callback_(std::move(new_frame_callback)),
encoded_frame_callback_(std::move(encoded_frame_callback)),
start_timestamp_(media::kNoTimestamp), start_timestamp_(media::kNoTimestamp),
// TODO(qiangchen): There can be two differences between clocks: 1) // TODO(qiangchen): There can be two differences between clocks: 1)
// the offset, 2) the rate (i.e., one clock runs faster than the other). // the offset, 2) the rate (i.e., one clock runs faster than the other).
// See http://crbug/516700 // See http://crbug/516700
time_diff_(base::TimeTicks::Now() - base::TimeTicks() - time_diff_(base::TimeTicks::Now() - base::TimeTicks() -
base::TimeDelta::FromMicroseconds(rtc::TimeMicros())) {} base::TimeDelta::FromMicroseconds(rtc::TimeMicros())),
start_timestamp_encoded_(media::kNoTimestamp),
time_diff_encoded_(base::TimeTicks::Now() - base::TimeTicks() -
base::TimeDelta::FromMicroseconds(rtc::TimeMicros())) {
}
MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate:: MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::
~RemoteVideoSourceDelegate() {} ~RemoteVideoSourceDelegate() {}
...@@ -230,6 +304,34 @@ void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate:: ...@@ -230,6 +304,34 @@ void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::
frame_callback_.Run(std::move(video_frame), base::TimeTicks()); frame_callback_.Run(std::move(video_frame), base::TimeTicks());
} }
void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame(
const webrtc::RecordableEncodedFrame& frame) {
const bool render_immediately = frame.render_time().us() == 0;
const base::TimeTicks current_time = base::TimeTicks::Now();
const base::TimeDelta incoming_timestamp =
render_immediately
? current_time - base::TimeTicks()
: base::TimeDelta::FromMicroseconds(frame.render_time().us());
const base::TimeTicks render_time =
render_immediately
? base::TimeTicks() + incoming_timestamp
: base::TimeTicks() + incoming_timestamp + time_diff_encoded_;
PostCrossThreadTask(
*io_task_runner_, FROM_HERE,
CrossThreadBindOnce(&RemoteVideoSourceDelegate::OnEncodedVideoFrameOnIO,
WrapRefCounted(this),
base::MakeRefCounted<WebRtcEncodedVideoFrame>(frame),
render_time));
}
void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::
OnEncodedVideoFrameOnIO(scoped_refptr<EncodedVideoFrame> frame,
base::TimeTicks estimated_capture_time) {
DCHECK(io_task_runner_->BelongsToCurrentThread());
encoded_frame_callback_.Run(std::move(frame), estimated_capture_time);
}
MediaStreamRemoteVideoSource::MediaStreamRemoteVideoSource( MediaStreamRemoteVideoSource::MediaStreamRemoteVideoSource(
std::unique_ptr<TrackObserver> observer) std::unique_ptr<TrackObserver> observer)
: observer_(std::move(observer)) { : observer_(std::move(observer)) {
...@@ -251,11 +353,12 @@ void MediaStreamRemoteVideoSource::OnSourceTerminated() { ...@@ -251,11 +353,12 @@ void MediaStreamRemoteVideoSource::OnSourceTerminated() {
void MediaStreamRemoteVideoSource::StartSourceImpl( void MediaStreamRemoteVideoSource::StartSourceImpl(
VideoCaptureDeliverFrameCB frame_callback, VideoCaptureDeliverFrameCB frame_callback,
EncodedVideoFrameCB /*encoded_frame_callback*/) { EncodedVideoFrameCB encoded_frame_callback) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(!delegate_.get()); DCHECK(!delegate_.get());
delegate_ = base::MakeRefCounted<RemoteVideoSourceDelegate>( delegate_ = base::MakeRefCounted<RemoteVideoSourceDelegate>(
io_task_runner(), std::move(frame_callback)); io_task_runner(), std::move(frame_callback),
std::move(encoded_frame_callback));
scoped_refptr<webrtc::VideoTrackInterface> video_track( scoped_refptr<webrtc::VideoTrackInterface> video_track(
static_cast<webrtc::VideoTrackInterface*>(observer_->track().get())); static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
video_track->AddOrUpdateSink(delegate_.get(), rtc::VideoSinkWants()); video_track->AddOrUpdateSink(delegate_.get(), rtc::VideoSinkWants());
...@@ -283,6 +386,11 @@ MediaStreamRemoteVideoSource::SinkInterfaceForTesting() { ...@@ -283,6 +386,11 @@ MediaStreamRemoteVideoSource::SinkInterfaceForTesting() {
return delegate_.get(); return delegate_.get();
} }
rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>*
MediaStreamRemoteVideoSource::EncodedSinkInterfaceForTesting() {
return delegate_.get();
}
void MediaStreamRemoteVideoSource::OnChanged( void MediaStreamRemoteVideoSource::OnChanged(
webrtc::MediaStreamTrackInterface::TrackState state) { webrtc::MediaStreamTrackInterface::TrackState state) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_); DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
...@@ -299,4 +407,37 @@ void MediaStreamRemoteVideoSource::OnChanged( ...@@ -299,4 +407,37 @@ void MediaStreamRemoteVideoSource::OnChanged(
} }
} }
bool MediaStreamRemoteVideoSource::SupportsEncodedOutput() const {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
scoped_refptr<webrtc::VideoTrackInterface> video_track(
static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
return video_track->GetSource()->SupportsEncodedOutput();
}
void MediaStreamRemoteVideoSource::RequestRefreshFrame() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
scoped_refptr<webrtc::VideoTrackInterface> video_track(
static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
if (video_track->GetSource()) {
video_track->GetSource()->GenerateKeyFrame();
}
}
void MediaStreamRemoteVideoSource::OnEncodedSinkEnabled() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
scoped_refptr<webrtc::VideoTrackInterface> video_track(
static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
video_track->GetSource()->AddEncodedSink(delegate_.get());
}
void MediaStreamRemoteVideoSource::OnEncodedSinkDisabled() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
if (!observer_->track()) {
return;
}
scoped_refptr<webrtc::VideoTrackInterface> video_track(
static_cast<webrtc::VideoTrackInterface*>(observer_->track().get()));
video_track->GetSource()->RemoveEncodedSink(delegate_.get());
}
} // namespace blink } // namespace blink
...@@ -34,16 +34,23 @@ class MODULES_EXPORT MediaStreamRemoteVideoSource ...@@ -34,16 +34,23 @@ class MODULES_EXPORT MediaStreamRemoteVideoSource
// the webrtc::MediaStreamTrackInterface instance held by |observer_|. // the webrtc::MediaStreamTrackInterface instance held by |observer_|.
void OnSourceTerminated(); void OnSourceTerminated();
// MediaStreamVideoSource overrides.
bool SupportsEncodedOutput() const override;
void RequestRefreshFrame() override;
protected: protected:
// Implements MediaStreamVideoSource. // Implements MediaStreamVideoSource.
void StartSourceImpl(VideoCaptureDeliverFrameCB frame_callback, void StartSourceImpl(VideoCaptureDeliverFrameCB frame_callback,
EncodedVideoFrameCB encoded_frame_callback) override; EncodedVideoFrameCB encoded_frame_callback) override;
void StopSourceImpl() override; void StopSourceImpl() override;
void OnEncodedSinkEnabled() override;
void OnEncodedSinkDisabled() override;
// Used by tests to test that a frame can be received and that the // Used by tests to test that a frame can be received and that the
// MediaStreamRemoteVideoSource behaves as expected. // MediaStreamRemoteVideoSource behaves as expected.
rtc::VideoSinkInterface<webrtc::VideoFrame>* SinkInterfaceForTesting(); rtc::VideoSinkInterface<webrtc::VideoFrame>* SinkInterfaceForTesting();
rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>*
EncodedSinkInterfaceForTesting();
private: private:
void OnChanged(webrtc::MediaStreamTrackInterface::TrackState state); void OnChanged(webrtc::MediaStreamTrackInterface::TrackState state);
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/mojom/mediastream/media_stream.mojom-blink.h" #include "third_party/blink/public/mojom/mediastream/media_stream.mojom-blink.h"
#include "third_party/blink/public/platform/scheduler/test/renderer_scheduler_test_support.h" #include "third_party/blink/public/platform/scheduler/test/renderer_scheduler_test_support.h"
#include "third_party/blink/public/web/modules/mediastream/media_stream_video_source.h"
#include "third_party/blink/public/web/modules/mediastream/media_stream_video_track.h" #include "third_party/blink/public/web/modules/mediastream/media_stream_video_track.h"
#include "third_party/blink/public/web/web_heap.h" #include "third_party/blink/public/web/web_heap.h"
#include "third_party/blink/renderer/modules/mediastream/mock_media_stream_video_sink.h" #include "third_party/blink/renderer/modules/mediastream/mock_media_stream_video_sink.h"
...@@ -39,6 +40,7 @@ class MediaStreamRemoteVideoSourceUnderTest ...@@ -39,6 +40,7 @@ class MediaStreamRemoteVideoSourceUnderTest
explicit MediaStreamRemoteVideoSourceUnderTest( explicit MediaStreamRemoteVideoSourceUnderTest(
std::unique_ptr<blink::TrackObserver> observer) std::unique_ptr<blink::TrackObserver> observer)
: MediaStreamRemoteVideoSource(std::move(observer)) {} : MediaStreamRemoteVideoSource(std::move(observer)) {}
using MediaStreamRemoteVideoSource::EncodedSinkInterfaceForTesting;
using MediaStreamRemoteVideoSource::SinkInterfaceForTesting; using MediaStreamRemoteVideoSource::SinkInterfaceForTesting;
using MediaStreamRemoteVideoSource::StartSourceImpl; using MediaStreamRemoteVideoSource::StartSourceImpl;
}; };
...@@ -47,7 +49,10 @@ class MediaStreamRemoteVideoSourceTest : public ::testing::Test { ...@@ -47,7 +49,10 @@ class MediaStreamRemoteVideoSourceTest : public ::testing::Test {
public: public:
MediaStreamRemoteVideoSourceTest() MediaStreamRemoteVideoSourceTest()
: mock_factory_(new blink::MockPeerConnectionDependencyFactory()), : mock_factory_(new blink::MockPeerConnectionDependencyFactory()),
webrtc_video_track_(blink::MockWebRtcVideoTrack::Create("test")), webrtc_video_source_(blink::MockWebRtcVideoTrackSource::Create(
/*supports_encoded_output=*/true)),
webrtc_video_track_(
blink::MockWebRtcVideoTrack::Create("test", webrtc_video_source_)),
remote_source_(nullptr), remote_source_(nullptr),
number_of_successful_track_starts_(0), number_of_successful_track_starts_(0),
number_of_failed_track_starts_(0) {} number_of_failed_track_starts_(0) {}
...@@ -147,6 +152,7 @@ class MediaStreamRemoteVideoSourceTest : public ::testing::Test { ...@@ -147,6 +152,7 @@ class MediaStreamRemoteVideoSourceTest : public ::testing::Test {
ScopedTestingPlatformSupport<IOTaskRunnerTestingPlatformSupport> platform_; ScopedTestingPlatformSupport<IOTaskRunnerTestingPlatformSupport> platform_;
std::unique_ptr<blink::MockPeerConnectionDependencyFactory> mock_factory_; std::unique_ptr<blink::MockPeerConnectionDependencyFactory> mock_factory_;
scoped_refptr<webrtc::VideoTrackSourceInterface> webrtc_video_source_;
scoped_refptr<webrtc::VideoTrackInterface> webrtc_video_track_; scoped_refptr<webrtc::VideoTrackInterface> webrtc_video_track_;
// |remote_source_| is owned by |web_source_|. // |remote_source_| is owned by |web_source_|.
MediaStreamRemoteVideoSourceUnderTest* remote_source_; MediaStreamRemoteVideoSourceUnderTest* remote_source_;
...@@ -234,4 +240,38 @@ TEST_F(MediaStreamRemoteVideoSourceTest, PreservesColorSpace) { ...@@ -234,4 +240,38 @@ TEST_F(MediaStreamRemoteVideoSourceTest, PreservesColorSpace) {
track->RemoveSink(&sink); track->RemoveSink(&sink);
} }
class TestEncodedVideoFrame : public webrtc::RecordableEncodedFrame {
public:
rtc::scoped_refptr<const webrtc::EncodedImageBufferInterface> encoded_buffer()
const override {
return nullptr;
}
absl::optional<webrtc::ColorSpace> color_space() const override {
return absl::nullopt;
}
webrtc::VideoCodecType codec() const override {
return webrtc::kVideoCodecVP8;
}
bool is_key_frame() const override { return true; }
EncodedResolution resolution() const override {
return EncodedResolution{0, 0};
}
webrtc::Timestamp render_time() const override {
return webrtc::Timestamp::ms(0);
}
};
TEST_F(MediaStreamRemoteVideoSourceTest, ForwardsEncodedVideoFrames) {
std::unique_ptr<blink::MediaStreamVideoTrack> track(CreateTrack());
blink::MockMediaStreamVideoSink sink;
track->AddEncodedSink(&sink, sink.GetDeliverEncodedVideoFrameCB());
base::RunLoop run_loop;
base::RepeatingClosure quit_closure = run_loop.QuitClosure();
EXPECT_CALL(sink, OnEncodedVideoFrame())
.WillOnce(RunClosure(std::move(quit_closure)));
source()->EncodedSinkInterfaceForTesting()->OnFrame(TestEncodedVideoFrame());
run_loop.Run();
track->RemoveEncodedSink(&sink);
}
} // namespace blink } // namespace blink
...@@ -192,8 +192,9 @@ MockWebRtcVideoTrack::MockWebRtcVideoTrack( ...@@ -192,8 +192,9 @@ MockWebRtcVideoTrack::MockWebRtcVideoTrack(
MockWebRtcVideoTrack::~MockWebRtcVideoTrack() {} MockWebRtcVideoTrack::~MockWebRtcVideoTrack() {}
scoped_refptr<MockWebRtcVideoTrack> MockWebRtcVideoTrack::Create( scoped_refptr<MockWebRtcVideoTrack> MockWebRtcVideoTrack::Create(
const std::string& id) { const std::string& id,
return new rtc::RefCountedObject<MockWebRtcVideoTrack>(id, nullptr); scoped_refptr<webrtc::VideoTrackSourceInterface> source) {
return new rtc::RefCountedObject<MockWebRtcVideoTrack>(id, source.get());
} }
void MockWebRtcVideoTrack::AddOrUpdateSink( void MockWebRtcVideoTrack::AddOrUpdateSink(
...@@ -251,6 +252,62 @@ void MockWebRtcVideoTrack::SetEnded() { ...@@ -251,6 +252,62 @@ void MockWebRtcVideoTrack::SetEnded() {
o->OnChanged(); o->OnChanged();
} }
scoped_refptr<MockWebRtcVideoTrackSource> MockWebRtcVideoTrackSource::Create(
bool supports_encoded_output) {
return new rtc::RefCountedObject<MockWebRtcVideoTrackSource>(
supports_encoded_output);
}
MockWebRtcVideoTrackSource::MockWebRtcVideoTrackSource(
bool supports_encoded_output)
: supports_encoded_output_(supports_encoded_output) {}
bool MockWebRtcVideoTrackSource::is_screencast() const {
return false;
}
absl::optional<bool> MockWebRtcVideoTrackSource::needs_denoising() const {
return absl::nullopt;
}
bool MockWebRtcVideoTrackSource::GetStats(Stats* stats) {
return false;
}
bool MockWebRtcVideoTrackSource::SupportsEncodedOutput() const {
return supports_encoded_output_;
}
void MockWebRtcVideoTrackSource::GenerateKeyFrame() {}
void MockWebRtcVideoTrackSource::AddEncodedSink(
rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) {}
void MockWebRtcVideoTrackSource::RemoveEncodedSink(
rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) {}
void MockWebRtcVideoTrackSource::RegisterObserver(
webrtc::ObserverInterface* observer) {}
void MockWebRtcVideoTrackSource::UnregisterObserver(
webrtc::ObserverInterface* observer) {}
webrtc::MediaSourceInterface::SourceState MockWebRtcVideoTrackSource::state()
const {
return webrtc::MediaSourceInterface::kLive;
}
bool MockWebRtcVideoTrackSource::remote() const {
return supports_encoded_output_;
}
void MockWebRtcVideoTrackSource::AddOrUpdateSink(
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {}
void MockWebRtcVideoTrackSource::RemoveSink(
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {}
class MockSessionDescription : public SessionDescriptionInterface { class MockSessionDescription : public SessionDescriptionInterface {
public: public:
MockSessionDescription(const std::string& type, const std::string& sdp) MockSessionDescription(const std::string& type, const std::string& sdp)
......
...@@ -65,9 +65,37 @@ class MockWebRtcAudioTrack : public webrtc::AudioTrackInterface { ...@@ -65,9 +65,37 @@ class MockWebRtcAudioTrack : public webrtc::AudioTrackInterface {
ObserverSet observers_; ObserverSet observers_;
}; };
class MockWebRtcVideoTrackSource : public webrtc::VideoTrackSourceInterface {
public:
static scoped_refptr<MockWebRtcVideoTrackSource> Create(
bool supports_encoded_output);
MockWebRtcVideoTrackSource(bool supports_encoded_output);
void RegisterObserver(webrtc::ObserverInterface* observer) override;
void UnregisterObserver(webrtc::ObserverInterface* observer) override;
SourceState state() const override;
bool remote() const override;
void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
bool is_screencast() const override;
absl::optional<bool> needs_denoising() const override;
bool GetStats(Stats* stats) override;
bool SupportsEncodedOutput() const override;
void GenerateKeyFrame() override;
void AddEncodedSink(
rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override;
void RemoveEncodedSink(
rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override;
private:
bool supports_encoded_output_;
};
class MockWebRtcVideoTrack : public webrtc::VideoTrackInterface { class MockWebRtcVideoTrack : public webrtc::VideoTrackInterface {
public: public:
static scoped_refptr<MockWebRtcVideoTrack> Create(const std::string& id); static scoped_refptr<MockWebRtcVideoTrack> Create(
const std::string& id,
scoped_refptr<webrtc::VideoTrackSourceInterface> source = nullptr);
MockWebRtcVideoTrack(const std::string& id, MockWebRtcVideoTrack(const std::string& id,
webrtc::VideoTrackSourceInterface* source); webrtc::VideoTrackSourceInterface* source);
void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink, void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment