Commit 16ce5566 authored by perkj@chromium.org's avatar perkj@chromium.org

MediaStream VideoTrack implementation.

This implementation only use Chrome video classes for local rendering.
A libjingle representation is created once its connected to a PeerConnection.


BUG=334241

Review URL: https://codereview.chromium.org/155853002

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@255846 0039d316-1c4b-4281-b951-d872f2087c98
parent 5526e7cd
......@@ -559,6 +559,7 @@
'renderer/media/cache_util_unittest.cc',
'renderer/media/crypto/key_systems_unittest.cc',
'renderer/media/media_stream_video_source_unittest.cc',
'renderer/media/media_stream_video_track_unittest.cc',
'renderer/media/mock_media_stream_video_source.cc',
'renderer/media/mock_media_stream_video_source.h',
'renderer/media/test_response_generator.cc',
......
......@@ -48,9 +48,4 @@ void MediaStreamAudioSource::AddTrack(
callback.Run(this, result);
}
void MediaStreamAudioSource::RemoveTrack(
const blink::WebMediaStreamTrack& track) {
NOTIMPLEMENTED();
}
} // namespace content
......@@ -24,10 +24,9 @@ class CONTENT_EXPORT MediaStreamAudioSource
MediaStreamAudioSource();
virtual ~MediaStreamAudioSource();
virtual void AddTrack(const blink::WebMediaStreamTrack& track,
const blink::WebMediaConstraints& constraints,
const ConstraintsCallback& callback) OVERRIDE;
virtual void RemoveTrack(const blink::WebMediaStreamTrack& track) OVERRIDE;
void AddTrack(const blink::WebMediaStreamTrack& track,
const blink::WebMediaConstraints& constraints,
const ConstraintsCallback& callback);
void SetLocalAudioSource(webrtc::AudioSourceInterface* source) {
local_audio_source_ = source;
......
......@@ -14,7 +14,9 @@
#include "content/renderer/media/media_stream.h"
#include "content/renderer/media/media_stream_dependency_factory.h"
#include "content/renderer/media/media_stream_source.h"
#include "content/renderer/media/media_stream_video_source.h"
#include "content/renderer/media/media_stream_video_track.h"
#include "third_party/WebKit/public/platform/WebMediaConstraints.h"
#include "third_party/WebKit/public/platform/WebMediaStream.h"
#include "third_party/WebKit/public/platform/WebMediaStreamCenterClient.h"
#include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
......@@ -46,17 +48,19 @@ void CreateNativeVideoMediaStreamTrack(
DCHECK(track.extraData() == NULL);
blink::WebMediaStreamSource source = track.source();
DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo);
if (!source.extraData()) {
MediaStreamVideoSource* native_source =
MediaStreamVideoSource::GetVideoSource(source);
if (!native_source) {
// TODO(perkj): Implement support for sources from
// remote MediaStreams.
NOTIMPLEMENTED();
return;
}
MediaStreamTrack* native_track = new MediaStreamVideoTrack(factory);
native_track->SetEnabled(track.isEnabled());
blink::WebMediaStreamTrack writable_track(track);
writable_track.setExtraData(native_track);
writable_track.setExtraData(
new MediaStreamVideoTrack(native_source, source.constraints(),
MediaStreamVideoSource::ConstraintsCallback(),
track.isEnabled(), factory));
}
void CreateNativeMediaStreamTrack(const blink::WebMediaStreamTrack& track,
......
......@@ -728,7 +728,7 @@ void MediaStreamDependencyFactory::AddNativeTrackToBlinkTrack(
if (track.source().type() == blink::WebMediaStreamSource::TypeVideo) {
DVLOG(1) << "AddNativeTrackToBlinkTrack() video";
track.setExtraData(new MediaStreamVideoTrack(
track.setExtraData(new WebRtcMediaStreamVideoTrack(
static_cast<webrtc::VideoTrackInterface*>(native_track)));
} else {
DVLOG(1) << "AddNativeTrackToBlinkTrack() audio";
......
......@@ -100,13 +100,17 @@ class MediaStreamDependencyFactoryTest : public ::testing::Test {
audio_track_vector[i].setExtraData(native_track);
}
blink::WebMediaConstraints constraints;
constraints.initialize();
blink::WebVector<blink::WebMediaStreamTrack> video_track_vector(
video_sources.size());
for (size_t i = 0; i < video_track_vector.size(); ++i) {
video_track_vector[i].initialize(video_sources[i].id(),
video_sources[i]);
video_track_vector[i].setExtraData(
new MediaStreamVideoTrack(dependency_factory_.get()));
MediaStreamVideoSource* native_source =
MediaStreamVideoSource::GetVideoSource(video_sources[i]);
video_track_vector[i] = MediaStreamVideoTrack::CreateVideoTrack(
native_source, constraints,
MediaStreamVideoSource::ConstraintsCallback(), true,
dependency_factory_.get());
}
stream_desc.initialize("media stream", audio_track_vector,
......
......@@ -478,10 +478,9 @@ void MediaStreamImpl::CreateVideoTracks(
constraints,
request->frame,
&webkit_source);
(*webkit_tracks)[i].initialize(webkit_source);
(*webkit_tracks)[i].setExtraData(new MediaStreamVideoTrack(
dependency_factory_));
request->StartTrack((*webkit_tracks)[i], constraints);
(*webkit_tracks)[i] =
request->CreateAndStartVideoTrack(webkit_source, constraints,
dependency_factory_);
}
}
......@@ -522,7 +521,7 @@ void MediaStreamImpl::CreateAudioTracks(
request->frame,
&webkit_source);
(*webkit_tracks)[i].initialize(webkit_source);
request->StartTrack((*webkit_tracks)[i], constraints);
request->StartAudioTrack((*webkit_tracks)[i], constraints);
}
}
......@@ -834,11 +833,12 @@ MediaStreamImpl::UserMediaRequestInfo::~UserMediaRequestInfo() {
DVLOG(1) << "~UserMediaRequestInfo";
}
void MediaStreamImpl::UserMediaRequestInfo::StartTrack(
void MediaStreamImpl::UserMediaRequestInfo::StartAudioTrack(
const blink::WebMediaStreamTrack& track,
const blink::WebMediaConstraints& constraints) {
MediaStreamSource* native_source =
static_cast <MediaStreamSource*>(track.source().extraData());
DCHECK(track.source().type() == blink::WebMediaStreamSource::TypeAudio);
MediaStreamAudioSource* native_source =
static_cast <MediaStreamAudioSource*>(track.source().extraData());
DCHECK(native_source);
sources_.push_back(track.source());
......@@ -849,6 +849,24 @@ void MediaStreamImpl::UserMediaRequestInfo::StartTrack(
AsWeakPtr()));
}
blink::WebMediaStreamTrack
MediaStreamImpl::UserMediaRequestInfo::CreateAndStartVideoTrack(
const blink::WebMediaStreamSource& source,
const blink::WebMediaConstraints& constraints,
MediaStreamDependencyFactory* factory) {
DCHECK(source.type() == blink::WebMediaStreamSource::TypeVideo);
MediaStreamVideoSource* native_source =
MediaStreamVideoSource::GetVideoSource(source);
DCHECK(native_source);
sources_.push_back(source);
sources_waiting_for_callback_.push_back(native_source);
return MediaStreamVideoTrack::CreateVideoTrack(
native_source, constraints, base::Bind(
&MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted,
AsWeakPtr()),
true, factory);
}
void MediaStreamImpl::UserMediaRequestInfo::CallbackOnTracksStarted(
const ResourcesReady& callback) {
DCHECK(ready_callback_.is_null());
......
......@@ -146,8 +146,13 @@ class CONTENT_EXPORT MediaStreamImpl
blink::WebMediaStream web_stream;
blink::WebUserMediaRequest request;
void StartTrack(const blink::WebMediaStreamTrack& track,
const blink::WebMediaConstraints& constraints);
void StartAudioTrack(const blink::WebMediaStreamTrack& track,
const blink::WebMediaConstraints& constraints);
blink::WebMediaStreamTrack CreateAndStartVideoTrack(
const blink::WebMediaStreamSource& source,
const blink::WebMediaConstraints& constraints,
MediaStreamDependencyFactory* factory);
// Triggers |callback| when all sources used in this request have either
// successfully started, or a source has failed to start.
......
......@@ -30,11 +30,6 @@ class CONTENT_EXPORT MediaStreamSource
MediaStreamSource();
virtual ~MediaStreamSource();
virtual void AddTrack(const blink::WebMediaStreamTrack& track,
const blink::WebMediaConstraints& constraints,
const ConstraintsCallback& callback) = 0;
virtual void RemoveTrack(const blink::WebMediaStreamTrack& track) = 0;
// Returns device information about a source that has been created by a
// JavaScript call to GetUserMedia, e.g., a camera or microphone.
const StreamDeviceInfo& device_info() const {
......
......@@ -32,6 +32,7 @@ class CONTENT_EXPORT MediaStreamTrack
static MediaStreamTrack* GetTrack(
const blink::WebMediaStreamTrack& track);
// If a subclass overrides this method it has to call the base class.
virtual void SetEnabled(bool enabled);
virtual webrtc::AudioTrackInterface* GetAudioAdapter();
......
......@@ -11,6 +11,7 @@
#include "base/logging.h"
#include "base/strings/string_number_conversions.h"
#include "content/renderer/media/media_stream_dependency_factory.h"
#include "content/renderer/media/media_stream_video_track.h"
#include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
namespace content {
......@@ -276,6 +277,12 @@ void ReleaseOriginalFrame(
} // anonymous namespace
// static
MediaStreamVideoSource* MediaStreamVideoSource::GetVideoSource(
const blink::WebMediaStreamSource& source) {
return static_cast<MediaStreamVideoSource*>(source.extraData());
}
MediaStreamVideoSource::MediaStreamVideoSource(
MediaStreamDependencyFactory* factory)
: state_(NEW),
......@@ -288,12 +295,17 @@ MediaStreamVideoSource::~MediaStreamVideoSource() {
}
void MediaStreamVideoSource::AddTrack(
const blink::WebMediaStreamTrack& track,
MediaStreamVideoTrack* track,
const blink::WebMediaConstraints& constraints,
const ConstraintsCallback& callback) {
DCHECK(CalledOnValidThread());
requested_constraints_.push_back(RequestedConstraints(constraints,
callback));
DCHECK(std::find(tracks_.begin(), tracks_.end(),
track) == tracks_.end());
tracks_.push_back(track);
requested_constraints_.push_back(
RequestedConstraints(constraints, callback));
switch (state_) {
case NEW: {
// Tab capture and Screen capture needs the maximum requested height
......@@ -312,7 +324,7 @@ void MediaStreamVideoSource::AddTrack(
}
case STARTING:
case RETRIEVING_CAPABILITIES: {
// The |callback| will be triggered once the delegate has started or
// The |callback| will be triggered once the source has started or
// the capabilities have been retrieved.
break;
}
......@@ -324,9 +336,11 @@ void MediaStreamVideoSource::AddTrack(
}
}
void MediaStreamVideoSource::RemoveTrack(
const blink::WebMediaStreamTrack& track) {
// TODO(ronghuawu): What should be done here? Do we really need RemoveTrack?
void MediaStreamVideoSource::RemoveTrack(MediaStreamVideoTrack* video_track) {
std::vector<MediaStreamVideoTrack*>::iterator it =
std::find(tracks_.begin(), tracks_.end(), video_track);
DCHECK(it != tracks_.end());
tracks_.erase(it);
}
void MediaStreamVideoSource::InitAdapter() {
......@@ -356,6 +370,7 @@ void MediaStreamVideoSource::DoStopSource() {
DVLOG(3) << "DoStopSource()";
StopSourceImpl();
state_ = ENDED;
SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded);
}
void MediaStreamVideoSource::DeliverVideoFrame(
......@@ -395,6 +410,11 @@ void MediaStreamVideoSource::DeliverVideoFrame(
capture_adapter_) {
capture_adapter_->OnFrameCaptured(video_frame);
}
for (std::vector<MediaStreamVideoTrack*>::iterator it = tracks_.begin();
it != tracks_.end(); ++it) {
(*it)->OnVideoFrame(video_frame);
}
}
void MediaStreamVideoSource::OnSupportedFormats(
......@@ -474,8 +494,9 @@ void MediaStreamVideoSource::FinalizeAddTrack() {
callbacks.swap(requested_constraints_);
for (std::vector<RequestedConstraints>::iterator it = callbacks.begin();
it != callbacks.end(); ++it) {
bool success = state_ == STARTED &&
!FilterFormats(it->constraints, formats).empty();
!FilterFormats(it->constraints, formats).empty();
DVLOG(3) << "FinalizeAddTrack() success " << success;
if (!it->callback.is_null())
it->callback.Run(this, success);
......@@ -487,7 +508,10 @@ void MediaStreamVideoSource::SetReadyState(
if (!owner().isNull()) {
owner().setReadyState(state);
}
// TODO(perkj): Notify all registered tracks.
for (std::vector<MediaStreamVideoTrack*>::iterator it = tracks_.begin();
it != tracks_.end(); ++it) {
(*it)->OnReadyStateChanged(state);
}
}
MediaStreamVideoSource::RequestedConstraints::RequestedConstraints(
......
......@@ -25,6 +25,7 @@ class VideoFrame;
namespace content {
class MediaStreamDependencyFactory;
class MediaStreamVideoTrack;
class WebRtcVideoCapturerAdapter;
// MediaStreamVideoSource is an interface used for sending video frames to a
......@@ -49,11 +50,15 @@ class CONTENT_EXPORT MediaStreamVideoSource
explicit MediaStreamVideoSource(MediaStreamDependencyFactory* factory);
virtual ~MediaStreamVideoSource();
// Returns the MediaStreamVideoSource object owned by |source|.
static MediaStreamVideoSource* GetVideoSource(
const blink::WebMediaStreamSource& source);
// Puts |track| in the registered tracks list.
virtual void AddTrack(const blink::WebMediaStreamTrack& track,
const blink::WebMediaConstraints& constraints,
const ConstraintsCallback& callback) OVERRIDE;
virtual void RemoveTrack(const blink::WebMediaStreamTrack& track) OVERRIDE;
void AddTrack(MediaStreamVideoTrack* track,
const blink::WebMediaConstraints& constraints,
const ConstraintsCallback& callback);
void RemoveTrack(MediaStreamVideoTrack* track);
// TODO(ronghuawu): Remove webrtc::VideoSourceInterface from the public
// interface of this class.
......@@ -158,6 +163,9 @@ class CONTENT_EXPORT MediaStreamVideoSource
media::VideoCaptureFormats supported_formats_;
// Tracks that currently are receiving video frames.
std::vector<MediaStreamVideoTrack*> tracks_;
// TODO(perkj): The below classes use webrtc/libjingle types. The goal is to
// get rid of them as far as possible.
MediaStreamDependencyFactory* factory_;
......
......@@ -8,6 +8,7 @@
#include "base/strings/string_number_conversions.h"
#include "base/strings/utf_string_conversions.h"
#include "content/renderer/media/media_stream_video_source.h"
#include "content/renderer/media/media_stream_video_track.h"
#include "content/renderer/media/mock_media_stream_dependency_factory.h"
#include "content/renderer/media/mock_media_stream_video_source.h"
#include "media/base/video_frame.h"
......@@ -79,18 +80,14 @@ class MediaStreamVideoSourceTest
blink::WebMediaStreamTrack CreateTrack(
const std::string& id,
const blink::WebMediaConstraints& constraints) {
blink::WebMediaStreamTrack track;
track.initialize(base::UTF8ToUTF16(id), webkit_source_);
MediaStreamVideoSource* source =
static_cast<MediaStreamVideoSource*>(track.source().extraData());
source->AddTrack(track,
constraints,
base::Bind(
&MediaStreamVideoSourceTest::OnConstraintsApplied,
base::Unretained(this)));
return track;
bool enabled = true;
MediaStreamDependencyFactory* factory = NULL;
return MediaStreamVideoTrack::CreateVideoTrack(
mock_source_, constraints,
base::Bind(
&MediaStreamVideoSourceTest::OnConstraintsApplied,
base::Unretained(this)),
enabled, factory);
}
blink::WebMediaStreamTrack CreateTrackAndStartSource(
......@@ -177,7 +174,7 @@ class MediaStreamVideoSourceTest
TEST_F(MediaStreamVideoSourceTest, AddTrackAndStartSource) {
blink::WebMediaConstraints constraints;
constraints.initialize();
blink::WebMediaStreamTrack track = CreateTrack("123", constraints);
CreateTrack("123", constraints);
mock_source()->CompleteGetSupportedFormats();
mock_source()->StartMockedSource();
EXPECT_EQ(1, NumberOfSuccessConstraintsCallbacks());
......@@ -197,18 +194,18 @@ TEST_F(MediaStreamVideoSourceTest, AddTwoTracksBeforeSourceStarts) {
TEST_F(MediaStreamVideoSourceTest, AddTrackAfterSourceStarts) {
blink::WebMediaConstraints constraints;
constraints.initialize();
blink::WebMediaStreamTrack track1 = CreateTrack("123", constraints);
CreateTrack("123", constraints);
mock_source()->CompleteGetSupportedFormats();
mock_source()->StartMockedSource();
EXPECT_EQ(1, NumberOfSuccessConstraintsCallbacks());
blink::WebMediaStreamTrack track2 = CreateTrack("123", constraints);
CreateTrack("123", constraints);
EXPECT_EQ(2, NumberOfSuccessConstraintsCallbacks());
}
TEST_F(MediaStreamVideoSourceTest, AddTrackAndFailToStartSource) {
blink::WebMediaConstraints constraints;
constraints.initialize();
blink::WebMediaStreamTrack track = CreateTrack("123", constraints);
CreateTrack("123", constraints);
mock_source()->CompleteGetSupportedFormats();
mock_source()->FailToStartMockedSource();
EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
......@@ -217,8 +214,8 @@ TEST_F(MediaStreamVideoSourceTest, AddTrackAndFailToStartSource) {
TEST_F(MediaStreamVideoSourceTest, AddTwoTracksBeforeGetSupportedFormats) {
blink::WebMediaConstraints constraints;
constraints.initialize();
blink::WebMediaStreamTrack track1 = CreateTrack("123", constraints);
blink::WebMediaStreamTrack track2 = CreateTrack("123", constraints);
CreateTrack("123", constraints);
CreateTrack("123", constraints);
mock_source()->CompleteGetSupportedFormats();
mock_source()->StartMockedSource();
EXPECT_EQ(2, NumberOfSuccessConstraintsCallbacks());
......@@ -262,7 +259,7 @@ TEST_F(MediaStreamVideoSourceTest, MandatoryAspectRatio4To3) {
CreateTrackAndStartSource(factory.CreateConstraints(), 640, 480, 30);
}
// Test that ApplyConstraints fail if the mandatory aspect ratio
// Test that AddTrack fail if the mandatory aspect ratio
// is set higher than supported.
TEST_F(MediaStreamVideoSourceTest, MandatoryAspectRatioTooHigh) {
ConstraintsFactory factory;
......@@ -309,7 +306,6 @@ TEST_F(MediaStreamVideoSourceTest, DefaultCapability) {
TEST_F(MediaStreamVideoSourceTest, InvalidMandatoryConstraint) {
ConstraintsFactory factory;
factory.AddMandatory("weird key", 640);
CreateTrack("123", factory.CreateConstraints());
mock_source()->CompleteGetSupportedFormats();
EXPECT_EQ(1, NumberOfFailedConstraintsCallbacks());
......
......@@ -5,55 +5,80 @@
#include "content/renderer/media/media_stream_video_track.h"
#include "content/renderer/media/media_stream_dependency_factory.h"
#include "content/renderer/media/media_stream_video_source.h"
#include "content/renderer/media/webrtc/webrtc_video_sink_adapter.h"
namespace content {
// Wrapper which allows to use std::find_if() when adding and removing
// sinks to/from |sinks_|.
struct SinkWrapper {
explicit SinkWrapper(MediaStreamVideoSink* sink) : sink_(sink) {}
bool operator()(
const WebRtcVideoSinkAdapter* owner) {
return owner->sink() == sink_;
}
MediaStreamVideoSink* sink_;
};
// Empty method used for keeping a reference to the original media::VideoFrame
// in RTCVideoRenderer::OnVideoFrame if a color conversion between I420 and
// YV12 is needed.
static void ReleaseOriginalFrame(
const scoped_refptr<media::VideoFrame>& frame) {
}
MediaStreamVideoTrack::MediaStreamVideoTrack(
webrtc::VideoTrackInterface* track)
: MediaStreamTrack(track, false),
factory_(NULL) {
//static
blink::WebMediaStreamTrack MediaStreamVideoTrack::CreateVideoTrack(
MediaStreamVideoSource* source,
const blink::WebMediaConstraints& constraints,
const MediaStreamVideoSource::ConstraintsCallback& callback,
bool enabled,
MediaStreamDependencyFactory* factory) {
blink::WebMediaStreamTrack track;
track.initialize(source->owner());
track.setExtraData(new MediaStreamVideoTrack(source,
constraints,
callback,
enabled,
factory));
return track;
}
// static
MediaStreamVideoTrack* MediaStreamVideoTrack::GetVideoTrack(
const blink::WebMediaStreamTrack& track) {
return static_cast<MediaStreamVideoTrack*>(track.extraData());
}
MediaStreamVideoTrack::MediaStreamVideoTrack(
MediaStreamVideoSource* source,
const blink::WebMediaConstraints& constraints,
const MediaStreamVideoSource::ConstraintsCallback& callback,
bool enabled,
MediaStreamDependencyFactory* factory)
: MediaStreamTrack(NULL, true),
enabled_(enabled),
source_(source),
factory_(factory) {
DCHECK(factory_);
// TODO(perkj): source can be NULL if this is actually a remote video track.
// Remove as soon as we only have one implementation of video tracks.
if (source)
source->AddTrack(this, constraints, callback);
}
MediaStreamVideoTrack::~MediaStreamVideoTrack() {
DCHECK(sinks_.empty());
// TODO(perkj): source can be NULL if this is actually a remote video track.
// Remove as soon as we only have one implementation of video tracks.
if (source_)
source_->RemoveTrack(this);
}
void MediaStreamVideoTrack::AddSink(MediaStreamVideoSink* sink) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(std::find_if(sinks_.begin(), sinks_.end(),
SinkWrapper(sink)) == sinks_.end());
sinks_.push_back(new WebRtcVideoSinkAdapter(GetVideoAdapter(), sink));
DCHECK(std::find(sinks_.begin(), sinks_.end(), sink) == sinks_.end());
sinks_.push_back(sink);
}
void MediaStreamVideoTrack::RemoveSink(MediaStreamVideoSink* sink) {
DCHECK(thread_checker_.CalledOnValidThread());
ScopedVector<WebRtcVideoSinkAdapter>::iterator it =
std::find_if(sinks_.begin(), sinks_.end(), SinkWrapper(sink));
std::vector<MediaStreamVideoSink*>::iterator it =
std::find(sinks_.begin(), sinks_.end(), sink);
DCHECK(it != sinks_.end());
sinks_.erase(it);
}
webrtc::VideoTrackInterface* MediaStreamVideoTrack::GetVideoAdapter() {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK_EQ(owner().source().type(), blink::WebMediaStreamSource::TypeVideo);
if (!track_.get()) {
MediaStreamVideoSource* source =
......@@ -67,4 +92,95 @@ webrtc::VideoTrackInterface* MediaStreamVideoTrack::GetVideoAdapter() {
return static_cast<webrtc::VideoTrackInterface*>(track_.get());
}
void MediaStreamVideoTrack::SetEnabled(bool enabled) {
DCHECK(thread_checker_.CalledOnValidThread());
enabled_ = enabled;
MediaStreamTrack::SetEnabled(enabled);
for (std::vector<MediaStreamVideoSink*>::iterator it = sinks_.begin();
it != sinks_.end(); ++it) {
(*it)->OnEnabledChanged(enabled);
}
}
void MediaStreamVideoTrack::OnVideoFrame(
const scoped_refptr<media::VideoFrame>& frame) {
DCHECK(thread_checker_.CalledOnValidThread());
if (!enabled_)
return;
scoped_refptr<media::VideoFrame> video_frame = frame;
if (frame->format() == media::VideoFrame::I420) {
// Rendering do not support I420 but video capture use I420.
// The only difference between YV12 and I420 is the order of U and V plane.
// To solve that the I420 frame is simply wrapped in an YV12 video frame.
// crbug/341452.
video_frame = media::VideoFrame::WrapExternalYuvData(
media::VideoFrame::YV12,
frame->coded_size(),
frame->visible_rect(),
frame->natural_size(),
frame->stride(media::VideoFrame::kYPlane),
frame->stride(media::VideoFrame::kUPlane),
frame->stride(media::VideoFrame::kVPlane),
frame->data(media::VideoFrame::kYPlane),
frame->data(media::VideoFrame::kUPlane),
frame->data(media::VideoFrame::kVPlane),
frame->GetTimestamp(),
base::Bind(&ReleaseOriginalFrame, frame));
}
for (std::vector<MediaStreamVideoSink*>::iterator it = sinks_.begin();
it != sinks_.end(); ++it) {
(*it)->OnVideoFrame(video_frame);
}
}
void MediaStreamVideoTrack::OnReadyStateChanged(
blink::WebMediaStreamSource::ReadyState state) {
DCHECK(thread_checker_.CalledOnValidThread());
for (std::vector<MediaStreamVideoSink*>::iterator it = sinks_.begin();
it != sinks_.end(); ++it) {
(*it)->OnReadyStateChanged(state);
}
}
// Wrapper which allows to use std::find_if() when adding and removing
// sinks to/from |sinks_|.
struct SinkWrapper {
explicit SinkWrapper(MediaStreamVideoSink* sink) : sink_(sink) {}
bool operator()(
const WebRtcVideoSinkAdapter* owner) {
return owner->sink() == sink_;
}
MediaStreamVideoSink* sink_;
};
WebRtcMediaStreamVideoTrack::WebRtcMediaStreamVideoTrack(
webrtc::VideoTrackInterface* track)
: MediaStreamVideoTrack(NULL,
blink::WebMediaConstraints(),
MediaStreamVideoSource::ConstraintsCallback(),
track->enabled(),
NULL) {
track_ = track;
}
WebRtcMediaStreamVideoTrack::~WebRtcMediaStreamVideoTrack() {
}
void WebRtcMediaStreamVideoTrack::AddSink(MediaStreamVideoSink* sink) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(std::find_if(sinks_.begin(), sinks_.end(),
SinkWrapper(sink)) == sinks_.end());
sinks_.push_back(new WebRtcVideoSinkAdapter(GetVideoAdapter(), sink));
}
void WebRtcMediaStreamVideoTrack::RemoveSink(MediaStreamVideoSink* sink) {
DCHECK(thread_checker_.CalledOnValidThread());
ScopedVector<WebRtcVideoSinkAdapter>::iterator it =
std::find_if(sinks_.begin(), sinks_.end(), SinkWrapper(sink));
DCHECK(it != sinks_.end());
sinks_.erase(it);
}
} // namespace content
......@@ -5,12 +5,15 @@
#ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_VIDEO_TRACK_H_
#define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_VIDEO_TRACK_H_
#include <vector>
#include "base/compiler_specific.h"
#include "base/memory/scoped_vector.h"
#include "base/threading/thread_checker.h"
#include "content/common/content_export.h"
#include "content/public/renderer/media_stream_video_sink.h"
#include "content/renderer/media/media_stream_track.h"
#include "content/renderer/media/media_stream_video_source.h"
namespace webrtc {
class VideoTrackInterface;
......@@ -24,23 +27,58 @@ class WebRtcVideoSinkAdapter;
// MediaStreamVideoTrack is a video specific representation of a
// blink::WebMediaStreamTrack in content. It is owned by the blink object
// and can be retrieved from a blink object using
// WebMediaStreamTrack::extraData()
// WebMediaStreamTrack::extraData() or MediaStreamVideoTrack::GetVideoTrack.
class CONTENT_EXPORT MediaStreamVideoTrack : public MediaStreamTrack {
public:
// Help method to create a blink::WebMediaStreamTrack and a
// MediaStreamVideoTrack instance. The MediaStreamVideoTrack object is owned
// by the blink object in its WebMediaStreamTrack::ExtraData member.
// |callback| is triggered if the track is added to the source
// successfully and will receive video frames that match |constraints|
// or if the source fail to provide video frames.
// If |enabled| is true, sinks added to the track will
// receive video frames when the source deliver frames to the track.
static blink::WebMediaStreamTrack CreateVideoTrack(
MediaStreamVideoSource* source,
const blink::WebMediaConstraints& constraints,
const MediaStreamVideoSource::ConstraintsCallback& callback,
bool enabled,
MediaStreamDependencyFactory* factory);
static MediaStreamVideoTrack* GetVideoTrack(
const blink::WebMediaStreamTrack& track);
// Constructor for local video tracks.
explicit MediaStreamVideoTrack(MediaStreamDependencyFactory* factory);
// Constructor for remote video tracks.
explicit MediaStreamVideoTrack(webrtc::VideoTrackInterface* track);
MediaStreamVideoTrack(
MediaStreamVideoSource* source,
const blink::WebMediaConstraints& constraints,
const MediaStreamVideoSource::ConstraintsCallback& callback,
bool enabled,
MediaStreamDependencyFactory* factory);
virtual ~MediaStreamVideoTrack();
void AddSink(MediaStreamVideoSink* sink);
void RemoveSink(MediaStreamVideoSink* sink);
virtual void AddSink(MediaStreamVideoSink* sink);
virtual void RemoveSink(MediaStreamVideoSink* sink);
// TODO(perkj): GetVideoAdapter is webrtc specific. Move GetVideoAdapter to
// where the track is added to a RTCPeerConnection. crbug/323223.
virtual webrtc::VideoTrackInterface* GetVideoAdapter() OVERRIDE;
virtual void SetEnabled(bool enabled) OVERRIDE;
private:
void OnVideoFrame(const scoped_refptr<media::VideoFrame>& frame);
void OnReadyStateChanged(blink::WebMediaStreamSource::ReadyState state);
protected:
// Used to DCHECK that we are called on the correct thread.
base::ThreadChecker thread_checker_;
ScopedVector<WebRtcVideoSinkAdapter> sinks_;
private:
bool enabled_;
std::vector<MediaStreamVideoSink*> sinks_;
// Weak ref to the source this tracks is connected to. |source_| is owned
// by the blink::WebMediaStreamSource and is guaranteed to outlive the
// track.
MediaStreamVideoSource* source_;
// Weak ref to a MediaStreamDependencyFactory, owned by the RenderThread.
// It's valid for the lifetime of RenderThread.
......@@ -49,6 +87,26 @@ class CONTENT_EXPORT MediaStreamVideoTrack : public MediaStreamTrack {
DISALLOW_COPY_AND_ASSIGN(MediaStreamVideoTrack);
};
// WebRtcMediaStreamVideoTrack is a content representation of a video track.
// received on a PeerConnection.
// TODO(perkj): Replace WebRtcMediaStreamVideoTrack with a remote
// MediaStreamVideoSource class so that all tracks are MediaStreamVideoTracks
// and new tracks can be cloned from the original remote video track.
// crbug/334243.
class CONTENT_EXPORT WebRtcMediaStreamVideoTrack
: public MediaStreamVideoTrack {
public:
explicit WebRtcMediaStreamVideoTrack(webrtc::VideoTrackInterface* track);
virtual ~WebRtcMediaStreamVideoTrack();
virtual void AddSink(MediaStreamVideoSink* sink) OVERRIDE;
virtual void RemoveSink(MediaStreamVideoSink* sink) OVERRIDE;
private:
ScopedVector<WebRtcVideoSinkAdapter> sinks_;
DISALLOW_COPY_AND_ASSIGN(WebRtcMediaStreamVideoTrack);
};
} // namespace content
#endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_VIDEO_TRACK_H_
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/strings/utf_string_conversions.h"
#include "content/renderer/media/media_stream_video_track.h"
#include "content/renderer/media/mock_media_stream_dependency_factory.h"
#include "content/renderer/media/mock_media_stream_video_source.h"
#include "media/base/video_frame.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace content {
class MockVideoSink : public MediaStreamVideoSink {
public:
MockVideoSink()
: number_of_frames_(0), enabled_(true),
state_(blink::WebMediaStreamSource::ReadyStateLive) {
}
virtual void OnVideoFrame(
const scoped_refptr<media::VideoFrame>& frame) OVERRIDE {
++number_of_frames_;
}
virtual void OnReadyStateChanged(
blink::WebMediaStreamSource::ReadyState state) OVERRIDE {
state_ = state;
}
virtual void OnEnabledChanged(bool enabled) OVERRIDE {
enabled_ = enabled;
}
int number_of_frames() const { return number_of_frames_; }
bool enabled() const { return enabled_; }
blink::WebMediaStreamSource::ReadyState state() const { return state_; }
private:
int number_of_frames_;
bool enabled_;
blink::WebMediaStreamSource::ReadyState state_;
};
class MediaStreamVideoTrackTest : public ::testing::Test {
public:
MediaStreamVideoTrackTest()
: mock_source_(new MockMediaStreamVideoSource(&factory_, false)) {
webkit_source_.initialize(base::UTF8ToUTF16("dummy_source_id"),
blink::WebMediaStreamSource::TypeVideo,
base::UTF8ToUTF16("dummy_source_name"));
webkit_source_.setExtraData(mock_source_);
}
protected:
// Create a track that's associated with |mock_source_|.
blink::WebMediaStreamTrack CreateTrack() {
blink::WebMediaConstraints constraints;
constraints.initialize();
bool enabled = true;
blink::WebMediaStreamTrack track =
MediaStreamVideoTrack::CreateVideoTrack(
mock_source_, constraints,
MediaStreamSource::ConstraintsCallback(), enabled, &factory_);
mock_source_->StartMockedSource();
return track;
}
MockMediaStreamVideoSource* mock_source() { return mock_source_; }
private:
MockMediaStreamDependencyFactory factory_;
blink::WebMediaStreamSource webkit_source_;
// |mock_source_| is owned by |webkit_source_|.
MockMediaStreamVideoSource* mock_source_;
};
TEST_F(MediaStreamVideoTrackTest, GetAdapter) {
blink::WebMediaStreamTrack track = CreateTrack();
MediaStreamVideoTrack* video_track =
MediaStreamVideoTrack::GetVideoTrack(track);
EXPECT_TRUE(video_track->GetVideoAdapter() != NULL);
}
TEST_F(MediaStreamVideoTrackTest, AddAndRemoveSink) {
MockVideoSink sink;
blink::WebMediaStreamTrack track = CreateTrack();
MediaStreamVideoSink::AddToVideoTrack(&sink, track);
MediaStreamVideoTrack* video_track =
MediaStreamVideoTrack::GetVideoTrack(track);
scoped_refptr<media::VideoFrame> frame =
media::VideoFrame::CreateBlackFrame(
gfx::Size(MediaStreamVideoSource::kDefaultWidth,
MediaStreamVideoSource::kDefaultHeight));
video_track->OnVideoFrame(frame);
EXPECT_EQ(1, sink.number_of_frames());
video_track->OnVideoFrame(frame);
EXPECT_EQ(2, sink.number_of_frames());
MediaStreamVideoSink::RemoveFromVideoTrack(&sink, track);
video_track->OnVideoFrame(frame);
EXPECT_EQ(2, sink.number_of_frames());
}
TEST_F(MediaStreamVideoTrackTest, SetEnabled) {
MockVideoSink sink;
blink::WebMediaStreamTrack track = CreateTrack();
MediaStreamVideoSink::AddToVideoTrack(&sink, track);
MediaStreamVideoTrack* video_track =
MediaStreamVideoTrack::GetVideoTrack(track);
scoped_refptr<media::VideoFrame> frame =
media::VideoFrame::CreateBlackFrame(
gfx::Size(MediaStreamVideoSource::kDefaultWidth,
MediaStreamVideoSource::kDefaultHeight));
video_track->OnVideoFrame(frame);
EXPECT_EQ(1, sink.number_of_frames());
video_track->SetEnabled(false);
EXPECT_FALSE(sink.enabled());
video_track->OnVideoFrame(frame);
EXPECT_EQ(1, sink.number_of_frames());
video_track->SetEnabled(true);
EXPECT_TRUE(sink.enabled());
video_track->OnVideoFrame(frame);
EXPECT_EQ(2, sink.number_of_frames());
MediaStreamVideoSink::RemoveFromVideoTrack(&sink, track);
}
TEST_F(MediaStreamVideoTrackTest, SourceStopped) {
MockVideoSink sink;
blink::WebMediaStreamTrack track = CreateTrack();
MediaStreamVideoSink::AddToVideoTrack(&sink, track);
EXPECT_EQ(blink::WebMediaStreamSource::ReadyStateLive, sink.state());
mock_source()->StopSource();
EXPECT_EQ(blink::WebMediaStreamSource::ReadyStateEnded, sink.state());
MediaStreamVideoSink::RemoveFromVideoTrack(&sink, track);
}
} // namespace content
......@@ -230,9 +230,11 @@ class RTCPeerConnectionHandlerTest : public ::testing::Test {
video_source.initialize(blink::WebString::fromUTF8(video_track_label),
blink::WebMediaStreamSource::TypeVideo,
blink::WebString::fromUTF8("video_track"));
video_source.setExtraData(
MockMediaStreamVideoSource* native_video_source =
new MockMediaStreamVideoSource(mock_dependency_factory_.get(),
false));
false);
video_source.setExtraData(native_video_source);
blink::WebVector<blink::WebMediaStreamTrack> audio_tracks(
static_cast<size_t>(1));
......@@ -244,9 +246,12 @@ class RTCPeerConnectionHandlerTest : public ::testing::Test {
true));
blink::WebVector<blink::WebMediaStreamTrack> video_tracks(
static_cast<size_t>(1));
video_tracks[0].initialize(video_source.id(), video_source);
video_tracks[0].setExtraData(
new MediaStreamVideoTrack(mock_dependency_factory_.get()));
blink::WebMediaConstraints constraints;
constraints.initialize();
video_tracks[0] = MediaStreamVideoTrack::CreateVideoTrack(
native_video_source, constraints,
MediaStreamVideoSource::ConstraintsCallback(), true,
mock_dependency_factory_.get());
blink::WebMediaStream local_stream;
local_stream.initialize(base::UTF8ToUTF16(stream_label), audio_tracks,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment