Commit 21b27f9c authored by perkj@chromium.org's avatar perkj@chromium.org

Added implementation of RemoteMediaStreams.

This implement an observer of a remote webrtc MediaStream and propagates changes to a WebKit MediaStream.
It propagates adding and removing of remote tracks to existing mediastreams as well as if a remote track has ended.

https://code.google.com/p/webrtc/issues/detail?id=872

BUG=233514

Review URL: https://chromiumcodereview.appspot.com/14200016

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@195186 0039d316-1c4b-4281-b951-d872f2087c98
parent f2413b9d
...@@ -167,5 +167,20 @@ IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest, CallWithNewVideoMediaStream) { ...@@ -167,5 +167,20 @@ IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest, CallWithNewVideoMediaStream) {
ExpectTitle("OK"); ExpectTitle("OK");
} }
// This test will make a PeerConnection-based call and send a new Video
// MediaStream that has been created based on a MediaStream created with
// getUserMedia. When video is flowing, the VideoTrack is removed and an
// AudioTrack is added instead.
// TODO(phoglund): This test is manual since not all buildbots has an audio
// input.
IN_PROC_BROWSER_TEST_F(WebrtcBrowserTest, MANUAL_CallAndModifyStream) {
GURL url(test_server()->GetURL("files/media/peerconnection-call.html"));
NavigateToURL(shell(), url);
EXPECT_TRUE(
ExecuteJavascript("callWithNewVideoMediaStreamLaterSwitchToAudio();"));
ExpectTitle("OK");
}
} // namespace content } // namespace content
...@@ -145,6 +145,8 @@ ...@@ -145,6 +145,8 @@
'renderer/media/renderer_gpu_video_decoder_factories.h', 'renderer/media/renderer_gpu_video_decoder_factories.h',
'renderer/media/renderer_webaudiodevice_impl.cc', 'renderer/media/renderer_webaudiodevice_impl.cc',
'renderer/media/renderer_webaudiodevice_impl.h', 'renderer/media/renderer_webaudiodevice_impl.h',
'renderer/media/remote_media_stream_impl.cc',
'renderer/media/remote_media_stream_impl.h',
'renderer/media/rtc_video_renderer.cc', 'renderer/media/rtc_video_renderer.cc',
'renderer/media/rtc_video_renderer.h', 'renderer/media/rtc_video_renderer.h',
'renderer/media/stream_texture_factory_impl_android.cc', 'renderer/media/stream_texture_factory_impl_android.cc',
......
...@@ -39,14 +39,19 @@ static typename V::iterator FindTrack(V* vector, ...@@ -39,14 +39,19 @@ static typename V::iterator FindTrack(V* vector,
class MockMediaStream : public webrtc::MediaStreamInterface { class MockMediaStream : public webrtc::MediaStreamInterface {
public: public:
explicit MockMediaStream(const std::string& label) explicit MockMediaStream(const std::string& label)
: label_(label) { : label_(label),
observer_(NULL) {
} }
virtual bool AddTrack(AudioTrackInterface* track) OVERRIDE { virtual bool AddTrack(AudioTrackInterface* track) OVERRIDE {
audio_track_vector_.push_back(track); audio_track_vector_.push_back(track);
if (observer_)
observer_->OnChanged();
return true; return true;
} }
virtual bool AddTrack(VideoTrackInterface* track) OVERRIDE { virtual bool AddTrack(VideoTrackInterface* track) OVERRIDE {
video_track_vector_.push_back(track); video_track_vector_.push_back(track);
if (observer_)
observer_->OnChanged();
return true; return true;
} }
virtual bool RemoveTrack(AudioTrackInterface* track) OVERRIDE { virtual bool RemoveTrack(AudioTrackInterface* track) OVERRIDE {
...@@ -55,6 +60,8 @@ class MockMediaStream : public webrtc::MediaStreamInterface { ...@@ -55,6 +60,8 @@ class MockMediaStream : public webrtc::MediaStreamInterface {
if (it == audio_track_vector_.end()) if (it == audio_track_vector_.end())
return false; return false;
audio_track_vector_.erase(it); audio_track_vector_.erase(it);
if (observer_)
observer_->OnChanged();
return true; return true;
} }
virtual bool RemoveTrack(VideoTrackInterface* track) OVERRIDE { virtual bool RemoveTrack(VideoTrackInterface* track) OVERRIDE {
...@@ -63,6 +70,8 @@ class MockMediaStream : public webrtc::MediaStreamInterface { ...@@ -63,6 +70,8 @@ class MockMediaStream : public webrtc::MediaStreamInterface {
if (it == video_track_vector_.end()) if (it == video_track_vector_.end())
return false; return false;
video_track_vector_.erase(it); video_track_vector_.erase(it);
if (observer_)
observer_->OnChanged();
return true; return true;
} }
virtual std::string label() const OVERRIDE { return label_; } virtual std::string label() const OVERRIDE { return label_; }
...@@ -83,10 +92,12 @@ class MockMediaStream : public webrtc::MediaStreamInterface { ...@@ -83,10 +92,12 @@ class MockMediaStream : public webrtc::MediaStreamInterface {
return it == video_track_vector_.end() ? NULL : *it; return it == video_track_vector_.end() ? NULL : *it;
} }
virtual void RegisterObserver(ObserverInterface* observer) OVERRIDE { virtual void RegisterObserver(ObserverInterface* observer) OVERRIDE {
NOTIMPLEMENTED(); DCHECK(!observer_);
observer_ = observer;
} }
virtual void UnregisterObserver(ObserverInterface* observer) OVERRIDE { virtual void UnregisterObserver(ObserverInterface* observer) OVERRIDE {
NOTIMPLEMENTED(); DCHECK(observer_ == observer);
observer_ = NULL;
} }
protected: protected:
...@@ -96,6 +107,7 @@ class MockMediaStream : public webrtc::MediaStreamInterface { ...@@ -96,6 +107,7 @@ class MockMediaStream : public webrtc::MediaStreamInterface {
std::string label_; std::string label_;
AudioTrackVector audio_track_vector_; AudioTrackVector audio_track_vector_;
VideoTrackVector video_track_vector_; VideoTrackVector video_track_vector_;
webrtc::ObserverInterface* observer_;
}; };
MockAudioSource::MockAudioSource( MockAudioSource::MockAudioSource(
...@@ -191,7 +203,9 @@ MockLocalVideoTrack::MockLocalVideoTrack(std::string id, ...@@ -191,7 +203,9 @@ MockLocalVideoTrack::MockLocalVideoTrack(std::string id,
webrtc::VideoSourceInterface* source) webrtc::VideoSourceInterface* source)
: enabled_(false), : enabled_(false),
id_(id), id_(id),
source_(source) { state_(MediaStreamTrackInterface::kLive),
source_(source),
observer_(NULL) {
} }
MockLocalVideoTrack::~MockLocalVideoTrack() {} MockLocalVideoTrack::~MockLocalVideoTrack() {}
...@@ -219,8 +233,7 @@ std::string MockLocalVideoTrack::id() const { return id_; } ...@@ -219,8 +233,7 @@ std::string MockLocalVideoTrack::id() const { return id_; }
bool MockLocalVideoTrack::enabled() const { return enabled_; } bool MockLocalVideoTrack::enabled() const { return enabled_; }
MockLocalVideoTrack::TrackState MockLocalVideoTrack::state() const { MockLocalVideoTrack::TrackState MockLocalVideoTrack::state() const {
NOTIMPLEMENTED(); return state_;
return kInitializing;
} }
bool MockLocalVideoTrack::set_enabled(bool enable) { bool MockLocalVideoTrack::set_enabled(bool enable) {
...@@ -229,16 +242,19 @@ bool MockLocalVideoTrack::set_enabled(bool enable) { ...@@ -229,16 +242,19 @@ bool MockLocalVideoTrack::set_enabled(bool enable) {
} }
bool MockLocalVideoTrack::set_state(TrackState new_state) { bool MockLocalVideoTrack::set_state(TrackState new_state) {
NOTIMPLEMENTED(); state_ = new_state;
return false; if (observer_)
observer_->OnChanged();
return true;
} }
void MockLocalVideoTrack::RegisterObserver(ObserverInterface* observer) { void MockLocalVideoTrack::RegisterObserver(ObserverInterface* observer) {
NOTIMPLEMENTED(); observer_ = observer;
} }
void MockLocalVideoTrack::UnregisterObserver(ObserverInterface* observer) { void MockLocalVideoTrack::UnregisterObserver(ObserverInterface* observer) {
NOTIMPLEMENTED(); DCHECK(observer_ == observer);
observer_ = NULL;
} }
VideoSourceInterface* MockLocalVideoTrack::GetSource() const { VideoSourceInterface* MockLocalVideoTrack::GetSource() const {
...@@ -254,9 +270,8 @@ std::string MockLocalAudioTrack::id() const { return id_; } ...@@ -254,9 +270,8 @@ std::string MockLocalAudioTrack::id() const { return id_; }
bool MockLocalAudioTrack::enabled() const { return enabled_; } bool MockLocalAudioTrack::enabled() const { return enabled_; }
MockLocalVideoTrack::TrackState MockLocalAudioTrack::state() const { MockLocalAudioTrack::TrackState MockLocalAudioTrack::state() const {
NOTIMPLEMENTED(); return state_;
return kInitializing;
} }
bool MockLocalAudioTrack::set_enabled(bool enable) { bool MockLocalAudioTrack::set_enabled(bool enable) {
...@@ -265,16 +280,19 @@ bool MockLocalAudioTrack::set_enabled(bool enable) { ...@@ -265,16 +280,19 @@ bool MockLocalAudioTrack::set_enabled(bool enable) {
} }
bool MockLocalAudioTrack::set_state(TrackState new_state) { bool MockLocalAudioTrack::set_state(TrackState new_state) {
NOTIMPLEMENTED(); state_ = new_state;
return false; if (observer_)
observer_->OnChanged();
return true;
} }
void MockLocalAudioTrack::RegisterObserver(ObserverInterface* observer) { void MockLocalAudioTrack::RegisterObserver(ObserverInterface* observer) {
NOTIMPLEMENTED(); observer_ = observer;
} }
void MockLocalAudioTrack::UnregisterObserver(ObserverInterface* observer) { void MockLocalAudioTrack::UnregisterObserver(ObserverInterface* observer) {
NOTIMPLEMENTED(); DCHECK(observer_ == observer);
observer_ = NULL;
} }
AudioSourceInterface* MockLocalAudioTrack::GetSource() const { AudioSourceInterface* MockLocalAudioTrack::GetSource() const {
......
...@@ -35,13 +35,14 @@ class MockVideoSource : public webrtc::VideoSourceInterface { ...@@ -35,13 +35,14 @@ class MockVideoSource : public webrtc::VideoSourceInterface {
virtual ~MockVideoSource(); virtual ~MockVideoSource();
private: private:
webrtc::ObserverInterface* observer_; webrtc::ObserverInterface* observer_;
MediaSourceInterface::SourceState state_; MediaSourceInterface::SourceState state_;
}; };
class MockAudioSource : public webrtc::AudioSourceInterface { class MockAudioSource : public webrtc::AudioSourceInterface {
public: public:
MockAudioSource(const webrtc::MediaConstraintsInterface* constraints); explicit MockAudioSource(
const webrtc::MediaConstraintsInterface* constraints);
virtual void RegisterObserver(webrtc::ObserverInterface* observer) OVERRIDE; virtual void RegisterObserver(webrtc::ObserverInterface* observer) OVERRIDE;
virtual void UnregisterObserver(webrtc::ObserverInterface* observer) OVERRIDE; virtual void UnregisterObserver(webrtc::ObserverInterface* observer) OVERRIDE;
...@@ -95,14 +96,18 @@ class MockLocalVideoTrack : public webrtc::VideoTrackInterface { ...@@ -95,14 +96,18 @@ class MockLocalVideoTrack : public webrtc::VideoTrackInterface {
private: private:
bool enabled_; bool enabled_;
std::string id_; std::string id_;
TrackState state_;
scoped_refptr<webrtc::VideoSourceInterface> source_; scoped_refptr<webrtc::VideoSourceInterface> source_;
webrtc::ObserverInterface* observer_;
}; };
class MockLocalAudioTrack : public webrtc::AudioTrackInterface { class MockLocalAudioTrack : public webrtc::AudioTrackInterface {
public: public:
explicit MockLocalAudioTrack(const std::string& id) explicit MockLocalAudioTrack(const std::string& id)
: enabled_(false), : enabled_(false),
id_(id) { id_(id),
state_(MediaStreamTrackInterface::kLive),
observer_(NULL) {
} }
virtual std::string kind() const OVERRIDE; virtual std::string kind() const OVERRIDE;
virtual std::string id() const OVERRIDE; virtual std::string id() const OVERRIDE;
...@@ -120,6 +125,8 @@ class MockLocalAudioTrack : public webrtc::AudioTrackInterface { ...@@ -120,6 +125,8 @@ class MockLocalAudioTrack : public webrtc::AudioTrackInterface {
private: private:
bool enabled_; bool enabled_;
std::string id_; std::string id_;
TrackState state_;
webrtc::ObserverInterface* observer_;
}; };
// A mock factory for creating different objects for // A mock factory for creating different objects for
......
...@@ -51,43 +51,6 @@ void PeerConnectionHandlerBase::RemoveStream( ...@@ -51,43 +51,6 @@ void PeerConnectionHandlerBase::RemoveStream(
DCHECK(native_stream); DCHECK(native_stream);
} }
WebKit::WebMediaStream
PeerConnectionHandlerBase::CreateRemoteWebKitMediaStream(
webrtc::MediaStreamInterface* stream) {
webrtc::AudioTrackVector audio_tracks = stream->GetAudioTracks();
webrtc::VideoTrackVector video_tracks = stream->GetVideoTracks();
WebKit::WebVector<WebKit::WebMediaStreamSource> audio_source_vector(
audio_tracks.size());
WebKit::WebVector<WebKit::WebMediaStreamSource> video_source_vector(
video_tracks.size());
// Add audio tracks.
size_t i = 0;
for (; i < audio_tracks.size(); ++i) {
webrtc::AudioTrackInterface* audio_track = audio_tracks[i];
DCHECK(audio_track);
audio_source_vector[i].initialize(
UTF8ToUTF16(audio_track->id()),
WebKit::WebMediaStreamSource::TypeAudio,
UTF8ToUTF16(audio_track->id()));
}
// Add video tracks.
for (i = 0; i < video_tracks.size(); ++i) {
webrtc::VideoTrackInterface* video_track = video_tracks[i];
DCHECK(video_track);
video_source_vector[i].initialize(
UTF8ToUTF16(video_track->id()),
WebKit::WebMediaStreamSource::TypeVideo,
UTF8ToUTF16(video_track->id()));
}
WebKit::WebMediaStream descriptor;
descriptor.initialize(UTF8ToUTF16(stream->label()),
audio_source_vector, video_source_vector);
descriptor.setExtraData(new MediaStreamExtraData(stream, false));
return descriptor;
}
webrtc::MediaStreamTrackInterface* webrtc::MediaStreamTrackInterface*
PeerConnectionHandlerBase::GetNativeMediaStreamTrack( PeerConnectionHandlerBase::GetNativeMediaStreamTrack(
const WebKit::WebMediaStream& stream, const WebKit::WebMediaStream& stream,
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
namespace content { namespace content {
class MediaStreamDependencyFactory; class MediaStreamDependencyFactory;
class RemoteMediaStreamImpl;
// PeerConnectionHandlerBase is the base class of a delegate for the // PeerConnectionHandlerBase is the base class of a delegate for the
// PeerConnection API messages going between WebKit and native // PeerConnection API messages going between WebKit and native
...@@ -35,8 +36,6 @@ class CONTENT_EXPORT PeerConnectionHandlerBase ...@@ -35,8 +36,6 @@ class CONTENT_EXPORT PeerConnectionHandlerBase
bool AddStream(const WebKit::WebMediaStream& stream, bool AddStream(const WebKit::WebMediaStream& stream,
const webrtc::MediaConstraintsInterface* constraints); const webrtc::MediaConstraintsInterface* constraints);
void RemoveStream(const WebKit::WebMediaStream& stream); void RemoveStream(const WebKit::WebMediaStream& stream);
WebKit::WebMediaStream CreateRemoteWebKitMediaStream(
webrtc::MediaStreamInterface* stream);
webrtc::MediaStreamTrackInterface* GetNativeMediaStreamTrack( webrtc::MediaStreamTrackInterface* GetNativeMediaStreamTrack(
const WebKit::WebMediaStream& stream, const WebKit::WebMediaStream& stream,
const WebKit::WebMediaStreamTrack& component); const WebKit::WebMediaStreamTrack& component);
...@@ -50,7 +49,7 @@ class CONTENT_EXPORT PeerConnectionHandlerBase ...@@ -50,7 +49,7 @@ class CONTENT_EXPORT PeerConnectionHandlerBase
scoped_refptr<webrtc::PeerConnectionInterface> native_peer_connection_; scoped_refptr<webrtc::PeerConnectionInterface> native_peer_connection_;
typedef std::map<webrtc::MediaStreamInterface*, typedef std::map<webrtc::MediaStreamInterface*,
WebKit::WebMediaStream> RemoteStreamMap; content::RemoteMediaStreamImpl*> RemoteStreamMap;
RemoteStreamMap remote_streams_; RemoteStreamMap remote_streams_;
// The message loop we are created on and on which to make calls to WebKit. // The message loop we are created on and on which to make calls to WebKit.
......
// Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/renderer/media/remote_media_stream_impl.h"
#include <string>
#include "base/logging.h"
#include "base/utf_string_conversions.h"
#include "content/renderer/media/media_stream_extra_data.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebString.h"
namespace content {
// RemoteMediaStreamTrackObserver is responsible for listening on change
// notification on a remote webrtc MediaStreamTrack and notify WebKit.
class RemoteMediaStreamTrackObserver
: NON_EXPORTED_BASE(public webrtc::ObserverInterface),
NON_EXPORTED_BASE(public base::NonThreadSafe) {
public:
RemoteMediaStreamTrackObserver(
webrtc::MediaStreamTrackInterface* webrtc_track,
const WebKit::WebMediaStreamTrack& webkit_track);
virtual ~RemoteMediaStreamTrackObserver();
webrtc::MediaStreamTrackInterface* observered_track() {
return webrtc_track_;
}
const WebKit::WebMediaStreamTrack& webkit_track() { return webkit_track_; }
private:
// webrtc::ObserverInterface implementation.
virtual void OnChanged() OVERRIDE;
webrtc::MediaStreamTrackInterface::TrackState state_;
scoped_refptr<webrtc::MediaStreamTrackInterface> webrtc_track_;
WebKit::WebMediaStreamTrack webkit_track_;
DISALLOW_COPY_AND_ASSIGN(RemoteMediaStreamTrackObserver);
};
} // namespace content
namespace {
void InitializeWebkitTrack(webrtc::MediaStreamTrackInterface* track,
WebKit::WebMediaStreamTrack* webkit_track,
WebKit::WebMediaStreamSource::Type type) {
WebKit::WebMediaStreamSource webkit_source;
WebKit::WebString webkit_track_id(UTF8ToUTF16(track->id()));
webkit_source.initialize(webkit_track_id, type, webkit_track_id);
webkit_track->initialize(webkit_track_id, webkit_source);
}
content::RemoteMediaStreamTrackObserver* FindTrackObserver(
webrtc::MediaStreamTrackInterface* track,
const ScopedVector<content::RemoteMediaStreamTrackObserver>& observers) {
ScopedVector<content::RemoteMediaStreamTrackObserver>::const_iterator it =
observers.begin();
for (; it != observers.end(); ++it) {
if ((*it)->observered_track() == track)
return *it;
}
return NULL;
}
} // namespace anonymous
namespace content {
RemoteMediaStreamTrackObserver::RemoteMediaStreamTrackObserver(
webrtc::MediaStreamTrackInterface* webrtc_track,
const WebKit::WebMediaStreamTrack& webkit_track)
: state_(webrtc_track->state()),
webrtc_track_(webrtc_track),
webkit_track_(webkit_track) {
webrtc_track->RegisterObserver(this);
}
RemoteMediaStreamTrackObserver::~RemoteMediaStreamTrackObserver() {
webrtc_track_->UnregisterObserver(this);
}
void RemoteMediaStreamTrackObserver::OnChanged() {
DCHECK(CalledOnValidThread());
webrtc::MediaStreamTrackInterface::TrackState state = webrtc_track_->state();
if (state == state_)
return;
state_ = state;
switch (state) {
case webrtc::MediaStreamTrackInterface::kInitializing:
// Ignore the kInitializing state since there is no match in
// WebMediaStreamSource::ReadyState.
break;
case webrtc::MediaStreamTrackInterface::kLive:
webkit_track_.source().setReadyState(
WebKit::WebMediaStreamSource::ReadyStateLive);
break;
case webrtc::MediaStreamTrackInterface::kEnded:
webkit_track_.source().setReadyState(
WebKit::WebMediaStreamSource::ReadyStateEnded);
break;
default:
NOTREACHED();
break;
}
}
RemoteMediaStreamImpl::RemoteMediaStreamImpl(
webrtc::MediaStreamInterface* webrtc_stream)
: webrtc_stream_(webrtc_stream) {
webrtc_stream_->RegisterObserver(this);
webrtc::AudioTrackVector webrtc_audio_tracks =
webrtc_stream_->GetAudioTracks();
WebKit::WebVector<WebKit::WebMediaStreamTrack> webkit_audio_tracks(
webrtc_audio_tracks.size());
// Initialize WebKit audio tracks.
size_t i = 0;
for (; i < webrtc_audio_tracks.size(); ++i) {
webrtc::AudioTrackInterface* audio_track = webrtc_audio_tracks[i];
DCHECK(audio_track);
InitializeWebkitTrack(audio_track, &webkit_audio_tracks[i],
WebKit::WebMediaStreamSource::TypeAudio);
audio_track_observers_.push_back(
new RemoteMediaStreamTrackObserver(audio_track,
webkit_audio_tracks[i]));
}
// Initialize WebKit video tracks.
webrtc::VideoTrackVector webrtc_video_tracks =
webrtc_stream_->GetVideoTracks();
WebKit::WebVector<WebKit::WebMediaStreamTrack> webkit_video_tracks(
webrtc_video_tracks.size());
for (i = 0; i < webrtc_video_tracks.size(); ++i) {
webrtc::VideoTrackInterface* video_track = webrtc_video_tracks[i];
DCHECK(video_track);
InitializeWebkitTrack(video_track, &webkit_video_tracks[i],
WebKit::WebMediaStreamSource::TypeVideo);
video_track_observers_.push_back(
new RemoteMediaStreamTrackObserver(video_track,
webkit_video_tracks[i]));
}
webkit_stream_.initialize(UTF8ToUTF16(webrtc_stream->label()),
webkit_audio_tracks, webkit_video_tracks);
webkit_stream_.setExtraData(new MediaStreamExtraData(webrtc_stream, false));
}
RemoteMediaStreamImpl::~RemoteMediaStreamImpl() {
webrtc_stream_->UnregisterObserver(this);
}
void RemoteMediaStreamImpl::OnChanged() {
// Find removed audio tracks.
ScopedVector<RemoteMediaStreamTrackObserver>::iterator audio_it =
audio_track_observers_.begin();
while (audio_it != audio_track_observers_.end()) {
std::string track_id = (*audio_it)->observered_track()->id();
if (webrtc_stream_->FindAudioTrack(track_id) == NULL) {
webkit_stream_.removeTrack((*audio_it)->webkit_track());
audio_it = audio_track_observers_.erase(audio_it);
} else {
++audio_it;
}
}
// Find removed video tracks.
ScopedVector<RemoteMediaStreamTrackObserver>::iterator video_it =
video_track_observers_.begin();
while (video_it != video_track_observers_.end()) {
std::string track_id = (*video_it)->observered_track()->id();
if (webrtc_stream_->FindVideoTrack(track_id) == NULL) {
webkit_stream_.removeTrack((*video_it)->webkit_track());
video_it = video_track_observers_.erase(video_it);
} else {
++video_it;
}
}
// Find added audio tracks.
webrtc::AudioTrackVector webrtc_audio_tracks =
webrtc_stream_->GetAudioTracks();
for (webrtc::AudioTrackVector::iterator it = webrtc_audio_tracks.begin();
it != webrtc_audio_tracks.end(); ++it) {
if (!FindTrackObserver(*it, audio_track_observers_)) {
WebKit::WebMediaStreamTrack new_track;
InitializeWebkitTrack(*it, &new_track,
WebKit::WebMediaStreamSource::TypeAudio);
audio_track_observers_.push_back(
new RemoteMediaStreamTrackObserver(*it, new_track));
webkit_stream_.addTrack(new_track);
}
}
// Find added video tracks.
webrtc::VideoTrackVector webrtc_video_tracks =
webrtc_stream_->GetVideoTracks();
for (webrtc::VideoTrackVector::iterator it = webrtc_video_tracks.begin();
it != webrtc_video_tracks.end(); ++it) {
if (!FindTrackObserver(*it, video_track_observers_)) {
WebKit::WebMediaStreamTrack new_track;
InitializeWebkitTrack(*it, &new_track,
WebKit::WebMediaStreamSource::TypeVideo);
video_track_observers_.push_back(
new RemoteMediaStreamTrackObserver(*it, new_track));
webkit_stream_.addTrack(new_track);
}
}
}
} // namespace content
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_RENDERER_MEDIA_REMOTE_MEDIA_STREAM_IMPL_H_
#define CONTENT_RENDERER_MEDIA_REMOTE_MEDIA_STREAM_IMPL_H_
#include "base/basictypes.h"
#include "base/compiler_specific.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_vector.h"
#include "base/threading/non_thread_safe.h"
#include "content/common/content_export.h"
#include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStream.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStreamSource.h"
#include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStreamTrack.h"
namespace content {
class RemoteMediaStreamTrackObserver;
// RemoteMediaStreamImpl serves as a container and glue between remote webrtc
// MediaStreams and WebKit MediaStreams. For each remote MediaStream received
// on a PeerConnection a RemoteMediaStreamImpl instance is created and
// owned by RtcPeerConnection.
class CONTENT_EXPORT RemoteMediaStreamImpl
: NON_EXPORTED_BASE(public webrtc::ObserverInterface),
NON_EXPORTED_BASE(public base::NonThreadSafe) {
public:
explicit RemoteMediaStreamImpl(
webrtc::MediaStreamInterface* webrtc_stream);
virtual ~RemoteMediaStreamImpl();
const WebKit::WebMediaStream& webkit_stream() { return webkit_stream_; }
private:
// webrtc::ObserverInterface implementation.
virtual void OnChanged() OVERRIDE;
scoped_refptr<webrtc::MediaStreamInterface> webrtc_stream_;
ScopedVector<RemoteMediaStreamTrackObserver> audio_track_observers_;
ScopedVector<RemoteMediaStreamTrackObserver> video_track_observers_;
WebKit::WebMediaStream webkit_stream_;
DISALLOW_COPY_AND_ASSIGN(RemoteMediaStreamImpl);
};
} // namespace content
#endif // CONTENT_RENDERER_MEDIA_REMOTE_MEDIA_STREAM_IMPL_H_
...@@ -10,9 +10,11 @@ ...@@ -10,9 +10,11 @@
#include "base/logging.h" #include "base/logging.h"
#include "base/memory/scoped_ptr.h" #include "base/memory/scoped_ptr.h"
#include "base/stl_util.h"
#include "base/utf_string_conversions.h" #include "base/utf_string_conversions.h"
#include "content/renderer/media/media_stream_dependency_factory.h" #include "content/renderer/media/media_stream_dependency_factory.h"
#include "content/renderer/media/peer_connection_tracker.h" #include "content/renderer/media/peer_connection_tracker.h"
#include "content/renderer/media/remote_media_stream_impl.h"
#include "content/renderer/media/rtc_data_channel_handler.h" #include "content/renderer/media/rtc_data_channel_handler.h"
#include "content/renderer/media/rtc_dtmf_sender_handler.h" #include "content/renderer/media/rtc_dtmf_sender_handler.h"
#include "content/renderer/media/rtc_media_constraints.h" #include "content/renderer/media/rtc_media_constraints.h"
...@@ -323,6 +325,7 @@ RTCPeerConnectionHandler::RTCPeerConnectionHandler( ...@@ -323,6 +325,7 @@ RTCPeerConnectionHandler::RTCPeerConnectionHandler(
RTCPeerConnectionHandler::~RTCPeerConnectionHandler() { RTCPeerConnectionHandler::~RTCPeerConnectionHandler() {
if (peer_connection_tracker_) if (peer_connection_tracker_)
peer_connection_tracker_->UnregisterPeerConnection(this); peer_connection_tracker_->UnregisterPeerConnection(this);
STLDeleteValues(&remote_streams_);
} }
void RTCPeerConnectionHandler::associateWithFrame(WebKit::WebFrame* frame) { void RTCPeerConnectionHandler::associateWithFrame(WebKit::WebFrame* frame) {
...@@ -660,17 +663,19 @@ void RTCPeerConnectionHandler::OnAddStream( ...@@ -660,17 +663,19 @@ void RTCPeerConnectionHandler::OnAddStream(
webrtc::MediaStreamInterface* stream_interface) { webrtc::MediaStreamInterface* stream_interface) {
DCHECK(stream_interface); DCHECK(stream_interface);
DCHECK(remote_streams_.find(stream_interface) == remote_streams_.end()); DCHECK(remote_streams_.find(stream_interface) == remote_streams_.end());
WebKit::WebMediaStream stream =
CreateRemoteWebKitMediaStream(stream_interface); RemoteMediaStreamImpl* remote_stream =
new RemoteMediaStreamImpl(stream_interface);
remote_streams_.insert(
std::pair<webrtc::MediaStreamInterface*, RemoteMediaStreamImpl*> (
stream_interface, remote_stream));
if (peer_connection_tracker_) if (peer_connection_tracker_)
peer_connection_tracker_->TrackAddStream( peer_connection_tracker_->TrackAddStream(
this, stream, PeerConnectionTracker::SOURCE_REMOTE); this, remote_stream->webkit_stream(),
PeerConnectionTracker::SOURCE_REMOTE);
remote_streams_.insert( client_->didAddRemoteStream(remote_stream->webkit_stream());
std::pair<webrtc::MediaStreamInterface*,
WebKit::WebMediaStream>(stream_interface, stream));
client_->didAddRemoteStream(stream);
} }
void RTCPeerConnectionHandler::OnRemoveStream( void RTCPeerConnectionHandler::OnRemoveStream(
...@@ -681,15 +686,17 @@ void RTCPeerConnectionHandler::OnRemoveStream( ...@@ -681,15 +686,17 @@ void RTCPeerConnectionHandler::OnRemoveStream(
NOTREACHED() << "Stream not found"; NOTREACHED() << "Stream not found";
return; return;
} }
WebKit::WebMediaStream stream = it->second;
DCHECK(!stream.isNull()); scoped_ptr<RemoteMediaStreamImpl> remote_stream(it->second);
const WebKit::WebMediaStream& webkit_stream = remote_stream->webkit_stream();
DCHECK(!webkit_stream.isNull());
remote_streams_.erase(it); remote_streams_.erase(it);
if (peer_connection_tracker_) if (peer_connection_tracker_)
peer_connection_tracker_->TrackRemoveStream( peer_connection_tracker_->TrackRemoveStream(
this, stream, PeerConnectionTracker::SOURCE_REMOTE); this, webkit_stream, PeerConnectionTracker::SOURCE_REMOTE);
client_->didRemoveRemoteStream(stream); client_->didRemoveRemoteStream(webkit_stream);
} }
void RTCPeerConnectionHandler::OnIceCandidate( void RTCPeerConnectionHandler::OnIceCandidate(
......
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
// found in the LICENSE file. // found in the LICENSE file.
#include <string> #include <string>
#include <vector>
#include "base/memory/scoped_ptr.h" #include "base/memory/scoped_ptr.h"
#include "base/utf_string_conversions.h" #include "base/utf_string_conversions.h"
...@@ -654,6 +655,106 @@ TEST_F(RTCPeerConnectionHandlerTest, OnAddAndOnRemoveStream) { ...@@ -654,6 +655,106 @@ TEST_F(RTCPeerConnectionHandlerTest, OnAddAndOnRemoveStream) {
pc_handler_->OnRemoveStream(remote_stream); pc_handler_->OnRemoveStream(remote_stream);
} }
// This test that WebKit is notified about remote track state changes.
TEST_F(RTCPeerConnectionHandlerTest, RemoteTrackState) {
std::string remote_stream_label("remote_stream");
scoped_refptr<webrtc::MediaStreamInterface> remote_stream(
AddRemoteMockMediaStream(remote_stream_label, "video", "audio"));
testing::InSequence sequence;
EXPECT_CALL(*mock_client_.get(), didAddRemoteStream(
testing::Property(&WebKit::WebMediaStream::label,
UTF8ToUTF16(remote_stream_label))));
pc_handler_->OnAddStream(remote_stream);
const WebKit::WebMediaStream& webkit_stream = mock_client_->remote_stream();
WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks;
webkit_stream.audioTracks(audio_tracks);
EXPECT_EQ(WebKit::WebMediaStreamSource::ReadyStateLive,
audio_tracks[0].source().readyState());
WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks;
webkit_stream.videoTracks(video_tracks);
EXPECT_EQ(WebKit::WebMediaStreamSource::ReadyStateLive,
video_tracks[0].source().readyState());
remote_stream->GetAudioTracks()[0]->set_state(
webrtc::MediaStreamTrackInterface::kEnded);
EXPECT_EQ(WebKit::WebMediaStreamSource::ReadyStateEnded,
audio_tracks[0].source().readyState());
remote_stream->GetVideoTracks()[0]->set_state(
webrtc::MediaStreamTrackInterface::kEnded);
EXPECT_EQ(WebKit::WebMediaStreamSource::ReadyStateEnded,
video_tracks[0].source().readyState());
}
// TODO(perkj): Enable this test once https://codereview.chromium.org/14314003
// is landed.
TEST_F(RTCPeerConnectionHandlerTest,
DISABLED_RemoveAndAddAudioTrackFromRemoteStream) {
std::string remote_stream_label("remote_stream");
scoped_refptr<webrtc::MediaStreamInterface> remote_stream(
AddRemoteMockMediaStream(remote_stream_label, "video", "audio"));
EXPECT_CALL(*mock_client_.get(), didAddRemoteStream(
testing::Property(&WebKit::WebMediaStream::label,
UTF8ToUTF16(remote_stream_label))));
pc_handler_->OnAddStream(remote_stream);
const WebKit::WebMediaStream& webkit_stream = mock_client_->remote_stream();
WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks;
webkit_stream.audioTracks(audio_tracks);
EXPECT_EQ(1u, audio_tracks.size());
// Remove the Webrtc audio track from the Webrtc MediaStream.
scoped_refptr<webrtc::AudioTrackInterface> webrtc_track =
remote_stream->GetAudioTracks()[0].get();
remote_stream->RemoveTrack(webrtc_track);
WebKit::WebVector<WebKit::WebMediaStreamTrack> modified_audio_tracks1;
webkit_stream.audioTracks(modified_audio_tracks1);
EXPECT_EQ(0u, modified_audio_tracks1.size());
// Add the WebRtc audio track again.
remote_stream->AddTrack(webrtc_track);
WebKit::WebVector<WebKit::WebMediaStreamTrack> modified_audio_tracks2;
webkit_stream.audioTracks(modified_audio_tracks2);
EXPECT_EQ(1u, modified_audio_tracks2.size());
}
// TODO(perkj): Enable this test once https://codereview.chromium.org/14314003
// is landed.
TEST_F(RTCPeerConnectionHandlerTest,
DISABLED_RemoveAndAddVideoTrackFromRemoteStream) {
std::string remote_stream_label("remote_stream");
scoped_refptr<webrtc::MediaStreamInterface> remote_stream(
AddRemoteMockMediaStream(remote_stream_label, "video", "video"));
EXPECT_CALL(*mock_client_.get(), didAddRemoteStream(
testing::Property(&WebKit::WebMediaStream::label,
UTF8ToUTF16(remote_stream_label))));
pc_handler_->OnAddStream(remote_stream);
const WebKit::WebMediaStream& webkit_stream = mock_client_->remote_stream();
WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks;
webkit_stream.videoTracks(video_tracks);
EXPECT_EQ(1u, video_tracks.size());
// Remove the Webrtc video track from the Webrtc MediaStream.
scoped_refptr<webrtc::VideoTrackInterface> webrtc_track =
remote_stream->GetVideoTracks()[0].get();
remote_stream->RemoveTrack(webrtc_track);
WebKit::WebVector<WebKit::WebMediaStreamTrack> modified_video_tracks1;
webkit_stream.videoTracks(modified_video_tracks1);
EXPECT_EQ(0u, modified_video_tracks1.size());
// Add the WebRtc video track again.
remote_stream->AddTrack(webrtc_track);
WebKit::WebVector<WebKit::WebMediaStreamTrack> modified_video_tracks2;
webkit_stream.videoTracks(modified_video_tracks2);
EXPECT_EQ(1u, modified_video_tracks2.size());
}
TEST_F(RTCPeerConnectionHandlerTest, OnIceCandidate) { TEST_F(RTCPeerConnectionHandlerTest, OnIceCandidate) {
testing::InSequence sequence; testing::InSequence sequence;
EXPECT_CALL(*mock_tracker_.get(), EXPECT_CALL(*mock_tracker_.get(),
......
...@@ -13,8 +13,8 @@ ...@@ -13,8 +13,8 @@
var gLocalStream = null; var gLocalStream = null;
var gSentTones = ''; var gSentTones = '';
setAllEventsOccuredHandler(function() { setAllEventsOccuredHandler(function() {
document.title = 'OK'; document.title = 'OK';
}); });
// Test that we can setup call with an audio and video track. // Test that we can setup call with an audio and video track.
...@@ -43,7 +43,7 @@ ...@@ -43,7 +43,7 @@
createConnections(null); createConnections(null);
gTestWithoutMsidAndBundle = true; gTestWithoutMsidAndBundle = true;
navigator.webkitGetUserMedia({audio:true, video:true}, navigator.webkitGetUserMedia({audio:true, video:true},
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
waitForVideo('remote-view-1'); waitForVideo('remote-view-1');
waitForVideo('remote-view-2'); waitForVideo('remote-view-2');
} }
...@@ -103,8 +103,8 @@ ...@@ -103,8 +103,8 @@
// Do the DTMF test after we have received video. // Do the DTMF test after we have received video.
detectVideoIn('remote-view-2', onCallEstablished); detectVideoIn('remote-view-2', onCallEstablished);
} }
// Test call with a new Video MediaStream that has been created based on a // Test call with a new Video MediaStream that has been created based on a
// stream generated by getUserMedia. // stream generated by getUserMedia.
function callWithNewVideoMediaStream() { function callWithNewVideoMediaStream() {
createConnections(null); createConnections(null);
...@@ -114,6 +114,69 @@ ...@@ -114,6 +114,69 @@
waitForVideo('remote-view-2'); waitForVideo('remote-view-2');
} }
// Test call with a new Video MediaStream that has been created based on a
// stream generated by getUserMedia. When Video is flowing, an audio track
// is added to the sent stream and the video track is removed. This
// is to test that adding and removing of remote tracks on an existing
// mediastream works.
function callWithNewVideoMediaStreamLaterSwitchToAudio() {
createConnections(null);
navigator.webkitGetUserMedia({audio:true, video:true},
createNewVideoStreamAndAddToBothConnections, printGetUserMediaError);
waitForVideo('remote-view-1');
waitForVideo('remote-view-2');
// Set an event handler for when video is playing.
setAllEventsOccuredHandler(function() {
// Add an audio track to the local stream and remove the video track and
// then renegotiate. But first - setup the expectations.
local_stream = gFirstConnection.getLocalStreams()[0];
remote_stream_1 = gFirstConnection.getRemoteStreams()[0];
// Add an expected event that onaddtrack will be called on the remote
// mediastream received on gFirstConnection when the audio track is
// received.
addExpectedEvent();
remote_stream_1.onaddtrack = function(){
expectEquals(remote_stream_1.getAudioTracks()[0].id,
local_stream.getAudioTracks()[0].id);
eventOccured();
}
// Add an expectation that the received video track is removed from
// gFirstConnection.
addExpectedEvent();
remote_stream_1.onremovetrack = function() {
eventOccured();
}
// Add an expected event that onaddtrack will be called on the remote
// mediastream received on gSecondConnection when the audio track is
// received.
remote_stream_2 = gSecondConnection.getRemoteStreams()[0];
addExpectedEvent();
remote_stream_2.onaddtrack = function() {
expectEquals(remote_stream_2.getAudioTracks()[0].id,
local_stream.getAudioTracks()[0].id);
eventOccured();
}
// Add an expectation that the received video track is removed from
// gSecondConnection.
addExpectedEvent();
remote_stream_2.onremovetrack = function() {
eventOccured();
}
// When all the above events have occurred- the test pass.
setAllEventsOccuredHandler(function() { document.title = 'OK'; });
local_stream.addTrack(gLocalStream.getAudioTracks()[0]);
local_stream.removeTrack(local_stream.getVideoTracks()[0]);
negotiate();
}); // End of setAllEventsOccuredHandler.
}
// This function is used for setting up a test that: // This function is used for setting up a test that:
// 1. Creates a data channel on |gFirstConnection| and sends data to // 1. Creates a data channel on |gFirstConnection| and sends data to
// |gSecondConnection|. // |gSecondConnection|.
...@@ -159,7 +222,7 @@ ...@@ -159,7 +222,7 @@
secondDataChannel.send(sendDataString); secondDataChannel.send(sendDataString);
} }
} }
} }
function onToneChange(tone) { function onToneChange(tone) {
gSentTones += tone.tone; gSentTones += tone.tone;
...@@ -207,14 +270,17 @@ ...@@ -207,14 +270,17 @@
gFirstConnection.addStream(localStream); gFirstConnection.addStream(localStream);
negotiate(); negotiate();
} }
// Called if getUserMedia succeeds when we want to send a modified // Called if getUserMedia succeeds when we want to send a modified
// MediaStream. A new MediaStream is created and the video track from // MediaStream. A new MediaStream is created and the video track from
// |localStream| is added. // |localStream| is added.
function createNewVideoStreamAndAddToBothConnections(localStream) { function createNewVideoStreamAndAddToBothConnections(localStream) {
displayAndRemember(localStream);
var new_stream = new webkitMediaStream(); var new_stream = new webkitMediaStream();
new_stream.addTrack(localStream.getVideoTracks()[0]); new_stream.addTrack(localStream.getVideoTracks()[0]);
addStreamToBothConnectionsAndNegotiate(new_stream); gFirstConnection.addStream(new_stream);
gSecondConnection.addStream(new_stream);
negotiate();
} }
function negotiate() { function negotiate() {
...@@ -288,7 +354,7 @@ ...@@ -288,7 +354,7 @@
var remoteVideo = $(target); var remoteVideo = $(target);
remoteVideo.src = remoteStreamUrl; remoteVideo.src = remoteStreamUrl;
} }
</script> </script>
</head> </head>
<body> <body>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment