Commit 4449d976 authored by hubbe's avatar hubbe Committed by Commit bot

Cast: Javascript bindings for cast receiver

This adds javascript bindings for the functions in cast_receiver_session, allowing
a cast_receiver to be initiated from a whitelisted javascript extension.

Also fixes a crashing bug in udp_transport.cc.

Review URL: https://codereview.chromium.org/938903003

Cr-Commit-Position: refs/heads/master@{#319175}
parent 87f25665
......@@ -162,6 +162,10 @@
"dependencies": ["permission:cast.streaming"],
"contexts": ["blessed_extension"]
},
"cast.streaming.receiverSession": {
"dependencies": ["permission:cast.streaming"],
"contexts": ["blessed_extension"]
},
"cast.streaming.session": {
"dependencies": ["permission:cast.streaming"],
"contexts": ["blessed_extension"]
......
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// The <code>chrome.cast.streaming.receiverSession</code> API creates a Cast
// receiver session and adds the resulting audio and video tracks to a
// MediaStream.
namespace cast.streaming.receiverSession {
// The UDP socket address and port.
dictionary IPEndPoint {
DOMString address;
long port;
};
// RTP receiver parameters.
dictionary RtpReceiverParams {
// Maximum latency in milliseconds. This parameter controls the logic
// of flow control. Implementation can adjust latency adaptively and
// tries to keep it under this threshold. A larger value allows smoother
// playback at the cost of higher latency.
long maxLatency;
DOMString codecName;
// Synchronization source identifier for incoming data.
long senderSsrc;
// The SSRC used to send RTCP reports back to the sender.
long receiverSsrc;
// RTP time units per second, defaults to 48000 for audio
// and 90000 for video.
long? rtpTimebase;
// 32 bytes hex-encoded AES key.
DOMString? aesKey;
// 32 bytes hex-encoded AES IV (Initialization vector) mask.
DOMString? aesIvMask;
};
callback ErrorCallback = void (DOMString error);
interface Functions {
// Creates a Cast receiver session which receives data from a UDP
// socket. The receiver will decode the incoming data into an audio
// and a video track which will be added to the provided media stream.
// The |audioParams| and |videoParams| are generally provided by the
// sender through some other messaging channel.
//
// |audioParams| : Audio stream parameters.
// |videoParams| : Video stream parameters.
// |localEndpoint| : Local IP and port to bind to.
// |height| : Video height.
// |width| : Video width.
// |maxFrameRate| : Max video frame rate.
// |mediaStreamURL| : URL of MediaStream to add the audio and video to.
// |transport_options| : Optional transport settings.
[nocompile] static void createAndBind(
RtpReceiverParams audioParams,
RtpReceiverParams videoParams,
IPEndPoint localEndpoint,
long maxWidth,
long maxHeight,
double maxFrameRate,
DOMString mediaStreamURL,
ErrorCallback error_callback,
optional object transport_options);
};
};
......@@ -120,6 +120,7 @@
],
'webrtc_schema_files': [
'cast_streaming_receiver_session.idl',
'cast_streaming_rtp_stream.idl',
'cast_streaming_session.idl',
'cast_streaming_udp_transport.idl',
......
include_rules = [
"+media/audio",
]
......@@ -21,6 +21,23 @@ class BinaryValue;
class DictionaryValue;
}
namespace blink {
class WebMediaStream;
}
namespace net {
class IPEndPoint;
}
namespace media {
class AudioCapturerSource;
class AudioParameters;
class VideoCapturerSource;
namespace cast {
struct FrameReceiverConfig;
}
}
namespace extensions {
// Native code that handle chrome.webrtc custom bindings.
......@@ -52,6 +69,8 @@ class CastStreamingNativeHandler : public ObjectBackedNativeHandler {
const v8::FunctionCallbackInfo<v8::Value>& args);
void StopCastUdpTransport(
const v8::FunctionCallbackInfo<v8::Value>& args);
void StartCastRtpReceiver(
const v8::FunctionCallbackInfo<v8::Value>& args);
void ToggleLogging(const v8::FunctionCallbackInfo<v8::Value>& args);
void GetRawEvents(const v8::FunctionCallbackInfo<v8::Value>& args);
......@@ -67,6 +86,20 @@ class CastStreamingNativeHandler : public ObjectBackedNativeHandler {
void CallStopCallback(int stream_id);
void CallErrorCallback(int stream_id, const std::string& message);
// Callback called after a cast receiver has been started. Adds the
// output audio/video streams to the MediaStream specified by |url|.
void AddTracksToMediaStream(
const std::string& url,
const media::AudioParameters& params,
scoped_refptr<media::AudioCapturerSource> audio,
scoped_ptr<media::VideoCapturerSource> video);
// |function| is a javascript function that will take |error_message| as
// an argument. Called when something goes wrong in a cast receiver.
void CallReceiverErrorCallback(
v8::CopyablePersistentTraits<v8::Function>::CopyablePersistent function,
const std::string& error_message);
void CallGetRawEventsCallback(int transport_id,
scoped_ptr<base::BinaryValue> raw_events);
void CallGetStatsCallback(int transport_id,
......@@ -77,6 +110,19 @@ class CastStreamingNativeHandler : public ObjectBackedNativeHandler {
CastRtpStream* GetRtpStreamOrThrow(int stream_id) const;
CastUdpTransport* GetUdpTransportOrThrow(int transport_id) const;
// Fills out a media::cast::FrameReceiverConfig from the v8
// equivialent. (cast.streaming.receiverSession.RtpReceiverParams)
// Returns true if everything was ok, raises a v8 exception and
// returns false if anything went wrong.
bool FrameReceiverConfigFromArg(
v8::Isolate* isolate,
const v8::Handle<v8::Value>& arg,
media::cast::FrameReceiverConfig* config);
bool IPEndPointFromArg(v8::Isolate* isolate,
const v8::Handle<v8::Value>& arg,
net::IPEndPoint* ip_endpoint);
int last_transport_id_;
typedef std::map<int, linked_ptr<CastRtpStream> > RtpStreamMap;
......
......@@ -214,6 +214,9 @@ void ChromeExtensionsDispatcherDelegate::PopulateSourceMap(
source_map->RegisterSource(
"cast.streaming.udpTransport",
IDR_CAST_STREAMING_UDP_TRANSPORT_CUSTOM_BINDINGS_JS);
source_map->RegisterSource(
"cast.streaming.receiverSession",
IDR_CAST_STREAMING_RECEIVER_SESSION_CUSTOM_BINDINGS_JS);
#endif
source_map->RegisterSource("webstore", IDR_WEBSTORE_CUSTOM_BINDINGS_JS);
......
......@@ -8,6 +8,7 @@
#include "chrome/renderer/media/cast_receiver_audio_valve.h"
#include "content/public/renderer/render_thread.h"
#include "media/base/audio_capturer_source.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/video_capturer_source.h"
#include "third_party/WebKit/public/platform/WebMediaStream.h"
#include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
......@@ -71,7 +72,8 @@ void CastReceiverSession::Start(
const net::IPEndPoint& remote_endpoint,
scoped_ptr<base::DictionaryValue> options,
const media::VideoCaptureFormat& capture_format,
const StartCB& start_callback) {
const StartCB& start_callback,
const CastReceiverSessionDelegate::ErrorCallback& error_callback) {
audio_config_ = audio_config;
video_config_ = video_config;
format_ = capture_format;
......@@ -84,7 +86,8 @@ void CastReceiverSession::Start(
local_endpoint,
remote_endpoint,
base::Passed(&options),
format_));
format_,
media::BindToCurrentLoop(error_callback)));
scoped_refptr<media::AudioCapturerSource> audio(
new CastReceiverSession::AudioCapturerSource(this));
scoped_ptr<media::VideoCapturerSource> video(
......
......@@ -49,7 +49,8 @@ class CastReceiverSession : public base::RefCounted<CastReceiverSession> {
const net::IPEndPoint& remote_endpoint,
scoped_ptr<base::DictionaryValue> options,
const media::VideoCaptureFormat& capture_format,
const StartCB& start_callback);
const StartCB& start_callback,
const CastReceiverSessionDelegate::ErrorCallback& error_callback);
private:
class VideoCapturerSource;
......
......@@ -24,12 +24,14 @@ void CastReceiverSessionDelegate::Start(
const net::IPEndPoint& local_endpoint,
const net::IPEndPoint& remote_endpoint,
scoped_ptr<base::DictionaryValue> options,
const media::VideoCaptureFormat& format) {
const media::VideoCaptureFormat& format,
const ErrorCallback& error_callback) {
format_ = format;
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
CastSessionDelegateBase::StartUDP(local_endpoint,
remote_endpoint,
options.Pass());
options.Pass(),
error_callback);
cast_receiver_ = media::cast::CastReceiver::Create(cast_environment_,
audio_config,
video_config,
......
......@@ -13,6 +13,8 @@
class CastReceiverSessionDelegate : public CastSessionDelegateBase {
public:
typedef base::Callback<void(const std::string&)> ErrorCallback;
CastReceiverSessionDelegate();
~CastReceiverSessionDelegate() override;
......@@ -26,7 +28,8 @@ class CastReceiverSessionDelegate : public CastSessionDelegateBase {
const net::IPEndPoint& local_endpoint,
const net::IPEndPoint& remote_endpoint,
scoped_ptr<base::DictionaryValue> options,
const media::VideoCaptureFormat& format);
const media::VideoCaptureFormat& format,
const ErrorCallback& error_callback);
void StartAudio(scoped_refptr<CastReceiverAudioValve> audio_valve);
......
......@@ -85,7 +85,8 @@ void CastSession::StartVideo(const media::cast::VideoSenderConfig& config,
}
void CastSession::StartUDP(const net::IPEndPoint& remote_endpoint,
scoped_ptr<base::DictionaryValue> options) {
scoped_ptr<base::DictionaryValue> options,
const ErrorCallback& error_callback) {
io_message_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(
......@@ -93,7 +94,8 @@ void CastSession::StartUDP(const net::IPEndPoint& remote_endpoint,
base::Unretained(delegate_.get()),
net::IPEndPoint(),
remote_endpoint,
base::Passed(&options)));
base::Passed(&options),
media::BindToCurrentLoop(error_callback)));
}
void CastSession::ToggleLogging(bool is_audio, bool enable) {
......
......@@ -71,7 +71,8 @@ class CastSession : public base::RefCounted<CastSession> {
// udp transport.
// Must be called before initialization of audio or video.
void StartUDP(const net::IPEndPoint& remote_endpoint,
scoped_ptr<base::DictionaryValue> options);
scoped_ptr<base::DictionaryValue> options,
const ErrorCallback& error_callback);
// Creates or destroys event subscriber for the audio or video stream.
// |is_audio|: true if the event subscriber is for audio. Video otherwise.
......
......@@ -45,7 +45,8 @@ CastSessionDelegateBase::~CastSessionDelegateBase() {
void CastSessionDelegateBase::StartUDP(
const net::IPEndPoint& local_endpoint,
const net::IPEndPoint& remote_endpoint,
scoped_ptr<base::DictionaryValue> options) {
scoped_ptr<base::DictionaryValue> options,
const ErrorCallback& error_callback) {
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
// CastSender uses the renderer's IO thread as the main thread. This reduces
......@@ -66,15 +67,30 @@ void CastSessionDelegateBase::StartUDP(
base::Bind(&CastSessionDelegateBase::ReceivePacket,
base::Unretained(this)),
base::Bind(&CastSessionDelegateBase::StatusNotificationCB,
base::Unretained(this)),
base::Unretained(this), error_callback),
base::Bind(&CastSessionDelegateBase::LogRawEvents,
base::Unretained(this))));
}
void CastSessionDelegateBase::StatusNotificationCB(
media::cast::CastTransportStatus unused_status) {
const ErrorCallback& error_callback,
media::cast::CastTransportStatus status) {
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
// TODO(hubbe): Call javascript UDPTransport error function.
std::string error_message;
switch (status) {
case media::cast::TRANSPORT_AUDIO_UNINITIALIZED:
case media::cast::TRANSPORT_VIDEO_UNINITIALIZED:
case media::cast::TRANSPORT_AUDIO_INITIALIZED:
case media::cast::TRANSPORT_VIDEO_INITIALIZED:
return; // Not errors, do nothing.
case media::cast::TRANSPORT_INVALID_CRYPTO_CONFIG:
error_callback.Run("Invalid encrypt/decrypt configuration.");
break;
case media::cast::TRANSPORT_SOCKET_ERROR:
error_callback.Run("Socket error.");
break;
}
}
CastSessionDelegate::CastSessionDelegate()
......@@ -132,11 +148,13 @@ void CastSessionDelegate::StartVideo(
void CastSessionDelegate::StartUDP(
const net::IPEndPoint& local_endpoint,
const net::IPEndPoint& remote_endpoint,
scoped_ptr<base::DictionaryValue> options) {
scoped_ptr<base::DictionaryValue> options,
const ErrorCallback& error_callback) {
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
CastSessionDelegateBase::StartUDP(local_endpoint,
remote_endpoint,
options.Pass());
options.Pass(),
error_callback);
event_subscribers_.reset(
new media::cast::RawEventSubscriberBundle(cast_environment_));
......
......@@ -43,6 +43,8 @@ class CastTransportSender;
// CastReceiverSessionDelegate.
class CastSessionDelegateBase {
public:
typedef base::Callback<void(const std::string&)> ErrorCallback;
CastSessionDelegateBase();
virtual ~CastSessionDelegateBase();
......@@ -51,10 +53,12 @@ class CastSessionDelegateBase {
// Must be called before initialization of audio or video.
void StartUDP(const net::IPEndPoint& local_endpoint,
const net::IPEndPoint& remote_endpoint,
scoped_ptr<base::DictionaryValue> options);
scoped_ptr<base::DictionaryValue> options,
const ErrorCallback& error_callback);
protected:
void StatusNotificationCB(
const ErrorCallback& error_callback,
media::cast::CastTransportStatus status);
virtual void ReceivePacket(scoped_ptr<media::cast::Packet> packet) = 0;
......@@ -85,14 +89,14 @@ class CastSessionDelegate : public CastSessionDelegateBase {
media::cast::VideoFrameInput>&)> VideoFrameInputAvailableCallback;
typedef base::Callback<void(scoped_ptr<base::BinaryValue>)> EventLogsCallback;
typedef base::Callback<void(scoped_ptr<base::DictionaryValue>)> StatsCallback;
typedef base::Callback<void(const std::string&)> ErrorCallback;
CastSessionDelegate();
~CastSessionDelegate() override;
void StartUDP(const net::IPEndPoint& local_endpoint,
const net::IPEndPoint& remote_endpoint,
scoped_ptr<base::DictionaryValue> options);
scoped_ptr<base::DictionaryValue> options,
const ErrorCallback& error_callback);
// After calling StartAudio() or StartVideo() encoding of that media will
// begin as soon as data is delivered to its sink, if the second method is
......
......@@ -17,12 +17,15 @@ CastUdpTransport::CastUdpTransport(
CastUdpTransport::~CastUdpTransport() {
}
void CastUdpTransport::SetDestination(const net::IPEndPoint& remote_address) {
void CastUdpTransport::SetDestination(
const net::IPEndPoint& remote_address,
const CastSessionDelegate::ErrorCallback& error_callback) {
DVLOG(1) << "CastUdpTransport::SetDestination = "
<< remote_address.ToString();
remote_address_ = remote_address;
cast_session_->StartUDP(remote_address,
make_scoped_ptr(options_->DeepCopy()));
make_scoped_ptr(options_->DeepCopy()),
error_callback);
}
void CastUdpTransport::SetOptions(scoped_ptr<base::DictionaryValue> options) {
......
......@@ -8,6 +8,7 @@
#include "base/basictypes.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "chrome/renderer/media/cast_session_delegate.h"
#include "net/base/ip_endpoint.h"
namespace base {
......@@ -25,7 +26,8 @@ class CastUdpTransport {
virtual ~CastUdpTransport();
// Specify the remote IP address and port.
void SetDestination(const net::IPEndPoint& remote_address);
void SetDestination(const net::IPEndPoint& remote_address,
const CastSessionDelegate::ErrorCallback& error_callback);
// Set options.
void SetOptions(scoped_ptr<base::DictionaryValue> options);
......
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Custom binding for the Cast Streaming Session API.
var binding = require('binding').Binding.create(
'cast.streaming.receiverSession');
var natives = requireNative('cast_streaming_natives');
binding.registerCustomHook(function(bindingsAPI, extensionId) {
var apiFunctions = bindingsAPI.apiFunctions;
apiFunctions.setHandleRequest('createAndBind',
function(ap, vp, local, weidgth, height, fr, url, cb, op) {
natives.StartCastRtpReceiver(
ap, vp, local, weidgth, height, fr, url, cb, op);
});
});
exports.binding = binding.generate();
......@@ -39,6 +39,7 @@
<include name="IDR_CAST_STREAMING_RTP_STREAM_CUSTOM_BINDINGS_JS" file="extensions\cast_streaming_rtp_stream_custom_bindings.js" type="BINDATA" />
<include name="IDR_CAST_STREAMING_SESSION_CUSTOM_BINDINGS_JS" file="extensions\cast_streaming_session_custom_bindings.js" type="BINDATA" />
<include name="IDR_CAST_STREAMING_UDP_TRANSPORT_CUSTOM_BINDINGS_JS" file="extensions\cast_streaming_udp_transport_custom_bindings.js" type="BINDATA" />
<include name="IDR_CAST_STREAMING_RECEIVER_SESSION_CUSTOM_BINDINGS_JS" file="extensions\cast_streaming_receiver_session_custom_bindings.js" type="BINDATA" />
<include name="IDR_CHROME_DIRECT_SETTING_JS"
file="extensions\chrome_direct_setting.js" type="BINDATA" />
<include name="IDR_CHROME_SETTING_JS" file="extensions\chrome_setting.js" type="BINDATA" />
......
......@@ -588,6 +588,8 @@
'renderer/render_widget_fullscreen_pepper.h',
],
'public_renderer_webrtc_sources': [
'public/renderer/media_stream_api.cc',
'public/renderer/media_stream_api.h',
'public/renderer/media_stream_audio_sink.cc',
'public/renderer/media_stream_audio_sink.h',
'public/renderer/media_stream_sink.h',
......
......@@ -49,6 +49,7 @@ source_set("renderer_sources") {
rebase_path(content_renderer_gypi_values.public_renderer_webrtc_sources,
".",
"//content")
deps += [ "//third_party/webrtc" ]
}
if (enable_plugins) {
......
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/public/renderer/media_stream_api.h"
#include "base/base64.h"
#include "base/callback.h"
#include "base/rand_util.h"
#include "base/strings/utf_string_conversions.h"
#include "content/renderer/media/media_stream_audio_source.h"
#include "content/renderer/media/media_stream_video_capturer_source.h"
#include "content/renderer/media/media_stream_video_track.h"
#include "media/base/audio_capturer_source.h"
#include "media/base/video_capturer_source.h"
#include "third_party/WebKit/public/platform/WebMediaDeviceInfo.h"
#include "third_party/WebKit/public/platform/WebMediaStream.h"
#include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
#include "third_party/WebKit/public/platform/WebURL.h"
#include "third_party/WebKit/public/web/WebMediaStreamRegistry.h"
#include "url/gurl.h"
namespace content {
namespace {
blink::WebString MakeTrackId() {
std::string track_id;
base::Base64Encode(base::RandBytesAsString(64), &track_id);
return base::UTF8ToUTF16(track_id);
}
} // namespace
bool AddVideoTrackToMediaStream(
scoped_ptr<media::VideoCapturerSource> source,
bool is_remote,
bool is_readonly,
const std::string& media_stream_url) {
blink::WebMediaStream stream =
blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(
GURL(media_stream_url));
if (stream.isNull()) {
LOG(ERROR) << "Stream not found";
return false;
}
blink::WebString track_id = MakeTrackId();
blink::WebMediaStreamSource webkit_source;
scoped_ptr<MediaStreamVideoSource> media_stream_source(
new MediaStreamVideoCapturerSource(
MediaStreamSource::SourceStoppedCallback(),
source.Pass()));
webkit_source.initialize(
track_id,
blink::WebMediaStreamSource::TypeVideo,
track_id,
is_remote,
is_readonly);
webkit_source.setExtraData(media_stream_source.get());
blink::WebMediaConstraints constraints;
constraints.initialize();
stream.addTrack(MediaStreamVideoTrack::CreateVideoTrack(
media_stream_source.release(),
constraints,
MediaStreamVideoSource::ConstraintsCallback(),
true));
return true;
}
bool AddAudioTrackToMediaStream(
scoped_refptr<media::AudioCapturerSource> source,
const media::AudioParameters& params,
bool is_remote,
bool is_readonly,
const std::string& media_stream_url) {
DCHECK(params.IsValid()) << params.AsHumanReadableString();
blink::WebMediaStream stream =
blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(
GURL(media_stream_url));
if (stream.isNull()) {
LOG(ERROR) << "Stream not found";
return false;
}
blink::WebMediaStreamSource webkit_source;
blink::WebString track_id = MakeTrackId();
webkit_source.initialize(
track_id,
blink::WebMediaStreamSource::TypeAudio,
track_id,
is_remote,
is_readonly);
MediaStreamAudioSource* audio_source(
new MediaStreamAudioSource(
-1,
StreamDeviceInfo(),
MediaStreamSource::SourceStoppedCallback(),
RenderThreadImpl::current()->GetPeerConnectionDependencyFactory()));
blink::WebMediaConstraints constraints;
constraints.initialize();
scoped_refptr<WebRtcAudioCapturer> capturer(
WebRtcAudioCapturer::CreateCapturer(
-1,
StreamDeviceInfo(),
constraints,
nullptr,
audio_source));
capturer->SetCapturerSource(source, params);
audio_source->SetAudioCapturer(capturer);
webkit_source.setExtraData(audio_source);
blink::WebMediaStreamTrack web_media_audio_track;
web_media_audio_track.initialize(webkit_source);
RenderThreadImpl::current()->GetPeerConnectionDependencyFactory()->
CreateLocalAudioTrack(web_media_audio_track);
stream.addTrack(web_media_audio_track);
return true;
}
} // namespace content
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_PUBLIC_RENDERER_MEDIA_STREAM_API_H_
#define CONTENT_PUBLIC_RENDERER_MEDIA_STREAM_API_H_
#include "content/common/content_export.h"
#include "media/base/audio_capturer_source.h"
#include "media/base/video_capturer_source.h"
namespace blink {
class WebMediaStreamSource;
}
namespace Media {
class AudioParameters;
}
namespace content {
// These two methods will initialize a WebMediaStreamSource object to take
// data from the provided audio or video capturer source.
// |is_remote| should be true if the source of the data is not a local device.
// |is_readonly| should be true if the format of the data cannot be changed by
// MediaTrackConstraints.
CONTENT_EXPORT bool AddVideoTrackToMediaStream(
scoped_ptr<media::VideoCapturerSource> source,
bool is_remote,
bool is_readonly,
const std::string& media_stream_url);
CONTENT_EXPORT bool AddAudioTrackToMediaStream(
scoped_refptr<media::AudioCapturerSource> source,
const media::AudioParameters& params,
bool is_remote,
bool is_readonly,
const std::string& media_stream_url);
} // namespace content
#endif // CONTENT_PUBLIC_RENDERER_MEDIA_STREAM_API_H_
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment