Commit f4b0da13 authored by mikhal@google.com's avatar mikhal@google.com

Cast: Refactoring Cast API's

Main changes:
1. Adding an IntializeAudio and InitializeVideo to CastSender.
2. Separating FrameInput to audio and video which enables initializing each separately.
3. Changing the CastSender and CastReceiver Create functions to return a scoped_ptr.

These changes better align Cast with the Chromium pipeline.

BUG=346822
R=hclam@chromium.org

Review URL: https://codereview.chromium.org/163553006

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@255954 0039d316-1c4b-4281-b951-d872f2087c98
parent 232f3511
...@@ -155,7 +155,8 @@ class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>, ...@@ -155,7 +155,8 @@ class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>,
// Attach this sink to MediaStreamTrack. This method call must // Attach this sink to MediaStreamTrack. This method call must
// be made on the render thread. Incoming data can then be // be made on the render thread. Incoming data can then be
// passed to media::cast::FrameInput on any thread. // passed to media::cast::FrameInput on any thread.
void AddToTrack(const scoped_refptr<media::cast::FrameInput>& frame_input) { void AddToTrack(
const scoped_refptr<media::cast::VideoFrameInput>& frame_input) {
DCHECK(render_thread_task_runner_->BelongsToCurrentThread()); DCHECK(render_thread_task_runner_->BelongsToCurrentThread());
frame_input_ = frame_input; frame_input_ = frame_input;
...@@ -167,7 +168,7 @@ class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>, ...@@ -167,7 +168,7 @@ class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>,
private: private:
blink::WebMediaStreamTrack track_; blink::WebMediaStreamTrack track_;
scoped_refptr<media::cast::FrameInput> frame_input_; scoped_refptr<media::cast::VideoFrameInput> frame_input_;
bool sink_added_; bool sink_added_;
CastRtpStream::ErrorCallback error_callback_; CastRtpStream::ErrorCallback error_callback_;
scoped_refptr<base::SingleThreadTaskRunner> render_thread_task_runner_; scoped_refptr<base::SingleThreadTaskRunner> render_thread_task_runner_;
...@@ -301,7 +302,8 @@ class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>, ...@@ -301,7 +302,8 @@ class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>,
} }
// See CastVideoSink for details. // See CastVideoSink for details.
void AddToTrack(const scoped_refptr<media::cast::FrameInput>& frame_input) { void AddToTrack(
const scoped_refptr<media::cast::AudioFrameInput>& frame_input) {
DCHECK(render_thread_task_runner_->BelongsToCurrentThread()); DCHECK(render_thread_task_runner_->BelongsToCurrentThread());
frame_input_ = frame_input; frame_input_ = frame_input;
if (!sink_added_) { if (!sink_added_) {
...@@ -316,7 +318,7 @@ class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>, ...@@ -316,7 +318,7 @@ class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>,
private: private:
blink::WebMediaStreamTrack track_; blink::WebMediaStreamTrack track_;
scoped_refptr<media::cast::FrameInput> frame_input_; scoped_refptr<media::cast::AudioFrameInput> frame_input_;
bool sink_added_; bool sink_added_;
CastRtpStream::ErrorCallback error_callback_; CastRtpStream::ErrorCallback error_callback_;
base::WeakPtrFactory<CastAudioSink> weak_factory_; base::WeakPtrFactory<CastAudioSink> weak_factory_;
...@@ -348,26 +350,19 @@ CastRtpPayloadParams::CastRtpPayloadParams() ...@@ -348,26 +350,19 @@ CastRtpPayloadParams::CastRtpPayloadParams()
min_bitrate(0), min_bitrate(0),
channels(0), channels(0),
width(0), width(0),
height(0) { height(0) {}
}
CastRtpPayloadParams::~CastRtpPayloadParams() { CastRtpPayloadParams::~CastRtpPayloadParams() {}
}
CastRtpParams::CastRtpParams() { CastRtpParams::CastRtpParams() {}
}
CastRtpParams::~CastRtpParams() { CastRtpParams::~CastRtpParams() {}
}
CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track, CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track,
const scoped_refptr<CastSession>& session) const scoped_refptr<CastSession>& session)
: track_(track), : track_(track), cast_session_(session), weak_factory_(this) {}
cast_session_(session),
weak_factory_(this) {}
CastRtpStream::~CastRtpStream() { CastRtpStream::~CastRtpStream() {}
}
std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() { std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() {
if (IsAudio()) if (IsAudio())
...@@ -376,9 +371,7 @@ std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() { ...@@ -376,9 +371,7 @@ std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() {
return SupportedVideoParams(); return SupportedVideoParams();
} }
CastRtpParams CastRtpStream::GetParams() { CastRtpParams CastRtpStream::GetParams() { return params_; }
return params_;
}
void CastRtpStream::Start(const CastRtpParams& params, void CastRtpStream::Start(const CastRtpParams& params,
const base::Closure& start_callback, const base::Closure& start_callback,
...@@ -404,8 +397,7 @@ void CastRtpStream::Start(const CastRtpParams& params, ...@@ -404,8 +397,7 @@ void CastRtpStream::Start(const CastRtpParams& params,
params.payload.clock_rate)); params.payload.clock_rate));
cast_session_->StartAudio( cast_session_->StartAudio(
config, config,
base::Bind(&CastAudioSink::AddToTrack, base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr()));
audio_sink_->AsWeakPtr()));
start_callback.Run(); start_callback.Run();
} else { } else {
VideoSenderConfig config; VideoSenderConfig config;
...@@ -420,8 +412,7 @@ void CastRtpStream::Start(const CastRtpParams& params, ...@@ -420,8 +412,7 @@ void CastRtpStream::Start(const CastRtpParams& params,
weak_factory_.GetWeakPtr())))); weak_factory_.GetWeakPtr()))));
cast_session_->StartVideo( cast_session_->StartVideo(
config, config,
base::Bind(&CastVideoSink::AddToTrack, base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr()));
video_sink_->AsWeakPtr()));
start_callback.Run(); start_callback.Run();
} }
} }
......
...@@ -24,7 +24,7 @@ CastSession::~CastSession() { ...@@ -24,7 +24,7 @@ CastSession::~CastSession() {
} }
void CastSession::StartAudio(const media::cast::AudioSenderConfig& config, void CastSession::StartAudio(const media::cast::AudioSenderConfig& config,
const FrameInputAvailableCallback& callback) { const AudioFrameInputAvailableCallback& callback) {
DCHECK(content::RenderThread::Get() DCHECK(content::RenderThread::Get()
->GetMessageLoop() ->GetMessageLoop()
->message_loop_proxy() ->message_loop_proxy()
...@@ -39,7 +39,7 @@ void CastSession::StartAudio(const media::cast::AudioSenderConfig& config, ...@@ -39,7 +39,7 @@ void CastSession::StartAudio(const media::cast::AudioSenderConfig& config,
} }
void CastSession::StartVideo(const media::cast::VideoSenderConfig& config, void CastSession::StartVideo(const media::cast::VideoSenderConfig& config,
const FrameInputAvailableCallback& callback) { const VideoFrameInputAvailableCallback& callback) {
DCHECK(content::RenderThread::Get() DCHECK(content::RenderThread::Get()
->GetMessageLoop() ->GetMessageLoop()
->message_loop_proxy() ->message_loop_proxy()
......
...@@ -21,7 +21,8 @@ class MessageLoopProxy; ...@@ -21,7 +21,8 @@ class MessageLoopProxy;
namespace media { namespace media {
class VideoFrame; class VideoFrame;
namespace cast { namespace cast {
class FrameInput; class AudioFrameInput;
class VideoFrameInput;
struct AudioSenderConfig; struct AudioSenderConfig;
struct VideoSenderConfig; struct VideoSenderConfig;
} // namespace cast } // namespace cast
...@@ -38,9 +39,10 @@ class CastSessionDelegate; ...@@ -38,9 +39,10 @@ class CastSessionDelegate;
// CastSessionDelegate on the IO thread. // CastSessionDelegate on the IO thread.
class CastSession : public base::RefCounted<CastSession> { class CastSession : public base::RefCounted<CastSession> {
public: public:
typedef typedef base::Callback<void(const scoped_refptr<
base::Callback<void(const scoped_refptr<media::cast::FrameInput>&)> media::cast::AudioFrameInput>&)> AudioFrameInputAvailableCallback;
FrameInputAvailableCallback; typedef base::Callback<void(const scoped_refptr<
media::cast::VideoFrameInput>&)> VideoFrameInputAvailableCallback;
typedef base::Callback<void(const std::vector<char>&)> SendPacketCallback; typedef base::Callback<void(const std::vector<char>&)> SendPacketCallback;
typedef base::Callback<void(scoped_ptr<std::string>)> EventLogsCallback; typedef base::Callback<void(scoped_ptr<std::string>)> EventLogsCallback;
typedef base::Callback<void(scoped_ptr<base::DictionaryValue>)> StatsCallback; typedef base::Callback<void(scoped_ptr<base::DictionaryValue>)> StatsCallback;
...@@ -53,9 +55,9 @@ class CastSession : public base::RefCounted<CastSession> { ...@@ -53,9 +55,9 @@ class CastSession : public base::RefCounted<CastSession> {
// media::cast::FrameInput will be given through the callback. The // media::cast::FrameInput will be given through the callback. The
// callback will be made on the main thread. // callback will be made on the main thread.
void StartAudio(const media::cast::AudioSenderConfig& config, void StartAudio(const media::cast::AudioSenderConfig& config,
const FrameInputAvailableCallback& callback); const AudioFrameInputAvailableCallback& callback);
void StartVideo(const media::cast::VideoSenderConfig& config, void StartVideo(const media::cast::VideoSenderConfig& config,
const FrameInputAvailableCallback& callback); const VideoFrameInputAvailableCallback& callback);
void StartUDP(const net::IPEndPoint& local_endpoint, void StartUDP(const net::IPEndPoint& local_endpoint,
const net::IPEndPoint& remote_endpoint); const net::IPEndPoint& remote_endpoint);
......
...@@ -43,8 +43,7 @@ const int kMaxAudioEventEntries = kMaxSerializedBytes / 75; ...@@ -43,8 +43,7 @@ const int kMaxAudioEventEntries = kMaxSerializedBytes / 75;
} // namespace } // namespace
CastSessionDelegate::CastSessionDelegate() CastSessionDelegate::CastSessionDelegate()
: transport_configured_(false), : io_message_loop_proxy_(
io_message_loop_proxy_(
content::RenderThread::Get()->GetIOMessageLoopProxy()), content::RenderThread::Get()->GetIOMessageLoopProxy()),
weak_factory_(this) { weak_factory_(this) {
DCHECK(io_message_loop_proxy_); DCHECK(io_message_loop_proxy_);
...@@ -63,56 +62,80 @@ CastSessionDelegate::~CastSessionDelegate() { ...@@ -63,56 +62,80 @@ CastSessionDelegate::~CastSessionDelegate() {
} }
} }
void CastSessionDelegate::Initialize(
const media::cast::CastLoggingConfig& logging_config) {
if (cast_environment_)
return; // Already initialized.
// CastSender uses the renderer's IO thread as the main thread. This reduces
// thread hopping for incoming video frames and outgoing network packets.
// There's no need to decode so no thread assigned for decoding.
cast_environment_ = new CastEnvironment(
scoped_ptr<base::TickClock>(new base::DefaultTickClock()).Pass(),
base::MessageLoopProxy::current(),
g_cast_threads.Get().GetAudioEncodeMessageLoopProxy(),
NULL,
g_cast_threads.Get().GetVideoEncodeMessageLoopProxy(),
NULL,
base::MessageLoopProxy::current(),
logging_config);
}
void CastSessionDelegate::StartAudio( void CastSessionDelegate::StartAudio(
const AudioSenderConfig& config, const AudioSenderConfig& config,
const FrameInputAvailableCallback& callback) { const AudioFrameInputAvailableCallback& callback) {
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
audio_config_.reset(new AudioSenderConfig(config)); audio_frame_input_available_callback_ = callback;
video_frame_input_available_callback_ = callback; media::cast::transport::CastTransportAudioConfig transport_config;
StartSendingInternal(); transport_config.base.ssrc = config.sender_ssrc;
transport_config.codec = config.codec;
transport_config.base.rtp_config = config.rtp_config;
transport_config.frequency = config.frequency;
transport_config.channels = config.channels;
cast_transport_->InitializeAudio(transport_config);
cast_sender_->InitializeAudio(
config,
base::Bind(&CastSessionDelegate::InitializationResult,
weak_factory_.GetWeakPtr()));
} }
void CastSessionDelegate::StartVideo( void CastSessionDelegate::StartVideo(
const VideoSenderConfig& config, const VideoSenderConfig& config,
const FrameInputAvailableCallback& callback) { const VideoFrameInputAvailableCallback& callback) {
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
audio_frame_input_available_callback_ = callback; video_frame_input_available_callback_ = callback;
video_config_.reset(new VideoSenderConfig(config)); media::cast::transport::CastTransportVideoConfig transport_config;
StartSendingInternal(); transport_config.base.ssrc = config.sender_ssrc;
transport_config.codec = config.codec;
transport_config.base.rtp_config = config.rtp_config;
cast_transport_->InitializeVideo(transport_config);
// TODO(mikhal): Pass in a valid GpuVideoAcceleratorFactories to support
// hardware video encoding.
cast_sender_->InitializeVideo(
config,
base::Bind(&CastSessionDelegate::InitializationResult,
weak_factory_.GetWeakPtr()),
NULL /* GPU*/);
} }
void CastSessionDelegate::StartUDP(const net::IPEndPoint& local_endpoint, void CastSessionDelegate::StartUDP(const net::IPEndPoint& local_endpoint,
const net::IPEndPoint& remote_endpoint) { const net::IPEndPoint& remote_endpoint) {
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
transport_configured_ = true;
local_endpoint_ = local_endpoint; // CastSender uses the renderer's IO thread as the main thread. This reduces
remote_endpoint_ = remote_endpoint; // thread hopping for incoming video frames and outgoing network packets.
StartSendingInternal(); // There's no need to decode so no thread assigned for decoding.
cast_environment_ = new CastEnvironment(
scoped_ptr<base::TickClock>(new base::DefaultTickClock()).Pass(),
base::MessageLoopProxy::current(),
g_cast_threads.Get().GetAudioEncodeMessageLoopProxy(),
NULL,
g_cast_threads.Get().GetVideoEncodeMessageLoopProxy(),
NULL,
base::MessageLoopProxy::current(),
media::cast::GetLoggingConfigWithRawEventsAndStatsEnabled());
// Logging: enable raw events and stats collection.
media::cast::CastLoggingConfig logging_config =
media::cast::GetLoggingConfigWithRawEventsAndStatsEnabled();
// Rationale for using unretained: The callback cannot be called after the
// destruction of CastTransportSenderIPC, and they both share the same thread.
cast_transport_.reset(new CastTransportSenderIPC(
local_endpoint,
remote_endpoint,
base::Bind(&CastSessionDelegate::StatusNotificationCB,
base::Unretained(this)),
logging_config,
base::Bind(&CastSessionDelegate::LogRawEvents, base::Unretained(this))));
cast_sender_ = CastSender::Create(cast_environment_, cast_transport_.get());
cast_transport_->SetPacketReceiver(cast_sender_->packet_receiver());
} }
void CastSessionDelegate::ToggleLogging(bool is_audio, void CastSessionDelegate::ToggleLogging(bool is_audio, bool enable) {
bool enable) {
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
if (enable) { if (enable) {
if (is_audio) { if (is_audio) {
...@@ -148,11 +171,12 @@ void CastSessionDelegate::ToggleLogging(bool is_audio, ...@@ -148,11 +171,12 @@ void CastSessionDelegate::ToggleLogging(bool is_audio,
} }
void CastSessionDelegate::GetEventLogsAndReset( void CastSessionDelegate::GetEventLogsAndReset(
bool is_audio, const EventLogsCallback& callback) { bool is_audio,
const EventLogsCallback& callback) {
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread()); DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
media::cast::EncodingEventSubscriber* subscriber = is_audio ? media::cast::EncodingEventSubscriber* subscriber =
audio_event_subscriber_.get() : video_event_subscriber_.get(); is_audio ? audio_event_subscriber_.get() : video_event_subscriber_.get();
if (!subscriber) { if (!subscriber) {
callback.Run(make_scoped_ptr(new std::string).Pass()); callback.Run(make_scoped_ptr(new std::string).Pass());
return; return;
...@@ -202,73 +226,20 @@ void CastSessionDelegate::StatusNotificationCB( ...@@ -202,73 +226,20 @@ void CastSessionDelegate::StatusNotificationCB(
// TODO(hubbe): Call javascript UDPTransport error function. // TODO(hubbe): Call javascript UDPTransport error function.
} }
void CastSessionDelegate::StartSendingInternal() {
DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
// No transport, wait.
if (!transport_configured_)
return;
// No audio or video, wait.
if (!audio_config_ || !video_config_)
return;
// Logging: enable raw events and stats collection.
media::cast::CastLoggingConfig logging_config =
media::cast::GetLoggingConfigWithRawEventsAndStatsEnabled();
Initialize(logging_config);
// Rationale for using unretained: The callback cannot be called after the
// destruction of CastTransportSenderIPC, and they both share the same thread.
cast_transport_.reset(new CastTransportSenderIPC(
local_endpoint_,
remote_endpoint_,
base::Bind(&CastSessionDelegate::StatusNotificationCB,
base::Unretained(this)),
logging_config,
base::Bind(&CastSessionDelegate::LogRawEvents,
base::Unretained(this))));
// TODO(hubbe): set config.aes_key and config.aes_iv_mask.
if (audio_config_) {
media::cast::transport::CastTransportAudioConfig config;
config.base.ssrc = audio_config_->sender_ssrc;
config.codec = audio_config_->codec;
config.base.rtp_config = audio_config_->rtp_config;
config.frequency = audio_config_->frequency;
config.channels = audio_config_->channels;
cast_transport_->InitializeAudio(config);
}
if (video_config_) {
media::cast::transport::CastTransportVideoConfig config;
config.base.ssrc = video_config_->sender_ssrc;
config.codec = video_config_->codec;
config.base.rtp_config = video_config_->rtp_config;
cast_transport_->InitializeVideo(config);
}
cast_sender_.reset(CastSender::CreateCastSender(
cast_environment_,
audio_config_.get(),
video_config_.get(),
NULL, // GPU.
base::Bind(&CastSessionDelegate::InitializationResult,
weak_factory_.GetWeakPtr()),
cast_transport_.get()));
cast_transport_->SetPacketReceiver(cast_sender_->packet_receiver());
}
void CastSessionDelegate::InitializationResult( void CastSessionDelegate::InitializationResult(
media::cast::CastInitializationStatus result) const { media::cast::CastInitializationStatus result) const {
DCHECK(cast_sender_); DCHECK(cast_sender_);
// TODO(pwestin): handle the error codes. // TODO(pwestin): handle the error codes.
if (result == media::cast::STATUS_INITIALIZED) { if (result == media::cast::STATUS_AUDIO_INITIALIZED) {
if (!audio_frame_input_available_callback_.is_null()) { if (!audio_frame_input_available_callback_.is_null()) {
audio_frame_input_available_callback_.Run(cast_sender_->frame_input()); audio_frame_input_available_callback_.Run(
cast_sender_->audio_frame_input());
} }
} else if (result == media::cast::STATUS_VIDEO_INITIALIZED) {
if (!video_frame_input_available_callback_.is_null()) { if (!video_frame_input_available_callback_.is_null()) {
video_frame_input_available_callback_.Run(cast_sender_->frame_input()); video_frame_input_available_callback_.Run(
cast_sender_->video_frame_input());
} }
} }
} }
......
...@@ -44,25 +44,32 @@ class CastTransportSender; ...@@ -44,25 +44,32 @@ class CastTransportSender;
// thread. All methods are accessible only on the IO thread. // thread. All methods are accessible only on the IO thread.
class CastSessionDelegate { class CastSessionDelegate {
public: public:
typedef base::Callback<void(const scoped_refptr<media::cast::FrameInput>&)> typedef base::Callback<void(const scoped_refptr<
FrameInputAvailableCallback; media::cast::AudioFrameInput>&)> AudioFrameInputAvailableCallback;
typedef base::Callback<void(const scoped_refptr<
media::cast::VideoFrameInput>&)> VideoFrameInputAvailableCallback;
typedef base::Callback<void(scoped_ptr<std::string>)> EventLogsCallback; typedef base::Callback<void(scoped_ptr<std::string>)> EventLogsCallback;
typedef base::Callback<void(scoped_ptr<base::DictionaryValue>)> StatsCallback; typedef base::Callback<void(scoped_ptr<base::DictionaryValue>)> StatsCallback;
CastSessionDelegate(); CastSessionDelegate();
virtual ~CastSessionDelegate(); virtual ~CastSessionDelegate();
// This will start the session by configuring and creating the Cast transport
// and the Cast sender.
// Must be called before initialization of audio or video.
void StartUDP(const net::IPEndPoint& local_endpoint,
const net::IPEndPoint& remote_endpoint);
// After calling StartAudio() or StartVideo() encoding of that media will // After calling StartAudio() or StartVideo() encoding of that media will
// begin as soon as data is delivered to its sink, if the second method is // begin as soon as data is delivered to its sink, if the second method is
// called the first media will be restarted. It is strongly recommended not to // called the first media will be restarted. It is strongly recommended not to
// deliver any data between calling the two methods. // deliver any data between calling the two methods.
// It's OK to call only one of the two methods. // It's OK to call only one of the two methods.
// StartUDP must be called before these methods.
void StartAudio(const media::cast::AudioSenderConfig& config, void StartAudio(const media::cast::AudioSenderConfig& config,
const FrameInputAvailableCallback& callback); const AudioFrameInputAvailableCallback& callback);
void StartVideo(const media::cast::VideoSenderConfig& config, void StartVideo(const media::cast::VideoSenderConfig& config,
const FrameInputAvailableCallback& callback); const VideoFrameInputAvailableCallback& callback);
void StartUDP(const net::IPEndPoint& local_endpoint,
const net::IPEndPoint& remote_endpoint);
void ToggleLogging(bool is_audio, bool enable); void ToggleLogging(bool is_audio, bool enable);
void GetEventLogsAndReset(bool is_audio, const EventLogsCallback& callback); void GetEventLogsAndReset(bool is_audio, const EventLogsCallback& callback);
...@@ -75,13 +82,6 @@ class CastSessionDelegate { ...@@ -75,13 +82,6 @@ class CastSessionDelegate {
void InitializationResult(media::cast::CastInitializationStatus result) const; void InitializationResult(media::cast::CastInitializationStatus result) const;
private: private:
// Start encoding threads and initialize the CastEnvironment.
void Initialize(const media::cast::CastLoggingConfig& logging_config);
// Configure CastSender. It is ready to accept audio / video frames after
// receiving a successful call to InitializationResult.
void StartSendingInternal();
void StatusNotificationCB( void StatusNotificationCB(
media::cast::transport::CastTransportStatus status); media::cast::transport::CastTransportStatus status);
...@@ -93,16 +93,8 @@ class CastSessionDelegate { ...@@ -93,16 +93,8 @@ class CastSessionDelegate {
scoped_ptr<media::cast::CastSender> cast_sender_; scoped_ptr<media::cast::CastSender> cast_sender_;
scoped_ptr<media::cast::transport::CastTransportSender> cast_transport_; scoped_ptr<media::cast::transport::CastTransportSender> cast_transport_;
// Configuration for audio and video. AudioFrameInputAvailableCallback audio_frame_input_available_callback_;
scoped_ptr<media::cast::AudioSenderConfig> audio_config_; VideoFrameInputAvailableCallback video_frame_input_available_callback_;
scoped_ptr<media::cast::VideoSenderConfig> video_config_;
FrameInputAvailableCallback audio_frame_input_available_callback_;
FrameInputAvailableCallback video_frame_input_available_callback_;
net::IPEndPoint local_endpoint_;
net::IPEndPoint remote_endpoint_;
bool transport_configured_;
scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber_; scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber_;
scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber_; scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber_;
......
...@@ -52,16 +52,16 @@ class AudioEncoder::ImplBase : public base::SupportsWeakPtr<ImplBase> { ...@@ -52,16 +52,16 @@ class AudioEncoder::ImplBase : public base::SupportsWeakPtr<ImplBase> {
sampling_rate % 100 != 0 || sampling_rate % 100 != 0 ||
samples_per_10ms_ * num_channels_ > samples_per_10ms_ * num_channels_ >
transport::EncodedAudioFrame::kMaxNumberOfSamples) { transport::EncodedAudioFrame::kMaxNumberOfSamples) {
initialization_status_ = STATUS_INVALID_AUDIO_CONFIGURATION; cast_initialization_cb_ = STATUS_INVALID_AUDIO_CONFIGURATION;
} else { } else {
initialization_status_ = STATUS_INITIALIZED; cast_initialization_cb_ = STATUS_AUDIO_INITIALIZED;
} }
} }
virtual ~ImplBase() {} virtual ~ImplBase() {}
CastInitializationStatus InitializationResult() const { CastInitializationStatus InitializationResult() const {
return initialization_status_; return cast_initialization_cb_;
} }
void LogAudioFrameEvent(uint32 rtp_timestamp, void LogAudioFrameEvent(uint32 rtp_timestamp,
...@@ -156,7 +156,7 @@ class AudioEncoder::ImplBase : public base::SupportsWeakPtr<ImplBase> { ...@@ -156,7 +156,7 @@ class AudioEncoder::ImplBase : public base::SupportsWeakPtr<ImplBase> {
const int num_channels_; const int num_channels_;
const int samples_per_10ms_; const int samples_per_10ms_;
const FrameEncodedCallback callback_; const FrameEncodedCallback callback_;
CastInitializationStatus initialization_status_; CastInitializationStatus cast_initialization_cb_;
private: private:
// In the case where a call to EncodeAudio() cannot completely fill the // In the case where a call to EncodeAudio() cannot completely fill the
...@@ -192,7 +192,7 @@ class AudioEncoder::OpusImpl : public AudioEncoder::ImplBase { ...@@ -192,7 +192,7 @@ class AudioEncoder::OpusImpl : public AudioEncoder::ImplBase {
encoder_memory_(new uint8[opus_encoder_get_size(num_channels)]), encoder_memory_(new uint8[opus_encoder_get_size(num_channels)]),
opus_encoder_(reinterpret_cast<OpusEncoder*>(encoder_memory_.get())), opus_encoder_(reinterpret_cast<OpusEncoder*>(encoder_memory_.get())),
buffer_(new float[num_channels * samples_per_10ms_]) { buffer_(new float[num_channels * samples_per_10ms_]) {
if (ImplBase::initialization_status_ != STATUS_INITIALIZED) { if (ImplBase::cast_initialization_cb_ != STATUS_AUDIO_INITIALIZED) {
return; return;
} }
...@@ -316,7 +316,6 @@ AudioEncoder::AudioEncoder( ...@@ -316,7 +316,6 @@ AudioEncoder::AudioEncoder(
// Note: It doesn't matter which thread constructs AudioEncoder, just so long // Note: It doesn't matter which thread constructs AudioEncoder, just so long
// as all calls to InsertAudio() are by the same thread. // as all calls to InsertAudio() are by the same thread.
insert_thread_checker_.DetachFromThread(); insert_thread_checker_.DetachFromThread();
switch (audio_config.codec) { switch (audio_config.codec) {
case transport::kOpus: case transport::kOpus:
impl_.reset(new OpusImpl(cast_environment, impl_.reset(new OpusImpl(cast_environment,
...@@ -340,6 +339,7 @@ AudioEncoder::AudioEncoder( ...@@ -340,6 +339,7 @@ AudioEncoder::AudioEncoder(
AudioEncoder::~AudioEncoder() {} AudioEncoder::~AudioEncoder() {}
CastInitializationStatus AudioEncoder::InitializationResult() const { CastInitializationStatus AudioEncoder::InitializationResult() const {
DCHECK(insert_thread_checker_.CalledOnValidThread());
if (impl_) { if (impl_) {
return impl_->InitializationResult(); return impl_->InitializationResult();
} }
......
...@@ -113,7 +113,7 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment, ...@@ -113,7 +113,7 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
audio_config.incoming_feedback_ssrc, audio_config.incoming_feedback_ssrc,
audio_config.rtcp_c_name), audio_config.rtcp_c_name),
timers_initialized_(false), timers_initialized_(false),
initialization_status_(STATUS_INITIALIZED), cast_initialization_cb_(STATUS_AUDIO_UNINITIALIZED),
weak_factory_(this) { weak_factory_(this) {
rtcp_.SetCastReceiverEventHistorySize(kReceiverRtcpEventHistorySize); rtcp_.SetCastReceiverEventHistorySize(kReceiverRtcpEventHistorySize);
if (!audio_config.use_external_encoder) { if (!audio_config.use_external_encoder) {
...@@ -122,7 +122,7 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment, ...@@ -122,7 +122,7 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
audio_config, audio_config,
base::Bind(&AudioSender::SendEncodedAudioFrame, base::Bind(&AudioSender::SendEncodedAudioFrame,
weak_factory_.GetWeakPtr())); weak_factory_.GetWeakPtr()));
initialization_status_ = audio_encoder_->InitializationResult(); cast_initialization_cb_ = audio_encoder_->InitializationResult();
} }
} }
...@@ -141,7 +141,6 @@ void AudioSender::InsertAudio(const AudioBus* audio_bus, ...@@ -141,7 +141,6 @@ void AudioSender::InsertAudio(const AudioBus* audio_bus,
const base::Closure& done_callback) { const base::Closure& done_callback) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(audio_encoder_.get()) << "Invalid internal state"; DCHECK(audio_encoder_.get()) << "Invalid internal state";
audio_encoder_->InsertAudio(audio_bus, recorded_time, done_callback); audio_encoder_->InsertAudio(audio_bus, recorded_time, done_callback);
} }
......
...@@ -39,7 +39,7 @@ class AudioSender : public base::NonThreadSafe, ...@@ -39,7 +39,7 @@ class AudioSender : public base::NonThreadSafe,
virtual ~AudioSender(); virtual ~AudioSender();
CastInitializationStatus InitializationResult() const { CastInitializationStatus InitializationResult() const {
return initialization_status_; return cast_initialization_cb_;
} }
// The |audio_bus| must be valid until the |done_callback| is called. // The |audio_bus| must be valid until the |done_callback| is called.
...@@ -85,7 +85,7 @@ class AudioSender : public base::NonThreadSafe, ...@@ -85,7 +85,7 @@ class AudioSender : public base::NonThreadSafe,
scoped_ptr<LocalRtcpAudioSenderFeedback> rtcp_feedback_; scoped_ptr<LocalRtcpAudioSenderFeedback> rtcp_feedback_;
Rtcp rtcp_; Rtcp rtcp_;
bool timers_initialized_; bool timers_initialized_;
CastInitializationStatus initialization_status_; CastInitializationStatus cast_initialization_cb_;
DISALLOW_COPY_AND_ASSIGN(AudioSender); DISALLOW_COPY_AND_ASSIGN(AudioSender);
}; };
......
...@@ -134,17 +134,6 @@ struct PcmAudioFrame { ...@@ -134,17 +134,6 @@ struct PcmAudioFrame {
typedef transport::Packet Packet; typedef transport::Packet Packet;
typedef transport::PacketList PacketList; typedef transport::PacketList PacketList;
enum CastInitializationStatus {
STATUS_INITIALIZED,
STATUS_INVALID_CAST_ENVIRONMENT,
STATUS_INVALID_CRYPTO_CONFIGURATION,
STATUS_UNSUPPORTED_AUDIO_CODEC,
STATUS_INVALID_AUDIO_CONFIGURATION,
STATUS_INVALID_VIDEO_CONFIGURATION,
STATUS_GPU_ACCELERATION_NOT_SUPPORTED,
STATUS_GPU_ACCELERATION_ERROR,
};
typedef base::Callback<void(CastInitializationStatus)> typedef base::Callback<void(CastInitializationStatus)>
CastInitializationCallback; CastInitializationCallback;
......
...@@ -29,6 +29,20 @@ const int kStartRttMs = 20; ...@@ -29,6 +29,20 @@ const int kStartRttMs = 20;
const int64 kCastMessageUpdateIntervalMs = 33; const int64 kCastMessageUpdateIntervalMs = 33;
const int64 kNackRepeatIntervalMs = 30; const int64 kNackRepeatIntervalMs = 30;
enum CastInitializationStatus {
STATUS_AUDIO_UNINITIALIZED,
STATUS_VIDEO_UNINITIALIZED,
STATUS_AUDIO_INITIALIZED,
STATUS_VIDEO_INITIALIZED,
STATUS_INVALID_CAST_ENVIRONMENT,
STATUS_INVALID_CRYPTO_CONFIGURATION,
STATUS_UNSUPPORTED_AUDIO_CODEC,
STATUS_INVALID_AUDIO_CONFIGURATION,
STATUS_INVALID_VIDEO_CONFIGURATION,
STATUS_GPU_ACCELERATION_NOT_SUPPORTED,
STATUS_GPU_ACCELERATION_ERROR,
};
enum DefaultSettings { enum DefaultSettings {
kDefaultAudioEncoderBitrate = 0, // This means "auto," and may mean VBR. kDefaultAudioEncoderBitrate = 0, // This means "auto," and may mean VBR.
kDefaultAudioSamplingRate = 48000, kDefaultAudioSamplingRate = 48000,
......
...@@ -68,7 +68,7 @@ class FrameReceiver : public base::RefCountedThreadSafe<FrameReceiver> { ...@@ -68,7 +68,7 @@ class FrameReceiver : public base::RefCountedThreadSafe<FrameReceiver> {
// This Class is thread safe. // This Class is thread safe.
class CastReceiver { class CastReceiver {
public: public:
static CastReceiver* CreateCastReceiver( static scoped_ptr<CastReceiver> Create(
scoped_refptr<CastEnvironment> cast_environment, scoped_refptr<CastEnvironment> cast_environment,
const AudioReceiverConfig& audio_config, const AudioReceiverConfig& audio_config,
const VideoReceiverConfig& video_config, const VideoReceiverConfig& video_config,
......
...@@ -82,13 +82,13 @@ class LocalFrameReceiver : public FrameReceiver { ...@@ -82,13 +82,13 @@ class LocalFrameReceiver : public FrameReceiver {
VideoReceiver* video_receiver_; VideoReceiver* video_receiver_;
}; };
CastReceiver* CastReceiver::CreateCastReceiver( scoped_ptr<CastReceiver> CastReceiver::Create(
scoped_refptr<CastEnvironment> cast_environment, scoped_refptr<CastEnvironment> cast_environment,
const AudioReceiverConfig& audio_config, const AudioReceiverConfig& audio_config,
const VideoReceiverConfig& video_config, const VideoReceiverConfig& video_config,
transport::PacketSender* const packet_sender) { transport::PacketSender* const packet_sender) {
return new CastReceiverImpl( return scoped_ptr<CastReceiver>(new CastReceiverImpl(
cast_environment, audio_config, video_config, packet_sender); cast_environment, audio_config, video_config, packet_sender));
} }
CastReceiverImpl::CastReceiverImpl( CastReceiverImpl::CastReceiverImpl(
......
...@@ -2,11 +2,10 @@ ...@@ -2,11 +2,10 @@
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
// //
// This is the main interface for the cast sender. All configuration are done // This is the main interface for the cast sender.
// at creation.
// //
// The FrameInput and PacketReciever interfaces should normally be accessed from // The AudioFrameInput, VideoFrameInput and PacketReciever interfaces should
// the IO thread. However they are allowed to be called from any thread. // be accessed from the main thread.
#ifndef MEDIA_CAST_CAST_SENDER_H_ #ifndef MEDIA_CAST_CAST_SENDER_H_
#define MEDIA_CAST_CAST_SENDER_H_ #define MEDIA_CAST_CAST_SENDER_H_
...@@ -23,23 +22,32 @@ ...@@ -23,23 +22,32 @@
namespace media { namespace media {
class AudioBus; class AudioBus;
class GpuVideoAcceleratorFactories;
class VideoFrame; class VideoFrame;
}
namespace media {
namespace cast { namespace cast {
class AudioSender;
class VideoSender;
// This Class is thread safe. class VideoFrameInput : public base::RefCountedThreadSafe<VideoFrameInput> {
class FrameInput : public base::RefCountedThreadSafe<FrameInput> {
public: public:
// The video_frame must be valid until the callback is called. // Insert video frames into Cast sender. Frames will be encoded, packetized
// The callback is called from the main cast thread as soon as // and sent to the network.
// the encoder is done with the frame; it does not mean that the encoded frame
// has been sent out.
virtual void InsertRawVideoFrame( virtual void InsertRawVideoFrame(
const scoped_refptr<media::VideoFrame>& video_frame, const scoped_refptr<media::VideoFrame>& video_frame,
const base::TimeTicks& capture_time) = 0; const base::TimeTicks& capture_time) = 0;
protected:
virtual ~VideoFrameInput() {}
private:
friend class base::RefCountedThreadSafe<VideoFrameInput>;
};
class AudioFrameInput : public base::RefCountedThreadSafe<AudioFrameInput> {
public:
// Insert audio frames into Cast sender. Frames will be encoded, packetized
// and sent to the network.
// The |audio_bus| must be valid until the |done_callback| is called. // The |audio_bus| must be valid until the |done_callback| is called.
// The callback is called from the main cast thread as soon as the encoder is // The callback is called from the main cast thread as soon as the encoder is
// done with |audio_bus|; it does not mean that the encoded data has been // done with |audio_bus|; it does not mean that the encoded data has been
...@@ -49,36 +57,44 @@ class FrameInput : public base::RefCountedThreadSafe<FrameInput> { ...@@ -49,36 +57,44 @@ class FrameInput : public base::RefCountedThreadSafe<FrameInput> {
const base::Closure& done_callback) = 0; const base::Closure& done_callback) = 0;
protected: protected:
virtual ~FrameInput() {} virtual ~AudioFrameInput() {}
private: private:
friend class base::RefCountedThreadSafe<FrameInput>; friend class base::RefCountedThreadSafe<AudioFrameInput>;
}; };
// This Class is thread safe. // The provided CastTransportSender and the CastSender should be called from the
// The provided CastTransportSender object will always be called from the main // main thread.
// cast thread.
// At least one of AudioSenderConfig and VideoSenderConfig have to be provided.
class CastSender { class CastSender {
public: public:
static CastSender* CreateCastSender( static scoped_ptr<CastSender> Create(
scoped_refptr<CastEnvironment> cast_environment, scoped_refptr<CastEnvironment> cast_environment,
const AudioSenderConfig* audio_config,
const VideoSenderConfig* video_config,
const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories,
const CastInitializationCallback& cast_initialization,
transport::CastTransportSender* const transport_sender); transport::CastTransportSender* const transport_sender);
virtual ~CastSender() {} virtual ~CastSender() {}
// All audio and video frames for the session should be inserted to this // All video frames for the session should be inserted to this object.
// object. virtual scoped_refptr<VideoFrameInput> video_frame_input() = 0;
// Can be called from any thread.
virtual scoped_refptr<FrameInput> frame_input() = 0; // All audio frames for the session should be inserted to this object.
virtual scoped_refptr<AudioFrameInput> audio_frame_input() = 0;
// All RTCP packets for the session should be inserted to this object. // All RTCP packets for the session should be inserted to this object.
// Can be called from any thread. // This function and the callback must be called on the main thread.
virtual transport::PacketReceiverCallback packet_receiver() = 0; virtual transport::PacketReceiverCallback packet_receiver() = 0;
// Initialize the audio stack. Must be called in order to send audio frames.
// Status of the initialization will be returned on cast_initialization_cb.
virtual void InitializeAudio(
const AudioSenderConfig& audio_config,
const CastInitializationCallback& cast_initialization_cb) = 0;
// Initialize the video stack. Must be called in order to send video frames.
// Status of the initialization will be returned on cast_initialization_cb.
virtual void InitializeVideo(
const VideoSenderConfig& video_config,
const CastInitializationCallback& cast_initialization_cb,
const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories) = 0;
}; };
} // namespace cast } // namespace cast
......
...@@ -12,17 +12,13 @@ ...@@ -12,17 +12,13 @@
namespace media { namespace media {
namespace cast { namespace cast {
// The LocalFrameInput class posts all incoming frames; audio and video to the // The LocalVideoFrameInput class posts all incoming video frames to the main
// main cast thread for processing. // cast thread for processing.
// This make the cast sender interface thread safe. class LocalVideoFrameInput : public VideoFrameInput {
class LocalFrameInput : public FrameInput {
public: public:
LocalFrameInput(scoped_refptr<CastEnvironment> cast_environment, LocalVideoFrameInput(scoped_refptr<CastEnvironment> cast_environment,
base::WeakPtr<AudioSender> audio_sender, base::WeakPtr<VideoSender> video_sender)
base::WeakPtr<VideoSender> video_sender) : cast_environment_(cast_environment), video_sender_(video_sender) {}
: cast_environment_(cast_environment),
audio_sender_(audio_sender),
video_sender_(video_sender) {}
virtual void InsertRawVideoFrame( virtual void InsertRawVideoFrame(
const scoped_refptr<media::VideoFrame>& video_frame, const scoped_refptr<media::VideoFrame>& video_frame,
...@@ -35,6 +31,26 @@ class LocalFrameInput : public FrameInput { ...@@ -35,6 +31,26 @@ class LocalFrameInput : public FrameInput {
capture_time)); capture_time));
} }
protected:
virtual ~LocalVideoFrameInput() {}
private:
friend class base::RefCountedThreadSafe<LocalVideoFrameInput>;
scoped_refptr<CastEnvironment> cast_environment_;
base::WeakPtr<VideoSender> video_sender_;
DISALLOW_COPY_AND_ASSIGN(LocalVideoFrameInput);
};
// The LocalAudioFrameInput class posts all incoming audio frames to the main
// cast thread for processing. Therefore frames can be inserted from any thread.
class LocalAudioFrameInput : public AudioFrameInput {
public:
LocalAudioFrameInput(scoped_refptr<CastEnvironment> cast_environment,
base::WeakPtr<AudioSender> audio_sender)
: cast_environment_(cast_environment), audio_sender_(audio_sender) {}
virtual void InsertAudio(const AudioBus* audio_bus, virtual void InsertAudio(const AudioBus* audio_bus,
const base::TimeTicks& recorded_time, const base::TimeTicks& recorded_time,
const base::Closure& done_callback) OVERRIDE { const base::Closure& done_callback) OVERRIDE {
...@@ -48,96 +64,71 @@ class LocalFrameInput : public FrameInput { ...@@ -48,96 +64,71 @@ class LocalFrameInput : public FrameInput {
} }
protected: protected:
virtual ~LocalFrameInput() {} virtual ~LocalAudioFrameInput() {}
private: private:
friend class base::RefCountedThreadSafe<LocalFrameInput>; friend class base::RefCountedThreadSafe<LocalAudioFrameInput>;
scoped_refptr<CastEnvironment> cast_environment_; scoped_refptr<CastEnvironment> cast_environment_;
base::WeakPtr<AudioSender> audio_sender_; base::WeakPtr<AudioSender> audio_sender_;
base::WeakPtr<VideoSender> video_sender_;
DISALLOW_COPY_AND_ASSIGN(LocalFrameInput); DISALLOW_COPY_AND_ASSIGN(LocalAudioFrameInput);
}; };
CastSender* CastSender::CreateCastSender( scoped_ptr<CastSender> CastSender::Create(
scoped_refptr<CastEnvironment> cast_environment, scoped_refptr<CastEnvironment> cast_environment,
const AudioSenderConfig* audio_config,
const VideoSenderConfig* video_config,
const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories,
const CastInitializationCallback& initialization_status,
transport::CastTransportSender* const transport_sender) { transport::CastTransportSender* const transport_sender) {
CHECK(cast_environment); CHECK(cast_environment);
return new CastSenderImpl(cast_environment, return scoped_ptr<CastSender>(
audio_config, new CastSenderImpl(cast_environment, transport_sender));
video_config,
gpu_factories,
initialization_status,
transport_sender);
} }
CastSenderImpl::CastSenderImpl( CastSenderImpl::CastSenderImpl(
scoped_refptr<CastEnvironment> cast_environment, scoped_refptr<CastEnvironment> cast_environment,
const AudioSenderConfig* audio_config,
const VideoSenderConfig* video_config,
const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories,
const CastInitializationCallback& initialization_status,
transport::CastTransportSender* const transport_sender) transport::CastTransportSender* const transport_sender)
: initialization_callback_(initialization_status), : cast_environment_(cast_environment),
packet_receiver_( transport_sender_(transport_sender),
base::Bind(&CastSenderImpl::ReceivedPacket, base::Unretained(this))),
cast_environment_(cast_environment),
weak_factory_(this) { weak_factory_(this) {
CHECK(cast_environment); CHECK(cast_environment);
CHECK(audio_config || video_config); }
base::WeakPtr<AudioSender> audio_sender_ptr; void CastSenderImpl::InitializeAudio(
base::WeakPtr<VideoSender> video_sender_ptr; const AudioSenderConfig& audio_config,
const CastInitializationCallback& cast_initialization_cb) {
if (audio_config) { DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(audio_config->use_external_encoder || CHECK(audio_config.use_external_encoder ||
cast_environment->HasAudioEncoderThread()); cast_environment_->HasAudioEncoderThread());
audio_sender_.reset( audio_sender_.reset(
new AudioSender(cast_environment, *audio_config, transport_sender)); new AudioSender(cast_environment_, audio_config, transport_sender_));
ssrc_of_audio_sender_ = audio_config->incoming_feedback_ssrc;
audio_sender_ptr = audio_sender_->AsWeakPtr(); CastInitializationStatus status = audio_sender_->InitializationResult();
CastInitializationStatus status = audio_sender_->InitializationResult(); if (status == STATUS_AUDIO_INITIALIZED) {
if (status != STATUS_INITIALIZED || !video_config) { ssrc_of_audio_sender_ = audio_config.incoming_feedback_ssrc;
if (status == STATUS_INITIALIZED && !video_config) { audio_frame_input_ =
// Audio only. new LocalAudioFrameInput(cast_environment_, audio_sender_->AsWeakPtr());
frame_input_ = new LocalFrameInput(
cast_environment, audio_sender_ptr, video_sender_ptr);
}
cast_environment->PostTask(
CastEnvironment::MAIN,
FROM_HERE,
base::Bind(&CastSenderImpl::InitializationResult,
weak_factory_.GetWeakPtr(),
status));
return;
}
}
if (video_config) {
CHECK(video_config->use_external_encoder ||
cast_environment->HasVideoEncoderThread());
video_sender_.reset(
new VideoSender(cast_environment,
*video_config,
gpu_factories,
base::Bind(&CastSenderImpl::InitializationResult,
weak_factory_.GetWeakPtr()),
transport_sender));
video_sender_ptr = video_sender_->AsWeakPtr();
ssrc_of_video_sender_ = video_config->incoming_feedback_ssrc;
} }
frame_input_ = cast_initialization_cb.Run(status);
new LocalFrameInput(cast_environment, audio_sender_ptr, video_sender_ptr); }
// Handing over responsibility to call NotifyInitialization to the void CastSenderImpl::InitializeVideo(
// video sender. const VideoSenderConfig& video_config,
const CastInitializationCallback& cast_initialization_cb,
const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(video_config.use_external_encoder ||
cast_environment_->HasVideoEncoderThread());
video_sender_.reset(new VideoSender(cast_environment_,
video_config,
gpu_factories,
cast_initialization_cb,
transport_sender_));
ssrc_of_video_sender_ = video_config.incoming_feedback_ssrc;
video_frame_input_ =
new LocalVideoFrameInput(cast_environment_, video_sender_->AsWeakPtr());
} }
CastSenderImpl::~CastSenderImpl() {} CastSenderImpl::~CastSenderImpl() {}
...@@ -203,16 +194,17 @@ void CastSenderImpl::ReceivedPacket(scoped_ptr<Packet> packet) { ...@@ -203,16 +194,17 @@ void CastSenderImpl::ReceivedPacket(scoped_ptr<Packet> packet) {
} }
} }
scoped_refptr<FrameInput> CastSenderImpl::frame_input() { return frame_input_; } scoped_refptr<AudioFrameInput> CastSenderImpl::audio_frame_input() {
return audio_frame_input_;
}
transport::PacketReceiverCallback CastSenderImpl::packet_receiver() { scoped_refptr<VideoFrameInput> CastSenderImpl::video_frame_input() {
return packet_receiver_; return video_frame_input_;
return base::Bind(&CastSenderImpl::ReceivedPacket, base::Unretained(this));
} }
void CastSenderImpl::InitializationResult(CastInitializationStatus status) transport::PacketReceiverCallback CastSenderImpl::packet_receiver() {
const { return base::Bind(&CastSenderImpl::ReceivedPacket,
initialization_callback_.Run(status); weak_factory_.GetWeakPtr());
} }
} // namespace cast } // namespace cast
......
...@@ -8,49 +8,53 @@ ...@@ -8,49 +8,53 @@
#include "base/memory/scoped_ptr.h" #include "base/memory/scoped_ptr.h"
#include "media/cast/audio_sender/audio_sender.h" #include "media/cast/audio_sender/audio_sender.h"
#include "media/cast/cast_config.h" #include "media/cast/cast_config.h"
#include "media/cast/cast_defines.h"
#include "media/cast/cast_environment.h" #include "media/cast/cast_environment.h"
#include "media/cast/cast_sender.h" #include "media/cast/cast_sender.h"
#include "media/cast/video_sender/video_sender.h" #include "media/cast/video_sender/video_sender.h"
namespace media { namespace media {
class VideoFrame; class VideoFrame;
}
namespace media {
namespace cast { namespace cast {
class AudioSender; class AudioSender;
class VideoSender; class VideoSender;
// This calls is a pure owner class that group all required sending objects // This class combines all required sending objects such as the audio and video
// together such as pacer, packet receiver, frame input, audio and video sender. // senders, pacer, packet receiver and frame input.
class CastSenderImpl : public CastSender { class CastSenderImpl : public CastSender {
public: public:
CastSenderImpl( CastSenderImpl(scoped_refptr<CastEnvironment> cast_environment,
scoped_refptr<CastEnvironment> cast_environment, transport::CastTransportSender* const transport_sender);
const AudioSenderConfig* audio_config,
const VideoSenderConfig* video_config, virtual void InitializeAudio(
const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, const AudioSenderConfig& audio_config,
const CastInitializationCallback& initialization_status, const CastInitializationCallback& cast_initialization_cb) OVERRIDE;
transport::CastTransportSender* const transport_sender); virtual void InitializeVideo(
const VideoSenderConfig& video_config,
const CastInitializationCallback& cast_initialization_cb,
const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories)
OVERRIDE;
virtual ~CastSenderImpl(); virtual ~CastSenderImpl();
virtual scoped_refptr<FrameInput> frame_input() OVERRIDE; virtual scoped_refptr<AudioFrameInput> audio_frame_input() OVERRIDE;
virtual scoped_refptr<VideoFrameInput> video_frame_input() OVERRIDE;
virtual transport::PacketReceiverCallback packet_receiver() OVERRIDE; virtual transport::PacketReceiverCallback packet_receiver() OVERRIDE;
private: private:
void ReceivedPacket(scoped_ptr<Packet> packet); void ReceivedPacket(scoped_ptr<Packet> packet);
// Used to trampoline the result back on the correct thread. And guaranteed
// not to be called until the creation is complete.
void InitializationResult(CastInitializationStatus status) const;
CastInitializationCallback initialization_callback_; CastInitializationCallback initialization_callback_;
scoped_ptr<AudioSender> audio_sender_; scoped_ptr<AudioSender> audio_sender_;
scoped_ptr<VideoSender> video_sender_; scoped_ptr<VideoSender> video_sender_;
scoped_refptr<FrameInput> frame_input_; scoped_refptr<AudioFrameInput> audio_frame_input_;
transport::PacketReceiverCallback packet_receiver_; scoped_refptr<VideoFrameInput> video_frame_input_;
scoped_refptr<CastEnvironment> cast_environment_; scoped_refptr<CastEnvironment> cast_environment_;
// The transport sender is owned by the owner of the CastSender, and should be
// valid throughout the lifetime of the CastSender.
transport::CastTransportSender* const transport_sender_;
uint32 ssrc_of_audio_sender_; uint32 ssrc_of_audio_sender_;
uint32 ssrc_of_video_sender_; uint32 ssrc_of_video_sender_;
base::WeakPtrFactory<CastSenderImpl> weak_factory_; base::WeakPtrFactory<CastSenderImpl> weak_factory_;
......
...@@ -95,6 +95,14 @@ void UpdateCastTransportStatus(transport::CastTransportStatus status) { ...@@ -95,6 +95,14 @@ void UpdateCastTransportStatus(transport::CastTransportStatus status) {
EXPECT_TRUE(result); EXPECT_TRUE(result);
} }
void AudioInitializationStatus(CastInitializationStatus status) {
EXPECT_EQ(STATUS_AUDIO_INITIALIZED, status);
}
void VideoInitializationStatus(CastInitializationStatus status) {
EXPECT_EQ(STATUS_VIDEO_INITIALIZED, status);
}
// This is wrapped in a struct because it needs to be put into a std::map. // This is wrapped in a struct because it needs to be put into a std::map.
typedef struct { typedef struct {
int counter[kNumOfLoggingEvents]; int counter[kNumOfLoggingEvents];
...@@ -396,8 +404,8 @@ class End2EndTest : public ::testing::Test { ...@@ -396,8 +404,8 @@ class End2EndTest : public ::testing::Test {
: start_time_(), : start_time_(),
testing_clock_sender_(new base::SimpleTestTickClock()), testing_clock_sender_(new base::SimpleTestTickClock()),
testing_clock_receiver_(new base::SimpleTestTickClock()), testing_clock_receiver_(new base::SimpleTestTickClock()),
task_runner_(new test::FakeSingleThreadTaskRunner( task_runner_(
testing_clock_sender_)), new test::FakeSingleThreadTaskRunner(testing_clock_sender_)),
logging_config_(GetLoggingConfigWithRawEventsAndStatsEnabled()), logging_config_(GetLoggingConfigWithRawEventsAndStatsEnabled()),
cast_environment_sender_(new CastEnvironment( cast_environment_sender_(new CastEnvironment(
scoped_ptr<base::TickClock>(testing_clock_sender_).Pass(), scoped_ptr<base::TickClock>(testing_clock_sender_).Pass(),
...@@ -492,11 +500,10 @@ class End2EndTest : public ::testing::Test { ...@@ -492,11 +500,10 @@ class End2EndTest : public ::testing::Test {
} }
void Create() { void Create() {
cast_receiver_.reset( cast_receiver_ = CastReceiver::Create(cast_environment_receiver_,
CastReceiver::CreateCastReceiver(cast_environment_receiver_, audio_receiver_config_,
audio_receiver_config_, video_receiver_config_,
video_receiver_config_, &receiver_to_sender_);
&receiver_to_sender_));
net::IPEndPoint dummy_endpoint; net::IPEndPoint dummy_endpoint;
transport_sender_.reset(new transport::CastTransportSenderImpl( transport_sender_.reset(new transport::CastTransportSenderImpl(
NULL, NULL,
...@@ -512,18 +519,21 @@ class End2EndTest : public ::testing::Test { ...@@ -512,18 +519,21 @@ class End2EndTest : public ::testing::Test {
transport_sender_->InitializeAudio(transport_audio_config_); transport_sender_->InitializeAudio(transport_audio_config_);
transport_sender_->InitializeVideo(transport_video_config_); transport_sender_->InitializeVideo(transport_video_config_);
cast_sender_.reset(CastSender::CreateCastSender( cast_sender_ =
cast_environment_sender_, CastSender::Create(cast_environment_sender_, transport_sender_.get());
&audio_sender_config_,
&video_sender_config_, // Initializing audio and video senders.
NULL, cast_sender_->InitializeAudio(audio_sender_config_,
base::Bind(&End2EndTest::InitializationResult, base::Unretained(this)), base::Bind(&AudioInitializationStatus));
transport_sender_.get())); cast_sender_->InitializeVideo(
video_sender_config_, base::Bind(&VideoInitializationStatus), NULL);
receiver_to_sender_.SetPacketReceiver(cast_sender_->packet_receiver()); receiver_to_sender_.SetPacketReceiver(cast_sender_->packet_receiver());
sender_to_receiver_.SetPacketReceiver(cast_receiver_->packet_receiver()); sender_to_receiver_.SetPacketReceiver(cast_receiver_->packet_receiver());
frame_input_ = cast_sender_->frame_input(); audio_frame_input_ = cast_sender_->audio_frame_input();
video_frame_input_ = cast_sender_->video_frame_input();
frame_receiver_ = cast_receiver_->frame_receiver(); frame_receiver_ = cast_receiver_->frame_receiver();
audio_bus_factory_.reset( audio_bus_factory_.reset(
...@@ -555,7 +565,7 @@ class End2EndTest : public ::testing::Test { ...@@ -555,7 +565,7 @@ class End2EndTest : public ::testing::Test {
media::VideoFrame::CreateFrame( media::VideoFrame::CreateFrame(
VideoFrame::I420, size, gfx::Rect(size), size, time_diff); VideoFrame::I420, size, gfx::Rect(size), size, time_diff);
PopulateVideoFrame(video_frame, start_value); PopulateVideoFrame(video_frame, start_value);
frame_input_->InsertRawVideoFrame(video_frame, capture_time); video_frame_input_->InsertRawVideoFrame(video_frame, capture_time);
} }
void RunTasks(int during_ms) { void RunTasks(int during_ms) {
...@@ -567,10 +577,6 @@ class End2EndTest : public ::testing::Test { ...@@ -567,10 +577,6 @@ class End2EndTest : public ::testing::Test {
} }
} }
void InitializationResult(CastInitializationStatus result) {
EXPECT_EQ(result, STATUS_INITIALIZED);
}
void LogRawEvents(const std::vector<PacketEvent>& packet_events) { void LogRawEvents(const std::vector<PacketEvent>& packet_events) {
EXPECT_FALSE(packet_events.empty()); EXPECT_FALSE(packet_events.empty());
for (std::vector<media::cast::PacketEvent>::const_iterator it = for (std::vector<media::cast::PacketEvent>::const_iterator it =
...@@ -608,7 +614,8 @@ class End2EndTest : public ::testing::Test { ...@@ -608,7 +614,8 @@ class End2EndTest : public ::testing::Test {
scoped_ptr<CastReceiver> cast_receiver_; scoped_ptr<CastReceiver> cast_receiver_;
scoped_ptr<CastSender> cast_sender_; scoped_ptr<CastSender> cast_sender_;
scoped_refptr<FrameInput> frame_input_; scoped_refptr<AudioFrameInput> audio_frame_input_;
scoped_refptr<VideoFrameInput> video_frame_input_;
scoped_refptr<FrameReceiver> frame_receiver_; scoped_refptr<FrameReceiver> frame_receiver_;
scoped_refptr<TestReceiverAudioCallback> test_receiver_audio_callback_; scoped_refptr<TestReceiverAudioCallback> test_receiver_audio_callback_;
...@@ -620,7 +627,6 @@ class End2EndTest : public ::testing::Test { ...@@ -620,7 +627,6 @@ class End2EndTest : public ::testing::Test {
std::vector<FrameEvent> frame_events_; std::vector<FrameEvent> frame_events_;
std::vector<PacketEvent> packet_events_; std::vector<PacketEvent> packet_events_;
std::vector<GenericEvent> generic_events_; std::vector<GenericEvent> generic_events_;
// |transport_sender_| has a RepeatingTimer which needs a MessageLoop. // |transport_sender_| has a RepeatingTimer which needs a MessageLoop.
base::MessageLoop message_loop_; base::MessageLoop message_loop_;
}; };
...@@ -657,7 +663,7 @@ TEST_F(End2EndTest, LoopNoLossPcm16) { ...@@ -657,7 +663,7 @@ TEST_F(End2EndTest, LoopNoLossPcm16) {
} }
AudioBus* const audio_bus_ptr = audio_bus.get(); AudioBus* const audio_bus_ptr = audio_bus.get();
frame_input_->InsertAudio( audio_frame_input_->InsertAudio(
audio_bus_ptr, audio_bus_ptr,
send_time, send_time,
base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus)));
...@@ -714,7 +720,7 @@ TEST_F(End2EndTest, LoopNoLossPcm16ExternalDecoder) { ...@@ -714,7 +720,7 @@ TEST_F(End2EndTest, LoopNoLossPcm16ExternalDecoder) {
send_time); send_time);
AudioBus* const audio_bus_ptr = audio_bus.get(); AudioBus* const audio_bus_ptr = audio_bus.get();
frame_input_->InsertAudio( audio_frame_input_->InsertAudio(
audio_bus_ptr, audio_bus_ptr,
send_time, send_time,
base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus)));
...@@ -749,7 +755,7 @@ TEST_F(End2EndTest, LoopNoLossOpus) { ...@@ -749,7 +755,7 @@ TEST_F(End2EndTest, LoopNoLossOpus) {
} }
AudioBus* const audio_bus_ptr = audio_bus.get(); AudioBus* const audio_bus_ptr = audio_bus.get();
frame_input_->InsertAudio( audio_frame_input_->InsertAudio(
audio_bus_ptr, audio_bus_ptr,
send_time, send_time,
base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus)));
...@@ -799,7 +805,7 @@ TEST_F(End2EndTest, StartSenderBeforeReceiver) { ...@@ -799,7 +805,7 @@ TEST_F(End2EndTest, StartSenderBeforeReceiver) {
base::TimeDelta::FromMilliseconds(10) * num_10ms_blocks)); base::TimeDelta::FromMilliseconds(10) * num_10ms_blocks));
AudioBus* const audio_bus_ptr = audio_bus.get(); AudioBus* const audio_bus_ptr = audio_bus.get();
frame_input_->InsertAudio( audio_frame_input_->InsertAudio(
audio_bus_ptr, audio_bus_ptr,
send_time, send_time,
base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus)));
...@@ -841,7 +847,7 @@ TEST_F(End2EndTest, StartSenderBeforeReceiver) { ...@@ -841,7 +847,7 @@ TEST_F(End2EndTest, StartSenderBeforeReceiver) {
} }
AudioBus* const audio_bus_ptr = audio_bus.get(); AudioBus* const audio_bus_ptr = audio_bus.get();
frame_input_->InsertAudio( audio_frame_input_->InsertAudio(
audio_bus_ptr, audio_bus_ptr,
send_time, send_time,
base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus)));
...@@ -1072,7 +1078,7 @@ TEST_F(End2EndTest, CryptoAudio) { ...@@ -1072,7 +1078,7 @@ TEST_F(End2EndTest, CryptoAudio) {
send_time); send_time);
} }
AudioBus* const audio_bus_ptr = audio_bus.get(); AudioBus* const audio_bus_ptr = audio_bus.get();
frame_input_->InsertAudio( audio_frame_input_->InsertAudio(
audio_bus_ptr, audio_bus_ptr,
send_time, send_time,
base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus)));
...@@ -1250,7 +1256,7 @@ TEST_F(End2EndTest, AudioLogging) { ...@@ -1250,7 +1256,7 @@ TEST_F(End2EndTest, AudioLogging) {
} }
AudioBus* const audio_bus_ptr = audio_bus.get(); AudioBus* const audio_bus_ptr = audio_bus.get();
frame_input_->InsertAudio( audio_frame_input_->InsertAudio(
audio_bus_ptr, audio_bus_ptr,
send_time, send_time,
base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus)));
......
...@@ -205,11 +205,13 @@ class SendProcess { ...@@ -205,11 +205,13 @@ class SendProcess {
SendProcess(scoped_refptr<base::SingleThreadTaskRunner> thread_proxy, SendProcess(scoped_refptr<base::SingleThreadTaskRunner> thread_proxy,
base::TickClock* clock, base::TickClock* clock,
const VideoSenderConfig& video_config, const VideoSenderConfig& video_config,
FrameInput* frame_input) scoped_refptr<AudioFrameInput> audio_frame_input,
scoped_refptr<VideoFrameInput> video_frame_input)
: test_app_thread_proxy_(thread_proxy), : test_app_thread_proxy_(thread_proxy),
video_config_(video_config), video_config_(video_config),
audio_diff_(kFrameTimerMs), audio_diff_(kFrameTimerMs),
frame_input_(frame_input), audio_frame_input_(audio_frame_input),
video_frame_input_(video_frame_input),
synthetic_count_(0), synthetic_count_(0),
clock_(clock), clock_(clock),
start_time_(), start_time_(),
...@@ -245,7 +247,7 @@ class SendProcess { ...@@ -245,7 +247,7 @@ class SendProcess {
scoped_ptr<AudioBus> audio_bus(audio_bus_factory_->NextAudioBus( scoped_ptr<AudioBus> audio_bus(audio_bus_factory_->NextAudioBus(
base::TimeDelta::FromMilliseconds(10) * num_10ms_blocks)); base::TimeDelta::FromMilliseconds(10) * num_10ms_blocks));
AudioBus* const audio_bus_ptr = audio_bus.get(); AudioBus* const audio_bus_ptr = audio_bus.get();
frame_input_->InsertAudio( audio_frame_input_->InsertAudio(
audio_bus_ptr, audio_bus_ptr,
clock_->NowTicks(), clock_->NowTicks(),
base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus))); base::Bind(&OwnThatAudioBus, base::Passed(&audio_bus)));
...@@ -277,21 +279,21 @@ class SendProcess { ...@@ -277,21 +279,21 @@ class SendProcess {
test_app_thread_proxy_->PostDelayedTask( test_app_thread_proxy_->PostDelayedTask(
FROM_HERE, FROM_HERE,
base::Bind(&SendProcess::SendVideoFrameOnTime, base::Bind(&SendProcess::SendVideoFrameOnTime,
base::Unretained(this), weak_factory_.GetWeakPtr(),
video_frame), video_frame),
video_frame_time - elapsed_time); video_frame_time - elapsed_time);
} else { } else {
test_app_thread_proxy_->PostTask( test_app_thread_proxy_->PostTask(
FROM_HERE, FROM_HERE,
base::Bind(&SendProcess::SendVideoFrameOnTime, base::Bind(&SendProcess::SendVideoFrameOnTime,
base::Unretained(this), weak_factory_.GetWeakPtr(),
video_frame)); video_frame));
} }
} }
void SendVideoFrameOnTime(scoped_refptr<media::VideoFrame> video_frame) { void SendVideoFrameOnTime(scoped_refptr<media::VideoFrame> video_frame) {
send_time_ = clock_->NowTicks(); send_time_ = clock_->NowTicks();
frame_input_->InsertRawVideoFrame(video_frame, send_time_); video_frame_input_->InsertRawVideoFrame(video_frame, send_time_);
test_app_thread_proxy_->PostTask( test_app_thread_proxy_->PostTask(
FROM_HERE, base::Bind(&SendProcess::SendFrame, base::Unretained(this))); FROM_HERE, base::Bind(&SendProcess::SendFrame, base::Unretained(this)));
} }
...@@ -300,7 +302,8 @@ class SendProcess { ...@@ -300,7 +302,8 @@ class SendProcess {
scoped_refptr<base::SingleThreadTaskRunner> test_app_thread_proxy_; scoped_refptr<base::SingleThreadTaskRunner> test_app_thread_proxy_;
const VideoSenderConfig video_config_; const VideoSenderConfig video_config_;
int audio_diff_; int audio_diff_;
const scoped_refptr<FrameInput> frame_input_; const scoped_refptr<AudioFrameInput> audio_frame_input_;
const scoped_refptr<VideoFrameInput> video_frame_input_;
FILE* video_file_; FILE* video_file_;
uint8 synthetic_count_; uint8 synthetic_count_;
base::TickClock* const clock_; // Not owned by this class. base::TickClock* const clock_; // Not owned by this class.
...@@ -336,8 +339,9 @@ void LogRawEvents( ...@@ -336,8 +339,9 @@ void LogRawEvents(
} }
void InitializationResult(media::cast::CastInitializationStatus result) { void InitializationResult(media::cast::CastInitializationStatus result) {
CHECK_EQ(result, media::cast::STATUS_INITIALIZED); bool end_result = result == media::cast::STATUS_AUDIO_INITIALIZED ||
VLOG(1) << "Cast Sender initialized"; result == media::cast::STATUS_VIDEO_INITIALIZED;
CHECK(end_result) << "Cast sender uninitialized";
} }
net::IPEndPoint CreateUDPAddress(std::string ip_str, int port) { net::IPEndPoint CreateUDPAddress(std::string ip_str, int port) {
...@@ -359,14 +363,14 @@ void WriteLogsToFileAndStopSubscribing( ...@@ -359,14 +363,14 @@ void WriteLogsToFileAndStopSubscribing(
media::cast::FrameEventMap frame_events; media::cast::FrameEventMap frame_events;
media::cast::PacketEventMap packet_events; media::cast::PacketEventMap packet_events;
media::cast::RtpTimestamp first_rtp_timestamp; media::cast::RtpTimestamp first_rtp_timestamp;
video_event_subscriber->GetEventsAndReset(&frame_events, &packet_events, video_event_subscriber->GetEventsAndReset(
&first_rtp_timestamp); &frame_events, &packet_events, &first_rtp_timestamp);
VLOG(0) << "Video frame map size: " << frame_events.size(); VLOG(0) << "Video frame map size: " << frame_events.size();
VLOG(0) << "Video packet map size: " << packet_events.size(); VLOG(0) << "Video packet map size: " << packet_events.size();
if (!serializer.SerializeEventsForStream(false, frame_events, packet_events, if (!serializer.SerializeEventsForStream(
first_rtp_timestamp)) { false, frame_events, packet_events, first_rtp_timestamp)) {
VLOG(1) << "Failed to serialize video events."; VLOG(1) << "Failed to serialize video events.";
return; return;
} }
...@@ -377,14 +381,14 @@ void WriteLogsToFileAndStopSubscribing( ...@@ -377,14 +381,14 @@ void WriteLogsToFileAndStopSubscribing(
// Serialize audio events. // Serialize audio events.
cast_environment->Logging()->RemoveRawEventSubscriber( cast_environment->Logging()->RemoveRawEventSubscriber(
audio_event_subscriber.get()); audio_event_subscriber.get());
audio_event_subscriber->GetEventsAndReset(&frame_events, &packet_events, audio_event_subscriber->GetEventsAndReset(
&first_rtp_timestamp); &frame_events, &packet_events, &first_rtp_timestamp);
VLOG(0) << "Audio frame map size: " << frame_events.size(); VLOG(0) << "Audio frame map size: " << frame_events.size();
VLOG(0) << "Audio packet map size: " << packet_events.size(); VLOG(0) << "Audio packet map size: " << packet_events.size();
if (!serializer.SerializeEventsForStream(true, frame_events, packet_events, if (!serializer.SerializeEventsForStream(
first_rtp_timestamp)) { true, frame_events, packet_events, first_rtp_timestamp)) {
VLOG(1) << "Failed to serialize audio events."; VLOG(1) << "Failed to serialize audio events.";
return; return;
} }
...@@ -406,7 +410,6 @@ void WriteLogsToFileAndStopSubscribing( ...@@ -406,7 +410,6 @@ void WriteLogsToFileAndStopSubscribing(
int main(int argc, char** argv) { int main(int argc, char** argv) {
base::AtExitManager at_exit; base::AtExitManager at_exit;
VLOG(1) << "Cast Sender";
base::Thread test_thread("Cast sender test app thread"); base::Thread test_thread("Cast sender test app thread");
base::Thread audio_thread("Cast audio encoder thread"); base::Thread audio_thread("Cast audio encoder thread");
base::Thread video_thread("Cast video encoder thread"); base::Thread video_thread("Cast video encoder thread");
...@@ -472,23 +475,25 @@ int main(int argc, char** argv) { ...@@ -472,23 +475,25 @@ int main(int argc, char** argv) {
transport_sender->InitializeAudio(transport_audio_config); transport_sender->InitializeAudio(transport_audio_config);
transport_sender->InitializeVideo(transport_video_config); transport_sender->InitializeVideo(transport_video_config);
scoped_ptr<media::cast::CastSender> cast_sender( scoped_ptr<media::cast::CastSender> cast_sender =
media::cast::CastSender::CreateCastSender( media::cast::CastSender::Create(cast_environment, transport_sender.get());
cast_environment,
&audio_config, cast_sender->InitializeVideo(
&video_config, video_config, base::Bind(&InitializationResult), NULL);
NULL, // gpu_factories. cast_sender->InitializeAudio(audio_config, base::Bind(&InitializationResult));
base::Bind(&InitializationResult),
transport_sender.get()));
transport_sender->SetPacketReceiver(cast_sender->packet_receiver()); transport_sender->SetPacketReceiver(cast_sender->packet_receiver());
media::cast::FrameInput* frame_input = cast_sender->frame_input(); scoped_refptr<media::cast::AudioFrameInput> audio_frame_input =
cast_sender->audio_frame_input();
scoped_refptr<media::cast::VideoFrameInput> video_frame_input =
cast_sender->video_frame_input();
scoped_ptr<media::cast::SendProcess> send_process( scoped_ptr<media::cast::SendProcess> send_process(
new media::cast::SendProcess(test_thread.message_loop_proxy(), new media::cast::SendProcess(test_thread.message_loop_proxy(),
cast_environment->Clock(), cast_environment->Clock(),
video_config, video_config,
frame_input)); audio_frame_input,
video_frame_input));
// Set up event subscribers. // Set up event subscribers.
int logging_duration = media::cast::GetLoggingDuration(); int logging_duration = media::cast::GetLoggingDuration();
......
...@@ -67,8 +67,8 @@ void InProcessReceiver::StartOnMainThread() { ...@@ -67,8 +67,8 @@ void InProcessReceiver::StartOnMainThread() {
remote_end_point_, remote_end_point_,
base::Bind(&InProcessReceiver::UpdateCastTransportStatus, base::Bind(&InProcessReceiver::UpdateCastTransportStatus,
base::Unretained(this)))); base::Unretained(this))));
cast_receiver_.reset(CastReceiver::CreateCastReceiver( cast_receiver_ = CastReceiver::Create(
cast_environment_, audio_config_, video_config_, transport_.get())); cast_environment_, audio_config_, video_config_, transport_.get());
// TODO(hubbe): Make the cast receiver do this automatically. // TODO(hubbe): Make the cast receiver do this automatically.
transport_->StartReceiving(cast_receiver_->packet_receiver()); transport_->StartReceiving(cast_receiver_->packet_receiver());
......
...@@ -95,7 +95,7 @@ VideoSender::VideoSender( ...@@ -95,7 +95,7 @@ VideoSender::VideoSender(
scoped_refptr<CastEnvironment> cast_environment, scoped_refptr<CastEnvironment> cast_environment,
const VideoSenderConfig& video_config, const VideoSenderConfig& video_config,
const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories,
const CastInitializationCallback& initialization_status, const CastInitializationCallback& cast_initialization_cb,
transport::CastTransportSender* const transport_sender) transport::CastTransportSender* const transport_sender)
: rtp_max_delay_(base::TimeDelta::FromMilliseconds( : rtp_max_delay_(base::TimeDelta::FromMilliseconds(
video_config.rtp_config.max_delay_ms)), video_config.rtp_config.max_delay_ms)),
...@@ -149,12 +149,12 @@ VideoSender::VideoSender( ...@@ -149,12 +149,12 @@ VideoSender::VideoSender(
video_config.rtcp_c_name)); video_config.rtcp_c_name));
rtcp_->SetCastReceiverEventHistorySize(kReceiverRtcpEventHistorySize); rtcp_->SetCastReceiverEventHistorySize(kReceiverRtcpEventHistorySize);
// TODO(pwestin): pass cast_initialization to |video_encoder_| // TODO(pwestin): pass cast_initialization_cb to |video_encoder_|
// and remove this call. // and remove this call.
cast_environment_->PostTask( cast_environment_->PostTask(
CastEnvironment::MAIN, CastEnvironment::MAIN,
FROM_HERE, FROM_HERE,
base::Bind(initialization_status, STATUS_INITIALIZED)); base::Bind(cast_initialization_cb, STATUS_VIDEO_INITIALIZED));
cast_environment_->Logging()->AddRawEventSubscriber(&event_subscriber_); cast_environment_->Logging()->AddRawEventSubscriber(&event_subscriber_);
memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_)); memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_));
......
...@@ -47,7 +47,7 @@ class VideoSender : public base::NonThreadSafe, ...@@ -47,7 +47,7 @@ class VideoSender : public base::NonThreadSafe,
VideoSender(scoped_refptr<CastEnvironment> cast_environment, VideoSender(scoped_refptr<CastEnvironment> cast_environment,
const VideoSenderConfig& video_config, const VideoSenderConfig& video_config,
const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories,
const CastInitializationCallback& initialization_status, const CastInitializationCallback& cast_initialization_cb,
transport::CastTransportSender* const transport_sender); transport::CastTransportSender* const transport_sender);
virtual ~VideoSender(); virtual ~VideoSender();
......
...@@ -63,12 +63,12 @@ class PeerVideoSender : public VideoSender { ...@@ -63,12 +63,12 @@ class PeerVideoSender : public VideoSender {
scoped_refptr<CastEnvironment> cast_environment, scoped_refptr<CastEnvironment> cast_environment,
const VideoSenderConfig& video_config, const VideoSenderConfig& video_config,
const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories, const scoped_refptr<GpuVideoAcceleratorFactories>& gpu_factories,
const CastInitializationCallback& initialization_status, const CastInitializationCallback& cast_initialization_cb,
transport::CastTransportSender* const transport_sender) transport::CastTransportSender* const transport_sender)
: VideoSender(cast_environment, : VideoSender(cast_environment,
video_config, video_config,
gpu_factories, gpu_factories,
initialization_status, cast_initialization_cb,
transport_sender) {} transport_sender) {}
using VideoSender::OnReceivedCastFeedback; using VideoSender::OnReceivedCastFeedback;
}; };
...@@ -174,7 +174,7 @@ class VideoSenderTest : public ::testing::Test { ...@@ -174,7 +174,7 @@ class VideoSenderTest : public ::testing::Test {
} }
void InitializationResult(CastInitializationStatus result) { void InitializationResult(CastInitializationStatus result) {
EXPECT_EQ(result, STATUS_INITIALIZED); EXPECT_EQ(result, STATUS_VIDEO_INITIALIZED);
} }
base::SimpleTestTickClock* testing_clock_; // Owned by CastEnvironment. base::SimpleTestTickClock* testing_clock_; // Owned by CastEnvironment.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment