Commit e81fe1a1 authored by miu@chromium.org's avatar miu@chromium.org

Cast Streaming API end-to-end browser_test.

Similar to the TabCaptureApi.EndToEnd test: An extension is run that generates a rotating cycle of colors and audio tones, and the result is captured and streamed to an in-process Cast receiver.  In addition:

1. Refactored existing code out of the cast_receiver_app into a shared InProcessReceiver class.
2. Added a convenient StandaloneCastEnvironment for test apps.
3. Got rid of the cast_receiver_app prompts since the defaults are used >99% of the time.  Using the --enable-prompts command line arg will show them again.
4. Minor clean-ups in files touched.

NOTE: The new EndToEnd test is being submitted as disabled while outstanding bugs are worked by the team.  Will re-enable once the implementation is sufficiently stable for the bots.

R=hubbe@chromium.org, kalman@chromium.org

Review URL: https://codereview.chromium.org/184813009

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@255805 0039d316-1c4b-4281-b951-d872f2087c98
parent 963a91b1
include_rules = [
"+media/cast",
]
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/command_line.h"
#include "chrome/browser/extensions/extension_apitest.h"
#include "chrome/common/chrome_switches.h"
#include "content/public/common/content_switches.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace extensions {
class CastStreamingApiTest : public ExtensionApiTest {
virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE {
ExtensionApiTest::SetUpCommandLine(command_line);
command_line->AppendSwitchASCII(
switches::kWhitelistedExtensionID,
"ddchlicdkolnonkihahngkmmmjnjlkkf");
}
};
// Test running the test extension for Cast Mirroring API.
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Basics) {
ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "basics.html"));
}
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Stats) {
ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "stats.html"));
}
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, BadLogging) {
ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "bad_logging.html"));
}
} // namespace extensions
......@@ -895,6 +895,7 @@
'../components/component_strings.gyp:component_strings',
'../device/bluetooth/bluetooth.gyp:device_bluetooth_mocks',
'../google_apis/google_apis.gyp:google_apis_test_support',
'../media/cast/test/utility/utility.gyp:cast_test_utility',
'../net/net.gyp:net',
'../net/net.gyp:net_test_support',
'../skia/skia.gyp:skia',
......@@ -1101,6 +1102,7 @@
'browser/extensions/api/bookmarks/bookmark_apitest.cc',
'browser/extensions/api/browsing_data/browsing_data_test.cc',
'browser/extensions/api/cast_channel/cast_channel_apitest.cc',
'browser/extensions/api/cast_streaming/cast_streaming_apitest.cc',
'browser/extensions/api/cloud_print_private/cloud_print_private_apitest.cc',
'browser/extensions/api/command_line_private/command_line_private_apitest.cc',
'browser/extensions/api/commands/command_service_browsertest.cc',
......@@ -1197,7 +1199,6 @@
'browser/extensions/browsertest_util.cc',
'browser/extensions/browsertest_util.h',
'browser/extensions/browsertest_util_browsertest.cc',
'browser/extensions/cast_streaming_apitest.cc',
'browser/extensions/chrome_app_api_browsertest.cc',
'browser/extensions/content_script_apitest.cc',
'browser/extensions/content_security_policy_apitest.cc',
......
<html>
<head>
<script src="end_to_end_sender.js"></script>
</head>
<body>
<h1 id="message">Test not started yet.</h1>
</body>
</html>
// Copyright (c) 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This code uses the tab capture and Cast streaming APIs to capture the content
// and send it to a Cast receiver end-point controlled by
// CastStreamingApiTestcode. It generates audio/video test patterns that rotate
// cyclicly, and these test patterns are checked for by an in-process Cast
// receiver to confirm the correct end-to-end functionality of the Cast
// streaming API.
//
// Once everything is set up and fully operational, chrome.test.succeed() is
// invoked as a signal for the end-to-end testing to proceed. If any step in
// the setup process fails, chrome.test.fail() is invoked.
// The test pattern cycles as a color fill of red, then green, then blue; paired
// with successively higher-frequency tones.
var colors = [ [ 255, 0, 0 ], [ 0, 255, 0 ], [ 0, 0, 255 ] ];
var freqs = [ 200, 500, 1800 ];
var curTestIdx = 0;
function updateTestPattern() {
if (document.body && document.body.style) // Check that page is loaded.
document.body.style.backgroundColor = "rgb(" + colors[curTestIdx] + ")";
// Important: Blink the testing message so that the capture pipeline will
// observe drawing updates and continue to produce video frames.
var message = document.getElementById("message");
if (message && !message.blinkInterval) {
message.innerHTML = "Testing...";
message.blinkInterval = setInterval(
function toggleVisibility() {
message.style.visibility =
message.style.visibility == "hidden" ? "visible" : "hidden";
},
125);
}
if (!this.audioContext) {
this.audioContext = new webkitAudioContext();
this.gainNode = this.audioContext.createGainNode();
this.gainNode.gain.value = 0.5;
this.gainNode.connect(this.audioContext.destination);
} else {
this.oscillator.disconnect();
}
// Note: We recreate the oscillator each time because this switches the audio
// frequency immediately. Re-using the same oscillator tends to take several
// hundred milliseconds to ramp-up/down the frequency.
this.oscillator = audioContext.createOscillator();
this.oscillator.type = "sine";
this.oscillator.frequency.value = freqs[curTestIdx];
this.oscillator.connect(gainNode);
this.oscillator.noteOn(0);
}
// Calls updateTestPattern(), then waits and calls itself again to advance to
// the next one.
function runTestPatternLoop() {
updateTestPattern();
if (!this.curAdvanceWaitTimeMillis) {
this.curAdvanceWaitTimeMillis = 750;
}
setTimeout(
function advanceTestPattern() {
++curTestIdx;
if (curTestIdx >= colors.length) { // Completed a cycle.
curTestIdx = 0;
// Increase the wait time between switching test patterns for
// overloaded bots that aren't capturing all the frames of video.
this.curAdvanceWaitTimeMillis *= 1.25;
}
runTestPatternLoop();
},
this.curAdvanceWaitTimeMillis);
}
chrome.test.runTests([
function sendTestPatterns() {
// The receive port changes between browser_test invocations, and is passed
// as an query parameter in the URL.
var recvPort;
try {
recvPort = parseInt(window.location.search.substring("?port=".length));
chrome.test.assertTrue(recvPort > 0);
} catch (err) {
chrome.test.fail("Error parsing ?port=### -- " + err.message);
return;
}
// Set to true if you want to confirm the sender color/tone changes are
// working, without starting tab capture and Cast sending.
if (false) {
setTimeout(runTestPatternLoop, 0);
return;
}
var width = 128;
var height = 128;
var frameRate = 15;
chrome.tabCapture.capture(
{ video: true,
audio: true,
videoConstraints: {
mandatory: {
minWidth: width,
minHeight: height,
maxWidth: width,
maxHeight: height,
maxFrameRate: frameRate,
}
}
},
function startStreamingTestPatterns(captureStream) {
chrome.test.assertTrue(!!captureStream);
chrome.cast.streaming.session.create(
captureStream.getAudioTracks()[0],
captureStream.getVideoTracks()[0],
function (audioId, videoId, udpId) {
chrome.cast.streaming.udpTransport.setDestination(
udpId, { address: "127.0.0.1", port: recvPort } );
var rtpStream = chrome.cast.streaming.rtpStream;
rtpStream.start(audioId,
rtpStream.getSupportedParams(audioId)[0]);
var videoParams = rtpStream.getSupportedParams(videoId)[0];
videoParams.payload.width = width;
videoParams.payload.height = height;
videoParams.payload.clockRate = frameRate;
rtpStream.start(videoId, videoParams);
setTimeout(runTestPatternLoop, 0);
if (window.innerWidth > 2 * width ||
window.innerHeight > 2 & height) {
console.warn("***TIMEOUT HAZARD*** Tab size is " +
window.innerWidth + "x" + window.innerHeight +
", which is much larger than the expected " +
width + "x" + height);
}
chrome.test.succeed();
});
});
}
]);
{
"key": "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8xv6iO+j4kzj1HiBL93+XVJH/CRyAQMUHS/Z0l8nCAzaAFkW/JsNwxJqQhrZspnxLqbQxNncXs6g6bsXAwKHiEs+LSs+bIv0Gc/2ycZdhXJ8GhEsSMakog5dpQd1681c2gLK/8CrAoewE/0GIKhaFcp7a2iZlGh4Am6fgMKy0iQIDAQAB",
"name": "chrome.webrtc.cast",
"name": "chrome.cast.streaming",
"version": "0.1",
"manifest_version": 2,
"description": "Tests Cast Mirroring Extensions API.",
......
......@@ -29,23 +29,19 @@ CastEnvironment::CastEnvironment(
scoped_refptr<SingleThreadTaskRunner> video_encode_thread_proxy,
scoped_refptr<SingleThreadTaskRunner> video_decode_thread_proxy,
scoped_refptr<SingleThreadTaskRunner> transport_thread_proxy,
const CastLoggingConfig& config)
: clock_(clock.Pass()),
main_thread_proxy_(main_thread_proxy),
const CastLoggingConfig& logging_config)
: main_thread_proxy_(main_thread_proxy),
audio_encode_thread_proxy_(audio_encode_thread_proxy),
audio_decode_thread_proxy_(audio_decode_thread_proxy),
video_encode_thread_proxy_(video_encode_thread_proxy),
video_decode_thread_proxy_(video_decode_thread_proxy),
transport_thread_proxy_(transport_thread_proxy),
logging_(new LoggingImpl(main_thread_proxy, config)) {
DCHECK(main_thread_proxy);
}
clock_(clock.Pass()),
logging_(new LoggingImpl(logging_config)) {}
CastEnvironment::~CastEnvironment() {
// Logging must be deleted on the main thread.
if (main_thread_proxy_->RunsTasksOnCurrentThread()) {
logging_.reset();
} else {
if (main_thread_proxy_ && !main_thread_proxy_->RunsTasksOnCurrentThread()) {
main_thread_proxy_->PostTask(
FROM_HERE,
base::Bind(&DeleteLoggingOnMainThread, base::Passed(&logging_)));
......@@ -55,10 +51,7 @@ CastEnvironment::~CastEnvironment() {
bool CastEnvironment::PostTask(ThreadId identifier,
const tracked_objects::Location& from_here,
const base::Closure& task) {
scoped_refptr<SingleThreadTaskRunner> task_runner =
GetMessageSingleThreadTaskRunnerForThread(identifier);
return task_runner->PostTask(from_here, task);
return GetTaskRunner(identifier)->PostTask(from_here, task);
}
bool CastEnvironment::PostDelayedTask(
......@@ -66,15 +59,11 @@ bool CastEnvironment::PostDelayedTask(
const tracked_objects::Location& from_here,
const base::Closure& task,
base::TimeDelta delay) {
scoped_refptr<SingleThreadTaskRunner> task_runner =
GetMessageSingleThreadTaskRunnerForThread(identifier);
return task_runner->PostDelayedTask(from_here, task, delay);
return GetTaskRunner(identifier)->PostDelayedTask(from_here, task, delay);
}
scoped_refptr<SingleThreadTaskRunner>
CastEnvironment::GetMessageSingleThreadTaskRunnerForThread(
ThreadId identifier) {
scoped_refptr<SingleThreadTaskRunner> CastEnvironment::GetTaskRunner(
ThreadId identifier) const {
switch (identifier) {
case CastEnvironment::MAIN:
return main_thread_proxy_;
......@@ -97,30 +86,28 @@ CastEnvironment::GetMessageSingleThreadTaskRunnerForThread(
bool CastEnvironment::CurrentlyOn(ThreadId identifier) {
switch (identifier) {
case CastEnvironment::MAIN:
return main_thread_proxy_->RunsTasksOnCurrentThread();
return main_thread_proxy_ &&
main_thread_proxy_->RunsTasksOnCurrentThread();
case CastEnvironment::AUDIO_ENCODER:
return audio_encode_thread_proxy_->RunsTasksOnCurrentThread();
return audio_encode_thread_proxy_ &&
audio_encode_thread_proxy_->RunsTasksOnCurrentThread();
case CastEnvironment::AUDIO_DECODER:
return audio_decode_thread_proxy_->RunsTasksOnCurrentThread();
return audio_decode_thread_proxy_ &&
audio_decode_thread_proxy_->RunsTasksOnCurrentThread();
case CastEnvironment::VIDEO_ENCODER:
return video_encode_thread_proxy_->RunsTasksOnCurrentThread();
return video_encode_thread_proxy_ &&
video_encode_thread_proxy_->RunsTasksOnCurrentThread();
case CastEnvironment::VIDEO_DECODER:
return video_decode_thread_proxy_->RunsTasksOnCurrentThread();
return video_decode_thread_proxy_ &&
video_decode_thread_proxy_->RunsTasksOnCurrentThread();
case CastEnvironment::TRANSPORT:
return transport_thread_proxy_->RunsTasksOnCurrentThread();
return transport_thread_proxy_ &&
transport_thread_proxy_->RunsTasksOnCurrentThread();
default:
NOTREACHED() << "Invalid thread identifier";
return false;
}
}
base::TickClock* CastEnvironment::Clock() const { return clock_.get(); }
LoggingImpl* CastEnvironment::Logging() {
DCHECK(CurrentlyOn(CastEnvironment::MAIN))
<< "Must be called from main thread";
return logging_.get();
}
} // namespace cast
} // namespace media
......@@ -46,7 +46,7 @@ class CastEnvironment : public base::RefCountedThreadSafe<CastEnvironment> {
scoped_refptr<base::SingleThreadTaskRunner> video_encode_thread_proxy,
scoped_refptr<base::SingleThreadTaskRunner> video_decode_thread_proxy,
scoped_refptr<base::SingleThreadTaskRunner> transport_thread_proxy,
const CastLoggingConfig& config);
const CastLoggingConfig& logging_config);
// These are the same methods in message_loop.h, but are guaranteed to either
// get posted to the MessageLoop if it's still alive, or be deleted otherwise.
......@@ -64,13 +64,15 @@ class CastEnvironment : public base::RefCountedThreadSafe<CastEnvironment> {
bool CurrentlyOn(ThreadId identifier);
base::TickClock* Clock() const;
// All of the media::cast implementation must use this TickClock.
base::TickClock* Clock() const { return clock_.get(); }
// Logging is not thread safe. Should always be called from the main thread.
LoggingImpl* Logging();
// Logging is not thread safe. Its methods should always be called from the
// main thread.
LoggingImpl* Logging() const { return logging_.get(); }
scoped_refptr<base::SingleThreadTaskRunner>
GetMessageSingleThreadTaskRunnerForThread(ThreadId identifier);
scoped_refptr<base::SingleThreadTaskRunner> GetTaskRunner(
ThreadId identifier) const;
bool HasAudioEncoderThread() {
return audio_encode_thread_proxy_ ? true : false;
......@@ -83,10 +85,7 @@ class CastEnvironment : public base::RefCountedThreadSafe<CastEnvironment> {
protected:
virtual ~CastEnvironment();
private:
friend class base::RefCountedThreadSafe<CastEnvironment>;
scoped_ptr<base::TickClock> clock_;
// Subclasses may override these.
scoped_refptr<base::SingleThreadTaskRunner> main_thread_proxy_;
scoped_refptr<base::SingleThreadTaskRunner> audio_encode_thread_proxy_;
scoped_refptr<base::SingleThreadTaskRunner> audio_decode_thread_proxy_;
......@@ -94,6 +93,10 @@ class CastEnvironment : public base::RefCountedThreadSafe<CastEnvironment> {
scoped_refptr<base::SingleThreadTaskRunner> video_decode_thread_proxy_;
scoped_refptr<base::SingleThreadTaskRunner> transport_thread_proxy_;
private:
friend class base::RefCountedThreadSafe<CastEnvironment>;
scoped_ptr<base::TickClock> clock_;
scoped_ptr<LoggingImpl> logging_;
DISALLOW_COPY_AND_ASSIGN(CastEnvironment);
......
......@@ -98,8 +98,7 @@ CastReceiverImpl::CastReceiverImpl(
transport::PacketSender* const packet_sender)
: pacer_(cast_environment->Clock(),
packet_sender,
cast_environment->GetMessageSingleThreadTaskRunnerForThread(
CastEnvironment::TRANSPORT)),
cast_environment->GetTaskRunner(CastEnvironment::TRANSPORT)),
audio_receiver_(cast_environment, audio_config, &pacer_),
video_receiver_(cast_environment, video_config, &pacer_),
frame_receiver_(new LocalFrameReceiver(cast_environment,
......
......@@ -9,20 +9,19 @@
namespace media {
namespace cast {
LoggingImpl::LoggingImpl(
scoped_refptr<base::SingleThreadTaskRunner> main_thread_proxy,
const CastLoggingConfig& config)
: main_thread_proxy_(main_thread_proxy),
config_(config),
raw_(),
stats_() {}
LoggingImpl::LoggingImpl(const CastLoggingConfig& config)
: config_(config), raw_(), stats_() {
// LoggingImpl can be constructed on any thread, but its methods should all be
// called on the same thread.
thread_checker_.DetachFromThread();
}
LoggingImpl::~LoggingImpl() {}
void LoggingImpl::InsertFrameEvent(const base::TimeTicks& time_of_event,
CastLoggingEvent event, uint32 rtp_timestamp,
uint32 frame_id) {
DCHECK(main_thread_proxy_->RunsTasksOnCurrentThread());
DCHECK(thread_checker_.CalledOnValidThread());
if (config_.enable_raw_data_collection) {
raw_.InsertFrameEvent(time_of_event, event, rtp_timestamp, frame_id);
}
......@@ -40,7 +39,7 @@ void LoggingImpl::InsertFrameEventWithSize(const base::TimeTicks& time_of_event,
CastLoggingEvent event,
uint32 rtp_timestamp,
uint32 frame_id, int frame_size) {
DCHECK(main_thread_proxy_->RunsTasksOnCurrentThread());
DCHECK(thread_checker_.CalledOnValidThread());
if (config_.enable_raw_data_collection) {
raw_.InsertFrameEventWithSize(time_of_event, event, rtp_timestamp, frame_id,
frame_size);
......@@ -61,7 +60,7 @@ void LoggingImpl::InsertFrameEventWithSize(const base::TimeTicks& time_of_event,
void LoggingImpl::InsertFrameEventWithDelay(
const base::TimeTicks& time_of_event, CastLoggingEvent event,
uint32 rtp_timestamp, uint32 frame_id, base::TimeDelta delay) {
DCHECK(main_thread_proxy_->RunsTasksOnCurrentThread());
DCHECK(thread_checker_.CalledOnValidThread());
if (config_.enable_raw_data_collection) {
raw_.InsertFrameEventWithDelay(time_of_event, event, rtp_timestamp,
frame_id, delay);
......@@ -82,7 +81,7 @@ void LoggingImpl::InsertFrameEventWithDelay(
void LoggingImpl::InsertPacketListEvent(const base::TimeTicks& time_of_event,
CastLoggingEvent event,
const PacketList& packets) {
DCHECK(main_thread_proxy_->RunsTasksOnCurrentThread());
DCHECK(thread_checker_.CalledOnValidThread());
for (unsigned int i = 0; i < packets.size(); ++i) {
const Packet& packet = packets[i];
// Parse basic properties.
......@@ -107,7 +106,7 @@ void LoggingImpl::InsertPacketEvent(const base::TimeTicks& time_of_event,
uint32 rtp_timestamp, uint32 frame_id,
uint16 packet_id, uint16 max_packet_id,
size_t size) {
DCHECK(main_thread_proxy_->RunsTasksOnCurrentThread());
DCHECK(thread_checker_.CalledOnValidThread());
if (config_.enable_raw_data_collection) {
raw_.InsertPacketEvent(time_of_event, event, rtp_timestamp, frame_id,
packet_id, max_packet_id, size);
......@@ -126,7 +125,7 @@ void LoggingImpl::InsertPacketEvent(const base::TimeTicks& time_of_event,
void LoggingImpl::InsertGenericEvent(const base::TimeTicks& time_of_event,
CastLoggingEvent event, int value) {
DCHECK(main_thread_proxy_->RunsTasksOnCurrentThread());
DCHECK(thread_checker_.CalledOnValidThread());
if (config_.enable_raw_data_collection) {
raw_.InsertGenericEvent(time_of_event, event, value);
}
......@@ -142,31 +141,33 @@ void LoggingImpl::InsertGenericEvent(const base::TimeTicks& time_of_event,
}
void LoggingImpl::AddRawEventSubscriber(RawEventSubscriber* subscriber) {
DCHECK(thread_checker_.CalledOnValidThread());
raw_.AddSubscriber(subscriber);
}
void LoggingImpl::RemoveRawEventSubscriber(RawEventSubscriber* subscriber) {
DCHECK(thread_checker_.CalledOnValidThread());
raw_.RemoveSubscriber(subscriber);
}
FrameStatsMap LoggingImpl::GetFrameStatsData(EventMediaType media_type) const {
DCHECK(main_thread_proxy_->RunsTasksOnCurrentThread());
DCHECK(thread_checker_.CalledOnValidThread());
return stats_.GetFrameStatsData(media_type);
}
PacketStatsMap LoggingImpl::GetPacketStatsData(
EventMediaType media_type) const {
DCHECK(main_thread_proxy_->RunsTasksOnCurrentThread());
DCHECK(thread_checker_.CalledOnValidThread());
return stats_.GetPacketStatsData(media_type);
}
GenericStatsMap LoggingImpl::GetGenericStatsData() const {
DCHECK(main_thread_proxy_->RunsTasksOnCurrentThread());
DCHECK(thread_checker_.CalledOnValidThread());
return stats_.GetGenericStatsData();
}
void LoggingImpl::ResetStats() {
DCHECK(main_thread_proxy_->RunsTasksOnCurrentThread());
DCHECK(thread_checker_.CalledOnValidThread());
stats_.Reset();
}
......
......@@ -10,7 +10,7 @@
// 2. Tracing of raw events.
#include "base/memory/ref_counted.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_checker.h"
#include "media/cast/cast_config.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/logging/logging_raw.h"
......@@ -19,14 +19,14 @@
namespace media {
namespace cast {
// Should only be called from the main thread.
class LoggingImpl : public base::NonThreadSafe {
class LoggingImpl {
public:
LoggingImpl(scoped_refptr<base::SingleThreadTaskRunner> main_thread_proxy,
const CastLoggingConfig& config);
explicit LoggingImpl(const CastLoggingConfig& config);
~LoggingImpl();
// Note: All methods below should be called from the same thread.
void InsertFrameEvent(const base::TimeTicks& time_of_event,
CastLoggingEvent event, uint32 rtp_timestamp,
uint32 frame_id);
......@@ -66,7 +66,7 @@ class LoggingImpl : public base::NonThreadSafe {
void ResetStats();
private:
scoped_refptr<base::SingleThreadTaskRunner> main_thread_proxy_;
base::ThreadChecker thread_checker_;
const CastLoggingConfig config_;
LoggingRaw raw_;
LoggingStats stats_;
......
......@@ -11,7 +11,6 @@
#include "media/cast/logging/logging_defines.h"
#include "media/cast/logging/logging_impl.h"
#include "media/cast/logging/simple_event_subscriber.h"
#include "media/cast/test/fake_single_thread_task_runner.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
......@@ -34,8 +33,7 @@ class LoggingImplTest : public ::testing::Test {
testing_clock_.Advance(
base::TimeDelta::FromMilliseconds(kStartMillisecond));
task_runner_ = new test::FakeSingleThreadTaskRunner(&testing_clock_);
logging_.reset(new LoggingImpl(task_runner_, config_));
logging_.reset(new LoggingImpl(config_));
logging_->AddRawEventSubscriber(&event_subscriber_);
}
......@@ -44,7 +42,6 @@ class LoggingImplTest : public ::testing::Test {
}
CastLoggingConfig config_;
scoped_refptr<test::FakeSingleThreadTaskRunner> task_runner_;
scoped_ptr<LoggingImpl> logging_;
base::SimpleTestTickClock testing_clock_;
SimpleEventSubscriber event_subscriber_;
......
......@@ -106,7 +106,6 @@ void LinuxOutputWindow::CreateWindow(int x_pos,
VLOG(1) << "XShmCreateImage failed";
NOTREACHED();
}
render_buffer_ = reinterpret_cast<uint8_t*>(image_->data);
shminfo_.readOnly = false;
// Attach image to display.
......@@ -119,14 +118,16 @@ void LinuxOutputWindow::CreateWindow(int x_pos,
void LinuxOutputWindow::RenderFrame(
const scoped_refptr<media::VideoFrame>& video_frame) {
CHECK_LE(video_frame->coded_size().width(), image_->width);
CHECK_LE(video_frame->coded_size().height(), image_->height);
libyuv::I420ToARGB(video_frame->data(VideoFrame::kYPlane),
video_frame->stride(VideoFrame::kYPlane),
video_frame->data(VideoFrame::kUPlane),
video_frame->stride(VideoFrame::kUPlane),
video_frame->data(VideoFrame::kVPlane),
video_frame->stride(VideoFrame::kVPlane),
render_buffer_,
video_frame->stride(VideoFrame::kYPlane) * 4,
reinterpret_cast<uint8_t*>(image_->data),
image_->bytes_per_line,
video_frame->coded_size().width(),
video_frame->coded_size().height());
......@@ -149,4 +150,4 @@ void LinuxOutputWindow::RenderFrame(
} // namespace test
} // namespace cast
} // namespace media
\ No newline at end of file
} // namespace media
......@@ -41,7 +41,6 @@ class LinuxOutputWindow {
int width,
int height,
const std::string& name);
uint8* render_buffer_;
Display* display_;
Window window_;
XShmSegmentInfo shminfo_;
......
......@@ -21,7 +21,10 @@
#include "media/cast/cast_environment.h"
#include "media/cast/cast_receiver.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/test/utility/default_config.h"
#include "media/cast/test/utility/in_process_receiver.h"
#include "media/cast/test/utility/input_builder.h"
#include "media/cast/test/utility/standalone_cast_environment.h"
#include "media/cast/transport/transport/udp_transport.h"
#include "net/base/net_util.h"
......@@ -31,6 +34,7 @@
namespace media {
namespace cast {
// Settings chosen to match default sender settings.
#define DEFAULT_SEND_PORT "0"
#define DEFAULT_RECEIVE_PORT "2344"
......@@ -46,12 +50,10 @@ namespace cast {
#define DEFAULT_VIDEO_CODEC_HEIGHT "480"
#define DEFAULT_VIDEO_CODEC_BITRATE "2000"
static const int kAudioSamplingFrequency = 48000;
#if defined(OS_LINUX)
const int kVideoWindowWidth = 1280;
const int kVideoWindowHeight = 720;
#endif // OS_LINUX
static const int kFrameTimerMs = 33;
void GetPorts(int* tx_port, int* rx_port) {
test::InputBuilder tx_input(
......@@ -103,18 +105,9 @@ void GetPayloadtype(AudioReceiverConfig* audio_config) {
}
AudioReceiverConfig GetAudioReceiverConfig() {
AudioReceiverConfig audio_config;
AudioReceiverConfig audio_config = GetDefaultAudioReceiverConfig();
GetSsrcs(&audio_config);
GetPayloadtype(&audio_config);
audio_config.rtcp_c_name = "audio_receiver@a.b.c.d";
VLOG(1) << "Using OPUS 48Khz stereo";
audio_config.use_external_decoder = false;
audio_config.frequency = 48000;
audio_config.channels = 2;
audio_config.codec = transport::kOpus;
return audio_config;
}
......@@ -127,28 +120,26 @@ void GetPayloadtype(VideoReceiverConfig* video_config) {
}
VideoReceiverConfig GetVideoReceiverConfig() {
VideoReceiverConfig video_config;
VideoReceiverConfig video_config = GetDefaultVideoReceiverConfig();
GetSsrcs(&video_config);
GetPayloadtype(&video_config);
video_config.rtcp_c_name = "video_receiver@a.b.c.d";
video_config.use_external_decoder = false;
VLOG(1) << "Using VP8";
video_config.codec = transport::kVp8;
return video_config;
}
static void UpdateCastTransportStatus(transport::CastTransportStatus status) {
VLOG(1) << "CastTransportStatus = " << status;
}
class ReceiveProcess : public base::RefCountedThreadSafe<ReceiveProcess> {
// An InProcessReceiver that renders video frames to a LinuxOutputWindow. While
// it does receive audio frames, it does not play them.
class ReceiverDisplay : public InProcessReceiver {
public:
explicit ReceiveProcess(scoped_refptr<FrameReceiver> frame_receiver)
: frame_receiver_(frame_receiver),
ReceiverDisplay(const scoped_refptr<CastEnvironment>& cast_environment,
const net::IPEndPoint& local_end_point,
const net::IPEndPoint& remote_end_point,
const AudioReceiverConfig& audio_config,
const VideoReceiverConfig& video_config)
: InProcessReceiver(cast_environment,
local_end_point,
remote_end_point,
audio_config,
video_config),
#if defined(OS_LINUX)
render_(0, 0, kVideoWindowWidth, kVideoWindowHeight, "Cast_receiver"),
#endif // OS_LINUX
......@@ -156,62 +147,34 @@ class ReceiveProcess : public base::RefCountedThreadSafe<ReceiveProcess> {
last_render_time_() {
}
void Start() {
GetAudioFrame(base::TimeDelta::FromMilliseconds(kFrameTimerMs));
GetVideoFrame();
}
virtual ~ReceiverDisplay() {}
protected:
virtual ~ReceiveProcess() {}
private:
friend class base::RefCountedThreadSafe<ReceiveProcess>;
void DisplayFrame(const scoped_refptr<media::VideoFrame>& video_frame,
const base::TimeTicks& render_time) {
virtual void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame,
const base::TimeTicks& render_time) OVERRIDE {
#ifdef OS_LINUX
render_.RenderFrame(video_frame);
#endif // OS_LINUX
// Print out the delta between frames.
if (!last_render_time_.is_null()) {
base::TimeDelta time_diff = render_time - last_render_time_;
VLOG(1) << " RenderDelay[mS] = " << time_diff.InMilliseconds();
VLOG(1) << "Size = " << video_frame->coded_size().ToString()
<< "; RenderDelay[mS] = " << time_diff.InMilliseconds();
}
last_render_time_ = render_time;
GetVideoFrame();
}
void ReceiveAudioFrame(scoped_ptr<PcmAudioFrame> audio_frame,
const base::TimeTicks& playout_time) {
virtual void OnAudioFrame(scoped_ptr<PcmAudioFrame> audio_frame,
const base::TimeTicks& playout_time) OVERRIDE {
// For audio just print the playout delta between audio frames.
// Default diff time is kFrameTimerMs.
base::TimeDelta time_diff =
base::TimeDelta::FromMilliseconds(kFrameTimerMs);
if (!last_playout_time_.is_null()) {
time_diff = playout_time - last_playout_time_;
VLOG(1) << " ***PlayoutDelay[mS] = " << time_diff.InMilliseconds();
base::TimeDelta time_diff = playout_time - last_playout_time_;
VLOG(1) << "SampleRate = " << audio_frame->frequency
<< "; PlayoutDelay[mS] = " << time_diff.InMilliseconds();
}
last_playout_time_ = playout_time;
}
void GetAudioFrame(base::TimeDelta playout_diff) {
int num_10ms_blocks = playout_diff.InMilliseconds() / 10;
frame_receiver_->GetRawAudioFrame(
num_10ms_blocks,
kAudioSamplingFrequency,
base::Bind(&ReceiveProcess::ReceiveAudioFrame, this));
base::MessageLoop::current()->PostDelayedTask(
FROM_HERE,
base::Bind(&ReceiveProcess::GetAudioFrame, this, playout_diff),
playout_diff);
}
void GetVideoFrame() {
frame_receiver_->GetRawVideoFrame(
base::Bind(&ReceiveProcess::DisplayFrame, this));
}
scoped_refptr<FrameReceiver> frame_receiver_;
#ifdef OS_LINUX
test::LinuxOutputWindow render_;
#endif // OS_LINUX
......@@ -224,32 +187,15 @@ class ReceiveProcess : public base::RefCountedThreadSafe<ReceiveProcess> {
int main(int argc, char** argv) {
base::AtExitManager at_exit;
base::MessageLoopForIO main_message_loop;
CommandLine::Init(argc, argv);
InitLogging(logging::LoggingSettings());
VLOG(1) << "Cast Receiver";
base::Thread audio_thread("Cast audio decoder thread");
base::Thread video_thread("Cast video decoder thread");
audio_thread.Start();
video_thread.Start();
scoped_ptr<base::TickClock> clock(new base::DefaultTickClock());
// Enable main and receiver side threads only. Enable raw event logging.
// Running transport on the main thread.
// Enable raw event logging only.
media::cast::CastLoggingConfig logging_config;
logging_config.enable_raw_data_collection = true;
scoped_refptr<media::cast::CastEnvironment> cast_environment(
new media::cast::CastEnvironment(clock.Pass(),
main_message_loop.message_loop_proxy(),
NULL,
audio_thread.message_loop_proxy(),
NULL,
video_thread.message_loop_proxy(),
main_message_loop.message_loop_proxy(),
logging_config));
new media::cast::StandaloneCastEnvironment(logging_config));
media::cast::AudioReceiverConfig audio_config =
media::cast::GetAudioReceiverConfig();
......@@ -281,23 +227,15 @@ int main(int argc, char** argv) {
net::IPEndPoint remote_end_point(remote_ip_number, remote_port);
net::IPEndPoint local_end_point(local_ip_number, local_port);
scoped_ptr<media::cast::transport::UdpTransport> transport(
new media::cast::transport::UdpTransport(
NULL,
main_message_loop.message_loop_proxy(),
local_end_point,
remote_end_point,
base::Bind(&media::cast::UpdateCastTransportStatus)));
scoped_ptr<media::cast::CastReceiver> cast_receiver(
media::cast::CastReceiver::CreateCastReceiver(
cast_environment, audio_config, video_config, transport.get()));
// TODO(hubbe): Make the cast receiver do this automatically.
transport->StartReceiving(cast_receiver->packet_receiver());
scoped_refptr<media::cast::ReceiveProcess> receive_process(
new media::cast::ReceiveProcess(cast_receiver->frame_receiver()));
receive_process->Start();
main_message_loop.Run();
media::cast::ReceiverDisplay* const receiver_display =
new media::cast::ReceiverDisplay(cast_environment,
local_end_point,
remote_end_point,
audio_config,
video_config);
receiver_display->Start();
base::MessageLoop().Run(); // Run forever (i.e., until SIGTERM).
NOTREACHED();
return 0;
}
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/test/utility/default_config.h"
#include "media/cast/transport/cast_transport_config.h"
namespace media {
namespace cast {
AudioReceiverConfig GetDefaultAudioReceiverConfig() {
AudioReceiverConfig config;
config.feedback_ssrc = 1;
config.incoming_ssrc = 2;
config.rtp_payload_type = 127;
config.rtcp_c_name = "audio_receiver@a.b.c.d";
config.use_external_decoder = false;
config.frequency = 48000;
config.channels = 2;
config.codec = media::cast::transport::kOpus;
return config;
}
VideoReceiverConfig GetDefaultVideoReceiverConfig() {
VideoReceiverConfig config;
config.feedback_ssrc = 12;
config.incoming_ssrc = 11;
config.rtp_payload_type = 96;
config.rtcp_c_name = "video_receiver@a.b.c.d";
config.use_external_decoder = false;
config.codec = media::cast::transport::kVp8;
return config;
}
} // namespace cast
} // namespace media
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAST_TEST_UTILITY_DEFAULT_CONFIG_H_
#define MEDIA_CAST_TEST_UTILITY_DEFAULT_CONFIG_H_
#include "media/cast/cast_config.h"
namespace media {
namespace cast {
// Returns an AudioReceiverConfig initialized to "good-to-go" values. This
// specifies 48 kHz, 2-channel Opus-coded audio, with standard ssrc's, payload
// type, and a dummy name.
AudioReceiverConfig GetDefaultAudioReceiverConfig();
// Returns a VideoReceiverConfig initialized to "good-to-go" values. This
// specifies VP8-coded video, with standard ssrc's, payload type, and a dummy
// name.
VideoReceiverConfig GetDefaultVideoReceiverConfig();
} // namespace cast
} // namespace media
#endif // MEDIA_CAST_TEST_UTILITY_DEFAULT_CONFIG_H_
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/test/utility/in_process_receiver.h"
#include "base/bind_helpers.h"
#include "base/time/time.h"
#include "media/base/video_frame.h"
#include "media/cast/cast_config.h"
#include "media/cast/cast_environment.h"
#include "media/cast/cast_receiver.h"
#include "media/cast/transport/cast_transport_config.h"
#include "media/cast/transport/transport/udp_transport.h"
using media::cast::transport::CastTransportStatus;
using media::cast::transport::UdpTransport;
namespace media {
namespace cast {
InProcessReceiver::InProcessReceiver(
const scoped_refptr<CastEnvironment>& cast_environment,
const net::IPEndPoint& local_end_point,
const net::IPEndPoint& remote_end_point,
const AudioReceiverConfig& audio_config,
const VideoReceiverConfig& video_config)
: cast_environment_(cast_environment),
local_end_point_(local_end_point),
remote_end_point_(remote_end_point),
audio_config_(audio_config),
video_config_(video_config),
weak_factory_(this) {}
InProcessReceiver::~InProcessReceiver() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
}
void InProcessReceiver::Start() {
cast_environment_->PostTask(CastEnvironment::MAIN,
FROM_HERE,
base::Bind(&InProcessReceiver::StartOnMainThread,
base::Unretained(this)));
}
void InProcessReceiver::DestroySoon() {
cast_environment_->PostTask(
CastEnvironment::MAIN,
FROM_HERE,
base::Bind(&InProcessReceiver::WillDestroyReceiver, base::Owned(this)));
}
void InProcessReceiver::UpdateCastTransportStatus(CastTransportStatus status) {
LOG_IF(ERROR, status == media::cast::transport::TRANSPORT_SOCKET_ERROR)
<< "Transport socket error occurred. InProcessReceiver is likely dead.";
VLOG(1) << "CastTransportStatus is now " << status;
}
void InProcessReceiver::StartOnMainThread() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(!transport_ && !cast_receiver_);
transport_.reset(
new UdpTransport(NULL,
cast_environment_->GetTaskRunner(CastEnvironment::MAIN),
local_end_point_,
remote_end_point_,
base::Bind(&InProcessReceiver::UpdateCastTransportStatus,
base::Unretained(this))));
cast_receiver_.reset(CastReceiver::CreateCastReceiver(
cast_environment_, audio_config_, video_config_, transport_.get()));
// TODO(hubbe): Make the cast receiver do this automatically.
transport_->StartReceiving(cast_receiver_->packet_receiver());
PullNextAudioFrame();
PullNextVideoFrame();
}
void InProcessReceiver::GotAudioFrame(scoped_ptr<PcmAudioFrame> audio_frame,
const base::TimeTicks& playout_time) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
OnAudioFrame(audio_frame.Pass(), playout_time);
// TODO(miu): Put this back here: PullNextAudioFrame();
}
void InProcessReceiver::GotVideoFrame(
const scoped_refptr<VideoFrame>& video_frame,
const base::TimeTicks& render_time) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
OnVideoFrame(video_frame, render_time);
PullNextVideoFrame();
}
void InProcessReceiver::PullNextAudioFrame() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
cast_receiver_->frame_receiver()->GetRawAudioFrame(
1 /* 10 ms of samples */,
audio_config_.frequency,
base::Bind(&InProcessReceiver::GotAudioFrame,
weak_factory_.GetWeakPtr()));
// TODO(miu): Fix audio decoder so that it never drops a request for the next
// frame of audio. Once fixed, remove this, and add PullNextAudioFrame() to
// the end of GotAudioFrame(), so that it behaves just like GotVideoFrame().
// http://crbug.com/347361
cast_environment_->PostDelayedTask(
CastEnvironment::MAIN,
FROM_HERE,
base::Bind(&InProcessReceiver::PullNextAudioFrame,
weak_factory_.GetWeakPtr()),
base::TimeDelta::FromMilliseconds(10));
}
void InProcessReceiver::PullNextVideoFrame() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
cast_receiver_->frame_receiver()->GetRawVideoFrame(base::Bind(
&InProcessReceiver::GotVideoFrame, weak_factory_.GetWeakPtr()));
}
// static
void InProcessReceiver::WillDestroyReceiver(InProcessReceiver* receiver) {
DCHECK(receiver->cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
}
} // namespace cast
} // namespace media
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAST_TEST_IN_PROCESS_RECEIVER_H_
#define MEDIA_CAST_TEST_IN_PROCESS_RECEIVER_H_
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_ptr.h"
#include "base/memory/weak_ptr.h"
#include "media/cast/cast_config.h"
#include "media/cast/transport/cast_transport_config.h"
namespace base {
class TimeTicks;
} // namespace base
namespace net {
class IPEndPoint;
} // namespace net
namespace media {
class VideoFrame;
namespace cast {
class CastEnvironment;
class CastReceiver;
namespace transport {
class UdpTransport;
} // namespace transport
// Common base functionality for an in-process Cast receiver. This is meant to
// be subclassed with the OnAudioFrame() and OnVideoFrame() methods implemented,
// so that the implementor can focus on what is to be done with the frames,
// rather than on the boilerplate "glue" code.
class InProcessReceiver {
public:
// Construct a receiver with the given configuration. |remote_end_point| can
// be left empty, if the transport should automatically mate with the first
// remote sender it encounters.
InProcessReceiver(const scoped_refptr<CastEnvironment>& cast_environment,
const net::IPEndPoint& local_end_point,
const net::IPEndPoint& remote_end_point,
const AudioReceiverConfig& audio_config,
const VideoReceiverConfig& video_config);
// Must be destroyed on the cast MAIN thread. See DestroySoon().
virtual ~InProcessReceiver();
// Convenience accessor to CastEnvironment.
scoped_refptr<CastEnvironment> cast_env() const { return cast_environment_; }
// Begin delivering any received audio/video frames to the OnXXXFrame()
// methods.
void Start();
// Schedules destruction on the cast MAIN thread. Any external references to
// the InProcessReceiver instance become invalid.
void DestroySoon();
protected:
// To be implemented by subclasses. These are called on the Cast MAIN thread
// as each frame is received.
virtual void OnAudioFrame(scoped_ptr<PcmAudioFrame> audio_frame,
const base::TimeTicks& playout_time) = 0;
virtual void OnVideoFrame(const scoped_refptr<VideoFrame>& video_frame,
const base::TimeTicks& render_time) = 0;
// Helper method that creates |transport_| and |cast_receiver_|, starts
// |transport_| receiving, and requests the first audio/video frame.
// Subclasses may override to provide additional start-up functionality.
virtual void StartOnMainThread();
// Callback for the transport to notify of status changes. A default
// implementation is provided here that simply logs socket errors.
virtual void UpdateCastTransportStatus(transport::CastTransportStatus status);
private:
friend class base::RefCountedThreadSafe<InProcessReceiver>;
// CastReceiver callbacks that receive a frame and then request another.
void GotAudioFrame(scoped_ptr<PcmAudioFrame> audio_frame,
const base::TimeTicks& playout_time);
void GotVideoFrame(const scoped_refptr<VideoFrame>& video_frame,
const base::TimeTicks& render_time);
void PullNextAudioFrame();
void PullNextVideoFrame();
// Invoked just before the destruction of |receiver| on the cast MAIN thread.
static void WillDestroyReceiver(InProcessReceiver* receiver);
const scoped_refptr<CastEnvironment> cast_environment_;
const net::IPEndPoint local_end_point_;
const net::IPEndPoint remote_end_point_;
const AudioReceiverConfig audio_config_;
const VideoReceiverConfig video_config_;
scoped_ptr<transport::UdpTransport> transport_;
scoped_ptr<CastReceiver> cast_receiver_;
// For shutdown safety, this member must be last:
base::WeakPtrFactory<InProcessReceiver> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(InProcessReceiver);
};
} // namespace cast
} // namespace media
#endif // MEDIA_CAST_TEST_IN_PROCESS_RECEIVER_H_
......@@ -7,6 +7,7 @@
#include <stdlib.h>
#include <cstdio>
#include "base/command_line.h"
#include "base/logging.h"
#include "base/strings/string_number_conversions.h"
......@@ -14,6 +15,8 @@ namespace media {
namespace cast {
namespace test {
static const char kEnablePromptsSwitch[] = "enable-prompts";
InputBuilder::InputBuilder(const std::string& title,
const std::string& default_value,
int low_range,
......@@ -26,6 +29,9 @@ InputBuilder::InputBuilder(const std::string& title,
InputBuilder::~InputBuilder() {}
std::string InputBuilder::GetStringInput() const {
if (!CommandLine::ForCurrentProcess()->HasSwitch(kEnablePromptsSwitch))
return default_value_;
printf("\n%s\n", title_.c_str());
if (!default_value_.empty())
printf("Hit enter for default (%s):\n", default_value_.c_str());
......
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/test/utility/standalone_cast_environment.h"
#include "base/time/default_tick_clock.h"
namespace media {
namespace cast {
StandaloneCastEnvironment::StandaloneCastEnvironment(
const CastLoggingConfig& logging_config)
: CastEnvironment(
make_scoped_ptr<base::TickClock>(new base::DefaultTickClock()),
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
logging_config),
main_thread_("StandaloneCastEnvironment Main"),
audio_encode_thread_("StandaloneCastEnvironment Audio Encode"),
audio_decode_thread_("StandaloneCastEnvironment Audio Decode"),
video_encode_thread_("StandaloneCastEnvironment Video Encode"),
video_decode_thread_("StandaloneCastEnvironment Video Decode"),
transport_thread_("StandaloneCastEnvironment Transport") {
#define CREATE_TASK_RUNNER(name, options) \
name##_thread_.StartWithOptions(options); \
CastEnvironment::name##_thread_proxy_ = name##_thread_.message_loop_proxy()
CREATE_TASK_RUNNER(main,
base::Thread::Options(base::MessageLoop::TYPE_IO, 0));
CREATE_TASK_RUNNER(audio_encode, base::Thread::Options());
CREATE_TASK_RUNNER(audio_decode, base::Thread::Options());
CREATE_TASK_RUNNER(video_encode, base::Thread::Options());
CREATE_TASK_RUNNER(video_decode, base::Thread::Options());
CREATE_TASK_RUNNER(transport, base::Thread::Options());
#undef CREATE_TASK_RUNNER
}
StandaloneCastEnvironment::~StandaloneCastEnvironment() {
DCHECK(CalledOnValidThread());
}
void StandaloneCastEnvironment::Shutdown() {
DCHECK(CalledOnValidThread());
main_thread_.Stop();
audio_encode_thread_.Stop();
audio_decode_thread_.Stop();
video_encode_thread_.Stop();
video_decode_thread_.Stop();
transport_thread_.Stop();
}
} // namespace cast
} // namespace media
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAST_TEST_UTILITY_STANDALONE_CAST_ENVIRONMENT_H_
#define MEDIA_CAST_TEST_UTILITY_STANDALONE_CAST_ENVIRONMENT_H_
#include "base/threading/thread.h"
#include "base/threading/thread_checker.h"
#include "media/cast/cast_environment.h"
namespace media {
namespace cast {
// A complete CastEnvironment where all task runners are spurned from
// internally-owned threads. Uses base::DefaultTickClock as a clock.
class StandaloneCastEnvironment : public CastEnvironment,
public base::ThreadChecker {
public:
explicit StandaloneCastEnvironment(const CastLoggingConfig& logging_config);
// Stops all threads backing the task runners, blocking the caller until
// complete.
void Shutdown();
private:
virtual ~StandaloneCastEnvironment();
base::Thread main_thread_;
base::Thread audio_encode_thread_;
base::Thread audio_decode_thread_;
base::Thread video_encode_thread_;
base::Thread video_decode_thread_;
base::Thread transport_thread_;
DISALLOW_COPY_AND_ASSIGN(StandaloneCastEnvironment);
};
} // namespace cast
} // namespace media
#endif // MEDIA_CAST_TEST_UTILITY_STANDALONE_CAST_ENVIRONMENT_H_
......@@ -11,6 +11,8 @@
'<(DEPTH)/',
],
'dependencies': [
'../../cast_receiver.gyp:cast_receiver',
'../../transport/cast_transport.gyp:cast_transport',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx_geometry',
'<(DEPTH)/testing/gtest.gyp:gtest',
......@@ -20,13 +22,19 @@
'sources': [
'<(DEPTH)/media/cast/test/fake_single_thread_task_runner.cc',
'<(DEPTH)/media/cast/test/fake_single_thread_task_runner.h',
'input_builder.cc',
'input_builder.h',
'audio_utility.cc',
'audio_utility.h',
'default_config.cc',
'default_config.h',
'in_process_receiver.cc',
'in_process_receiver.h',
'input_builder.cc',
'input_builder.h',
'standalone_cast_environment.cc',
'standalone_cast_environment.h',
'video_utility.cc',
'video_utility.h',
], # source
},
],
}
\ No newline at end of file
}
......@@ -59,7 +59,7 @@ CastTransportSenderImpl::CastTransportSenderImpl(
external_transport ? external_transport : transport_.get(),
transport_task_runner),
rtcp_builder_(&pacer_),
logging_(transport_task_runner, logging_config),
logging_(logging_config),
raw_events_callback_(raw_events_callback) {
if (!raw_events_callback_.is_null()) {
DCHECK(logging_config.enable_raw_data_collection);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment