Commit f70d95e5 authored by dcheng's avatar dcheng Committed by Commit bot

Remove implicit conversions from scoped_refptr to T* in content/renderer/media/

This patch was generated by running the rewrite_scoped_refptr clang tool
on a Linux build.

BUG=110610

Review URL: https://codereview.chromium.org/480233007

Cr-Commit-Position: refs/heads/master@{#292079}
parent 1e4ebba1
......@@ -39,7 +39,7 @@ scoped_ptr<PepperCdmWrapper> PepperCdmWrapperImpl::Create(
PepperWebPluginImpl* ppapi_plugin = static_cast<PepperWebPluginImpl*>(plugin);
scoped_refptr<PepperPluginInstanceImpl> plugin_instance =
ppapi_plugin->instance();
if (!plugin_instance)
if (!plugin_instance.get())
return scoped_ptr<PepperCdmWrapper>();
GURL url(plugin_instance->container()->element().document().url());
......@@ -59,7 +59,7 @@ PepperCdmWrapperImpl::PepperCdmWrapperImpl(
: helper_plugin_(helper_plugin.Pass()),
plugin_instance_(plugin_instance) {
DCHECK(helper_plugin_);
DCHECK(plugin_instance_);
DCHECK(plugin_instance_.get());
// Plugin must be a CDM.
DCHECK(plugin_instance_->GetContentDecryptorDelegate());
}
......
......@@ -40,7 +40,7 @@ MediaStream::~MediaStream() {
webrtc::MediaStreamInterface* MediaStream::GetWebRtcAdapter(
const blink::WebMediaStream& stream) {
DCHECK(webrtc_media_stream_);
DCHECK(webrtc_media_stream_.get());
DCHECK(thread_checker_.CalledOnValidThread());
return webrtc_media_stream_.get();
}
......
......@@ -199,7 +199,7 @@ MediaStreamAudioProcessor::MediaStreamAudioProcessor(
// In unit tests not creating a message filter, |aec_dump_message_filter_|
// will be NULL. We can just ignore that. Other unit tests and browser tests
// ensure that we do get the filter when we should.
if (aec_dump_message_filter_)
if (aec_dump_message_filter_.get())
aec_dump_message_filter_->AddDelegate(this);
}
}
......@@ -271,7 +271,7 @@ void MediaStreamAudioProcessor::Stop() {
stopped_ = true;
if (aec_dump_message_filter_) {
if (aec_dump_message_filter_.get()) {
aec_dump_message_filter_->RemoveDelegate(this);
aec_dump_message_filter_ = NULL;
}
......
......@@ -168,7 +168,7 @@ TEST_F(MediaStreamAudioProcessorTest, WithoutAudioProcessing) {
EXPECT_FALSE(audio_processor->has_audio_processing());
audio_processor->OnCaptureFormatChanged(params_);
ProcessDataAndVerifyFormat(audio_processor,
ProcessDataAndVerifyFormat(audio_processor.get(),
params_.sample_rate(),
params_.channels(),
params_.sample_rate() / 100);
......@@ -187,9 +187,9 @@ TEST_F(MediaStreamAudioProcessorTest, WithAudioProcessing) {
webrtc_audio_device.get()));
EXPECT_TRUE(audio_processor->has_audio_processing());
audio_processor->OnCaptureFormatChanged(params_);
VerifyDefaultComponents(audio_processor);
VerifyDefaultComponents(audio_processor.get());
ProcessDataAndVerifyFormat(audio_processor,
ProcessDataAndVerifyFormat(audio_processor.get(),
kAudioProcessingSampleRate,
kAudioProcessingNumberOfChannel,
kAudioProcessingSampleRate / 100);
......@@ -213,7 +213,7 @@ TEST_F(MediaStreamAudioProcessorTest, VerifyTabCaptureWithoutAudioProcessing) {
EXPECT_FALSE(audio_processor->has_audio_processing());
audio_processor->OnCaptureFormatChanged(params_);
ProcessDataAndVerifyFormat(audio_processor,
ProcessDataAndVerifyFormat(audio_processor.get(),
params_.sample_rate(),
params_.channels(),
params_.sample_rate() / 100);
......@@ -247,7 +247,7 @@ TEST_F(MediaStreamAudioProcessorTest, TurnOffDefaultConstraints) {
EXPECT_FALSE(audio_processor->has_audio_processing());
audio_processor->OnCaptureFormatChanged(params_);
ProcessDataAndVerifyFormat(audio_processor,
ProcessDataAndVerifyFormat(audio_processor.get(),
params_.sample_rate(),
params_.channels(),
params_.sample_rate() / 100);
......@@ -372,9 +372,9 @@ TEST_F(MediaStreamAudioProcessorTest, TestAllSampleRates) {
media::CHANNEL_LAYOUT_STEREO, kSupportedSampleRates[i], 16,
buffer_size);
audio_processor->OnCaptureFormatChanged(params);
VerifyDefaultComponents(audio_processor);
VerifyDefaultComponents(audio_processor.get());
ProcessDataAndVerifyFormat(audio_processor,
ProcessDataAndVerifyFormat(audio_processor.get(),
kAudioProcessingSampleRate,
kAudioProcessingNumberOfChannel,
kAudioProcessingSampleRate / 100);
......@@ -402,7 +402,7 @@ TEST_F(MediaStreamAudioProcessorTest, GetAecDumpMessageFilter) {
constraint_factory.CreateWebMediaConstraints(), 0,
webrtc_audio_device.get()));
EXPECT_TRUE(audio_processor->aec_dump_message_filter_);
EXPECT_TRUE(audio_processor->aec_dump_message_filter_.get());
audio_processor = NULL;
}
......
......@@ -48,7 +48,7 @@ void MediaStreamAudioSource::AddTrack(
const blink::WebMediaConstraints& constraints,
const ConstraintsCallback& callback) {
// TODO(xians): Properly implement for audio sources.
if (!local_audio_source_) {
if (!local_audio_source_.get()) {
if (!factory_->InitializeMediaStreamAudioSource(render_view_id_,
constraints,
this)) {
......
......@@ -33,7 +33,7 @@ class CONTENT_EXPORT MediaStreamAudioSource
}
void SetAudioCapturer(WebRtcAudioCapturer* capturer) {
DCHECK(!audio_capturer_);
DCHECK(!audio_capturer_.get());
audio_capturer_ = capturer;
}
......
......@@ -29,7 +29,7 @@ MediaStreamTrack::~MediaStreamTrack() {
void MediaStreamTrack::SetEnabled(bool enabled) {
DCHECK(thread_checker_.CalledOnValidThread());
if (track_)
if (track_.get())
track_->set_enabled(enabled);
}
......@@ -49,7 +49,7 @@ void MediaStreamTrack::Stop() {
// since there is no proper way of doing that on a remote track, we can
// at least disable the track. Blink will not call down to the content layer
// after a track has been stopped.
if (track_)
if (track_.get())
track_->set_enabled(false);
}
......
......@@ -78,7 +78,7 @@ MediaStreamVideoTrack::FrameDeliverer::FrameDeliverer(
const scoped_refptr<base::MessageLoopProxy>& io_message_loop, bool enabled)
: io_message_loop_(io_message_loop),
enabled_(enabled) {
DCHECK(io_message_loop_);
DCHECK(io_message_loop_.get());
}
MediaStreamVideoTrack::FrameDeliverer::~FrameDeliverer() {
......@@ -160,7 +160,7 @@ const scoped_refptr<media::VideoFrame>&
MediaStreamVideoTrack::FrameDeliverer::GetBlackFrame(
const scoped_refptr<media::VideoFrame>& reference_frame) {
DCHECK(io_message_loop_->BelongsToCurrentThread());
if (!black_frame_ ||
if (!black_frame_.get() ||
black_frame_->natural_size() != reference_frame->natural_size())
black_frame_ =
media::VideoFrame::CreateBlackFrame(reference_frame->natural_size());
......
......@@ -59,7 +59,7 @@ void RendererGpuVideoAcceleratorFactories::BindContext() {
WebGraphicsContext3DCommandBufferImpl*
RendererGpuVideoAcceleratorFactories::GetContext3d() {
DCHECK(task_runner_->BelongsToCurrentThread());
if (!context_provider_)
if (!context_provider_.get())
return NULL;
if (context_provider_->IsContextLost()) {
context_provider_->VerifyContexts();
......
......@@ -714,15 +714,15 @@ void RTCPeerConnectionHandler::removeStream(
break;
}
}
DCHECK(webrtc_stream);
native_peer_connection_->RemoveStream(webrtc_stream);
DCHECK(webrtc_stream.get());
native_peer_connection_->RemoveStream(webrtc_stream.get());
if (peer_connection_tracker_)
peer_connection_tracker_->TrackRemoveStream(
this, stream, PeerConnectionTracker::SOURCE_LOCAL);
PerSessionWebRTCAPIMetrics::GetInstance()->DecrementStreamCounter();
track_metrics_.RemoveStream(MediaStreamTrackMetrics::SENT_STREAM,
webrtc_stream);
webrtc_stream.get());
}
void RTCPeerConnectionHandler::getStats(
......
......@@ -35,11 +35,11 @@ class RTCVideoDecoderTest : public ::testing::Test,
ASSERT_TRUE(vda_thread_.Start());
vda_task_runner_ = vda_thread_.message_loop_proxy();
mock_vda_ = new media::MockVideoDecodeAccelerator;
EXPECT_CALL(*mock_gpu_factories_, GetTaskRunner())
EXPECT_CALL(*mock_gpu_factories_.get(), GetTaskRunner())
.WillRepeatedly(Return(vda_task_runner_));
EXPECT_CALL(*mock_gpu_factories_, DoCreateVideoDecodeAccelerator())
EXPECT_CALL(*mock_gpu_factories_.get(), DoCreateVideoDecodeAccelerator())
.WillRepeatedly(Return(mock_vda_));
EXPECT_CALL(*mock_gpu_factories_, CreateSharedMemory(_))
EXPECT_CALL(*mock_gpu_factories_.get(), CreateSharedMemory(_))
.WillRepeatedly(Return(static_cast<base::SharedMemory*>(NULL)));
EXPECT_CALL(*mock_vda_, Initialize(_, _))
.Times(1)
......
......@@ -482,7 +482,7 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() {
input_buffer->handle(),
base::TimeDelta(),
base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index));
if (!frame) {
if (!frame.get()) {
DLOG(ERROR) << "Impl::EncodeOneFrame(): failed to create frame";
NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
return;
......@@ -561,7 +561,7 @@ RTCVideoEncoder::~RTCVideoEncoder() {
DVLOG(3) << "~RTCVideoEncoder";
DCHECK(thread_checker_.CalledOnValidThread());
Release();
DCHECK(!impl_);
DCHECK(!impl_.get());
}
int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
......@@ -572,7 +572,7 @@ int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
<< ", height=" << codec_settings->height
<< ", startBitrate=" << codec_settings->startBitrate;
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!impl_);
DCHECK(!impl_.get());
weak_factory_.InvalidateWeakPtrs();
impl_ = new Impl(weak_factory_.GetWeakPtr(), gpu_factories_);
......@@ -599,7 +599,7 @@ int32_t RTCVideoEncoder::Encode(
const webrtc::CodecSpecificInfo* codec_specific_info,
const std::vector<webrtc::VideoFrameType>* frame_types) {
DVLOG(3) << "Encode()";
if (!impl_) {
if (!impl_.get()) {
DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_;
return impl_status_;
}
......@@ -627,7 +627,7 @@ int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
webrtc::EncodedImageCallback* callback) {
DVLOG(3) << "RegisterEncodeCompleteCallback()";
DCHECK(thread_checker_.CalledOnValidThread());
if (!impl_) {
if (!impl_.get()) {
DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_;
return impl_status_;
}
......@@ -640,7 +640,7 @@ int32_t RTCVideoEncoder::Release() {
DVLOG(3) << "Release()";
DCHECK(thread_checker_.CalledOnValidThread());
if (impl_) {
if (impl_.get()) {
gpu_factories_->GetTaskRunner()->PostTask(
FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
impl_ = NULL;
......@@ -660,7 +660,7 @@ int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) {
int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
<< ", frame_rate=" << frame_rate;
if (!impl_) {
if (!impl_.get()) {
DVLOG(3) << "SetRates(): returning " << impl_status_;
return impl_status_;
}
......
......@@ -199,7 +199,7 @@ void VideoCaptureImpl::OnBufferDestroyed(int buffer_id) {
if (iter == client_buffers_.end())
return;
DCHECK(!iter->second || iter->second->HasOneRef())
DCHECK(!iter->second.get() || iter->second->HasOneRef())
<< "Instructed to delete buffer we are still using.";
client_buffers_.erase(iter);
}
......
......@@ -59,12 +59,12 @@ void VideoFrameCompositor::PutCurrentFrame(
void VideoFrameCompositor::UpdateCurrentFrame(
const scoped_refptr<media::VideoFrame>& frame) {
if (current_frame_ &&
if (current_frame_.get() &&
current_frame_->natural_size() != frame->natural_size()) {
natural_size_changed_cb_.Run(frame->natural_size());
}
if (!current_frame_ || IsOpaque(current_frame_) != IsOpaque(frame)) {
if (!current_frame_.get() || IsOpaque(current_frame_) != IsOpaque(frame)) {
opacity_changed_cb_.Run(IsOpaque(frame));
}
......
......@@ -124,7 +124,7 @@ VideoFrameResolutionAdapter::VideoFrameResolutionAdapter(
frame_rate_(MediaStreamVideoSource::kDefaultFrameRate),
max_frame_rate_(max_frame_rate),
keep_frame_counter_(0.0f) {
DCHECK(renderer_task_runner_);
DCHECK(renderer_task_runner_.get());
DCHECK(io_thread_checker_.CalledOnValidThread());
DCHECK_GE(max_aspect_ratio_, min_aspect_ratio_);
CHECK_NE(0, max_aspect_ratio_);
......@@ -311,7 +311,7 @@ VideoTrackAdapter::VideoTrackAdapter(
renderer_task_runner_(base::MessageLoopProxy::current()),
frame_counter_(0),
source_frame_rate_(0.0f) {
DCHECK(io_message_loop_);
DCHECK(io_message_loop_.get());
}
VideoTrackAdapter::~VideoTrackAdapter() {
......@@ -361,7 +361,7 @@ void VideoTrackAdapter::AddTrackOnIO(
break;
}
}
if (!adapter) {
if (!adapter.get()) {
adapter = new VideoFrameResolutionAdapter(renderer_task_runner_,
max_frame_size,
min_aspect_ratio,
......
......@@ -197,7 +197,8 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
// |gpu_factories_| requires that its entry points be called on its
// |GetTaskRunner()|. Since |pipeline_| will own decoders created from the
// factories, require that their message loops are identical.
DCHECK(!gpu_factories_ || (gpu_factories_->GetTaskRunner() == media_loop_));
DCHECK(!gpu_factories_.get() ||
(gpu_factories_->GetTaskRunner() == media_loop_.get()));
// Let V8 know we started new thread if we did not do it yet.
// Made separate task to avoid deletion of player currently being created.
......@@ -613,7 +614,7 @@ bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture(
scoped_refptr<media::VideoFrame> video_frame =
GetCurrentFrameFromCompositor();
if (!video_frame)
if (!video_frame.get())
return false;
if (video_frame->format() != media::VideoFrame::NATIVE_TEXTURE)
return false;
......
......@@ -210,7 +210,7 @@ void WebMediaPlayerMS::pause() {
media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE));
if (!current_frame_)
if (!current_frame_.get())
return;
// Copy the frame so that rendering can show the last received frame.
......@@ -428,7 +428,7 @@ void WebMediaPlayerMS::OnFrameAvailable(
SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData);
GetClient()->sizeChanged();
if (video_frame_provider_) {
if (video_frame_provider_.get()) {
video_weblayer_.reset(new cc_blink::WebLayerImpl(
cc::VideoLayer::Create(this, media::VIDEO_ROTATION_0)));
video_weblayer_->setOpaque(true);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment