Commit 751ef0ee authored by henrika's avatar henrika Committed by Commit Bot

Refining native logs in UserMediaProcessor and related classes

Extends existing logs related to media streams and media stream tracks
and aligns the style with recent work in this area.

Bug: 1017219
Change-Id: I88ed50cd4c0d9dd5e2f6104e8176a8ef99d6d46d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1960278Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Commit-Queue: Henrik Andreasson <henrika@chromium.org>
Cr-Commit-Position: refs/heads/master@{#726392}
parent 0890817f
......@@ -111,6 +111,10 @@ class BLINK_PLATFORM_EXPORT MediaStreamAudioTrack
// Buffer used to deliver silent audio data while this track is disabled.
std::unique_ptr<media::AudioBus> silent_bus_;
// Set to true once at first audio callback after calling Start().
// Only used for logging purposes.
bool received_audio_callback_ = false;
// Provides weak pointers that are valid until Stop() is called.
base::WeakPtrFactory<MediaStreamAudioTrack> weak_factory_{this};
......
......@@ -36,11 +36,26 @@ using EchoCancellationType =
blink::AudioProcessingProperties::EchoCancellationType;
namespace {
void SendLogMessage(const std::string& message) {
blink::WebRtcLogMessage("PLAS::" + message);
}
// Used as an identifier for ProcessedLocalAudioSource::From().
void* const kProcessedLocalAudioSourceIdentifier =
const_cast<void**>(&kProcessedLocalAudioSourceIdentifier);
void LogAudioProcesingProperties(
std::string GetEnsureSourceIsStartedLogString(
const blink::MediaStreamDevice& device) {
return base::StringPrintf(
"EnsureSourceIsStarted({session_id=%s}, {channel_layout=%d}, "
"{sample_rate=%d}, {buffer_size=%d}, {effects=%d})",
device.session_id().ToString().c_str(), device.input.channel_layout(),
device.input.sample_rate(), device.input.frames_per_buffer(),
device.input.effects());
}
std::string GetAudioProcesingPropertiesLogString(
const blink::AudioProcessingProperties& properties) {
auto aec_to_string =
[](blink::AudioProcessingProperties::EchoCancellationType type) {
......@@ -56,18 +71,17 @@ void LogAudioProcesingProperties(
};
auto bool_to_string = [](bool value) { return value ? "true" : "false"; };
auto str = base::StringPrintf(
"AudioProcessingProperties: "
"aec=%s, "
"disable_hw_ns=%s, "
"goog_audio_mirroring=%s, "
"goog_auto_gain_control=%s, "
"goog_experimental_echo_cancellation=%s, "
"goog_typing_noise_detection=%s, "
"goog_noise_suppression=%s, "
"goog_experimental_noise_suppression=%s, "
"goog_highpass_filter=%s, "
"goog_experimental_agc=%s, "
"hybrid_agc=%s",
"aec: %s, "
"disable_hw_ns: %s, "
"goog_audio_mirroring: %s, "
"goog_auto_gain_control: %s, "
"goog_experimental_echo_cancellation: %s, "
"goog_typing_noise_detection: %s, "
"goog_noise_suppression: %s, "
"goog_experimental_noise_suppression: %s, "
"goog_highpass_filter: %s, "
"goog_experimental_agc: %s, "
"hybrid_agc: %s",
aec_to_string(properties.echo_cancellation_type),
bool_to_string(properties.disable_hw_noise_suppression),
bool_to_string(properties.goog_audio_mirroring),
......@@ -79,8 +93,7 @@ void LogAudioProcesingProperties(
bool_to_string(properties.goog_highpass_filter),
bool_to_string(properties.goog_experimental_auto_gain_control),
bool_to_string(base::FeatureList::IsEnabled(features::kWebRtcHybridAgc)));
blink::WebRtcLogMessage(str);
return str;
}
} // namespace
......@@ -100,12 +113,14 @@ ProcessedLocalAudioSource::ProcessedLocalAudioSource(
started_callback_(std::move(started_callback)),
volume_(0),
allow_invalid_render_frame_id_for_testing_(false) {
DVLOG(1) << "ProcessedLocalAudioSource::ProcessedLocalAudioSource()";
SetDevice(device);
SendLogMessage(
base::StringPrintf("ProcessedLocalAudioSource({session_id=%s})",
device.session_id().ToString().c_str()));
}
ProcessedLocalAudioSource::~ProcessedLocalAudioSource() {
DVLOG(1) << "ProcessedLocalAudioSource::~ProcessedLocalAudioSource()";
DVLOG(1) << "PLAS::~ProcessedLocalAudioSource()";
EnsureSourceIsStopped();
}
......@@ -118,6 +133,12 @@ ProcessedLocalAudioSource* ProcessedLocalAudioSource::From(
return nullptr;
}
void ProcessedLocalAudioSource::SendLogMessageWithSessionId(
const std::string& message) const {
SendLogMessage(message + " [session_id=" + device().session_id().ToString() +
"]");
}
base::Optional<blink::AudioProcessingProperties>
ProcessedLocalAudioSource::GetAudioProcessingProperties() const {
return audio_processing_properties_;
......@@ -137,23 +158,17 @@ bool ProcessedLocalAudioSource::EnsureSourceIsStarted() {
// to initialize the audio source.
if (!allow_invalid_render_frame_id_for_testing_ &&
!internal_consumer_frame_->frame()) {
blink::WebRtcLogMessage(
"ProcessedLocalAudioSource::EnsureSourceIsStarted() fails "
" because the render frame does not exist.");
SendLogMessageWithSessionId(
"EnsureSourceIsStarted() => (ERROR: "
" render frame does not exist)");
return false;
}
std::string str = base::StringPrintf(
"ProcessedLocalAudioSource::EnsureSourceIsStarted."
"channel_layout=%d, sample_rate=%d, buffer_size=%d, session_id=%s"
", effects=%d. ",
device().input.channel_layout(), device().input.sample_rate(),
device().input.frames_per_buffer(),
device().session_id().ToString().c_str(), device().input.effects());
blink::WebRtcLogMessage(str);
DVLOG(1) << str;
LogAudioProcesingProperties(audio_processing_properties_);
SendLogMessage(GetEnsureSourceIsStartedLogString(device()));
SendLogMessageWithSessionId(base::StringPrintf(
"EnsureSourceIsStarted() => (audio_processing_properties=[%s])",
GetAudioProcesingPropertiesLogString(audio_processing_properties_)
.c_str()));
blink::MediaStreamDevice modified_device(device());
bool device_is_modified = false;
......@@ -197,9 +212,8 @@ bool ProcessedLocalAudioSource::EnsureSourceIsStarted() {
WebRtcAudioDeviceImpl* const rtc_audio_device =
PeerConnectionDependencyFactory::GetInstance()->GetWebRtcAudioDevice();
if (!rtc_audio_device) {
blink::WebRtcLogMessage(
"ProcessedLocalAudioSource::EnsureSourceIsStarted() fails"
" because there is no WebRtcAudioDeviceImpl instance.");
SendLogMessageWithSessionId(
"EnsureSourceIsStarted() => (ERROR: no WebRTC ADM instance)");
return false;
}
......@@ -227,10 +241,10 @@ bool ProcessedLocalAudioSource::EnsureSourceIsStarted() {
channel_layout != media::CHANNEL_LAYOUT_STEREO &&
channel_layout != media::CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC &&
channel_layout != media::CHANNEL_LAYOUT_DISCRETE) {
blink::WebRtcLogMessage(base::StringPrintf(
"ProcessedLocalAudioSource::EnsureSourceIsStarted() fails "
" because the input channel layout (%d) is not supported.",
static_cast<int>(channel_layout)));
SendLogMessage(
base::StringPrintf("EnsureSourceIsStarted() => (ERROR: "
"input channel layout (%d) is not supported.",
static_cast<int>(channel_layout)));
return false;
}
......@@ -258,6 +272,7 @@ bool ProcessedLocalAudioSource::EnsureSourceIsStarted() {
}
DVLOG(1) << params.AsHumanReadableString();
DCHECK(params.IsValid());
media::AudioSourceParameters source_params(device().session_id());
const bool use_remote_apm =
media::IsWebRtcApmInAudioServiceEnabled() &&
......@@ -277,8 +292,9 @@ bool ProcessedLocalAudioSource::EnsureSourceIsStarted() {
source_params.processing->settings.automatic_gain_control =
media::AutomaticGainControlType::kHybridExperimental;
}
blink::WebRtcLogMessage(base::StringPrintf(
"Using APM in audio process; settings: %s",
SendLogMessageWithSessionId(base::StringPrintf(
"EnsureSourceIsStarted() => (using APM in audio process: "
"settings=[%s])",
source_params.processing->settings.ToString().c_str()));
} else {
......@@ -291,10 +307,11 @@ bool ProcessedLocalAudioSource::EnsureSourceIsStarted() {
}
// Start the source.
DVLOG(1) << "Starting WebRTC audio source for consumption "
<< "with input parameters={" << params.AsHumanReadableString()
<< "} and output parameters={"
<< GetAudioParameters().AsHumanReadableString() << '}';
SendLogMessageWithSessionId(base::StringPrintf(
"EnsureSourceIsStarted() => (WebRTC audio source starts: "
"input_parameters=[%s], output_parameters=[%s])",
params.AsHumanReadableString().c_str(),
GetAudioParameters().AsHumanReadableString().c_str()));
scoped_refptr<media::AudioCapturerSource> new_source =
Platform::Current()->NewAudioCapturerSource(
internal_consumer_frame_->web_frame(), source_params);
......@@ -372,6 +389,7 @@ int ProcessedLocalAudioSource::MaxVolume() const {
}
void ProcessedLocalAudioSource::OnCaptureStarted() {
SendLogMessageWithSessionId(base::StringPrintf("OnCaptureStarted()"));
std::move(started_callback_)
.Run(this, blink::mojom::MediaStreamRequestResult::OK, "");
}
......@@ -392,24 +410,29 @@ void ProcessedLocalAudioSource::Capture(const media::AudioBus* audio_bus,
}
void ProcessedLocalAudioSource::OnCaptureError(const std::string& message) {
blink::WebRtcLogMessage("ProcessedLocalAudioSource::OnCaptureError: " +
message);
SendLogMessageWithSessionId(
base::StringPrintf("OnCaptureError({message=%s})", message.c_str()));
StopSourceOnError(message);
}
void ProcessedLocalAudioSource::OnCaptureMuted(bool is_muted) {
SendLogMessageWithSessionId(base::StringPrintf(
"OnCaptureMuted({is_muted=%s})", is_muted ? "true" : "false"));
SetMutedState(is_muted);
}
void ProcessedLocalAudioSource::OnCaptureProcessorCreated(
media::AudioProcessorControls* controls) {
SendLogMessageWithSessionId(
base::StringPrintf("OnCaptureProcessorCreated()"));
DCHECK(audio_processor_proxy_);
audio_processor_proxy_->SetControls(controls);
}
void ProcessedLocalAudioSource::SetOutputDeviceForAec(
const std::string& output_device_id) {
DVLOG(1) << "ProcessedLocalAudioSource::SetOutputDeviceForAec()";
SendLogMessageWithSessionId(base::StringPrintf(
"SetOutputDeviceForAec({device_id=%s})", output_device_id.c_str()));
if (source_)
source_->SetOutputDeviceForAec(output_device_id);
}
......
......@@ -119,6 +119,11 @@ class MODULES_EXPORT ProcessedLocalAudioSource final
// processing will take place.
int GetBufferSize(int sample_rate) const;
// Helper method which sends the log |message| to a native WebRTC log and
// adds the current session ID (from the associated media stream device) to
// make the log unique.
void SendLogMessageWithSessionId(const std::string& message) const;
// The LocalFrame that will consume the audio data. Used when creating
// AudioCapturerSources.
std::unique_ptr<MediaStreamInternalFrameWrapper> internal_consumer_frame_;
......
......@@ -161,9 +161,11 @@ class MODULES_EXPORT UserMediaProcessor
bool IsCurrentRequestInfo(
const blink::WebUserMediaRequest& web_request) const;
void DelayedGetUserMediaRequestSucceeded(
int request_id,
const blink::WebMediaStream& stream,
blink::WebUserMediaRequest web_request);
void DelayedGetUserMediaRequestFailed(
int request_id,
blink::WebUserMediaRequest web_request,
blink::mojom::blink::MediaStreamRequestResult result,
const String& constraint_name);
......
......@@ -5,11 +5,21 @@
#include "third_party/blink/renderer/modules/peerconnection/webrtc_media_stream_track_adapter.h"
#include "base/bind.h"
#include "base/strings/stringprintf.h"
#include "third_party/blink/public/platform/modules/mediastream/media_stream_audio_track.h"
#include "third_party/blink/public/platform/modules/webrtc/webrtc_logging.h"
#include "third_party/blink/renderer/modules/mediastream/processed_local_audio_source.h"
#include "third_party/blink/renderer/modules/peerconnection/media_stream_video_webrtc_sink.h"
#include "third_party/blink/renderer/modules/peerconnection/peer_connection_dependency_factory.h"
namespace {
void SendLogMessage(const std::string& message) {
blink::WebRtcLogMessage("WRMSTA::" + message);
}
} // namespace
namespace blink {
// static
......@@ -155,6 +165,8 @@ void WebRtcMediaStreamTrackAdapter::InitializeLocalAudioTrack(
DCHECK(!web_track.IsNull());
DCHECK_EQ(web_track.Source().GetType(),
blink::WebMediaStreamSource::kTypeAudio);
SendLogMessage(base::StringPrintf("InitializeLocalAudioTrack({id=%s})",
web_track.Id().Utf8().c_str()));
web_track_ = web_track;
blink::MediaStreamAudioTrack* native_track =
blink::MediaStreamAudioTrack::From(web_track_);
......@@ -208,6 +220,8 @@ void WebRtcMediaStreamTrackAdapter::InitializeRemoteAudioTrack(
DCHECK(webrtc_audio_track);
DCHECK_EQ(webrtc_audio_track->kind(),
webrtc::MediaStreamTrackInterface::kAudioKind);
SendLogMessage(
base::StringPrintf("InitializeRemoteAudioTrack([this=%p])", this));
remote_audio_track_adapter_ =
base::MakeRefCounted<blink::RemoteAudioTrackAdapter>(
main_thread_, webrtc_audio_track.get());
......
......@@ -7,10 +7,12 @@
#include "base/logging.h"
#include "base/metrics/histogram_macros.h"
#include "base/stl_util.h"
#include "base/strings/stringprintf.h"
#include "base/trace_event/trace_event.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_parameters.h"
#include "media/base/sample_rates.h"
#include "third_party/blink/public/platform/modules/webrtc/webrtc_logging.h"
#include "third_party/blink/renderer/modules/mediastream/processed_local_audio_source.h"
#include "third_party/blink/renderer/modules/webrtc/webrtc_audio_renderer.h"
......@@ -19,6 +21,14 @@ using media::ChannelLayout;
namespace blink {
namespace {
void SendLogMessage(const std::string& message) {
blink::WebRtcLogMessage("WRADI::" + message);
}
} // namespace
WebRtcAudioDeviceImpl::WebRtcAudioDeviceImpl()
: audio_processing_id_(base::UnguessableToken::Create()),
audio_transport_callback_(nullptr),
......@@ -26,7 +36,8 @@ WebRtcAudioDeviceImpl::WebRtcAudioDeviceImpl()
initialized_(false),
playing_(false),
recording_(false) {
DVLOG(1) << "WebRtcAudioDeviceImpl::WebRtcAudioDeviceImpl()";
SendLogMessage(base::StringPrintf("WebRtcAudioDeviceImpl({id=%s})",
GetAudioProcessingId().ToString().c_str()));
// This object can be constructed on either the signaling thread or the main
// thread, so we need to detach these thread checkers here and have them
// initialize automatically when the first methods are called.
......@@ -38,7 +49,8 @@ WebRtcAudioDeviceImpl::WebRtcAudioDeviceImpl()
}
WebRtcAudioDeviceImpl::~WebRtcAudioDeviceImpl() {
DVLOG(1) << "WebRtcAudioDeviceImpl::~WebRtcAudioDeviceImpl()";
SendLogMessage(base::StringPrintf("~WebRtcAudioDeviceImpl([id=%s])",
GetAudioProcessingId().ToString().c_str()));
DCHECK_CALLED_ON_VALID_THREAD(main_thread_checker_);
DCHECK(!initialized_) << "Terminate must have been called.";
}
......@@ -143,8 +155,9 @@ base::UnguessableToken WebRtcAudioDeviceImpl::GetAudioProcessingId() const {
int32_t WebRtcAudioDeviceImpl::RegisterAudioCallback(
webrtc::AudioTransport* audio_callback) {
DVLOG(1) << "WebRtcAudioDeviceImpl::RegisterAudioCallback()";
DCHECK_CALLED_ON_VALID_THREAD(signaling_thread_checker_);
SendLogMessage(base::StringPrintf("RegisterAudioCallback([id=%s])",
GetAudioProcessingId().ToString().c_str()));
base::AutoLock lock(lock_);
DCHECK_EQ(!audio_transport_callback_, !!audio_callback);
audio_transport_callback_ = audio_callback;
......@@ -254,9 +267,10 @@ bool WebRtcAudioDeviceImpl::Playing() const {
}
int32_t WebRtcAudioDeviceImpl::StartRecording() {
DVLOG(1) << "WebRtcAudioDeviceImpl::StartRecording()";
DCHECK_CALLED_ON_VALID_THREAD(worker_thread_checker_);
DCHECK(initialized_);
SendLogMessage(base::StringPrintf("StartRecording([id=%s])",
GetAudioProcessingId().ToString().c_str()));
base::AutoLock auto_lock(lock_);
if (!audio_transport_callback_) {
LOG(ERROR) << "Audio transport is missing";
......@@ -269,7 +283,6 @@ int32_t WebRtcAudioDeviceImpl::StartRecording() {
}
int32_t WebRtcAudioDeviceImpl::StopRecording() {
DVLOG(1) << "WebRtcAudioDeviceImpl::StopRecording()";
DCHECK(initialized_);
// Can be called both from the worker thread (e.g. when called from webrtc)
// or the signaling thread (e.g. when we call it ourselves internally).
......@@ -279,6 +292,8 @@ int32_t WebRtcAudioDeviceImpl::StopRecording() {
DCHECK(signaling_thread_checker_.CalledOnValidThread() ||
worker_thread_checker_.CalledOnValidThread());
#endif
SendLogMessage(base::StringPrintf("StopRecording([id=%s])",
GetAudioProcessingId().ToString().c_str()));
base::AutoLock auto_lock(lock_);
recording_ = false;
return 0;
......@@ -379,7 +394,8 @@ bool WebRtcAudioDeviceImpl::SetAudioRenderer(
void WebRtcAudioDeviceImpl::AddAudioCapturer(
ProcessedLocalAudioSource* capturer) {
DCHECK_CALLED_ON_VALID_THREAD(main_thread_checker_);
DVLOG(1) << "WebRtcAudioDeviceImpl::AddAudioCapturer()";
SendLogMessage(base::StringPrintf("AddAudioCapturer([id=%s])",
GetAudioProcessingId().ToString().c_str()));
DCHECK(capturer);
DCHECK(!capturer->device().id.empty());
......@@ -392,7 +408,8 @@ void WebRtcAudioDeviceImpl::AddAudioCapturer(
void WebRtcAudioDeviceImpl::RemoveAudioCapturer(
ProcessedLocalAudioSource* capturer) {
DCHECK_CALLED_ON_VALID_THREAD(main_thread_checker_);
DVLOG(1) << "WebRtcAudioDeviceImpl::RemoveAudioCapturer()";
SendLogMessage(base::StringPrintf("RemoveAudioCapturer([id=%s])",
GetAudioProcessingId().ToString().c_str()));
DCHECK(capturer);
base::AutoLock auto_lock(lock_);
capturers_.remove(capturer);
......
......@@ -8,21 +8,32 @@
#include <vector>
#include "base/logging.h"
#include "base/strings/stringprintf.h"
#include "media/base/audio_bus.h"
#include "third_party/blink/public/platform/modules/mediastream/web_media_stream_audio_sink.h"
#include "third_party/blink/public/platform/modules/webrtc/webrtc_logging.h"
#include "third_party/blink/public/platform/web_media_stream_source.h"
namespace blink {
namespace {
void SendLogMessage(const std::string& message) {
blink::WebRtcLogMessage("MSAT::" + message);
}
} // namespace
MediaStreamAudioTrack::MediaStreamAudioTrack(bool is_local_track)
: WebPlatformMediaStreamTrack(is_local_track), is_enabled_(1) {
DVLOG(1) << "MediaStreamAudioTrack@" << this << "::MediaStreamAudioTrack("
<< (is_local_track ? "local" : "remote") << " track)";
SendLogMessage(
base::StringPrintf("MediaStreamAudioTrack([this=%p] {is_local_track=%s})",
this, (is_local_track ? "local" : "remote")));
}
MediaStreamAudioTrack::~MediaStreamAudioTrack() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DVLOG(1) << "MediaStreamAudioTrack@" << this << " is being destroyed.";
SendLogMessage(base::StringPrintf("~MediaStreamAudioTrack([this=%p])", this));
Stop();
}
......@@ -38,9 +49,7 @@ MediaStreamAudioTrack* MediaStreamAudioTrack::From(
void MediaStreamAudioTrack::AddSink(WebMediaStreamAudioSink* sink) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DVLOG(1) << "Adding WebMediaStreamAudioSink@" << sink
<< " to MediaStreamAudioTrack@" << this << '.';
SendLogMessage(base::StringPrintf("AddSink([this=%p])", this));
// If the track has already stopped, just notify the sink of this fact without
// adding it.
......@@ -55,9 +64,8 @@ void MediaStreamAudioTrack::AddSink(WebMediaStreamAudioSink* sink) {
void MediaStreamAudioTrack::RemoveSink(WebMediaStreamAudioSink* sink) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
SendLogMessage(base::StringPrintf("RemoveSink([this=%p])", this));
deliverer_.RemoveConsumer(sink);
DVLOG(1) << "Removed WebMediaStreamAudioSink@" << sink
<< " from MediaStreamAudioTrack@" << this << '.';
}
media::AudioParameters MediaStreamAudioTrack::GetOutputFormat() const {
......@@ -66,8 +74,8 @@ media::AudioParameters MediaStreamAudioTrack::GetOutputFormat() const {
void MediaStreamAudioTrack::SetEnabled(bool enabled) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DVLOG(1) << "MediaStreamAudioTrack@" << this << "::SetEnabled("
<< (enabled ? 'Y' : 'N') << ')';
SendLogMessage(base::StringPrintf("SetEnabled([this=%p] {enabled=%s})", this,
(enabled ? "true" : "false")));
const bool previously_enabled =
!!base::subtle::NoBarrier_AtomicExchange(&is_enabled_, enabled ? 1 : 0);
......@@ -98,13 +106,13 @@ void MediaStreamAudioTrack::Start(base::OnceClosure stop_callback) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(!stop_callback.is_null());
DCHECK(stop_callback_.is_null());
DVLOG(1) << "Starting MediaStreamAudioTrack@" << this << '.';
SendLogMessage(base::StringPrintf("Start([this=%p])", this));
stop_callback_ = std::move(stop_callback);
}
void MediaStreamAudioTrack::StopAndNotify(base::OnceClosure callback) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DVLOG(1) << "Stopping MediaStreamAudioTrack@" << this << '.';
SendLogMessage(base::StringPrintf("StopAndNotify([this=%p])", this));
if (!stop_callback_.is_null())
std::move(stop_callback_).Run();
......@@ -122,11 +130,22 @@ void MediaStreamAudioTrack::StopAndNotify(base::OnceClosure callback) {
}
void MediaStreamAudioTrack::OnSetFormat(const media::AudioParameters& params) {
SendLogMessage(base::StringPrintf("OnSetFormat([this=%p] {params: [%s]})",
this,
params.AsHumanReadableString().c_str()));
deliverer_.OnSetFormat(params);
}
void MediaStreamAudioTrack::OnData(const media::AudioBus& audio_bus,
base::TimeTicks reference_time) {
if (!received_audio_callback_) {
// Add log message with unique this pointer id to mark the audio track as
// alive at the first data callback.
SendLogMessage(base::StringPrintf(
"OnData([this=%p] => (audio track is alive))", this));
received_audio_callback_ = true;
}
// Note: Using NoBarrier_Load because the timing of when the audio thread sees
// a changed |is_enabled_| value can be relaxed.
const bool deliver_data = !!base::subtle::NoBarrier_Load(&is_enabled_);
......
......@@ -6,9 +6,11 @@
#include "base/bind.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/stringprintf.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "third_party/blink/public/platform/modules/mediastream/media_stream_audio_track.h"
#include "third_party/blink/public/platform/modules/webrtc/webrtc_logging.h"
#include "third_party/blink/public/platform/web_media_stream_source.h"
#include "third_party/blink/public/platform/web_string.h"
#include "third_party/blink/renderer/platform/scheduler/public/post_cross_thread_task.h"
......@@ -17,6 +19,14 @@
namespace blink {
namespace {
void SendLogMessage(const std::string& message) {
blink::WebRtcLogMessage("MSAS::" + message);
}
} // namespace
const int kMaxAudioLatencyMs = 5000;
static_assert(std::numeric_limits<int>::max() / media::limits::kMaxSampleRate >
kMaxAudioLatencyMs,
......@@ -45,8 +55,9 @@ MediaStreamAudioSource::MediaStreamAudioSource(
disable_local_echo_(disable_local_echo),
is_stopped_(false),
task_runner_(std::move(task_runner)) {
DVLOG(1) << "MediaStreamAudioSource@" << this << "::MediaStreamAudioSource("
<< (is_local_source_ ? "local" : "remote") << " source)";
SendLogMessage(base::StringPrintf(
"MediaStreamAudioSource([this=%p] {is_local_source=%s})", this,
(is_local_source ? "local" : "remote")));
}
MediaStreamAudioSource::MediaStreamAudioSource(
......@@ -58,7 +69,8 @@ MediaStreamAudioSource::MediaStreamAudioSource(
MediaStreamAudioSource::~MediaStreamAudioSource() {
DCHECK(task_runner_->BelongsToCurrentThread());
DVLOG(1) << "MediaStreamAudioSource@" << this << " is being destroyed.";
SendLogMessage(
base::StringPrintf("~MediaStreamAudioSource([this=%p])", this));
}
// static
......@@ -74,6 +86,8 @@ bool MediaStreamAudioSource::ConnectToTrack(
const WebMediaStreamTrack& blink_track) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(!blink_track.IsNull());
SendLogMessage(base::StringPrintf("ConnectToTrack({track_id=%s})",
blink_track.Id().Utf8().c_str()));
// Sanity-check that there is not already a MediaStreamAudioTrack instance
// associated with |blink_track|.
......@@ -167,6 +181,8 @@ void MediaStreamAudioSource::DoChangeSource(
std::unique_ptr<MediaStreamAudioTrack>
MediaStreamAudioSource::CreateMediaStreamAudioTrack(const std::string& id) {
DCHECK(task_runner_->BelongsToCurrentThread());
SendLogMessage(
base::StringPrintf("CreateMediaStreamAudioTrack({id=%s})", id.c_str()));
return std::unique_ptr<MediaStreamAudioTrack>(
new MediaStreamAudioTrack(is_local_source()));
}
......@@ -190,9 +206,10 @@ void MediaStreamAudioSource::ChangeSourceImpl(
}
void MediaStreamAudioSource::SetFormat(const media::AudioParameters& params) {
DVLOG(1) << "MediaStreamAudioSource@" << this << "::SetFormat("
<< params.AsHumanReadableString() << "), was previously set to {"
<< deliverer_.GetAudioParameters().AsHumanReadableString() << "}.";
SendLogMessage(base::StringPrintf(
"SetFormat([this=%p] {params=[%s]}, {old_params=[%s]})", this,
params.AsHumanReadableString().c_str(),
deliverer_.GetAudioParameters().AsHumanReadableString().c_str()));
deliverer_.OnSetFormat(params);
}
......@@ -210,6 +227,7 @@ void MediaStreamAudioSource::DoStopSource() {
void MediaStreamAudioSource::StopAudioDeliveryTo(MediaStreamAudioTrack* track) {
DCHECK(task_runner_->BelongsToCurrentThread());
SendLogMessage(base::StringPrintf("StopAudioDeliveryTo([this=%p])", this));
const bool did_remove_last_track = deliverer_.RemoveConsumer(track);
DVLOG(1) << "Removed MediaStreamAudioTrack@" << track
......@@ -222,8 +240,8 @@ void MediaStreamAudioSource::StopAudioDeliveryTo(MediaStreamAudioTrack* track) {
}
void MediaStreamAudioSource::StopSourceOnError(const std::string& why) {
VLOG(1) << why;
SendLogMessage(base::StringPrintf("StopSourceOnError([this=%p] {why=%s})",
this, why.c_str()));
// Stop source when error occurs.
PostCrossThreadTask(
*task_runner_, FROM_HERE,
......@@ -232,7 +250,8 @@ void MediaStreamAudioSource::StopSourceOnError(const std::string& why) {
}
void MediaStreamAudioSource::SetMutedState(bool muted_state) {
DVLOG(3) << "MediaStreamAudioSource::SetMutedState state=" << muted_state;
SendLogMessage(base::StringPrintf("SetMutedState([this=%p] {muted_state=%s})",
this, (muted_state ? "true" : "false")));
PostCrossThreadTask(
*task_runner_, FROM_HERE,
WTF::CrossThreadBindOnce(&WebPlatformMediaStreamSource::SetSourceMuted,
......
......@@ -30,6 +30,7 @@
#include "third_party/blink/renderer/platform/mediastream/media_stream_source.h"
#include "third_party/blink/public/platform/modules/webrtc/webrtc_logging.h"
#include "third_party/blink/renderer/platform/mediastream/media_stream_audio_source.h"
#include "third_party/blink/renderer/platform/wtf/assertions.h"
......@@ -37,6 +38,36 @@ namespace blink {
namespace {
void SendLogMessage(const std::string& message) {
blink::WebRtcLogMessage("MSS::" + message);
}
const char* StreamTypeToString(MediaStreamSource::StreamType type) {
switch (type) {
case MediaStreamSource::kTypeAudio:
return "Audio";
case MediaStreamSource::kTypeVideo:
return "Video";
default:
NOTREACHED();
}
return "Invalid";
}
const char* ReadyStateToString(MediaStreamSource::ReadyState state) {
switch (state) {
case MediaStreamSource::kReadyStateLive:
return "Live";
case MediaStreamSource::kReadyStateMuted:
return "Muted";
case MediaStreamSource::kReadyStateEnded:
return "Ended";
default:
NOTREACHED();
}
return "Invalid";
}
void GetSourceSettings(const blink::WebMediaStreamSource& web_source,
blink::WebMediaStreamTrack::Settings& settings) {
blink::MediaStreamAudioSource* const source =
......@@ -68,13 +99,28 @@ MediaStreamSource::MediaStreamSource(const String& id,
name_(name),
remote_(remote),
ready_state_(ready_state),
requires_consumer_(requires_consumer) {}
requires_consumer_(requires_consumer) {
SendLogMessage(
String::Format(
"MediaStreamSource({id=%s}, {type=%s}, {name=%s}, {remote=%d}, "
"{ready_state=%s}",
id.Utf8().c_str(), StreamTypeToString(type), name.Utf8().c_str(),
remote, ReadyStateToString(ready_state))
.Utf8());
}
void MediaStreamSource::SetGroupId(const String& group_id) {
SendLogMessage(
String::Format("SetGroupId({group_id=%s})", group_id.Utf8().c_str())
.Utf8());
group_id_ = group_id;
}
void MediaStreamSource::SetReadyState(ReadyState ready_state) {
SendLogMessage(String::Format("SetReadyState({id=%s}, {ready_state=%s})",
Id().Utf8().c_str(),
ReadyStateToString(ready_state))
.Utf8());
if (ready_state_ != kReadyStateEnded && ready_state_ != ready_state) {
ready_state_ = ready_state;
......@@ -171,6 +217,12 @@ void MediaStreamSource::GetSettings(WebMediaStreamTrack::Settings& settings) {
void MediaStreamSource::SetAudioFormat(size_t number_of_channels,
float sample_rate) {
SendLogMessage(
String::Format(
"SetAudioFormat({id=%s}, {number_of_channels=%d}, {sample_rate=%f})",
Id().Utf8().c_str(), static_cast<int>(number_of_channels),
sample_rate)
.Utf8());
DCHECK(requires_consumer_);
MutexLocker locker(audio_consumers_lock_);
for (AudioDestinationConsumer* consumer : audio_consumers_)
......
......@@ -11,10 +11,20 @@
#include "base/location.h"
#include "base/logging.h"
#include "base/stl_util.h"
#include "base/strings/stringprintf.h"
#include "third_party/blink/public/platform/modules/webrtc/webrtc_logging.h"
#include "third_party/blink/renderer/platform/scheduler/public/post_cross_thread_task.h"
#include "third_party/blink/renderer/platform/wtf/cross_thread_functional.h"
#include "third_party/webrtc/rtc_base/ref_counted_object.h"
namespace {
void SendLogMessage(const std::string& message) {
blink::WebRtcLogMessage("WRAS::" + message);
}
} // namespace
namespace WTF {
template <>
......@@ -48,12 +58,14 @@ WebRtcAudioSink::WebRtcAudioSink(
fifo_(ConvertToBaseRepeatingCallback(
CrossThreadBindRepeating(&WebRtcAudioSink::DeliverRebufferedAudio,
CrossThreadUnretained(this)))) {
DVLOG(1) << "WebRtcAudioSink::WebRtcAudioSink()";
SendLogMessage(base::StringPrintf("WebRtcAudioSink({label=%s})",
adapter_->label().c_str()));
}
WebRtcAudioSink::~WebRtcAudioSink() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DVLOG(1) << "WebRtcAudioSink::~WebRtcAudioSink()";
SendLogMessage(base::StringPrintf("~WebRtcAudioSink([label=%s])",
adapter_->label().c_str()));
}
void WebRtcAudioSink::SetAudioProcessor(
......@@ -72,6 +84,9 @@ void WebRtcAudioSink::SetLevel(
void WebRtcAudioSink::OnEnabledChanged(bool enabled) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
SendLogMessage(base::StringPrintf("OnEnabledChanged([label=%s] {enabled=%s})",
adapter_->label().c_str(),
(enabled ? "true" : "false")));
PostCrossThreadTask(
*adapter_->signaling_task_runner(), FROM_HERE,
CrossThreadBindOnce(
......@@ -90,6 +105,9 @@ void WebRtcAudioSink::OnData(const media::AudioBus& audio_bus,
void WebRtcAudioSink::OnSetFormat(const media::AudioParameters& params) {
DCHECK(params.IsValid());
SendLogMessage(base::StringPrintf("OnSetFormat([label=%s] {params=[%s]})",
adapter_->label().c_str(),
params.AsHumanReadableString().c_str()));
params_ = params;
// Make sure that our params always reflect a buffer size of 10ms.
params_.set_frames_per_buffer(params_.sample_rate() / 100);
......@@ -129,14 +147,19 @@ WebRtcAudioSink::Adapter::Adapter(
scoped_refptr<base::SingleThreadTaskRunner> signaling_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> main_task_runner)
: webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>(label),
label_(label),
source_(std::move(source)),
signaling_task_runner_(std::move(signaling_task_runner)),
main_task_runner_(std::move(main_task_runner)) {
DCHECK(signaling_task_runner_);
DCHECK(main_task_runner_);
SendLogMessage(
base::StringPrintf("Adapter::Adapter({label=%s})", label_.c_str()));
}
WebRtcAudioSink::Adapter::~Adapter() {
SendLogMessage(
base::StringPrintf("Adapter::~Adapter([label=%s])", label_.c_str()));
if (audio_processor_) {
PostCrossThreadTask(*main_task_runner_.get(), FROM_HERE,
CrossThreadBindOnce(&DereferenceOnMainThread,
......@@ -163,6 +186,9 @@ std::string WebRtcAudioSink::Adapter::kind() const {
bool WebRtcAudioSink::Adapter::set_enabled(bool enable) {
DCHECK(!signaling_task_runner_ ||
signaling_task_runner_->RunsTasksInCurrentSequence());
SendLogMessage(
base::StringPrintf("Adapter::set_enabled([label=%s] {enable=%s})",
label_.c_str(), (enable ? "true" : "false")));
return webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>::set_enabled(
enable);
}
......@@ -171,6 +197,8 @@ void WebRtcAudioSink::Adapter::AddSink(webrtc::AudioTrackSinkInterface* sink) {
DCHECK(!signaling_task_runner_ ||
signaling_task_runner_->RunsTasksInCurrentSequence());
DCHECK(sink);
SendLogMessage(
base::StringPrintf("Adapter::AddSink({label=%s})", label_.c_str()));
base::AutoLock auto_lock(lock_);
DCHECK(!base::Contains(sinks_, sink));
sinks_.push_back(sink);
......@@ -180,6 +208,8 @@ void WebRtcAudioSink::Adapter::RemoveSink(
webrtc::AudioTrackSinkInterface* sink) {
DCHECK(!signaling_task_runner_ ||
signaling_task_runner_->RunsTasksInCurrentSequence());
SendLogMessage(
base::StringPrintf("Adapter::RemoveSink([label=%s])", label_.c_str()));
base::AutoLock auto_lock(lock_);
const auto it = std::find(sinks_.begin(), sinks_.end(), sink);
if (it != sinks_.end())
......@@ -200,6 +230,9 @@ bool WebRtcAudioSink::Adapter::GetSignalLevel(int* level) {
// Convert from float in range [0.0,1.0] to an int in range [0,32767].
*level = static_cast<int>(signal_level * std::numeric_limits<int16_t>::max() +
0.5f /* rounding to nearest int */);
SendLogMessage(
base::StringPrintf("Adapter::GetSignalLevel([label=%s]) => (level=%d)",
label_.c_str(), *level));
return true;
}
......
......@@ -102,6 +102,8 @@ class PLATFORM_EXPORT WebRtcAudioSink : public WebMediaStreamAudioSink {
size_t number_of_channels,
size_t number_of_frames);
std::string label() const { return label_; }
// webrtc::MediaStreamTrack implementation.
std::string kind() const override;
bool set_enabled(bool enable) override;
......@@ -118,6 +120,8 @@ class PLATFORM_EXPORT WebRtcAudioSink : public WebMediaStreamAudioSink {
~Adapter() override;
private:
const std::string label_;
const scoped_refptr<webrtc::AudioSourceInterface> source_;
// Task runner for operations that must be done on libjingle's signaling
......
......@@ -5,8 +5,10 @@
#include "third_party/blink/renderer/platform/webrtc/peer_connection_remote_audio_source.h"
#include "base/logging.h"
#include "base/strings/stringprintf.h"
#include "base/time/time.h"
#include "media/base/audio_bus.h"
#include "third_party/blink/public/platform/modules/webrtc/webrtc_logging.h"
namespace blink {
......@@ -14,19 +16,26 @@ namespace {
// Used as an identifier for the down-casters.
void* const kPeerConnectionRemoteTrackIdentifier =
const_cast<void**>(&kPeerConnectionRemoteTrackIdentifier);
void SendLogMessage(const std::string& message) {
blink::WebRtcLogMessage("PCRAS::" + message);
}
} // namespace
PeerConnectionRemoteAudioTrack::PeerConnectionRemoteAudioTrack(
scoped_refptr<webrtc::AudioTrackInterface> track_interface)
: MediaStreamAudioTrack(false /* is_local_track */),
track_interface_(std::move(track_interface)) {
DVLOG(1)
<< "PeerConnectionRemoteAudioTrack::PeerConnectionRemoteAudioTrack()";
blink::WebRtcLogMessage(
base::StringPrintf("PCRAT::PeerConnectionRemoteAudioTrack({id=%s})",
track_interface_->id().c_str()));
}
PeerConnectionRemoteAudioTrack::~PeerConnectionRemoteAudioTrack() {
DVLOG(1)
<< "PeerConnectionRemoteAudioTrack::~PeerConnectionRemoteAudioTrack()";
blink::WebRtcLogMessage(
base::StringPrintf("PCRAT::~PeerConnectionRemoteAudioTrack([id=%s])",
track_interface_->id().c_str()));
// Ensure the track is stopped.
MediaStreamAudioTrack::Stop();
}
......@@ -42,6 +51,9 @@ PeerConnectionRemoteAudioTrack* PeerConnectionRemoteAudioTrack::From(
void PeerConnectionRemoteAudioTrack::SetEnabled(bool enabled) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
blink::WebRtcLogMessage(base::StringPrintf(
"PCRAT::SetEnabled([id=%s] {enabled=%s})", track_interface_->id().c_str(),
(enabled ? "true" : "false")));
// This affects the shared state of the source for whether or not it's a part
// of the mixed audio that's rendered for remote tracks from WebRTC.
......@@ -67,13 +79,13 @@ PeerConnectionRemoteAudioSource::PeerConnectionRemoteAudioSource(
track_interface_(std::move(track_interface)),
is_sink_of_peer_connection_(false) {
DCHECK(track_interface_);
DVLOG(1)
<< "PeerConnectionRemoteAudioSource::PeerConnectionRemoteAudioSource()";
SendLogMessage(base::StringPrintf("PeerConnectionRemoteAudioSource([id=%s])",
track_interface_->id().c_str()));
}
PeerConnectionRemoteAudioSource::~PeerConnectionRemoteAudioSource() {
DVLOG(1)
<< "PeerConnectionRemoteAudioSource::~PeerConnectionRemoteAudioSource()";
SendLogMessage(base::StringPrintf("~PeerConnectionRemoteAudioSource([id=%s])",
track_interface_->id().c_str()));
EnsureSourceIsStopped();
}
......@@ -88,8 +100,8 @@ bool PeerConnectionRemoteAudioSource::EnsureSourceIsStarted() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
if (is_sink_of_peer_connection_)
return true;
VLOG(1) << "Starting PeerConnection remote audio source with id="
<< track_interface_->id();
SendLogMessage(base::StringPrintf("EnsureSourceIsStarted([id=%s])",
track_interface_->id().c_str()));
track_interface_->AddSink(this);
is_sink_of_peer_connection_ = true;
return true;
......@@ -98,10 +110,10 @@ bool PeerConnectionRemoteAudioSource::EnsureSourceIsStarted() {
void PeerConnectionRemoteAudioSource::EnsureSourceIsStopped() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
if (is_sink_of_peer_connection_) {
SendLogMessage(base::StringPrintf("EnsureSourceIsStopped([id=%s])",
track_interface_->id().c_str()));
track_interface_->RemoveSink(this);
is_sink_of_peer_connection_ = false;
VLOG(1) << "Stopped PeerConnection remote audio source with id="
<< track_interface_->id();
}
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment