Commit f289287b authored by Kenneth Russell's avatar Kenneth Russell Committed by Commit Bot

Revert "Add WebRTC logging to Windows audio input implementation."

This reverts commit df64b29e.

Reason for revert: Caused http://crbug.com/790557 .

Original change's description:
> Add WebRTC logging to Windows audio input implementation.
> 
> Log failures in Open() and Start().
> 
> Bug: 757737
> Cq-Include-Trybots: master.tryserver.chromium.android:android_optional_gpu_tests_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel
> Change-Id: Idaad68af9aabc753cb4fca17b2dfff3cdf69d83b
> Reviewed-on: https://chromium-review.googlesource.com/795727
> Commit-Queue: Henrik Grunell <grunell@chromium.org>
> Reviewed-by: Max Morin <maxmorin@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#520562}

TBR=grunell@chromium.org,maxmorin@chromium.org,marinaciocea@chromium.org

Change-Id: I54d97e5063d5a63325977ce19753d1e5439dd396
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: 757737, 790557
Cq-Include-Trybots: master.tryserver.chromium.android:android_optional_gpu_tests_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel
Reviewed-on: https://chromium-review.googlesource.com/801263Reviewed-by: default avatarKenneth Russell <kbr@chromium.org>
Commit-Queue: Kenneth Russell <kbr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#520648}
parent a7e8185c
...@@ -91,7 +91,7 @@ class MEDIA_EXPORT AudioManager { ...@@ -91,7 +91,7 @@ class MEDIA_EXPORT AudioManager {
// Log callback used for sending log messages from a stream to the object // Log callback used for sending log messages from a stream to the object
// that manages the stream. // that manages the stream.
using LogCallback = base::RepeatingCallback<void(const std::string&)>; using LogCallback = base::Callback<void(const std::string&)>;
// Factory for all the supported stream formats. |params| defines parameters // Factory for all the supported stream formats. |params| defines parameters
// of the audio stream to be created. // of the audio stream to be created.
......
...@@ -12,7 +12,6 @@ ...@@ -12,7 +12,6 @@
#include "base/logging.h" #include "base/logging.h"
#include "base/metrics/histogram_macros.h" #include "base/metrics/histogram_macros.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h" #include "base/strings/utf_string_conversions.h"
#include "base/trace_event/trace_event.h" #include "base/trace_event/trace_event.h"
#include "media/audio/audio_device_description.h" #include "media/audio/audio_device_description.h"
...@@ -28,9 +27,7 @@ ...@@ -28,9 +27,7 @@
using base::win::ScopedCOMInitializer; using base::win::ScopedCOMInitializer;
namespace media { namespace media {
namespace { namespace {
bool IsSupportedFormatForConversion(const WAVEFORMATEX& format) { bool IsSupportedFormatForConversion(const WAVEFORMATEX& format) {
if (format.nSamplesPerSec < limits::kMinSampleRate || if (format.nSamplesPerSec < limits::kMinSampleRate ||
format.nSamplesPerSec > limits::kMaxSampleRate) { format.nSamplesPerSec > limits::kMaxSampleRate) {
...@@ -53,18 +50,14 @@ bool IsSupportedFormatForConversion(const WAVEFORMATEX& format) { ...@@ -53,18 +50,14 @@ bool IsSupportedFormatForConversion(const WAVEFORMATEX& format) {
return true; return true;
} }
} // namespace } // namespace
WASAPIAudioInputStream::WASAPIAudioInputStream( WASAPIAudioInputStream::WASAPIAudioInputStream(AudioManagerWin* manager,
AudioManagerWin* manager, const AudioParameters& params,
const AudioParameters& params, const std::string& device_id)
const std::string& device_id, : manager_(manager), device_id_(device_id) {
const AudioManager::LogCallback& log_callback)
: manager_(manager), device_id_(device_id), log_callback_(log_callback) {
DCHECK(manager_); DCHECK(manager_);
DCHECK(!device_id_.empty()); DCHECK(!device_id_.empty());
DCHECK(!log_callback_.is_null());
// Load the Avrt DLL if not already loaded. Required to support MMCSS. // Load the Avrt DLL if not already loaded. Required to support MMCSS.
bool avrt_init = avrt::Initialize(); bool avrt_init = avrt::Initialize();
...@@ -109,17 +102,15 @@ bool WASAPIAudioInputStream::Open() { ...@@ -109,17 +102,15 @@ bool WASAPIAudioInputStream::Open() {
DCHECK_EQ(OPEN_RESULT_OK, open_result_); DCHECK_EQ(OPEN_RESULT_OK, open_result_);
// Verify that we are not already opened. // Verify that we are not already opened.
if (opened_) { if (opened_)
log_callback_.Run("WASAPIAIS::Open: already open");
return false; return false;
}
// Obtain a reference to the IMMDevice interface of the capturing // Obtain a reference to the IMMDevice interface of the capturing
// device with the specified unique identifier or role which was // device with the specified unique identifier or role which was
// set at construction. // set at construction.
HRESULT hr = SetCaptureDevice(); HRESULT hr = SetCaptureDevice();
if (FAILED(hr)) { if (FAILED(hr)) {
ReportOpenResult(hr); ReportOpenResult();
return false; return false;
} }
...@@ -129,7 +120,7 @@ bool WASAPIAudioInputStream::Open() { ...@@ -129,7 +120,7 @@ bool WASAPIAudioInputStream::Open() {
NULL, &audio_client_); NULL, &audio_client_);
if (FAILED(hr)) { if (FAILED(hr)) {
open_result_ = OPEN_RESULT_ACTIVATION_FAILED; open_result_ = OPEN_RESULT_ACTIVATION_FAILED;
ReportOpenResult(hr); ReportOpenResult();
return false; return false;
} }
...@@ -142,10 +133,9 @@ bool WASAPIAudioInputStream::Open() { ...@@ -142,10 +133,9 @@ bool WASAPIAudioInputStream::Open() {
// Verify that the selected audio endpoint supports the specified format // Verify that the selected audio endpoint supports the specified format
// set during construction. // set during construction.
hr = S_OK; if (!DesiredFormatIsSupported()) {
if (!DesiredFormatIsSupported(&hr)) {
open_result_ = OPEN_RESULT_FORMAT_NOT_SUPPORTED; open_result_ = OPEN_RESULT_FORMAT_NOT_SUPPORTED;
ReportOpenResult(hr); ReportOpenResult();
return false; return false;
} }
...@@ -154,7 +144,7 @@ bool WASAPIAudioInputStream::Open() { ...@@ -154,7 +144,7 @@ bool WASAPIAudioInputStream::Open() {
hr = InitializeAudioEngine(); hr = InitializeAudioEngine();
if (SUCCEEDED(hr) && converter_) if (SUCCEEDED(hr) && converter_)
open_result_ = OPEN_RESULT_OK_WITH_RESAMPLING; open_result_ = OPEN_RESULT_OK_WITH_RESAMPLING;
ReportOpenResult(hr); // Report before we assign a value to |opened_|. ReportOpenResult(); // Report before we assign a value to |opened_|.
opened_ = SUCCEEDED(hr); opened_ = SUCCEEDED(hr);
return opened_; return opened_;
...@@ -201,20 +191,10 @@ void WASAPIAudioInputStream::Start(AudioInputCallback* callback) { ...@@ -201,20 +191,10 @@ void WASAPIAudioInputStream::Start(AudioInputCallback* callback) {
// Start streaming data between the endpoint buffer and the audio engine. // Start streaming data between the endpoint buffer and the audio engine.
HRESULT hr = audio_client_->Start(); HRESULT hr = audio_client_->Start();
if (FAILED(hr)) { DLOG_IF(ERROR, FAILED(hr)) << "Failed to start input streaming.";
DLOG(ERROR) << "Failed to start input streaming.";
log_callback_.Run(base::StringPrintf(
"WASAPIAIS::Start: Failed to start audio client, hresult = %#lx", hr));
}
if (SUCCEEDED(hr) && audio_render_client_for_loopback_.Get()) { if (SUCCEEDED(hr) && audio_render_client_for_loopback_.Get())
hr = audio_render_client_for_loopback_->Start(); hr = audio_render_client_for_loopback_->Start();
if (FAILED(hr))
log_callback_.Run(base::StringPrintf(
"WASAPIAIS::Start: Failed to start render client for loopback, "
"hresult = %#lx",
hr));
}
started_ = SUCCEEDED(hr); started_ = SUCCEEDED(hr);
} }
...@@ -630,24 +610,23 @@ HRESULT WASAPIAudioInputStream::GetAudioEngineStreamFormat() { ...@@ -630,24 +610,23 @@ HRESULT WASAPIAudioInputStream::GetAudioEngineStreamFormat() {
return hr; return hr;
} }
bool WASAPIAudioInputStream::DesiredFormatIsSupported(HRESULT* hr) { bool WASAPIAudioInputStream::DesiredFormatIsSupported() {
// An application that uses WASAPI to manage shared-mode streams can rely // An application that uses WASAPI to manage shared-mode streams can rely
// on the audio engine to perform only limited format conversions. The audio // on the audio engine to perform only limited format conversions. The audio
// engine can convert between a standard PCM sample size used by the // engine can convert between a standard PCM sample size used by the
// application and the floating-point samples that the engine uses for its // application and the floating-point samples that the engine uses for its
// internal processing. However, the format for an application stream // internal processing. However, the format for an application stream
// typically must have the same number of channels and the same sample // typically must have the same number of channels and the same sample
// rate as the stream format used by the device. // rate as the stream format used byfCHANNEL_LAYOUT_UNSUPPORTED the device.
// Many audio devices support both PCM and non-PCM stream formats. However, // Many audio devices support both PCM and non-PCM stream formats. However,
// the audio engine can mix only PCM streams. // the audio engine can mix only PCM streams.
base::win::ScopedCoMem<WAVEFORMATEX> closest_match; base::win::ScopedCoMem<WAVEFORMATEX> closest_match;
HRESULT hresult = audio_client_->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, HRESULT hr = audio_client_->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED,
&format_, &closest_match); &format_, &closest_match);
DLOG_IF(ERROR, hresult == S_FALSE) DLOG_IF(ERROR, hr == S_FALSE)
<< "Format is not supported but a closest match exists."; << "Format is not supported but a closest match exists.";
if (hresult == S_FALSE && if (hr == S_FALSE && IsSupportedFormatForConversion(*closest_match.get())) {
IsSupportedFormatForConversion(*closest_match.get())) {
DVLOG(1) << "Audio capture data conversion needed."; DVLOG(1) << "Audio capture data conversion needed.";
// Ideally, we want a 1:1 ratio between the buffers we get and the buffers // Ideally, we want a 1:1 ratio between the buffers we get and the buffers
// we give to OnData so that each buffer we receive from the OS can be // we give to OnData so that each buffer we receive from the OS can be
...@@ -702,15 +681,10 @@ bool WASAPIAudioInputStream::DesiredFormatIsSupported(HRESULT* hr) { ...@@ -702,15 +681,10 @@ bool WASAPIAudioInputStream::DesiredFormatIsSupported(HRESULT* hr) {
<< "Audio capture data conversion: Need to inject fifo"; << "Audio capture data conversion: Need to inject fifo";
// Indicate that we're good to go with a close match. // Indicate that we're good to go with a close match.
hresult = S_OK; hr = S_OK;
} }
// At this point, |hresult| == S_OK if the desired format is supported. If return (hr == S_OK);
// |hresult| == S_FALSE, the OS supports a closest match but we don't support
// conversion to it. Thus, SUCCEEDED() or FAILED() can't be used to determine
// if the desired format is supported.
*hr = hresult;
return (hresult == S_OK);
} }
HRESULT WASAPIAudioInputStream::InitializeAudioEngine() { HRESULT WASAPIAudioInputStream::InitializeAudioEngine() {
...@@ -846,16 +820,10 @@ HRESULT WASAPIAudioInputStream::InitializeAudioEngine() { ...@@ -846,16 +820,10 @@ HRESULT WASAPIAudioInputStream::InitializeAudioEngine() {
return hr; return hr;
} }
void WASAPIAudioInputStream::ReportOpenResult(HRESULT hr) const { void WASAPIAudioInputStream::ReportOpenResult() const {
DCHECK(!opened_); // This method must be called before we set this flag. DCHECK(!opened_); // This method must be called before we set this flag.
UMA_HISTOGRAM_ENUMERATION("Media.Audio.Capture.Win.Open", open_result_, UMA_HISTOGRAM_ENUMERATION("Media.Audio.Capture.Win.Open", open_result_,
OPEN_RESULT_MAX + 1); OPEN_RESULT_MAX + 1);
if (open_result_ != OPEN_RESULT_OK &&
open_result_ != OPEN_RESULT_OK_WITH_RESAMPLING) {
log_callback_.Run(base::StringPrintf(
"WASAPIAIS::Open: failed, result = %d, hresult = %#lx", open_result_,
hr));
}
} }
double WASAPIAudioInputStream::ProvideInput(AudioBus* audio_bus, double WASAPIAudioInputStream::ProvideInput(AudioBus* audio_bus,
......
...@@ -75,7 +75,6 @@ ...@@ -75,7 +75,6 @@
#include "base/win/scoped_com_initializer.h" #include "base/win/scoped_com_initializer.h"
#include "base/win/scoped_handle.h" #include "base/win/scoped_handle.h"
#include "media/audio/agc_audio_stream.h" #include "media/audio/agc_audio_stream.h"
#include "media/audio/audio_manager.h"
#include "media/base/audio_converter.h" #include "media/base/audio_converter.h"
#include "media/base/audio_parameters.h" #include "media/base/audio_parameters.h"
#include "media/base/media_export.h" #include "media/base/media_export.h"
...@@ -96,8 +95,7 @@ class MEDIA_EXPORT WASAPIAudioInputStream ...@@ -96,8 +95,7 @@ class MEDIA_EXPORT WASAPIAudioInputStream
// the audio manager who is creating this object. // the audio manager who is creating this object.
WASAPIAudioInputStream(AudioManagerWin* manager, WASAPIAudioInputStream(AudioManagerWin* manager,
const AudioParameters& params, const AudioParameters& params,
const std::string& device_id, const std::string& device_id);
const AudioManager::LogCallback& log_callback);
// The dtor is typically called by the AudioManager only and it is usually // The dtor is typically called by the AudioManager only and it is usually
// triggered by calling AudioInputStream::Close(). // triggered by calling AudioInputStream::Close().
...@@ -125,13 +123,9 @@ class MEDIA_EXPORT WASAPIAudioInputStream ...@@ -125,13 +123,9 @@ class MEDIA_EXPORT WASAPIAudioInputStream
// The Open() method is divided into these sub methods. // The Open() method is divided into these sub methods.
HRESULT SetCaptureDevice(); HRESULT SetCaptureDevice();
HRESULT GetAudioEngineStreamFormat(); HRESULT GetAudioEngineStreamFormat();
// Returns whether the desired format is supported or not and writes the bool DesiredFormatIsSupported();
// result of a failing system call to |*hr|, or S_OK if successful. If this
// function returns false with |*hr| == S_FALSE, the OS supports a closest
// match but we don't support conversion to it.
bool DesiredFormatIsSupported(HRESULT* hr);
HRESULT InitializeAudioEngine(); HRESULT InitializeAudioEngine();
void ReportOpenResult(HRESULT hr) const; void ReportOpenResult() const;
// AudioConverter::InputCallback implementation. // AudioConverter::InputCallback implementation.
double ProvideInput(AudioBus* audio_bus, uint32_t frames_delayed) override; double ProvideInput(AudioBus* audio_bus, uint32_t frames_delayed) override;
...@@ -252,9 +246,6 @@ class MEDIA_EXPORT WASAPIAudioInputStream ...@@ -252,9 +246,6 @@ class MEDIA_EXPORT WASAPIAudioInputStream
std::unique_ptr<AudioBus> convert_bus_; std::unique_ptr<AudioBus> convert_bus_;
bool imperfect_buffer_size_conversion_ = false; bool imperfect_buffer_size_conversion_ = false;
// Callback to send log messages.
AudioManager::LogCallback log_callback_;
SEQUENCE_CHECKER(sequence_checker_); SEQUENCE_CHECKER(sequence_checker_);
DISALLOW_COPY_AND_ASSIGN(WASAPIAudioInputStream); DISALLOW_COPY_AND_ASSIGN(WASAPIAudioInputStream);
......
...@@ -249,7 +249,7 @@ AudioInputStream* AudioManagerWin::MakeLowLatencyInputStream( ...@@ -249,7 +249,7 @@ AudioInputStream* AudioManagerWin::MakeLowLatencyInputStream(
const LogCallback& log_callback) { const LogCallback& log_callback) {
// Used for both AUDIO_PCM_LOW_LATENCY and AUDIO_PCM_LINEAR. // Used for both AUDIO_PCM_LOW_LATENCY and AUDIO_PCM_LINEAR.
DVLOG(1) << "MakeLowLatencyInputStream: " << device_id; DVLOG(1) << "MakeLowLatencyInputStream: " << device_id;
return new WASAPIAudioInputStream(this, params, device_id, log_callback); return new WASAPIAudioInputStream(this, params, device_id);
} }
std::string AudioManagerWin::GetDefaultOutputDeviceID() { std::string AudioManagerWin::GetDefaultOutputDeviceID() {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment