Commit 36cf37d8 authored by sergeyu@chromium.org's avatar sergeyu@chromium.org

Add loopback audio capture on Windows.

Loopback audio capture will be used for desktop capture API, to
capture system audio so it can be included in screencast streams.

BUG=223639

Review URL: https://chromiumcodereview.appspot.com/23848005

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@223569 0039d316-1c4b-4281-b951-d872f2087c98
parent c6f695df
......@@ -34,12 +34,9 @@
#include "content/public/common/media_stream_request.h"
#include "extensions/common/constants.h"
#include "grit/generated_resources.h"
#include "media/audio/audio_manager_base.h"
#include "ui/base/l10n/l10n_util.h"
#if defined(USE_CRAS)
#include "media/audio/cras/audio_manager_cras.h"
#endif
using content::BrowserThread;
using content::MediaStreamDevices;
......@@ -288,7 +285,7 @@ void MediaCaptureDevicesDispatcher::ProcessScreenCaptureAccessRequest(
const bool system_audio_capture_requested =
request.audio_type == content::MEDIA_SYSTEM_AUDIO_CAPTURE;
#if defined(USE_CRAS)
#if defined(USE_CRAS) || defined(OS_WIN)
const bool system_audio_capture_supported = true;
#else
const bool system_audio_capture_supported = false;
......@@ -343,11 +340,11 @@ void MediaCaptureDevicesDispatcher::ProcessScreenCaptureAccessRequest(
devices.push_back(content::MediaStreamDevice(
content::MEDIA_DESKTOP_VIDEO_CAPTURE, media_id.ToString(), "Screen"));
if (system_audio_capture_requested) {
#if defined(USE_CRAS)
#if defined(USE_CRAS) || defined(OS_WIN)
// Use the special loopback device ID for system audio capture.
devices.push_back(content::MediaStreamDevice(
content::MEDIA_SYSTEM_AUDIO_CAPTURE,
media::AudioManagerCras::kLoopbackDeviceId,
media::AudioManagerBase::kLoopbackInputDeviceId,
"System Audio"));
#endif
}
......
......@@ -34,6 +34,7 @@ static const int kMaxInputChannels = 2;
const char AudioManagerBase::kDefaultDeviceName[] = "Default";
const char AudioManagerBase::kDefaultDeviceId[] = "default";
const char AudioManagerBase::kLoopbackInputDeviceId[] = "loopback";
struct AudioManagerBase::DispatcherParams {
DispatcherParams(const AudioParameters& input,
......
......@@ -32,11 +32,24 @@ class AudioOutputDispatcher;
// AudioManagerBase provides AudioManager functions common for all platforms.
class MEDIA_EXPORT AudioManagerBase : public AudioManager {
public:
// TODO(sergeyu): The constants below belong to AudioManager interface, not
// to the base implementation.
// Name of the generic "default" device.
static const char kDefaultDeviceName[];
// Unique Id of the generic "default" device.
static const char kDefaultDeviceId[];
// Input device ID used to capture the default system playback stream. When
// this device ID is passed to MakeAudioInputStream() the returned
// AudioInputStream will be capturing audio currently being played on the
// default playback device. At the moment this feature is supported only on
// some platforms. AudioInputStream::Intialize() will return an error on
// platforms that don't support it. GetInputStreamParameters() must be used
// to get the parameters of the loopback device before creating a loopback
// stream, otherwise stream initialization may fail.
static const char kLoopbackInputDeviceId[];
virtual ~AudioManagerBase();
virtual scoped_refptr<base::MessageLoopProxy> GetMessageLoop() OVERRIDE;
......
......@@ -31,8 +31,6 @@ static const int kMaxOutputStreams = 50;
// Default sample rate for input and output streams.
static const int kDefaultSampleRate = 48000;
const char AudioManagerCras::kLoopbackDeviceId[] = "loopback";
bool AudioManagerCras::HasAudioOutputDevices() {
return true;
}
......
......@@ -15,10 +15,6 @@ namespace media {
class MEDIA_EXPORT AudioManagerCras : public AudioManagerBase {
public:
// Unique ID of the "loopback" input device. This device captures post-mix,
// pre-DSP system audio.
static const char kLoopbackDeviceId[];
AudioManagerCras();
// AudioManager implementation.
......
......@@ -26,9 +26,8 @@ CrasInputStream::CrasInputStream(const AudioParameters& params,
params_(params),
started_(false),
stream_id_(0),
stream_direction_(device_id == AudioManagerCras::kLoopbackDeviceId
? CRAS_STREAM_POST_MIX_PRE_DSP
: CRAS_STREAM_INPUT) {
stream_direction_(device_id == AudioManagerBase::kLoopbackInputDeviceId ?
CRAS_STREAM_POST_MIX_PRE_DSP : CRAS_STREAM_INPUT) {
DCHECK(audio_manager_);
}
......
......@@ -103,9 +103,8 @@ bool WASAPIAudioInputStream::Open() {
// Verify that the selected audio endpoint supports the specified format
// set during construction.
if (!DesiredFormatIsSupported()) {
if (!DesiredFormatIsSupported())
return false;
}
// Initialize the audio stream between the client and the device using
// shared mode and a lowest possible glitch-free latency.
......@@ -141,6 +140,9 @@ void WASAPIAudioInputStream::Start(AudioInputCallback* callback) {
HRESULT hr = audio_client_->Start();
DLOG_IF(ERROR, FAILED(hr)) << "Failed to start input streaming.";
if (SUCCEEDED(hr) && audio_render_client_for_loopback_)
hr = audio_render_client_for_loopback_->Start();
started_ = SUCCEEDED(hr);
}
......@@ -276,6 +278,10 @@ HRESULT WASAPIAudioInputStream::GetMixFormat(const std::string& device_id,
// Retrieve the default capture audio endpoint.
hr = enumerator->GetDefaultAudioEndpoint(eCapture, eConsole,
endpoint_device.Receive());
} else if (device_id == AudioManagerBase::kLoopbackInputDeviceId) {
// Capture the default playback stream.
hr = enumerator->GetDefaultAudioEndpoint(eRender, eConsole,
endpoint_device.Receive());
} else {
// Retrieve a capture endpoint device that is specified by an endpoint
// device-identification string.
......@@ -454,42 +460,44 @@ void WASAPIAudioInputStream::HandleError(HRESULT err) {
HRESULT WASAPIAudioInputStream::SetCaptureDevice() {
ScopedComPtr<IMMDeviceEnumerator> enumerator;
HRESULT hr = CoCreateInstance(__uuidof(MMDeviceEnumerator),
NULL,
CLSCTX_INPROC_SERVER,
__uuidof(IMMDeviceEnumerator),
enumerator.ReceiveVoid());
if (SUCCEEDED(hr)) {
// Retrieve the IMMDevice by using the specified role or the specified
// unique endpoint device-identification string.
// TODO(henrika): possibly add support for the eCommunications as well.
if (device_id_ == AudioManagerBase::kDefaultDeviceId) {
// Retrieve the default capture audio endpoint for the specified role.
// Note that, in Windows Vista, the MMDevice API supports device roles
// but the system-supplied user interface programs do not.
hr = enumerator->GetDefaultAudioEndpoint(eCapture,
eConsole,
endpoint_device_.Receive());
} else {
// Retrieve a capture endpoint device that is specified by an endpoint
// device-identification string.
hr = enumerator->GetDevice(UTF8ToUTF16(device_id_).c_str(),
endpoint_device_.Receive());
}
HRESULT hr = enumerator.CreateInstance(__uuidof(MMDeviceEnumerator),
NULL, CLSCTX_INPROC_SERVER);
if (FAILED(hr))
return hr;
if (FAILED(hr))
return hr;
// Retrieve the IMMDevice by using the specified role or the specified
// unique endpoint device-identification string.
// TODO(henrika): possibly add support for the eCommunications as well.
if (device_id_ == AudioManagerBase::kDefaultDeviceId) {
// Retrieve the default capture audio endpoint for the specified role.
// Note that, in Windows Vista, the MMDevice API supports device roles
// but the system-supplied user interface programs do not.
hr = enumerator->GetDefaultAudioEndpoint(eCapture, eConsole,
endpoint_device_.Receive());
} else if (device_id_ == AudioManagerBase::kLoopbackInputDeviceId) {
// Capture the default playback stream.
hr = enumerator->GetDefaultAudioEndpoint(eRender, eConsole,
endpoint_device_.Receive());
} else {
// Retrieve a capture endpoint device that is specified by an endpoint
// device-identification string.
hr = enumerator->GetDevice(UTF8ToUTF16(device_id_).c_str(),
endpoint_device_.Receive());
}
// Verify that the audio endpoint device is active, i.e., the audio
// adapter that connects to the endpoint device is present and enabled.
DWORD state = DEVICE_STATE_DISABLED;
hr = endpoint_device_->GetState(&state);
if (SUCCEEDED(hr)) {
if (!(state & DEVICE_STATE_ACTIVE)) {
DLOG(ERROR) << "Selected capture device is not active.";
hr = E_ACCESSDENIED;
}
}
if (FAILED(hr))
return hr;
// Verify that the audio endpoint device is active, i.e., the audio
// adapter that connects to the endpoint device is present and enabled.
DWORD state = DEVICE_STATE_DISABLED;
hr = endpoint_device_->GetState(&state);
if (FAILED(hr))
return hr;
if (!(state & DEVICE_STATE_ACTIVE)) {
DLOG(ERROR) << "Selected capture device is not active.";
hr = E_ACCESSDENIED;
}
return hr;
......@@ -565,16 +573,25 @@ bool WASAPIAudioInputStream::DesiredFormatIsSupported() {
}
HRESULT WASAPIAudioInputStream::InitializeAudioEngine() {
DWORD flags;
// Use event-driven mode only fo regular input devices. For loopback the
// EVENTCALLBACK flag is specified when intializing
// |audio_render_client_for_loopback_|.
if (device_id_ == AudioManagerBase::kLoopbackInputDeviceId) {
flags = AUDCLNT_STREAMFLAGS_LOOPBACK | AUDCLNT_STREAMFLAGS_NOPERSIST;
} else {
flags =
AUDCLNT_STREAMFLAGS_EVENTCALLBACK | AUDCLNT_STREAMFLAGS_NOPERSIST;
}
// Initialize the audio stream between the client and the device.
// We connect indirectly through the audio engine by using shared mode
// and WASAPI is initialized in an event driven mode.
// We connect indirectly through the audio engine by using shared mode.
// Note that, |hnsBufferDuration| is set of 0, which ensures that the
// buffer is never smaller than the minimum buffer size needed to ensure
// that glitches do not occur between the periodic processing passes.
// This setting should lead to lowest possible latency.
HRESULT hr = audio_client_->Initialize(AUDCLNT_SHAREMODE_SHARED,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK |
AUDCLNT_STREAMFLAGS_NOPERSIST,
flags,
0, // hnsBufferDuration
0,
&format_,
......@@ -590,6 +607,7 @@ HRESULT WASAPIAudioInputStream::InitializeAudioEngine() {
hr = audio_client_->GetBufferSize(&endpoint_buffer_size_frames_);
if (FAILED(hr))
return hr;
DVLOG(1) << "endpoint buffer size: " << endpoint_buffer_size_frames_
<< " [frames]";
......@@ -618,9 +636,41 @@ HRESULT WASAPIAudioInputStream::InitializeAudioEngine() {
}
#endif
// Set the event handle that the audio engine will signal each time
// a buffer becomes ready to be processed by the client.
hr = audio_client_->SetEventHandle(audio_samples_ready_event_.Get());
// Set the event handle that the audio engine will signal each time a buffer
// becomes ready to be processed by the client.
//
// In loopback case the capture device doesn't receive any events, so we
// need to create a separate playback client to get notifications. According
// to MSDN:
//
// A pull-mode capture client does not receive any events when a stream is
// initialized with event-driven buffering and is loopback-enabled. To
// work around this, initialize a render stream in event-driven mode. Each
// time the client receives an event for the render stream, it must signal
// the capture client to run the capture thread that reads the next set of
// samples from the capture endpoint buffer.
//
// http://msdn.microsoft.com/en-us/library/windows/desktop/dd316551(v=vs.85).aspx
if (device_id_ == AudioManagerBase::kLoopbackInputDeviceId) {
hr = endpoint_device_->Activate(
__uuidof(IAudioClient), CLSCTX_INPROC_SERVER, NULL,
audio_render_client_for_loopback_.ReceiveVoid());
if (FAILED(hr))
return hr;
hr = audio_render_client_for_loopback_->Initialize(
AUDCLNT_SHAREMODE_SHARED,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK | AUDCLNT_STREAMFLAGS_NOPERSIST,
0, 0, &format_, NULL);
if (FAILED(hr))
return hr;
hr = audio_render_client_for_loopback_->SetEventHandle(
audio_samples_ready_event_.Get());
} else {
hr = audio_client_->SetEventHandle(audio_samples_ready_event_.Get());
}
if (FAILED(hr))
return hr;
......
......@@ -184,6 +184,14 @@ class MEDIA_EXPORT WASAPIAudioInputStream
// an audio stream between an audio application and the audio engine.
base::win::ScopedComPtr<IAudioClient> audio_client_;
// Loopback IAudioClient doesn't support event-driven mode, so a separate
// IAudioClient is needed to receive notifications when data is available in
// the buffer. For loopback input |audio_client_| is used to receive data,
// while |audio_render_client_for_loopback_| is used to get notifications
// when a new buffer is ready. See comment in InitializeAudioEngine() for
// details.
base::win::ScopedComPtr<IAudioClient> audio_render_client_for_loopback_;
// The IAudioCaptureClient interface enables a client to read input data
// from a capture endpoint buffer.
base::win::ScopedComPtr<IAudioCaptureClient> audio_capture_client_;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment