Commit 099278c4 authored by Chandan Padhi's avatar Chandan Padhi Committed by Commit Bot

Add support for video properties in MediaStreamTrack.getCapabilities()

Bug: 293292
Change-Id: Id190bc93a1fc42b6ad5c0d0a313dabc6fe348dce
Reviewed-on: https://chromium-review.googlesource.com/925203
Commit-Queue: Guido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarPhilip Jägenstedt <foolip@chromium.org>
Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Cr-Commit-Position: refs/heads/master@{#539021}
parent f2b173e5
...@@ -263,8 +263,10 @@ VideoCaptureSettings ApplyConstraintsProcessor::SelectVideoSettings( ...@@ -263,8 +263,10 @@ VideoCaptureSettings ApplyConstraintsProcessor::SelectVideoSettings(
blink::mojom::VideoInputDeviceCapabilities::New(); blink::mojom::VideoInputDeviceCapabilities::New();
device_capabilities->device_id = device_capabilities->device_id =
current_request_.Track().Source().Id().Ascii(); current_request_.Track().Source().Id().Ascii();
device_capabilities->facing_mode = device_capabilities->facing_mode = GetMojoFacingMode(
GetMojoFacingMode(GetCurrentVideoTrack()->FacingMode()); GetCurrentVideoSource()
? ToWebFacingMode(GetCurrentVideoSource()->device().video_facing)
: blink::WebMediaStreamTrack::FacingMode::kNone);
device_capabilities->formats = std::move(formats); device_capabilities->formats = std::move(formats);
DCHECK(video_source_->GetCurrentCaptureParams()); DCHECK(video_source_->GetCurrentCaptureParams());
......
...@@ -749,6 +749,20 @@ blink::WebString GetVideoKindForFormat( ...@@ -749,6 +749,20 @@ blink::WebString GetVideoKindForFormat(
: blink::WebString::FromASCII(kVideoKindColor); : blink::WebString::FromASCII(kVideoKindColor);
} }
blink::WebMediaStreamTrack::FacingMode ToWebFacingMode(
media::VideoFacingMode video_facing) {
switch (video_facing) {
case media::MEDIA_VIDEO_FACING_NONE:
return blink::WebMediaStreamTrack::FacingMode::kNone;
case media::MEDIA_VIDEO_FACING_USER:
return blink::WebMediaStreamTrack::FacingMode::kUser;
case media::MEDIA_VIDEO_FACING_ENVIRONMENT:
return blink::WebMediaStreamTrack::FacingMode::kEnvironment;
default:
return blink::WebMediaStreamTrack::FacingMode::kNone;
}
}
VideoDeviceCaptureCapabilities::VideoDeviceCaptureCapabilities() = default; VideoDeviceCaptureCapabilities::VideoDeviceCaptureCapabilities() = default;
VideoDeviceCaptureCapabilities::VideoDeviceCaptureCapabilities( VideoDeviceCaptureCapabilities::VideoDeviceCaptureCapabilities(
VideoDeviceCaptureCapabilities&& other) = default; VideoDeviceCaptureCapabilities&& other) = default;
......
...@@ -26,6 +26,9 @@ namespace content { ...@@ -26,6 +26,9 @@ namespace content {
blink::WebString CONTENT_EXPORT blink::WebString CONTENT_EXPORT
GetVideoKindForFormat(const media::VideoCaptureFormat& format); GetVideoKindForFormat(const media::VideoCaptureFormat& format);
blink::WebMediaStreamTrack::FacingMode CONTENT_EXPORT
ToWebFacingMode(media::VideoFacingMode video_facing);
struct CONTENT_EXPORT VideoDeviceCaptureCapabilities { struct CONTENT_EXPORT VideoDeviceCaptureCapabilities {
VideoDeviceCaptureCapabilities(); VideoDeviceCaptureCapabilities();
VideoDeviceCaptureCapabilities(VideoDeviceCaptureCapabilities&& other); VideoDeviceCaptureCapabilities(VideoDeviceCaptureCapabilities&& other);
......
...@@ -382,7 +382,7 @@ void MediaStreamVideoTrack::GetSettings( ...@@ -382,7 +382,7 @@ void MediaStreamVideoTrack::GetSettings(
settings.frame_rate = format->frame_rate; settings.frame_rate = format->frame_rate;
settings.video_kind = GetVideoKindForFormat(*format); settings.video_kind = GetVideoKindForFormat(*format);
} }
settings.facing_mode = FacingMode(); settings.facing_mode = ToWebFacingMode(source_->device().video_facing);
const base::Optional<CameraCalibration> calibration = const base::Optional<CameraCalibration> calibration =
source_->device().camera_calibration; source_->device().camera_calibration;
if (calibration) { if (calibration) {
...@@ -393,36 +393,6 @@ void MediaStreamVideoTrack::GetSettings( ...@@ -393,36 +393,6 @@ void MediaStreamVideoTrack::GetSettings(
} }
} }
blink::WebMediaStreamTrack::FacingMode MediaStreamVideoTrack::FacingMode()
const {
if (!source_)
return blink::WebMediaStreamTrack::FacingMode::kNone;
const MediaStreamDevice& device = source_->device();
#if defined(OS_ANDROID)
// On Android, the facing mode is not available in the |video_facing| field,
// but is available as part of the label.
// TODO(guidou): Remove this code once the |video_facing| field is supported
// on Android. See http://crbug.com/672856.
if (device.name.find("front") != std::string::npos) {
return blink::WebMediaStreamTrack::FacingMode::kUser;
} else if (device.name.find("back") != std::string::npos) {
return blink::WebMediaStreamTrack::FacingMode::kEnvironment;
}
#endif
switch (device.video_facing) {
case media::MEDIA_VIDEO_FACING_NONE:
return blink::WebMediaStreamTrack::FacingMode::kNone;
case media::MEDIA_VIDEO_FACING_USER:
return blink::WebMediaStreamTrack::FacingMode::kUser;
case media::MEDIA_VIDEO_FACING_ENVIRONMENT:
return blink::WebMediaStreamTrack::FacingMode::kEnvironment;
default:
return blink::WebMediaStreamTrack::FacingMode::kNone;
}
}
void MediaStreamVideoTrack::OnReadyStateChanged( void MediaStreamVideoTrack::OnReadyStateChanged(
blink::WebMediaStreamSource::ReadyState state) { blink::WebMediaStreamSource::ReadyState state) {
DCHECK(main_render_thread_checker_.CalledOnValidThread()); DCHECK(main_render_thread_checker_.CalledOnValidThread());
......
...@@ -97,7 +97,6 @@ class CONTENT_EXPORT MediaStreamVideoTrack : public MediaStreamTrack { ...@@ -97,7 +97,6 @@ class CONTENT_EXPORT MediaStreamVideoTrack : public MediaStreamTrack {
const VideoTrackAdapterSettings& adapter_settings() const { const VideoTrackAdapterSettings& adapter_settings() const {
return *adapter_settings_; return *adapter_settings_;
} }
blink::WebMediaStreamTrack::FacingMode FacingMode() const;
// Setting information about the track size. // Setting information about the track size.
// Called from MediaStreamVideoSource at track initialization. // Called from MediaStreamVideoSource at track initialization.
......
...@@ -47,6 +47,10 @@ ...@@ -47,6 +47,10 @@
namespace content { namespace content {
namespace { namespace {
// TODO(c.padhi): Allow frame rates lower than 1Hz,
// see https://crbug.com/814131.
const float kMinDeviceCaptureFrameRate = 1.0f;
void CopyFirstString(const blink::StringConstraint& constraint, void CopyFirstString(const blink::StringConstraint& constraint,
std::string* destination) { std::string* destination) {
if (!constraint.Exact().IsEmpty()) if (!constraint.Exact().IsEmpty())
...@@ -106,6 +110,31 @@ void SurfaceHardwareEchoCancellationSetting( ...@@ -106,6 +110,31 @@ void SurfaceHardwareEchoCancellationSetting(
source->SetEchoCancellation(true); source->SetEchoCancellation(true);
} }
blink::WebMediaStreamSource::Capabilities ComputeCapabilities(
const MediaStreamDevice& device,
const media::VideoCaptureFormats& formats,
bool is_device_capture) {
int max_width = 1;
int max_height = 1;
float min_frame_rate = is_device_capture ? kMinDeviceCaptureFrameRate : 0.0f;
float max_frame_rate = min_frame_rate;
for (const auto& format : formats) {
max_width = std::max(max_width, format.frame_size.width());
max_height = std::max(max_height, format.frame_size.height());
max_frame_rate = std::max(max_frame_rate, format.frame_rate);
}
blink::WebMediaStreamSource::Capabilities capabilities;
capabilities.device_id = blink::WebString::FromUTF8(device.id);
capabilities.width = {1, max_width};
capabilities.height = {1, max_height};
capabilities.aspect_ratio = {1.0 / max_height,
static_cast<double>(max_width)};
capabilities.frame_rate = {min_frame_rate, max_frame_rate};
if (is_device_capture)
capabilities.facing_mode = ToWebFacingMode(device.video_facing);
return capabilities;
}
} // namespace } // namespace
UserMediaRequest::UserMediaRequest( UserMediaRequest::UserMediaRequest(
...@@ -157,12 +186,6 @@ class UserMediaProcessor::RequestInfo ...@@ -157,12 +186,6 @@ class UserMediaProcessor::RequestInfo
const AudioCaptureSettings& audio_capture_settings() const { const AudioCaptureSettings& audio_capture_settings() const {
return audio_capture_settings_; return audio_capture_settings_;
} }
bool is_audio_content_capture() const {
return audio_capture_settings_.HasValue() && is_audio_content_capture_;
}
bool is_audio_device_capture() const {
return audio_capture_settings_.HasValue() && !is_audio_content_capture_;
}
void SetAudioCaptureSettings(const AudioCaptureSettings& settings, void SetAudioCaptureSettings(const AudioCaptureSettings& settings,
bool is_content_capture) { bool is_content_capture) {
DCHECK(settings.HasValue()); DCHECK(settings.HasValue());
...@@ -172,6 +195,12 @@ class UserMediaProcessor::RequestInfo ...@@ -172,6 +195,12 @@ class UserMediaProcessor::RequestInfo
const VideoCaptureSettings& video_capture_settings() const { const VideoCaptureSettings& video_capture_settings() const {
return video_capture_settings_; return video_capture_settings_;
} }
bool is_video_content_capture() const {
return video_capture_settings_.HasValue() && is_video_content_capture_;
}
bool is_video_device_capture() const {
return video_capture_settings_.HasValue() && !is_video_content_capture_;
}
void SetVideoCaptureSettings(const VideoCaptureSettings& settings, void SetVideoCaptureSettings(const VideoCaptureSettings& settings,
bool is_content_capture) { bool is_content_capture) {
DCHECK(settings.HasValue()); DCHECK(settings.HasValue());
...@@ -179,6 +208,31 @@ class UserMediaProcessor::RequestInfo ...@@ -179,6 +208,31 @@ class UserMediaProcessor::RequestInfo
video_capture_settings_ = settings; video_capture_settings_ = settings;
} }
void SetDevices(MediaStreamDevices audio_devices,
MediaStreamDevices video_devices) {
audio_devices_ = std::move(audio_devices);
video_devices_ = std::move(video_devices);
}
void AddVideoFormats(const std::string& device_id,
media::VideoCaptureFormats formats) {
video_formats_map_[device_id] = std::move(formats);
}
// Do not store or delete the returned pointer.
media::VideoCaptureFormats* GetVideoFormats(const std::string& device_id) {
auto it = video_formats_map_.find(device_id);
CHECK(it != video_formats_map_.end());
return &it->second;
}
const MediaStreamDevices& audio_devices() const { return audio_devices_; }
const MediaStreamDevices& video_devices() const { return video_devices_; }
bool CanStartTracks() const {
return video_formats_map_.size() == video_devices_.size();
}
blink::WebMediaStream* web_stream() { return &web_stream_; } blink::WebMediaStream* web_stream() { return &web_stream_; }
const blink::WebUserMediaRequest& web_request() const { const blink::WebUserMediaRequest& web_request() const {
...@@ -218,6 +272,9 @@ class UserMediaProcessor::RequestInfo ...@@ -218,6 +272,9 @@ class UserMediaProcessor::RequestInfo
// Sources used in this request. // Sources used in this request.
std::vector<blink::WebMediaStreamSource> sources_; std::vector<blink::WebMediaStreamSource> sources_;
std::vector<MediaStreamSource*> sources_waiting_for_callback_; std::vector<MediaStreamSource*> sources_waiting_for_callback_;
std::map<std::string, media::VideoCaptureFormats> video_formats_map_;
MediaStreamDevices audio_devices_;
MediaStreamDevices video_devices_;
}; };
// TODO(guidou): Initialize request_result_name_ as a null blink::WebString. // TODO(guidou): Initialize request_result_name_ as a null blink::WebString.
...@@ -608,24 +665,40 @@ void UserMediaProcessor::OnStreamGenerated( ...@@ -608,24 +665,40 @@ void UserMediaProcessor::OnStreamGenerated(
} }
} }
DCHECK(!current_request_info_->web_request().IsNull()); current_request_info_->SetDevices(audio_devices, video_devices);
blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector(
audio_devices.size()); if (video_devices.empty()) {
CreateAudioTracks(audio_devices, StartTracks(label);
&audio_track_vector); return;
}
blink::WebVector<blink::WebMediaStreamTrack> video_track_vector( if (current_request_info_->is_video_content_capture()) {
video_devices.size()); for (const auto& video_device : video_devices) {
CreateVideoTracks(video_devices, &video_track_vector); current_request_info_->AddVideoFormats(
video_device.id,
{current_request_info_->video_capture_settings().Format()});
}
StartTracks(label);
return;
}
blink::WebString blink_id = blink::WebString::FromUTF8(label); for (const auto& video_device : video_devices) {
current_request_info_->web_stream()->Initialize(blink_id, audio_track_vector, GetMediaDevicesDispatcher()->GetAllVideoInputDeviceFormats(
video_track_vector); video_device.id,
base::BindOnce(&UserMediaProcessor::GotAllVideoInputFormatsForDevice,
weak_factory_.GetWeakPtr(), label, video_device.id));
}
}
// Wait for the tracks to be started successfully or to fail. void UserMediaProcessor::GotAllVideoInputFormatsForDevice(
current_request_info_->CallbackOnTracksStarted( const std::string& label,
base::Bind(&UserMediaProcessor::OnCreateNativeTracksCompleted, const std::string& device_id,
weak_factory_.GetWeakPtr(), label)); const media::VideoCaptureFormats& formats) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(current_request_info_);
current_request_info_->AddVideoFormats(device_id, formats);
if (current_request_info_->CanStartTracks())
StartTracks(label);
} }
void UserMediaProcessor::OnStreamGeneratedForCancelledRequest( void UserMediaProcessor::OnStreamGeneratedForCancelledRequest(
...@@ -731,6 +804,9 @@ blink::WebMediaStreamSource UserMediaProcessor::InitializeVideoSourceObject( ...@@ -731,6 +804,9 @@ blink::WebMediaStreamSource UserMediaProcessor::InitializeVideoSourceObject(
source.SetExtraData(CreateVideoSource( source.SetExtraData(CreateVideoSource(
device, base::Bind(&UserMediaProcessor::OnLocalSourceStopped, device, base::Bind(&UserMediaProcessor::OnLocalSourceStopped,
weak_factory_.GetWeakPtr()))); weak_factory_.GetWeakPtr())));
source.SetCapabilities(ComputeCapabilities(
device, *current_request_info_->GetVideoFormats(device.id),
current_request_info_->is_video_device_capture()));
local_sources_.push_back(source); local_sources_.push_back(source);
} }
return source; return source;
...@@ -834,6 +910,26 @@ MediaStreamVideoSource* UserMediaProcessor::CreateVideoSource( ...@@ -834,6 +910,26 @@ MediaStreamVideoSource* UserMediaProcessor::CreateVideoSource(
current_request_info_->video_capture_settings().capture_params()); current_request_info_->video_capture_settings().capture_params());
} }
void UserMediaProcessor::StartTracks(const std::string& label) {
DCHECK(!current_request_info_->web_request().IsNull());
blink::WebVector<blink::WebMediaStreamTrack> audio_tracks(
current_request_info_->audio_devices().size());
CreateAudioTracks(current_request_info_->audio_devices(), &audio_tracks);
blink::WebVector<blink::WebMediaStreamTrack> video_tracks(
current_request_info_->video_devices().size());
CreateVideoTracks(current_request_info_->video_devices(), &video_tracks);
blink::WebString blink_id = blink::WebString::FromUTF8(label);
current_request_info_->web_stream()->Initialize(blink_id, audio_tracks,
video_tracks);
// Wait for the tracks to be started successfully or to fail.
current_request_info_->CallbackOnTracksStarted(
base::BindRepeating(&UserMediaProcessor::OnCreateNativeTracksCompleted,
weak_factory_.GetWeakPtr(), label));
}
void UserMediaProcessor::CreateVideoTracks( void UserMediaProcessor::CreateVideoTracks(
const MediaStreamDevices& devices, const MediaStreamDevices& devices,
blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks) { blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks) {
......
...@@ -146,6 +146,12 @@ class CONTENT_EXPORT UserMediaProcessor ...@@ -146,6 +146,12 @@ class CONTENT_EXPORT UserMediaProcessor
const std::string& label, const std::string& label,
const MediaStreamDevices& audio_devices, const MediaStreamDevices& audio_devices,
const MediaStreamDevices& video_devices); const MediaStreamDevices& video_devices);
void GotAllVideoInputFormatsForDevice(
const std::string& label,
const std::string& device_id,
const media::VideoCaptureFormats& formats);
void OnStreamGenerationFailed(int request_id, void OnStreamGenerationFailed(int request_id,
MediaStreamRequestResult result); MediaStreamRequestResult result);
...@@ -163,8 +169,8 @@ class CONTENT_EXPORT UserMediaProcessor ...@@ -163,8 +169,8 @@ class CONTENT_EXPORT UserMediaProcessor
// Called when |source| has been stopped from JavaScript. // Called when |source| has been stopped from JavaScript.
void OnLocalSourceStopped(const blink::WebMediaStreamSource& source); void OnLocalSourceStopped(const blink::WebMediaStreamSource& source);
// Creates a WebKit representation of stream sources based on // Creates a WebKit representation of a stream source based on
// |devices| from the MediaStreamDispatcherHost. // |device| from the MediaStreamDispatcherHost.
blink::WebMediaStreamSource InitializeVideoSourceObject( blink::WebMediaStreamSource InitializeVideoSourceObject(
const MediaStreamDevice& device); const MediaStreamDevice& device);
...@@ -172,6 +178,8 @@ class CONTENT_EXPORT UserMediaProcessor ...@@ -172,6 +178,8 @@ class CONTENT_EXPORT UserMediaProcessor
const MediaStreamDevice& device, const MediaStreamDevice& device,
bool* is_pending); bool* is_pending);
void StartTracks(const std::string& label);
void CreateVideoTracks( void CreateVideoTracks(
const MediaStreamDevices& devices, const MediaStreamDevices& devices,
blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks); blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks);
......
<!doctype html> <!doctype html>
<title>MediaStreamTrack GetCapabilities</title> <title>MediaStreamTrack GetCapabilities</title>
<p class="instructions">This test checks for the presence of <p class="instructions">This test checks for the presence of audio and video properties in
<code>echoCancellation</code> and <code>deviceId</code> fields <code>MediaStreamTrack.getCapabilities()</code> method.</p>
in <code>MediaStreamTrack.getCapabilities()</code> method.</p>
<script src=/resources/testharness.js></script> <script src=/resources/testharness.js></script>
<script src=/resources/testharnessreport.js></script> <script src=/resources/testharnessreport.js></script>
<script> <script>
...@@ -13,5 +12,18 @@ in <code>MediaStreamTrack.getCapabilities()</code> method.</p> ...@@ -13,5 +12,18 @@ in <code>MediaStreamTrack.getCapabilities()</code> method.</p>
assert_true(undefined !== capabilities.deviceId, "MediaTrackCapabilities's deviceId should exist."); assert_true(undefined !== capabilities.deviceId, "MediaTrackCapabilities's deviceId should exist.");
assert_true(undefined !== capabilities.echoCancellation, "MediaTrackCapabilities's echoCancellation should exist."); assert_true(undefined !== capabilities.echoCancellation, "MediaTrackCapabilities's echoCancellation should exist.");
}); });
}); });
promise_test(() => {
return navigator.mediaDevices.getUserMedia({video: true})
.then(stream => {
var capabilities = stream.getVideoTracks()[0].getCapabilities();
assert_true(undefined !== capabilities.deviceId, "MediaTrackCapabilities's deviceId should exist.");
assert_true(undefined !== capabilities.width, "MediaTrackCapabilities's width should exist.");
assert_true(undefined !== capabilities.height, "MediaTrackCapabilities's height should exist.");
assert_true(undefined !== capabilities.aspectRatio, "MediaTrackCapabilities's aspectRatio should exist.");
assert_true(undefined !== capabilities.frameRate, "MediaTrackCapabilities's frameRate should exist.");
assert_true(undefined !== capabilities.facingMode, "MediaTrackCapabilities's facingMode should exist.");
});
});
</script> </script>
...@@ -311,10 +311,58 @@ void MediaStreamTrack::getCapabilities(MediaTrackCapabilities& capabilities) { ...@@ -311,10 +311,58 @@ void MediaStreamTrack::getCapabilities(MediaTrackCapabilities& capabilities) {
auto platform_capabilities = component_->Source()->GetCapabilities(); auto platform_capabilities = component_->Source()->GetCapabilities();
capabilities.setDeviceId(platform_capabilities.device_id); capabilities.setDeviceId(platform_capabilities.device_id);
if (component_->Source()->GetType() == MediaStreamSource::kTypeAudio) {
Vector<bool> echo_cancellation; Vector<bool> echo_cancellation;
for (bool value : platform_capabilities.echo_cancellation) for (bool value : platform_capabilities.echo_cancellation)
echo_cancellation.push_back(value); echo_cancellation.push_back(value);
capabilities.setEchoCancellation(echo_cancellation); capabilities.setEchoCancellation(echo_cancellation);
}
if (component_->Source()->GetType() == MediaStreamSource::kTypeVideo) {
LongRange width, height;
if (platform_capabilities.width.size() == 2) {
width.setMin(platform_capabilities.width[0]);
width.setMax(platform_capabilities.width[1]);
}
if (platform_capabilities.height.size() == 2) {
height.setMin(platform_capabilities.height[0]);
height.setMax(platform_capabilities.height[1]);
}
capabilities.setWidth(width);
capabilities.setHeight(height);
DoubleRange aspect_ratio, frame_rate;
if (platform_capabilities.aspect_ratio.size() == 2) {
aspect_ratio.setMin(platform_capabilities.aspect_ratio[0]);
aspect_ratio.setMax(platform_capabilities.aspect_ratio[1]);
}
if (platform_capabilities.frame_rate.size() == 2) {
frame_rate.setMin(platform_capabilities.frame_rate[0]);
frame_rate.setMax(platform_capabilities.frame_rate[1]);
}
capabilities.setAspectRatio(aspect_ratio);
capabilities.setFrameRate(frame_rate);
Vector<String> facing_mode;
switch (platform_capabilities.facing_mode) {
case WebMediaStreamTrack::FacingMode::kUser:
facing_mode.push_back("user");
break;
case WebMediaStreamTrack::FacingMode::kEnvironment:
facing_mode.push_back("environment");
break;
case WebMediaStreamTrack::FacingMode::kLeft:
facing_mode.push_back("left");
break;
case WebMediaStreamTrack::FacingMode::kRight:
facing_mode.push_back("right");
break;
default:
break;
}
capabilities.setFacingMode(facing_mode);
}
} }
void MediaStreamTrack::getConstraints(MediaTrackConstraints& constraints) { void MediaStreamTrack::getConstraints(MediaTrackConstraints& constraints) {
......
...@@ -3,6 +3,11 @@ ...@@ -3,6 +3,11 @@
// found in the LICENSE file. // found in the LICENSE file.
dictionary MediaTrackCapabilities { dictionary MediaTrackCapabilities {
LongRange width;
LongRange height;
DoubleRange aspectRatio;
DoubleRange frameRate;
sequence<DOMString> facingMode;
sequence<boolean> echoCancellation; sequence<boolean> echoCancellation;
DOMString deviceId; DOMString deviceId;
// W3C Image Capture API // W3C Image Capture API
......
...@@ -75,7 +75,16 @@ class WebMediaStreamSource { ...@@ -75,7 +75,16 @@ class WebMediaStreamSource {
}; };
struct Capabilities { struct Capabilities {
// WebVector is used to store an optional range for the below numeric
// fields. All of them should have 0 or 2 values representing min/max.
WebVector<long> width;
WebVector<long> height;
WebVector<double> aspect_ratio;
WebVector<double> frame_rate;
WebVector<bool> echo_cancellation; WebVector<bool> echo_cancellation;
WebMediaStreamTrack::FacingMode facing_mode =
WebMediaStreamTrack::FacingMode::kNone;
WebString device_id; WebString device_id;
}; };
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment