Commit 099278c4 authored by Chandan Padhi's avatar Chandan Padhi Committed by Commit Bot

Add support for video properties in MediaStreamTrack.getCapabilities()

Bug: 293292
Change-Id: Id190bc93a1fc42b6ad5c0d0a313dabc6fe348dce
Reviewed-on: https://chromium-review.googlesource.com/925203
Commit-Queue: Guido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarPhilip Jägenstedt <foolip@chromium.org>
Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Cr-Commit-Position: refs/heads/master@{#539021}
parent f2b173e5
......@@ -263,8 +263,10 @@ VideoCaptureSettings ApplyConstraintsProcessor::SelectVideoSettings(
blink::mojom::VideoInputDeviceCapabilities::New();
device_capabilities->device_id =
current_request_.Track().Source().Id().Ascii();
device_capabilities->facing_mode =
GetMojoFacingMode(GetCurrentVideoTrack()->FacingMode());
device_capabilities->facing_mode = GetMojoFacingMode(
GetCurrentVideoSource()
? ToWebFacingMode(GetCurrentVideoSource()->device().video_facing)
: blink::WebMediaStreamTrack::FacingMode::kNone);
device_capabilities->formats = std::move(formats);
DCHECK(video_source_->GetCurrentCaptureParams());
......
......@@ -749,6 +749,20 @@ blink::WebString GetVideoKindForFormat(
: blink::WebString::FromASCII(kVideoKindColor);
}
blink::WebMediaStreamTrack::FacingMode ToWebFacingMode(
media::VideoFacingMode video_facing) {
switch (video_facing) {
case media::MEDIA_VIDEO_FACING_NONE:
return blink::WebMediaStreamTrack::FacingMode::kNone;
case media::MEDIA_VIDEO_FACING_USER:
return blink::WebMediaStreamTrack::FacingMode::kUser;
case media::MEDIA_VIDEO_FACING_ENVIRONMENT:
return blink::WebMediaStreamTrack::FacingMode::kEnvironment;
default:
return blink::WebMediaStreamTrack::FacingMode::kNone;
}
}
VideoDeviceCaptureCapabilities::VideoDeviceCaptureCapabilities() = default;
VideoDeviceCaptureCapabilities::VideoDeviceCaptureCapabilities(
VideoDeviceCaptureCapabilities&& other) = default;
......
......@@ -26,6 +26,9 @@ namespace content {
blink::WebString CONTENT_EXPORT
GetVideoKindForFormat(const media::VideoCaptureFormat& format);
blink::WebMediaStreamTrack::FacingMode CONTENT_EXPORT
ToWebFacingMode(media::VideoFacingMode video_facing);
struct CONTENT_EXPORT VideoDeviceCaptureCapabilities {
VideoDeviceCaptureCapabilities();
VideoDeviceCaptureCapabilities(VideoDeviceCaptureCapabilities&& other);
......
......@@ -382,7 +382,7 @@ void MediaStreamVideoTrack::GetSettings(
settings.frame_rate = format->frame_rate;
settings.video_kind = GetVideoKindForFormat(*format);
}
settings.facing_mode = FacingMode();
settings.facing_mode = ToWebFacingMode(source_->device().video_facing);
const base::Optional<CameraCalibration> calibration =
source_->device().camera_calibration;
if (calibration) {
......@@ -393,36 +393,6 @@ void MediaStreamVideoTrack::GetSettings(
}
}
blink::WebMediaStreamTrack::FacingMode MediaStreamVideoTrack::FacingMode()
const {
if (!source_)
return blink::WebMediaStreamTrack::FacingMode::kNone;
const MediaStreamDevice& device = source_->device();
#if defined(OS_ANDROID)
// On Android, the facing mode is not available in the |video_facing| field,
// but is available as part of the label.
// TODO(guidou): Remove this code once the |video_facing| field is supported
// on Android. See http://crbug.com/672856.
if (device.name.find("front") != std::string::npos) {
return blink::WebMediaStreamTrack::FacingMode::kUser;
} else if (device.name.find("back") != std::string::npos) {
return blink::WebMediaStreamTrack::FacingMode::kEnvironment;
}
#endif
switch (device.video_facing) {
case media::MEDIA_VIDEO_FACING_NONE:
return blink::WebMediaStreamTrack::FacingMode::kNone;
case media::MEDIA_VIDEO_FACING_USER:
return blink::WebMediaStreamTrack::FacingMode::kUser;
case media::MEDIA_VIDEO_FACING_ENVIRONMENT:
return blink::WebMediaStreamTrack::FacingMode::kEnvironment;
default:
return blink::WebMediaStreamTrack::FacingMode::kNone;
}
}
void MediaStreamVideoTrack::OnReadyStateChanged(
blink::WebMediaStreamSource::ReadyState state) {
DCHECK(main_render_thread_checker_.CalledOnValidThread());
......
......@@ -97,7 +97,6 @@ class CONTENT_EXPORT MediaStreamVideoTrack : public MediaStreamTrack {
const VideoTrackAdapterSettings& adapter_settings() const {
return *adapter_settings_;
}
blink::WebMediaStreamTrack::FacingMode FacingMode() const;
// Setting information about the track size.
// Called from MediaStreamVideoSource at track initialization.
......
......@@ -47,6 +47,10 @@
namespace content {
namespace {
// TODO(c.padhi): Allow frame rates lower than 1Hz,
// see https://crbug.com/814131.
const float kMinDeviceCaptureFrameRate = 1.0f;
void CopyFirstString(const blink::StringConstraint& constraint,
std::string* destination) {
if (!constraint.Exact().IsEmpty())
......@@ -106,6 +110,31 @@ void SurfaceHardwareEchoCancellationSetting(
source->SetEchoCancellation(true);
}
blink::WebMediaStreamSource::Capabilities ComputeCapabilities(
const MediaStreamDevice& device,
const media::VideoCaptureFormats& formats,
bool is_device_capture) {
int max_width = 1;
int max_height = 1;
float min_frame_rate = is_device_capture ? kMinDeviceCaptureFrameRate : 0.0f;
float max_frame_rate = min_frame_rate;
for (const auto& format : formats) {
max_width = std::max(max_width, format.frame_size.width());
max_height = std::max(max_height, format.frame_size.height());
max_frame_rate = std::max(max_frame_rate, format.frame_rate);
}
blink::WebMediaStreamSource::Capabilities capabilities;
capabilities.device_id = blink::WebString::FromUTF8(device.id);
capabilities.width = {1, max_width};
capabilities.height = {1, max_height};
capabilities.aspect_ratio = {1.0 / max_height,
static_cast<double>(max_width)};
capabilities.frame_rate = {min_frame_rate, max_frame_rate};
if (is_device_capture)
capabilities.facing_mode = ToWebFacingMode(device.video_facing);
return capabilities;
}
} // namespace
UserMediaRequest::UserMediaRequest(
......@@ -157,12 +186,6 @@ class UserMediaProcessor::RequestInfo
const AudioCaptureSettings& audio_capture_settings() const {
return audio_capture_settings_;
}
bool is_audio_content_capture() const {
return audio_capture_settings_.HasValue() && is_audio_content_capture_;
}
bool is_audio_device_capture() const {
return audio_capture_settings_.HasValue() && !is_audio_content_capture_;
}
void SetAudioCaptureSettings(const AudioCaptureSettings& settings,
bool is_content_capture) {
DCHECK(settings.HasValue());
......@@ -172,6 +195,12 @@ class UserMediaProcessor::RequestInfo
const VideoCaptureSettings& video_capture_settings() const {
return video_capture_settings_;
}
bool is_video_content_capture() const {
return video_capture_settings_.HasValue() && is_video_content_capture_;
}
bool is_video_device_capture() const {
return video_capture_settings_.HasValue() && !is_video_content_capture_;
}
void SetVideoCaptureSettings(const VideoCaptureSettings& settings,
bool is_content_capture) {
DCHECK(settings.HasValue());
......@@ -179,6 +208,31 @@ class UserMediaProcessor::RequestInfo
video_capture_settings_ = settings;
}
void SetDevices(MediaStreamDevices audio_devices,
MediaStreamDevices video_devices) {
audio_devices_ = std::move(audio_devices);
video_devices_ = std::move(video_devices);
}
void AddVideoFormats(const std::string& device_id,
media::VideoCaptureFormats formats) {
video_formats_map_[device_id] = std::move(formats);
}
// Do not store or delete the returned pointer.
media::VideoCaptureFormats* GetVideoFormats(const std::string& device_id) {
auto it = video_formats_map_.find(device_id);
CHECK(it != video_formats_map_.end());
return &it->second;
}
const MediaStreamDevices& audio_devices() const { return audio_devices_; }
const MediaStreamDevices& video_devices() const { return video_devices_; }
bool CanStartTracks() const {
return video_formats_map_.size() == video_devices_.size();
}
blink::WebMediaStream* web_stream() { return &web_stream_; }
const blink::WebUserMediaRequest& web_request() const {
......@@ -218,6 +272,9 @@ class UserMediaProcessor::RequestInfo
// Sources used in this request.
std::vector<blink::WebMediaStreamSource> sources_;
std::vector<MediaStreamSource*> sources_waiting_for_callback_;
std::map<std::string, media::VideoCaptureFormats> video_formats_map_;
MediaStreamDevices audio_devices_;
MediaStreamDevices video_devices_;
};
// TODO(guidou): Initialize request_result_name_ as a null blink::WebString.
......@@ -608,24 +665,40 @@ void UserMediaProcessor::OnStreamGenerated(
}
}
DCHECK(!current_request_info_->web_request().IsNull());
blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector(
audio_devices.size());
CreateAudioTracks(audio_devices,
&audio_track_vector);
current_request_info_->SetDevices(audio_devices, video_devices);
if (video_devices.empty()) {
StartTracks(label);
return;
}
blink::WebVector<blink::WebMediaStreamTrack> video_track_vector(
video_devices.size());
CreateVideoTracks(video_devices, &video_track_vector);
if (current_request_info_->is_video_content_capture()) {
for (const auto& video_device : video_devices) {
current_request_info_->AddVideoFormats(
video_device.id,
{current_request_info_->video_capture_settings().Format()});
}
StartTracks(label);
return;
}
blink::WebString blink_id = blink::WebString::FromUTF8(label);
current_request_info_->web_stream()->Initialize(blink_id, audio_track_vector,
video_track_vector);
for (const auto& video_device : video_devices) {
GetMediaDevicesDispatcher()->GetAllVideoInputDeviceFormats(
video_device.id,
base::BindOnce(&UserMediaProcessor::GotAllVideoInputFormatsForDevice,
weak_factory_.GetWeakPtr(), label, video_device.id));
}
}
// Wait for the tracks to be started successfully or to fail.
current_request_info_->CallbackOnTracksStarted(
base::Bind(&UserMediaProcessor::OnCreateNativeTracksCompleted,
weak_factory_.GetWeakPtr(), label));
void UserMediaProcessor::GotAllVideoInputFormatsForDevice(
const std::string& label,
const std::string& device_id,
const media::VideoCaptureFormats& formats) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(current_request_info_);
current_request_info_->AddVideoFormats(device_id, formats);
if (current_request_info_->CanStartTracks())
StartTracks(label);
}
void UserMediaProcessor::OnStreamGeneratedForCancelledRequest(
......@@ -731,6 +804,9 @@ blink::WebMediaStreamSource UserMediaProcessor::InitializeVideoSourceObject(
source.SetExtraData(CreateVideoSource(
device, base::Bind(&UserMediaProcessor::OnLocalSourceStopped,
weak_factory_.GetWeakPtr())));
source.SetCapabilities(ComputeCapabilities(
device, *current_request_info_->GetVideoFormats(device.id),
current_request_info_->is_video_device_capture()));
local_sources_.push_back(source);
}
return source;
......@@ -834,6 +910,26 @@ MediaStreamVideoSource* UserMediaProcessor::CreateVideoSource(
current_request_info_->video_capture_settings().capture_params());
}
void UserMediaProcessor::StartTracks(const std::string& label) {
DCHECK(!current_request_info_->web_request().IsNull());
blink::WebVector<blink::WebMediaStreamTrack> audio_tracks(
current_request_info_->audio_devices().size());
CreateAudioTracks(current_request_info_->audio_devices(), &audio_tracks);
blink::WebVector<blink::WebMediaStreamTrack> video_tracks(
current_request_info_->video_devices().size());
CreateVideoTracks(current_request_info_->video_devices(), &video_tracks);
blink::WebString blink_id = blink::WebString::FromUTF8(label);
current_request_info_->web_stream()->Initialize(blink_id, audio_tracks,
video_tracks);
// Wait for the tracks to be started successfully or to fail.
current_request_info_->CallbackOnTracksStarted(
base::BindRepeating(&UserMediaProcessor::OnCreateNativeTracksCompleted,
weak_factory_.GetWeakPtr(), label));
}
void UserMediaProcessor::CreateVideoTracks(
const MediaStreamDevices& devices,
blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks) {
......
......@@ -146,6 +146,12 @@ class CONTENT_EXPORT UserMediaProcessor
const std::string& label,
const MediaStreamDevices& audio_devices,
const MediaStreamDevices& video_devices);
void GotAllVideoInputFormatsForDevice(
const std::string& label,
const std::string& device_id,
const media::VideoCaptureFormats& formats);
void OnStreamGenerationFailed(int request_id,
MediaStreamRequestResult result);
......@@ -163,8 +169,8 @@ class CONTENT_EXPORT UserMediaProcessor
// Called when |source| has been stopped from JavaScript.
void OnLocalSourceStopped(const blink::WebMediaStreamSource& source);
// Creates a WebKit representation of stream sources based on
// |devices| from the MediaStreamDispatcherHost.
// Creates a WebKit representation of a stream source based on
// |device| from the MediaStreamDispatcherHost.
blink::WebMediaStreamSource InitializeVideoSourceObject(
const MediaStreamDevice& device);
......@@ -172,6 +178,8 @@ class CONTENT_EXPORT UserMediaProcessor
const MediaStreamDevice& device,
bool* is_pending);
void StartTracks(const std::string& label);
void CreateVideoTracks(
const MediaStreamDevices& devices,
blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks);
......
<!doctype html>
<title>MediaStreamTrack GetCapabilities</title>
<p class="instructions">This test checks for the presence of
<code>echoCancellation</code> and <code>deviceId</code> fields
in <code>MediaStreamTrack.getCapabilities()</code> method.</p>
<p class="instructions">This test checks for the presence of audio and video properties in
<code>MediaStreamTrack.getCapabilities()</code> method.</p>
<script src=/resources/testharness.js></script>
<script src=/resources/testharnessreport.js></script>
<script>
......@@ -13,5 +12,18 @@ in <code>MediaStreamTrack.getCapabilities()</code> method.</p>
assert_true(undefined !== capabilities.deviceId, "MediaTrackCapabilities's deviceId should exist.");
assert_true(undefined !== capabilities.echoCancellation, "MediaTrackCapabilities's echoCancellation should exist.");
});
});
});
promise_test(() => {
return navigator.mediaDevices.getUserMedia({video: true})
.then(stream => {
var capabilities = stream.getVideoTracks()[0].getCapabilities();
assert_true(undefined !== capabilities.deviceId, "MediaTrackCapabilities's deviceId should exist.");
assert_true(undefined !== capabilities.width, "MediaTrackCapabilities's width should exist.");
assert_true(undefined !== capabilities.height, "MediaTrackCapabilities's height should exist.");
assert_true(undefined !== capabilities.aspectRatio, "MediaTrackCapabilities's aspectRatio should exist.");
assert_true(undefined !== capabilities.frameRate, "MediaTrackCapabilities's frameRate should exist.");
assert_true(undefined !== capabilities.facingMode, "MediaTrackCapabilities's facingMode should exist.");
});
});
</script>
......@@ -311,10 +311,58 @@ void MediaStreamTrack::getCapabilities(MediaTrackCapabilities& capabilities) {
auto platform_capabilities = component_->Source()->GetCapabilities();
capabilities.setDeviceId(platform_capabilities.device_id);
Vector<bool> echo_cancellation;
for (bool value : platform_capabilities.echo_cancellation)
echo_cancellation.push_back(value);
capabilities.setEchoCancellation(echo_cancellation);
if (component_->Source()->GetType() == MediaStreamSource::kTypeAudio) {
Vector<bool> echo_cancellation;
for (bool value : platform_capabilities.echo_cancellation)
echo_cancellation.push_back(value);
capabilities.setEchoCancellation(echo_cancellation);
}
if (component_->Source()->GetType() == MediaStreamSource::kTypeVideo) {
LongRange width, height;
if (platform_capabilities.width.size() == 2) {
width.setMin(platform_capabilities.width[0]);
width.setMax(platform_capabilities.width[1]);
}
if (platform_capabilities.height.size() == 2) {
height.setMin(platform_capabilities.height[0]);
height.setMax(platform_capabilities.height[1]);
}
capabilities.setWidth(width);
capabilities.setHeight(height);
DoubleRange aspect_ratio, frame_rate;
if (platform_capabilities.aspect_ratio.size() == 2) {
aspect_ratio.setMin(platform_capabilities.aspect_ratio[0]);
aspect_ratio.setMax(platform_capabilities.aspect_ratio[1]);
}
if (platform_capabilities.frame_rate.size() == 2) {
frame_rate.setMin(platform_capabilities.frame_rate[0]);
frame_rate.setMax(platform_capabilities.frame_rate[1]);
}
capabilities.setAspectRatio(aspect_ratio);
capabilities.setFrameRate(frame_rate);
Vector<String> facing_mode;
switch (platform_capabilities.facing_mode) {
case WebMediaStreamTrack::FacingMode::kUser:
facing_mode.push_back("user");
break;
case WebMediaStreamTrack::FacingMode::kEnvironment:
facing_mode.push_back("environment");
break;
case WebMediaStreamTrack::FacingMode::kLeft:
facing_mode.push_back("left");
break;
case WebMediaStreamTrack::FacingMode::kRight:
facing_mode.push_back("right");
break;
default:
break;
}
capabilities.setFacingMode(facing_mode);
}
}
void MediaStreamTrack::getConstraints(MediaTrackConstraints& constraints) {
......
......@@ -3,6 +3,11 @@
// found in the LICENSE file.
dictionary MediaTrackCapabilities {
LongRange width;
LongRange height;
DoubleRange aspectRatio;
DoubleRange frameRate;
sequence<DOMString> facingMode;
sequence<boolean> echoCancellation;
DOMString deviceId;
// W3C Image Capture API
......
......@@ -75,7 +75,16 @@ class WebMediaStreamSource {
};
struct Capabilities {
// WebVector is used to store an optional range for the below numeric
// fields. All of them should have 0 or 2 values representing min/max.
WebVector<long> width;
WebVector<long> height;
WebVector<double> aspect_ratio;
WebVector<double> frame_rate;
WebVector<bool> echo_cancellation;
WebMediaStreamTrack::FacingMode facing_mode =
WebMediaStreamTrack::FacingMode::kNone;
WebString device_id;
};
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment