Commit f4b4b2d0 authored by Markus Handell's avatar Markus Handell Committed by Commit Bot

VideoCaptureDeviceAVFoundation: move static functionality to a new utils file.

This change prepares for development of capturer features behind a new flag. Moving static functionality into a separate file allows the capturer implementation to be duped, and the creator to create either implementation based on a flag.

Future CLs will add the flag and perform the duplication.

Bug: chromium:1126690
Change-Id: I8592a2ba81765e999c497eda9a6f09cc644e1423
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2401520Reviewed-by: default avatarAvi Drissman <avi@chromium.org>
Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarccameron <ccameron@chromium.org>
Commit-Queue: Markus Handell <handellm@google.com>
Cr-Commit-Position: refs/heads/master@{#805965}
parent 2adf0823
......@@ -164,6 +164,8 @@ component("capture_lib") {
sources += [
"video/mac/video_capture_device_avfoundation_mac.h",
"video/mac/video_capture_device_avfoundation_mac.mm",
"video/mac/video_capture_device_avfoundation_utils_mac.h",
"video/mac/video_capture_device_avfoundation_utils_mac.mm",
"video/mac/video_capture_device_decklink_mac.h",
"video/mac/video_capture_device_decklink_mac.mm",
"video/mac/video_capture_device_factory_mac.h",
......
......@@ -16,23 +16,12 @@
namespace media {
class VideoCaptureDeviceMac;
// Find the best capture format from |formats| for the specified dimensions and
// frame rate. Returns an element of |formats|, or nil.
AVCaptureDeviceFormat* CAPTURE_EXPORT
FindBestCaptureFormat(NSArray<AVCaptureDeviceFormat*>* formats,
int width,
int height,
float frame_rate);
} // namespace media
// Class used by VideoCaptureDeviceMac (VCDM) for video and image capture using
// AVFoundation API. This class lives inside the thread created by its owner
// VCDM.
//
// * Clients (VCDM) should call +deviceNames to fetch the list of devices
// available in the system; this method returns the list of device names that
// have to be used with -setCaptureDevice:.
// * Previous to any use, clients (VCDM) must call -initWithFrameReceiver: to
// initialise an object of this class and register a |frameReceiver_|.
// * Frame receiver registration or removal can also happen via explicit call
......@@ -86,13 +75,6 @@ FindBestCaptureFormat(NSArray<AVCaptureDeviceFormat*>* formats,
base::ThreadChecker _main_thread_checker;
}
// Returns a dictionary of capture devices with friendly name and unique id.
+ (NSDictionary*)deviceNames;
// Retrieve the capture supported formats for a given device |descriptor|.
+ (void)getDevice:(const media::VideoCaptureDeviceDescriptor&)descriptor
supportedFormats:(media::VideoCaptureFormats*)formats;
// Initializes the instance and the underlying capture session and registers the
// frame receiver.
- (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver;
......
......@@ -19,308 +19,15 @@
#include "base/strings/sys_string_conversions.h"
#include "media/base/timestamp_constants.h"
#include "media/base/video_types.h"
#import "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
#include "media/capture/video/mac/video_capture_device_factory_mac.h"
#include "media/capture/video/mac/video_capture_device_mac.h"
#include "media/capture/video_capture_types.h"
#include "services/video_capture/public/uma/video_capture_service_event.h"
#include "ui/gfx/geometry/size.h"
namespace {
enum MacBookVersions {
OTHER = 0,
MACBOOK_5, // MacBook5.X
MACBOOK_6,
MACBOOK_7,
MACBOOK_8,
MACBOOK_PRO_11, // MacBookPro11.X
MACBOOK_PRO_12,
MACBOOK_PRO_13,
MACBOOK_AIR_5, // MacBookAir5.X
MACBOOK_AIR_6,
MACBOOK_AIR_7,
MACBOOK_AIR_8,
MACBOOK_AIR_3,
MACBOOK_AIR_4,
MACBOOK_4,
MACBOOK_9,
MACBOOK_10,
MACBOOK_PRO_10,
MACBOOK_PRO_9,
MACBOOK_PRO_8,
MACBOOK_PRO_7,
MACBOOK_PRO_6,
MACBOOK_PRO_5,
MAX_MACBOOK_VERSION = MACBOOK_PRO_5
};
MacBookVersions GetMacBookModel(const std::string& model) {
struct {
const char* name;
MacBookVersions version;
} static const kModelToVersion[] = {
{"MacBook4,", MACBOOK_4}, {"MacBook5,", MACBOOK_5},
{"MacBook6,", MACBOOK_6}, {"MacBook7,", MACBOOK_7},
{"MacBook8,", MACBOOK_8}, {"MacBook9,", MACBOOK_9},
{"MacBook10,", MACBOOK_10}, {"MacBookPro5,", MACBOOK_PRO_5},
{"MacBookPro6,", MACBOOK_PRO_6}, {"MacBookPro7,", MACBOOK_PRO_7},
{"MacBookPro8,", MACBOOK_PRO_8}, {"MacBookPro9,", MACBOOK_PRO_9},
{"MacBookPro10,", MACBOOK_PRO_10}, {"MacBookPro11,", MACBOOK_PRO_11},
{"MacBookPro12,", MACBOOK_PRO_12}, {"MacBookPro13,", MACBOOK_PRO_13},
{"MacBookAir3,", MACBOOK_AIR_3}, {"MacBookAir4,", MACBOOK_AIR_4},
{"MacBookAir5,", MACBOOK_AIR_5}, {"MacBookAir6,", MACBOOK_AIR_6},
{"MacBookAir7,", MACBOOK_AIR_7}, {"MacBookAir8,", MACBOOK_AIR_8},
};
for (const auto& entry : kModelToVersion) {
if (base::StartsWith(model, entry.name,
base::CompareCase::INSENSITIVE_ASCII)) {
return entry.version;
}
}
return OTHER;
}
// Add Uma stats for number of detected devices on MacBooks. These are used for
// investigating crbug/582931.
void MaybeWriteUma(int number_of_devices, int number_of_suspended_devices) {
std::string model = base::mac::GetModelIdentifier();
if (!base::StartsWith(model, "MacBook",
base::CompareCase::INSENSITIVE_ASCII)) {
return;
}
static int attempt_since_process_start_counter = 0;
static int device_count_at_last_attempt = 0;
static bool has_seen_zero_device_count = false;
const int attempt_count_since_process_start =
++attempt_since_process_start_counter;
const int retry_count =
media::VideoCaptureDeviceFactoryMac::GetGetDevicesInfoRetryCount();
const int device_count = number_of_devices + number_of_suspended_devices;
UMA_HISTOGRAM_COUNTS_1M("Media.VideoCapture.MacBook.NumberOfDevices",
device_count);
if (device_count == 0) {
UMA_HISTOGRAM_ENUMERATION(
"Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera",
GetMacBookModel(model), MAX_MACBOOK_VERSION + 1);
if (!has_seen_zero_device_count) {
UMA_HISTOGRAM_COUNTS_1M(
"Media.VideoCapture.MacBook.AttemptCountWhenNoCamera",
attempt_count_since_process_start);
has_seen_zero_device_count = true;
}
}
if (attempt_count_since_process_start == 1) {
if (retry_count == 0) {
video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
device_count == 0
? video_capture::uma::
AVF_RECEIVED_ZERO_INFOS_FIRST_TRY_FIRST_ATTEMPT
: video_capture::uma::
AVF_RECEIVED_NONZERO_INFOS_FIRST_TRY_FIRST_ATTEMPT);
} else {
video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
device_count == 0
? video_capture::uma::AVF_RECEIVED_ZERO_INFOS_RETRY
: video_capture::uma::AVF_RECEIVED_NONZERO_INFOS_RETRY);
}
// attempt count > 1
} else if (retry_count == 0) {
video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
device_count == 0
? video_capture::uma::
AVF_RECEIVED_ZERO_INFOS_FIRST_TRY_NONFIRST_ATTEMPT
: video_capture::uma::
AVF_RECEIVED_NONZERO_INFOS_FIRST_TRY_NONFIRST_ATTEMPT);
}
if (attempt_count_since_process_start > 1 &&
device_count != device_count_at_last_attempt) {
video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
device_count == 0
? video_capture::uma::AVF_DEVICE_COUNT_CHANGED_FROM_POSITIVE_TO_ZERO
: video_capture::uma::
AVF_DEVICE_COUNT_CHANGED_FROM_ZERO_TO_POSITIVE);
}
device_count_at_last_attempt = device_count;
}
// This function translates Mac Core Video pixel formats to Chromium pixel
// formats.
media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
switch (code) {
case kCVPixelFormatType_422YpCbCr8:
return media::PIXEL_FORMAT_UYVY;
case kCMPixelFormat_422YpCbCr8_yuvs:
return media::PIXEL_FORMAT_YUY2;
case kCMVideoCodecType_JPEG_OpenDML:
return media::PIXEL_FORMAT_MJPEG;
default:
return media::PIXEL_FORMAT_UNKNOWN;
}
}
// Extracts |base_address| and |length| out of a SampleBuffer.
void ExtractBaseAddressAndLength(char** base_address,
size_t* length,
CMSampleBufferRef sample_buffer) {
CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(sample_buffer);
DCHECK(block_buffer);
size_t length_at_offset;
const OSStatus status = CMBlockBufferGetDataPointer(
block_buffer, 0, &length_at_offset, length, base_address);
DCHECK_EQ(noErr, status);
// Expect the (M)JPEG data to be available as a contiguous reference, i.e.
// not covered by multiple memory blocks.
DCHECK_EQ(length_at_offset, *length);
}
} // anonymous namespace
namespace media {
// Find the best capture format from |formats| for the specified dimensions and
// frame rate. Returns an element of |formats|, or nil.
AVCaptureDeviceFormat* FindBestCaptureFormat(
NSArray<AVCaptureDeviceFormat*>* formats,
int width,
int height,
float frame_rate) {
AVCaptureDeviceFormat* bestCaptureFormat = nil;
VideoPixelFormat bestPixelFormat = VideoPixelFormat::PIXEL_FORMAT_UNKNOWN;
bool bestMatchesFrameRate = false;
Float64 bestMaxFrameRate = 0;
for (AVCaptureDeviceFormat* captureFormat in formats) {
const FourCharCode fourcc =
CMFormatDescriptionGetMediaSubType([captureFormat formatDescription]);
VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat(fourcc);
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(
[captureFormat formatDescription]);
Float64 maxFrameRate = 0;
bool matchesFrameRate = false;
for (AVFrameRateRange* frameRateRange in
[captureFormat videoSupportedFrameRateRanges]) {
maxFrameRate = std::max(maxFrameRate, [frameRateRange maxFrameRate]);
matchesFrameRate |= [frameRateRange minFrameRate] <= frame_rate &&
frame_rate <= [frameRateRange maxFrameRate];
}
// If the pixel format is unsupported by our code, then it is not useful.
if (pixelFormat == VideoPixelFormat::PIXEL_FORMAT_UNKNOWN)
continue;
// If our CMSampleBuffers will have a different size than the native
// capture, then we will not be the fast path.
if (dimensions.width != width || dimensions.height != height)
continue;
// Prefer a capture format that handles the requested framerate to one
// that doesn't.
if (bestCaptureFormat) {
if (bestMatchesFrameRate && !matchesFrameRate)
continue;
if (matchesFrameRate && !bestMatchesFrameRate)
bestCaptureFormat = nil;
}
// Prefer a capture format with a lower maximum framerate, under the
// assumption that that may have lower power consumption.
if (bestCaptureFormat) {
if (bestMaxFrameRate < maxFrameRate)
continue;
if (maxFrameRate < bestMaxFrameRate)
bestCaptureFormat = nil;
}
// Finally, compare according to Chromium preference.
if (bestCaptureFormat) {
if (VideoCaptureFormat::ComparePixelFormatPreference(bestPixelFormat,
pixelFormat)) {
continue;
}
}
bestCaptureFormat = captureFormat;
bestPixelFormat = pixelFormat;
bestMaxFrameRate = maxFrameRate;
bestMatchesFrameRate = matchesFrameRate;
}
VLOG(1) << "Selecting AVCaptureDevice format "
<< VideoPixelFormatToString(bestPixelFormat);
return bestCaptureFormat;
}
} // namespace media
@implementation VideoCaptureDeviceAVFoundation
#pragma mark Class methods
+ (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
// At this stage we already know that AVFoundation is supported and the whole
// library is loaded and initialised, by the device monitoring.
NSArray* devices = [AVCaptureDevice devices];
int number_of_suspended_devices = 0;
for (AVCaptureDevice* device in devices) {
if ([device hasMediaType:AVMediaTypeVideo] ||
[device hasMediaType:AVMediaTypeMuxed]) {
if ([device isSuspended]) {
++number_of_suspended_devices;
continue;
}
DeviceNameAndTransportType* nameAndTransportType =
[[[DeviceNameAndTransportType alloc]
initWithName:[device localizedName]
transportType:[device transportType]] autorelease];
[deviceNames setObject:nameAndTransportType forKey:[device uniqueID]];
}
}
MaybeWriteUma([deviceNames count], number_of_suspended_devices);
}
+ (NSDictionary*)deviceNames {
NSMutableDictionary* deviceNames =
[[[NSMutableDictionary alloc] init] autorelease];
// The device name retrieval is not going to happen in the main thread, and
// this might cause instabilities (it did in QTKit), so keep an eye here.
[self getDeviceNames:deviceNames];
return deviceNames;
}
+ (void)getDevice:(const media::VideoCaptureDeviceDescriptor&)descriptor
supportedFormats:(media::VideoCaptureFormats*)formats {
NSArray* devices = [AVCaptureDevice devices];
AVCaptureDevice* device = nil;
for (device in devices) {
if (base::SysNSStringToUTF8([device uniqueID]) == descriptor.device_id)
break;
}
if (device == nil)
return;
for (AVCaptureDeviceFormat* format in device.formats) {
// MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType
// as well according to CMFormatDescription.h
const media::VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat(
CMFormatDescriptionGetMediaSubType([format formatDescription]));
CMVideoDimensions dimensions =
CMVideoFormatDescriptionGetDimensions([format formatDescription]);
for (AVFrameRateRange* frameRate in
[format videoSupportedFrameRateRanges]) {
media::VideoCaptureFormat format(
gfx::Size(dimensions.width, dimensions.height),
frameRate.maxFrameRate, pixelFormat);
formats->push_back(format);
DVLOG(2) << descriptor.display_name() << " "
<< media::VideoCaptureFormat::ToString(format);
}
}
}
#pragma mark Public methods
- (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
......@@ -546,7 +253,7 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
char* baseAddress = 0;
size_t length = 0;
ExtractBaseAddressAndLength(&baseAddress, &length, sampleBuffer);
media::ExtractBaseAddressAndLength(&baseAddress, &length, sampleBuffer);
_frameReceiver->OnPhotoTaken(reinterpret_cast<uint8_t*>(baseAddress),
length, "image/jpeg");
};
......@@ -571,7 +278,7 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
CMVideoFormatDescriptionGetDimensions(formatDescription);
const media::VideoCaptureFormat captureFormat(
gfx::Size(dimensions.width, dimensions.height), _frameRate,
FourCCToChromiumPixelFormat(fourcc));
media::FourCCToChromiumPixelFormat(fourcc));
gfx::ColorSpace colorSpace;
// We have certain format expectation for capture output:
......@@ -603,7 +310,7 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
}
}
if (!videoFrame) {
ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer);
media::ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer);
}
{
......
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_DEVICE_AVFOUNDATION_UTILS_MAC_H_
#define MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_DEVICE_AVFOUNDATION_UTILS_MAC_H_
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CoreMedia.h>
#import <CoreVideo/CoreVideo.h>
#include "base/mac/scoped_nsobject.h"
#include "media/capture/video/video_capture_device_descriptor.h"
#include "media/capture/video_capture_types.h"
namespace media {
// Find the best capture format from |formats| for the specified dimensions and
// frame rate. Returns an element of |formats|, or nil.
AVCaptureDeviceFormat* CAPTURE_EXPORT
FindBestCaptureFormat(NSArray<AVCaptureDeviceFormat*>* formats,
int width,
int height,
float frame_rate);
// Returns a dictionary of capture devices with friendly name and unique id.
// VideoCaptureDeviceMac should call this function to fetch the list of devices
// available in the system; this method returns the list of device names that
// have to be used with -[VideoCaptureDeviceAVFoundation setCaptureDevice:].
base::scoped_nsobject<NSDictionary> GetVideoCaptureDeviceNames();
// Retrieve the capture supported formats for a given device |descriptor|.
media::VideoCaptureFormats GetDeviceSupportedFormats(
const media::VideoCaptureDeviceDescriptor& descriptor);
// This function translates Mac Core Video pixel formats to Chromium pixel
// formats.
media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code);
// Extracts |base_address| and |length| out of a SampleBuffer.
void ExtractBaseAddressAndLength(char** base_address,
size_t* length,
CMSampleBufferRef sample_buffer);
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_DEVICE_AVFOUNDATION_UTILS_MAC_H_
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
#include "base/mac/mac_util.h"
#include "base/metrics/histogram_macros.h"
#include "base/strings/string_util.h"
#include "base/strings/sys_string_conversions.h"
#include "media/capture/video/mac/video_capture_device_factory_mac.h"
#include "media/capture/video/mac/video_capture_device_mac.h"
#include "media/capture/video_capture_types.h"
#include "services/video_capture/public/uma/video_capture_service_event.h"
namespace media {
namespace {
enum MacBookVersions {
OTHER = 0,
MACBOOK_5, // MacBook5.X
MACBOOK_6,
MACBOOK_7,
MACBOOK_8,
MACBOOK_PRO_11, // MacBookPro11.X
MACBOOK_PRO_12,
MACBOOK_PRO_13,
MACBOOK_AIR_5, // MacBookAir5.X
MACBOOK_AIR_6,
MACBOOK_AIR_7,
MACBOOK_AIR_8,
MACBOOK_AIR_3,
MACBOOK_AIR_4,
MACBOOK_4,
MACBOOK_9,
MACBOOK_10,
MACBOOK_PRO_10,
MACBOOK_PRO_9,
MACBOOK_PRO_8,
MACBOOK_PRO_7,
MACBOOK_PRO_6,
MACBOOK_PRO_5,
MAX_MACBOOK_VERSION = MACBOOK_PRO_5
};
MacBookVersions GetMacBookModel(const std::string& model) {
struct {
const char* name;
MacBookVersions version;
} static const kModelToVersion[] = {
{"MacBook4,", MACBOOK_4}, {"MacBook5,", MACBOOK_5},
{"MacBook6,", MACBOOK_6}, {"MacBook7,", MACBOOK_7},
{"MacBook8,", MACBOOK_8}, {"MacBook9,", MACBOOK_9},
{"MacBook10,", MACBOOK_10}, {"MacBookPro5,", MACBOOK_PRO_5},
{"MacBookPro6,", MACBOOK_PRO_6}, {"MacBookPro7,", MACBOOK_PRO_7},
{"MacBookPro8,", MACBOOK_PRO_8}, {"MacBookPro9,", MACBOOK_PRO_9},
{"MacBookPro10,", MACBOOK_PRO_10}, {"MacBookPro11,", MACBOOK_PRO_11},
{"MacBookPro12,", MACBOOK_PRO_12}, {"MacBookPro13,", MACBOOK_PRO_13},
{"MacBookAir3,", MACBOOK_AIR_3}, {"MacBookAir4,", MACBOOK_AIR_4},
{"MacBookAir5,", MACBOOK_AIR_5}, {"MacBookAir6,", MACBOOK_AIR_6},
{"MacBookAir7,", MACBOOK_AIR_7}, {"MacBookAir8,", MACBOOK_AIR_8},
};
for (const auto& entry : kModelToVersion) {
if (base::StartsWith(model, entry.name,
base::CompareCase::INSENSITIVE_ASCII)) {
return entry.version;
}
}
return OTHER;
}
// Add Uma stats for number of detected devices on MacBooks. These are used for
// investigating crbug/582931.
void MaybeWriteUma(int number_of_devices, int number_of_suspended_devices) {
std::string model = base::mac::GetModelIdentifier();
if (!base::StartsWith(model, "MacBook",
base::CompareCase::INSENSITIVE_ASCII)) {
return;
}
static int attempt_since_process_start_counter = 0;
static int device_count_at_last_attempt = 0;
static bool has_seen_zero_device_count = false;
const int attempt_count_since_process_start =
++attempt_since_process_start_counter;
const int retry_count =
media::VideoCaptureDeviceFactoryMac::GetGetDevicesInfoRetryCount();
const int device_count = number_of_devices + number_of_suspended_devices;
UMA_HISTOGRAM_COUNTS_1M("Media.VideoCapture.MacBook.NumberOfDevices",
device_count);
if (device_count == 0) {
UMA_HISTOGRAM_ENUMERATION(
"Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera",
GetMacBookModel(model), MAX_MACBOOK_VERSION + 1);
if (!has_seen_zero_device_count) {
UMA_HISTOGRAM_COUNTS_1M(
"Media.VideoCapture.MacBook.AttemptCountWhenNoCamera",
attempt_count_since_process_start);
has_seen_zero_device_count = true;
}
}
if (attempt_count_since_process_start == 1) {
if (retry_count == 0) {
video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
device_count == 0
? video_capture::uma::
AVF_RECEIVED_ZERO_INFOS_FIRST_TRY_FIRST_ATTEMPT
: video_capture::uma::
AVF_RECEIVED_NONZERO_INFOS_FIRST_TRY_FIRST_ATTEMPT);
} else {
video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
device_count == 0
? video_capture::uma::AVF_RECEIVED_ZERO_INFOS_RETRY
: video_capture::uma::AVF_RECEIVED_NONZERO_INFOS_RETRY);
}
// attempt count > 1
} else if (retry_count == 0) {
video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
device_count == 0
? video_capture::uma::
AVF_RECEIVED_ZERO_INFOS_FIRST_TRY_NONFIRST_ATTEMPT
: video_capture::uma::
AVF_RECEIVED_NONZERO_INFOS_FIRST_TRY_NONFIRST_ATTEMPT);
}
if (attempt_count_since_process_start > 1 &&
device_count != device_count_at_last_attempt) {
video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
device_count == 0
? video_capture::uma::AVF_DEVICE_COUNT_CHANGED_FROM_POSITIVE_TO_ZERO
: video_capture::uma::
AVF_DEVICE_COUNT_CHANGED_FROM_ZERO_TO_POSITIVE);
}
device_count_at_last_attempt = device_count;
}
base::scoped_nsobject<NSDictionary> GetDeviceNames() {
// At this stage we already know that AVFoundation is supported and the whole
// library is loaded and initialised, by the device monitoring.
NSMutableDictionary* deviceNames = [[NSMutableDictionary alloc] init];
NSArray* devices = [AVCaptureDevice devices];
int number_of_suspended_devices = 0;
for (AVCaptureDevice* device in devices) {
if ([device hasMediaType:AVMediaTypeVideo] ||
[device hasMediaType:AVMediaTypeMuxed]) {
if ([device isSuspended]) {
++number_of_suspended_devices;
continue;
}
DeviceNameAndTransportType* nameAndTransportType =
[[[DeviceNameAndTransportType alloc]
initWithName:[device localizedName]
transportType:[device transportType]] autorelease];
[deviceNames setObject:nameAndTransportType forKey:[device uniqueID]];
}
}
MaybeWriteUma([deviceNames count], number_of_suspended_devices);
return base::scoped_nsobject<NSDictionary>(deviceNames,
base::scoped_policy::ASSUME);
}
} // namespace
AVCaptureDeviceFormat* FindBestCaptureFormat(
NSArray<AVCaptureDeviceFormat*>* formats,
int width,
int height,
float frame_rate) {
AVCaptureDeviceFormat* bestCaptureFormat = nil;
VideoPixelFormat bestPixelFormat = VideoPixelFormat::PIXEL_FORMAT_UNKNOWN;
bool bestMatchesFrameRate = false;
Float64 bestMaxFrameRate = 0;
for (AVCaptureDeviceFormat* captureFormat in formats) {
const FourCharCode fourcc =
CMFormatDescriptionGetMediaSubType([captureFormat formatDescription]);
VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat(fourcc);
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(
[captureFormat formatDescription]);
Float64 maxFrameRate = 0;
bool matchesFrameRate = false;
for (AVFrameRateRange* frameRateRange in
[captureFormat videoSupportedFrameRateRanges]) {
maxFrameRate = std::max(maxFrameRate, [frameRateRange maxFrameRate]);
matchesFrameRate |= [frameRateRange minFrameRate] <= frame_rate &&
frame_rate <= [frameRateRange maxFrameRate];
}
// If the pixel format is unsupported by our code, then it is not useful.
if (pixelFormat == VideoPixelFormat::PIXEL_FORMAT_UNKNOWN)
continue;
// If our CMSampleBuffers will have a different size than the native
// capture, then we will not be the fast path.
if (dimensions.width != width || dimensions.height != height)
continue;
// Prefer a capture format that handles the requested framerate to one
// that doesn't.
if (bestCaptureFormat) {
if (bestMatchesFrameRate && !matchesFrameRate)
continue;
if (matchesFrameRate && !bestMatchesFrameRate)
bestCaptureFormat = nil;
}
// Prefer a capture format with a lower maximum framerate, under the
// assumption that that may have lower power consumption.
if (bestCaptureFormat) {
if (bestMaxFrameRate < maxFrameRate)
continue;
if (maxFrameRate < bestMaxFrameRate)
bestCaptureFormat = nil;
}
// Finally, compare according to Chromium preference.
if (bestCaptureFormat) {
if (VideoCaptureFormat::ComparePixelFormatPreference(bestPixelFormat,
pixelFormat)) {
continue;
}
}
bestCaptureFormat = captureFormat;
bestPixelFormat = pixelFormat;
bestMaxFrameRate = maxFrameRate;
bestMatchesFrameRate = matchesFrameRate;
}
VLOG(1) << "Selecting AVCaptureDevice format "
<< VideoPixelFormatToString(bestPixelFormat);
return bestCaptureFormat;
}
media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
switch (code) {
case kCVPixelFormatType_422YpCbCr8:
return media::PIXEL_FORMAT_UYVY;
case kCMPixelFormat_422YpCbCr8_yuvs:
return media::PIXEL_FORMAT_YUY2;
case kCMVideoCodecType_JPEG_OpenDML:
return media::PIXEL_FORMAT_MJPEG;
default:
return media::PIXEL_FORMAT_UNKNOWN;
}
}
void ExtractBaseAddressAndLength(char** base_address,
size_t* length,
CMSampleBufferRef sample_buffer) {
CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(sample_buffer);
DCHECK(block_buffer);
size_t length_at_offset;
const OSStatus status = CMBlockBufferGetDataPointer(
block_buffer, 0, &length_at_offset, length, base_address);
DCHECK_EQ(noErr, status);
// Expect the (M)JPEG data to be available as a contiguous reference, i.e.
// not covered by multiple memory blocks.
DCHECK_EQ(length_at_offset, *length);
}
base::scoped_nsobject<NSDictionary> GetVideoCaptureDeviceNames() {
// The device name retrieval is not going to happen in the main thread, and
// this might cause instabilities (it did in QTKit), so keep an eye here.
return base::scoped_nsobject<NSDictionary>(GetDeviceNames(),
base::scoped_policy::RETAIN);
}
media::VideoCaptureFormats GetDeviceSupportedFormats(
const media::VideoCaptureDeviceDescriptor& descriptor) {
media::VideoCaptureFormats formats;
NSArray* devices = [AVCaptureDevice devices];
AVCaptureDevice* device = nil;
for (device in devices) {
if (base::SysNSStringToUTF8([device uniqueID]) == descriptor.device_id)
break;
}
if (device == nil)
return media::VideoCaptureFormats();
for (AVCaptureDeviceFormat* format in device.formats) {
// MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType
// as well according to CMFormatDescription.h
const media::VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat(
CMFormatDescriptionGetMediaSubType([format formatDescription]));
CMVideoDimensions dimensions =
CMVideoFormatDescriptionGetDimensions([format formatDescription]);
for (AVFrameRateRange* frameRate in
[format videoSupportedFrameRateRanges]) {
media::VideoCaptureFormat format(
gfx::Size(dimensions.width, dimensions.height),
frameRate.maxFrameRate, pixelFormat);
DVLOG(2) << descriptor.display_name() << " "
<< media::VideoCaptureFormat::ToString(format);
formats.push_back(std::move(format));
}
}
return formats;
}
} // namespace media
......@@ -16,6 +16,7 @@
#include "base/strings/string_util.h"
#include "base/task_runner_util.h"
#import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
#import "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
#import "media/capture/video/mac/video_capture_device_decklink_mac.h"
#include "media/capture/video/mac/video_capture_device_mac.h"
#include "services/video_capture/public/uma/video_capture_service_event.h"
......@@ -102,12 +103,12 @@ void VideoCaptureDeviceFactoryMac::GetDevicesInfo(
// Loop through all available devices and add to |devices_info|.
std::vector<VideoCaptureDeviceInfo> devices_info;
NSDictionary* capture_devices;
DVLOG(1) << "Enumerating video capture devices using AVFoundation";
capture_devices = [VideoCaptureDeviceAVFoundation deviceNames];
base::scoped_nsobject<NSDictionary> capture_devices =
GetVideoCaptureDeviceNames();
// Enumerate all devices found by AVFoundation, translate the info for each
// to class Name and add it to |device_names|.
for (NSString* key in capture_devices) {
for (NSString* key in capture_devices.get()) {
const std::string device_id = [key UTF8String];
const VideoCaptureApi capture_api = VideoCaptureApi::MACOSX_AVFOUNDATION;
int transport_type = [[capture_devices valueForKey:key] transportType];
......@@ -128,9 +129,8 @@ void VideoCaptureDeviceFactoryMac::GetDevicesInfo(
devices_info.emplace_back(descriptor);
// Get supported formats
[VideoCaptureDeviceAVFoundation
getDevice:descriptor
supportedFormats:&devices_info.back().supported_formats];
devices_info.back().supported_formats =
GetDeviceSupportedFormats(descriptor);
}
// Also retrieve Blackmagic devices, if present, via DeckLink SDK API.
......
......@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
#include "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
#include "base/mac/scoped_cftyperef.h"
#include "base/mac/scoped_nsobject.h"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment