Commit 3df39558 authored by Henrik Boström's avatar Henrik Boström Committed by Commit Bot

Reland "[macOS Capture] Recognize NV12 and prefer it by default."

This is a reland of ca1323fe

The original CL was supposed to modify FourCCToChromiumPixelFormat, but
this part of the change got lost in a rebase. This reland has been
rebased on top of other changes, including support for both a modern and
a legacy implementation of the VideoCaptureDeviceAVFoundation. As a
consequence, this CL has to have both a modern and a legacy
implementation of FourCCToChromiumPixelFormat.

This CL moves FourCCToChromiumPixelFormat from the utils file to the
static part of the capture classes. This allows the
VideoCaptureDeviceFactory to perform GetDeviceSupportedFormats() using
the current implementation.

Original change's description:
> [macOS Capture] Recognize NV12 and prefer it by default.
>
> The current default is YUVY, which despite not being supported by any
> cameras we've tested, is preferred[1] over the currently recognized
> YUY2, which IS supported by most cameras we've tested. See [2] and
> https://crbug.com/1124647.
>
> This CL changes the default to NV12 and makes it a recognized format.
> This pixel format is more preferred (according to [1]) than both YUVY
> and YUY2. An alternative would be to default to YUY2 instead, and YUVY
> wouldn't be picked because it isn't in the list of supported formats,
> but we have reason to believe that NV12 is a better choice for future
> WebRTC patches and that this format is supported by the Chrome
> rendering pipeline.
>
> NV12 is a planar pixel format, it has 2 planes and is contiguous in
> memory. To support this, this CL adds support for planar (along with
> the old nonplanar) contiguous buffers. We don't care how many planes
> there are, as long as the buffer remains contiguous.
>
> [1] https://source.chromium.org/chromium/chromium/src/+/master:media/capture/video_capture_types.cc;l=18;drc=9e9c1706881c9a04f124ade622c10460d14fd9f5?q=media::VideoCaptureFormat::ComparePixelFormatPreference&originalUrl=https:%2F%2Fcs.chromium.org%2F
>
> [2] https://docs.google.com/document/d/1kcJ0Km5kAvrNFB6o7RG89bpvjOhgEo-0qorg5qm7KyA/edit?usp=sharing
>
> Bug: chromium:1124647
> Change-Id: I5c92e1de73922fd9ee87d8a5892438f61f2f56bd
> Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2391064
> Reviewed-by: Guido Urdaneta <guidou@chromium.org>
> Reviewed-by: ccameron <ccameron@chromium.org>
> Commit-Queue: Henrik Boström <hbos@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#806084}

Bug: chromium:1124647
Change-Id: If7497373e7cd31966e9da4e3b1f4869d59904e1d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2410380Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarMarkus Handell <handellm@google.com>
Commit-Queue: Henrik Boström <hbos@chromium.org>
Cr-Commit-Position: refs/heads/master@{#807429}
parent b72af7a3
...@@ -77,6 +77,10 @@ class VideoCaptureDeviceMac; ...@@ -77,6 +77,10 @@ class VideoCaptureDeviceMac;
base::ThreadChecker _main_thread_checker; base::ThreadChecker _main_thread_checker;
} }
// This function translates Mac Core Video pixel formats to Chromium pixel
// formats. This legacy implementation does not recognize NV12.
+ (media::VideoPixelFormat)FourCCToChromiumPixelFormat:(FourCharCode)code;
// Returns a dictionary of capture devices with friendly name and unique id. // Returns a dictionary of capture devices with friendly name and unique id.
+ (NSDictionary*)deviceNames; + (NSDictionary*)deviceNames;
......
...@@ -144,21 +144,6 @@ void MaybeWriteUma(int number_of_devices, int number_of_suspended_devices) { ...@@ -144,21 +144,6 @@ void MaybeWriteUma(int number_of_devices, int number_of_suspended_devices) {
device_count_at_last_attempt = device_count; device_count_at_last_attempt = device_count;
} }
// This function translates Mac Core Video pixel formats to Chromium pixel
// formats.
media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
switch (code) {
case kCVPixelFormatType_422YpCbCr8:
return media::PIXEL_FORMAT_UYVY;
case kCMPixelFormat_422YpCbCr8_yuvs:
return media::PIXEL_FORMAT_YUY2;
case kCMVideoCodecType_JPEG_OpenDML:
return media::PIXEL_FORMAT_MJPEG;
default:
return media::PIXEL_FORMAT_UNKNOWN;
}
}
// Extracts |base_address| and |length| out of a SampleBuffer. // Extracts |base_address| and |length| out of a SampleBuffer.
void ExtractBaseAddressAndLength(char** base_address, void ExtractBaseAddressAndLength(char** base_address,
size_t* length, size_t* length,
...@@ -181,6 +166,19 @@ void ExtractBaseAddressAndLength(char** base_address, ...@@ -181,6 +166,19 @@ void ExtractBaseAddressAndLength(char** base_address,
#pragma mark Class methods #pragma mark Class methods
+ (media::VideoPixelFormat)FourCCToChromiumPixelFormat:(FourCharCode)code {
switch (code) {
case kCVPixelFormatType_422YpCbCr8:
return media::PIXEL_FORMAT_UYVY;
case kCMPixelFormat_422YpCbCr8_yuvs:
return media::PIXEL_FORMAT_YUY2;
case kCMVideoCodecType_JPEG_OpenDML:
return media::PIXEL_FORMAT_MJPEG;
default:
return media::PIXEL_FORMAT_UNKNOWN;
}
}
+ (void)getDeviceNames:(NSMutableDictionary*)deviceNames { + (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
// At this stage we already know that AVFoundation is supported and the whole // At this stage we already know that AVFoundation is supported and the whole
// library is loaded and initialised, by the device monitoring. // library is loaded and initialised, by the device monitoring.
...@@ -225,8 +223,10 @@ void ExtractBaseAddressAndLength(char** base_address, ...@@ -225,8 +223,10 @@ void ExtractBaseAddressAndLength(char** base_address,
for (AVCaptureDeviceFormat* format in device.formats) { for (AVCaptureDeviceFormat* format in device.formats) {
// MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType
// as well according to CMFormatDescription.h // as well according to CMFormatDescription.h
const media::VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat( const media::VideoPixelFormat pixelFormat =
CMFormatDescriptionGetMediaSubType([format formatDescription])); [VideoCaptureDeviceAVFoundationLegacy
FourCCToChromiumPixelFormat:CMFormatDescriptionGetMediaSubType(
[format formatDescription])];
CMVideoDimensions dimensions = CMVideoDimensions dimensions =
CMVideoFormatDescriptionGetDimensions([format formatDescription]); CMVideoFormatDescriptionGetDimensions([format formatDescription]);
...@@ -354,8 +354,10 @@ void ExtractBaseAddressAndLength(char** base_address, ...@@ -354,8 +354,10 @@ void ExtractBaseAddressAndLength(char** base_address,
CMFormatDescriptionGetMediaSubType([format formatDescription]); CMFormatDescriptionGetMediaSubType([format formatDescription]);
// Compare according to Chromium preference. // Compare according to Chromium preference.
if (media::VideoCaptureFormat::ComparePixelFormatPreference( if (media::VideoCaptureFormat::ComparePixelFormatPreference(
FourCCToChromiumPixelFormat(fourcc), [VideoCaptureDeviceAVFoundationLegacy
FourCCToChromiumPixelFormat(best_fourcc))) { FourCCToChromiumPixelFormat:fourcc],
[VideoCaptureDeviceAVFoundationLegacy
FourCCToChromiumPixelFormat:best_fourcc])) {
best_fourcc = fourcc; best_fourcc = fourcc;
} }
} }
...@@ -484,7 +486,8 @@ void ExtractBaseAddressAndLength(char** base_address, ...@@ -484,7 +486,8 @@ void ExtractBaseAddressAndLength(char** base_address,
CMVideoFormatDescriptionGetDimensions(formatDescription); CMVideoFormatDescriptionGetDimensions(formatDescription);
const media::VideoCaptureFormat captureFormat( const media::VideoCaptureFormat captureFormat(
gfx::Size(dimensions.width, dimensions.height), _frameRate, gfx::Size(dimensions.width, dimensions.height), _frameRate,
FourCCToChromiumPixelFormat(fourcc)); [VideoCaptureDeviceAVFoundationLegacy
FourCCToChromiumPixelFormat:fourcc]);
gfx::ColorSpace colorSpace; gfx::ColorSpace colorSpace;
// We have certain format expectation for capture output: // We have certain format expectation for capture output:
......
...@@ -56,6 +56,10 @@ CAPTURE_EXPORT ...@@ -56,6 +56,10 @@ CAPTURE_EXPORT
scoped_refptr<base::SingleThreadTaskRunner> _mainThreadTaskRunner; scoped_refptr<base::SingleThreadTaskRunner> _mainThreadTaskRunner;
} }
// This function translates Mac Core Video pixel formats to Chromium pixel
// formats. This implementation recognizes NV12.
+ (media::VideoPixelFormat)FourCCToChromiumPixelFormat:(FourCharCode)code;
- (void)setOnStillImageOutputStoppedForTesting: - (void)setOnStillImageOutputStoppedForTesting:
(base::RepeatingCallback<void()>)onStillImageOutputStopped; (base::RepeatingCallback<void()>)onStillImageOutputStopped;
......
...@@ -29,6 +29,8 @@ ...@@ -29,6 +29,8 @@
namespace { namespace {
constexpr int kTimeToWaitBeforeStoppingStillImageCaptureInSeconds = 60; constexpr int kTimeToWaitBeforeStoppingStillImageCaptureInSeconds = 60;
constexpr FourCharCode kDefaultFourCCPixelFormat =
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; // NV12 (a.k.a. 420v)
base::TimeDelta GetCMSampleBufferTimestamp(CMSampleBufferRef sampleBuffer) { base::TimeDelta GetCMSampleBufferTimestamp(CMSampleBufferRef sampleBuffer) {
const CMTime cm_timestamp = const CMTime cm_timestamp =
...@@ -50,6 +52,23 @@ std::string MacFourCCToString(OSType fourcc) { ...@@ -50,6 +52,23 @@ std::string MacFourCCToString(OSType fourcc) {
@implementation VideoCaptureDeviceAVFoundation @implementation VideoCaptureDeviceAVFoundation
#pragma mark Class methods
+ (media::VideoPixelFormat)FourCCToChromiumPixelFormat:(FourCharCode)code {
switch (code) {
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
return media::PIXEL_FORMAT_NV12; // Mac fourcc: "420v".
case kCVPixelFormatType_422YpCbCr8:
return media::PIXEL_FORMAT_UYVY; // Mac fourcc: "2vuy".
case kCMPixelFormat_422YpCbCr8_yuvs:
return media::PIXEL_FORMAT_YUY2;
case kCMVideoCodecType_JPEG_OpenDML:
return media::PIXEL_FORMAT_MJPEG; // Mac fourcc: "dmb1".
default:
return media::PIXEL_FORMAT_UNKNOWN;
}
}
#pragma mark Public methods #pragma mark Public methods
- (id)initWithFrameReceiver: - (id)initWithFrameReceiver:
...@@ -155,11 +174,12 @@ std::string MacFourCCToString(OSType fourcc) { ...@@ -155,11 +174,12 @@ std::string MacFourCCToString(OSType fourcc) {
_frameHeight = height; _frameHeight = height;
_frameRate = frameRate; _frameRate = frameRate;
_bestCaptureFormat.reset( _bestCaptureFormat.reset(
media::FindBestCaptureFormat([_captureDevice formats], width, height, media::FindBestCaptureFormat([VideoCaptureDeviceAVFoundation class],
[_captureDevice formats], width, height,
frameRate), frameRate),
base::scoped_policy::RETAIN); base::scoped_policy::RETAIN);
// Default to NV12, a pixel format commonly supported by web cameras.
FourCharCode best_fourcc = kCMPixelFormat_422YpCbCr8; FourCharCode best_fourcc = kDefaultFourCCPixelFormat;
if (_bestCaptureFormat) { if (_bestCaptureFormat) {
best_fourcc = CMFormatDescriptionGetMediaSubType( best_fourcc = CMFormatDescriptionGetMediaSubType(
[_bestCaptureFormat formatDescription]); [_bestCaptureFormat formatDescription]);
...@@ -439,7 +459,7 @@ std::string MacFourCCToString(OSType fourcc) { ...@@ -439,7 +459,7 @@ std::string MacFourCCToString(OSType fourcc) {
CMVideoFormatDescriptionGetDimensions(formatDescription); CMVideoFormatDescriptionGetDimensions(formatDescription);
const media::VideoCaptureFormat captureFormat( const media::VideoCaptureFormat captureFormat(
gfx::Size(dimensions.width, dimensions.height), _frameRate, gfx::Size(dimensions.width, dimensions.height), _frameRate,
media::FourCCToChromiumPixelFormat(pixelFormat)); [VideoCaptureDeviceAVFoundation FourCCToChromiumPixelFormat:pixelFormat]);
base::TimeDelta timestamp = GetCMSampleBufferTimestamp(sampleBuffer); base::TimeDelta timestamp = GetCMSampleBufferTimestamp(sampleBuffer);
base::AutoLock lock(_lock); base::AutoLock lock(_lock);
if (_frameReceiver && baseAddress) { if (_frameReceiver && baseAddress) {
...@@ -473,10 +493,27 @@ std::string MacFourCCToString(OSType fourcc) { ...@@ -473,10 +493,27 @@ std::string MacFourCCToString(OSType fourcc) {
kCVReturnSuccess) { kCVReturnSuccess) {
return [self processRawSample:sampleBuffer]; return [self processRawSample:sampleBuffer];
} }
void* baseAddress = char* baseAddress = 0;
static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame)); size_t frameSize = 0;
size_t frameSize = CVPixelBufferGetHeight(videoFrame) * if (!CVPixelBufferIsPlanar(videoFrame)) {
CVPixelBufferGetBytesPerRow(videoFrame); // For nonplanar buffers, CVPixelBufferGetBaseAddress returns a pointer
// to (0,0). (For planar buffers, it returns something else.)
// https://developer.apple.com/documentation/corevideo/1457115-cvpixelbuffergetbaseaddress?language=objc
baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame));
} else {
// For planar buffers, CVPixelBufferGetBaseAddressOfPlane() is used. If
// the buffer is contiguous (CHECK'd below) then we only need to know
// the address of the first plane, regardless of
// CVPixelBufferGetPlaneCount().
baseAddress =
static_cast<char*>(CVPixelBufferGetBaseAddressOfPlane(videoFrame, 0));
}
// CVPixelBufferGetDataSize() works for both nonplanar and planar buffers
// as long as they are contiguous in memory. If it is not contiguous, 0 is
// returned.
frameSize = CVPixelBufferGetDataSize(videoFrame);
// Only contiguous buffers are supported.
CHECK(frameSize);
[self processRamSample:sampleBuffer [self processRamSample:sampleBuffer
baseAddress:baseAddress baseAddress:baseAddress
frameSize:frameSize frameSize:frameSize
......
...@@ -17,8 +17,11 @@ namespace media { ...@@ -17,8 +17,11 @@ namespace media {
// Find the best capture format from |formats| for the specified dimensions and // Find the best capture format from |formats| for the specified dimensions and
// frame rate. Returns an element of |formats|, or nil. // frame rate. Returns an element of |formats|, or nil.
// |implementation| is a class implementing FourCCToChromiumPixelFormat, which
// our VideoCaptureDeviceAVFoundationProtocol implementations do.
AVCaptureDeviceFormat* CAPTURE_EXPORT AVCaptureDeviceFormat* CAPTURE_EXPORT
FindBestCaptureFormat(NSArray<AVCaptureDeviceFormat*>* formats, FindBestCaptureFormat(Class implementation,
NSArray<AVCaptureDeviceFormat*>* formats,
int width, int width,
int height, int height,
float frame_rate); float frame_rate);
...@@ -30,13 +33,12 @@ FindBestCaptureFormat(NSArray<AVCaptureDeviceFormat*>* formats, ...@@ -30,13 +33,12 @@ FindBestCaptureFormat(NSArray<AVCaptureDeviceFormat*>* formats,
base::scoped_nsobject<NSDictionary> GetVideoCaptureDeviceNames(); base::scoped_nsobject<NSDictionary> GetVideoCaptureDeviceNames();
// Retrieve the capture supported formats for a given device |descriptor|. // Retrieve the capture supported formats for a given device |descriptor|.
// |implementation| is a class implementing FourCCToChromiumPixelFormat, which
// our VideoCaptureDeviceAVFoundationProtocol implementations do.
media::VideoCaptureFormats GetDeviceSupportedFormats( media::VideoCaptureFormats GetDeviceSupportedFormats(
Class implementation,
const media::VideoCaptureDeviceDescriptor& descriptor); const media::VideoCaptureDeviceDescriptor& descriptor);
// This function translates Mac Core Video pixel formats to Chromium pixel
// formats.
media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code);
// Extracts |base_address| and |length| out of a SampleBuffer. // Extracts |base_address| and |length| out of a SampleBuffer.
void ExtractBaseAddressAndLength(char** base_address, void ExtractBaseAddressAndLength(char** base_address,
size_t* length, size_t* length,
......
...@@ -164,6 +164,7 @@ base::scoped_nsobject<NSDictionary> GetDeviceNames() { ...@@ -164,6 +164,7 @@ base::scoped_nsobject<NSDictionary> GetDeviceNames() {
} // namespace } // namespace
AVCaptureDeviceFormat* FindBestCaptureFormat( AVCaptureDeviceFormat* FindBestCaptureFormat(
Class implementation,
NSArray<AVCaptureDeviceFormat*>* formats, NSArray<AVCaptureDeviceFormat*>* formats,
int width, int width,
int height, int height,
...@@ -176,7 +177,8 @@ AVCaptureDeviceFormat* FindBestCaptureFormat( ...@@ -176,7 +177,8 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
for (AVCaptureDeviceFormat* captureFormat in formats) { for (AVCaptureDeviceFormat* captureFormat in formats) {
const FourCharCode fourcc = const FourCharCode fourcc =
CMFormatDescriptionGetMediaSubType([captureFormat formatDescription]); CMFormatDescriptionGetMediaSubType([captureFormat formatDescription]);
VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat(fourcc); VideoPixelFormat pixelFormat =
[implementation FourCCToChromiumPixelFormat:fourcc];
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions( CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(
[captureFormat formatDescription]); [captureFormat formatDescription]);
Float64 maxFrameRate = 0; Float64 maxFrameRate = 0;
...@@ -234,19 +236,6 @@ AVCaptureDeviceFormat* FindBestCaptureFormat( ...@@ -234,19 +236,6 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
return bestCaptureFormat; return bestCaptureFormat;
} }
media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
switch (code) {
case kCVPixelFormatType_422YpCbCr8:
return media::PIXEL_FORMAT_UYVY;
case kCMPixelFormat_422YpCbCr8_yuvs:
return media::PIXEL_FORMAT_YUY2;
case kCMVideoCodecType_JPEG_OpenDML:
return media::PIXEL_FORMAT_MJPEG;
default:
return media::PIXEL_FORMAT_UNKNOWN;
}
}
void ExtractBaseAddressAndLength(char** base_address, void ExtractBaseAddressAndLength(char** base_address,
size_t* length, size_t* length,
CMSampleBufferRef sample_buffer) { CMSampleBufferRef sample_buffer) {
...@@ -270,6 +259,7 @@ base::scoped_nsobject<NSDictionary> GetVideoCaptureDeviceNames() { ...@@ -270,6 +259,7 @@ base::scoped_nsobject<NSDictionary> GetVideoCaptureDeviceNames() {
} }
media::VideoCaptureFormats GetDeviceSupportedFormats( media::VideoCaptureFormats GetDeviceSupportedFormats(
Class implementation,
const media::VideoCaptureDeviceDescriptor& descriptor) { const media::VideoCaptureDeviceDescriptor& descriptor) {
media::VideoCaptureFormats formats; media::VideoCaptureFormats formats;
NSArray* devices = [AVCaptureDevice devices]; NSArray* devices = [AVCaptureDevice devices];
...@@ -283,8 +273,9 @@ media::VideoCaptureFormats GetDeviceSupportedFormats( ...@@ -283,8 +273,9 @@ media::VideoCaptureFormats GetDeviceSupportedFormats(
for (AVCaptureDeviceFormat* format in device.formats) { for (AVCaptureDeviceFormat* format in device.formats) {
// MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType
// as well according to CMFormatDescription.h // as well according to CMFormatDescription.h
const media::VideoPixelFormat pixelFormat = FourCCToChromiumPixelFormat( const media::VideoPixelFormat pixelFormat = [implementation
CMFormatDescriptionGetMediaSubType([format formatDescription])); FourCCToChromiumPixelFormat:CMFormatDescriptionGetMediaSubType(
[format formatDescription])];
CMVideoDimensions dimensions = CMVideoDimensions dimensions =
CMVideoFormatDescriptionGetDimensions([format formatDescription]); CMVideoFormatDescriptionGetDimensions([format formatDescription]);
......
...@@ -130,8 +130,8 @@ void VideoCaptureDeviceFactoryMac::GetDevicesInfo( ...@@ -130,8 +130,8 @@ void VideoCaptureDeviceFactoryMac::GetDevicesInfo(
devices_info.emplace_back(descriptor); devices_info.emplace_back(descriptor);
// Get supported formats // Get supported formats
devices_info.back().supported_formats = devices_info.back().supported_formats = GetDeviceSupportedFormats(
GetDeviceSupportedFormats(descriptor); GetVideoCaptureDeviceAVFoundationImplementationClass(), descriptor);
} }
// Also retrieve Blackmagic devices, if present, via DeckLink SDK API. // Also retrieve Blackmagic devices, if present, via DeckLink SDK API.
......
...@@ -90,6 +90,9 @@ namespace media { ...@@ -90,6 +90,9 @@ namespace media {
// Test the behavior of the function FindBestCaptureFormat which is used to // Test the behavior of the function FindBestCaptureFormat which is used to
// determine the capture format. // determine the capture format.
TEST(VideoCaptureDeviceMacTest, FindBestCaptureFormat) { TEST(VideoCaptureDeviceMacTest, FindBestCaptureFormat) {
// We are only interested in the modern implementation here.
Class impl = [VideoCaptureDeviceAVFoundation class];
base::scoped_nsobject<FakeAVCaptureDeviceFormat> fmt_320_240_xyzw_30( base::scoped_nsobject<FakeAVCaptureDeviceFormat> fmt_320_240_xyzw_30(
[[FakeAVCaptureDeviceFormat alloc] initWithWidth:320 [[FakeAVCaptureDeviceFormat alloc] initWithWidth:320
height:240 height:240
...@@ -136,68 +139,68 @@ TEST(VideoCaptureDeviceMacTest, FindBestCaptureFormat) { ...@@ -136,68 +139,68 @@ TEST(VideoCaptureDeviceMacTest, FindBestCaptureFormat) {
AVCaptureDeviceFormat* result = nil; AVCaptureDeviceFormat* result = nil;
// If we can't find a valid format, we should return nil; // If we can't find a valid format, we should return nil;
result = FindBestCaptureFormat(@[ fmt_320_240_xyzw_30 ], 320, 240, 30); result = FindBestCaptureFormat(impl, @[ fmt_320_240_xyzw_30 ], 320, 240, 30);
EXPECT_EQ(result, nil); EXPECT_EQ(result, nil);
// Can't find a matching resolution // Can't find a matching resolution
result = FindBestCaptureFormat(@[ fmt_320_240_yuvs_30, fmt_320_240_2vuy_30 ], result = FindBestCaptureFormat(
640, 480, 30); impl, @[ fmt_320_240_yuvs_30, fmt_320_240_2vuy_30 ], 640, 480, 30);
EXPECT_EQ(result, nil); EXPECT_EQ(result, nil);
result = FindBestCaptureFormat(@[ fmt_320_240_2vuy_30, fmt_320_240_yuvs_30 ], result = FindBestCaptureFormat(
640, 480, 30); impl, @[ fmt_320_240_2vuy_30, fmt_320_240_yuvs_30 ], 640, 480, 30);
EXPECT_EQ(result, nil); EXPECT_EQ(result, nil);
// Simple exact match. // Simple exact match.
result = FindBestCaptureFormat(@[ fmt_640_480_yuvs_30, fmt_320_240_yuvs_30 ], result = FindBestCaptureFormat(
320, 240, 30); impl, @[ fmt_640_480_yuvs_30, fmt_320_240_yuvs_30 ], 320, 240, 30);
EXPECT_EQ(result, fmt_320_240_yuvs_30.get()); EXPECT_EQ(result, fmt_320_240_yuvs_30.get());
result = FindBestCaptureFormat(@[ fmt_320_240_yuvs_30, fmt_640_480_yuvs_30 ], result = FindBestCaptureFormat(
320, 240, 30); impl, @[ fmt_320_240_yuvs_30, fmt_640_480_yuvs_30 ], 320, 240, 30);
EXPECT_EQ(result, fmt_320_240_yuvs_30.get()); EXPECT_EQ(result, fmt_320_240_yuvs_30.get());
// Different frame rate. // Different frame rate.
result = FindBestCaptureFormat(@[ fmt_640_480_2vuy_30 ], 640, 480, 60); result = FindBestCaptureFormat(impl, @[ fmt_640_480_2vuy_30 ], 640, 480, 60);
EXPECT_EQ(result, fmt_640_480_2vuy_30.get()); EXPECT_EQ(result, fmt_640_480_2vuy_30.get());
// Prefer the same frame rate. // Prefer the same frame rate.
result = FindBestCaptureFormat(@[ fmt_640_480_yuvs_30, fmt_640_480_2vuy_60 ], result = FindBestCaptureFormat(
640, 480, 60); impl, @[ fmt_640_480_yuvs_30, fmt_640_480_2vuy_60 ], 640, 480, 60);
EXPECT_EQ(result, fmt_640_480_2vuy_60.get()); EXPECT_EQ(result, fmt_640_480_2vuy_60.get());
result = FindBestCaptureFormat(@[ fmt_640_480_2vuy_60, fmt_640_480_yuvs_30 ], result = FindBestCaptureFormat(
640, 480, 60); impl, @[ fmt_640_480_2vuy_60, fmt_640_480_yuvs_30 ], 640, 480, 60);
EXPECT_EQ(result, fmt_640_480_2vuy_60.get()); EXPECT_EQ(result, fmt_640_480_2vuy_60.get());
// Prefer version with matching frame rate. // Prefer version with matching frame rate.
result = FindBestCaptureFormat(@[ fmt_640_480_yuvs_30, fmt_640_480_2vuy_60 ], result = FindBestCaptureFormat(
640, 480, 60); impl, @[ fmt_640_480_yuvs_30, fmt_640_480_2vuy_60 ], 640, 480, 60);
EXPECT_EQ(result, fmt_640_480_2vuy_60.get()); EXPECT_EQ(result, fmt_640_480_2vuy_60.get());
result = FindBestCaptureFormat(@[ fmt_640_480_2vuy_60, fmt_640_480_yuvs_30 ], result = FindBestCaptureFormat(
640, 480, 60); impl, @[ fmt_640_480_2vuy_60, fmt_640_480_yuvs_30 ], 640, 480, 60);
EXPECT_EQ(result, fmt_640_480_2vuy_60.get()); EXPECT_EQ(result, fmt_640_480_2vuy_60.get());
// Prefer version with matching frame rate when there are multiple framerates. // Prefer version with matching frame rate when there are multiple framerates.
result = FindBestCaptureFormat( result = FindBestCaptureFormat(
@[ fmt_640_480_yuvs_30, fmt_640_480_2vuy_30_60 ], 640, 480, 60); impl, @[ fmt_640_480_yuvs_30, fmt_640_480_2vuy_30_60 ], 640, 480, 60);
EXPECT_EQ(result, fmt_640_480_2vuy_30_60.get()); EXPECT_EQ(result, fmt_640_480_2vuy_30_60.get());
result = FindBestCaptureFormat( result = FindBestCaptureFormat(
@[ fmt_640_480_2vuy_30_60, fmt_640_480_yuvs_30 ], 640, 480, 60); impl, @[ fmt_640_480_2vuy_30_60, fmt_640_480_yuvs_30 ], 640, 480, 60);
EXPECT_EQ(result, fmt_640_480_2vuy_30_60.get()); EXPECT_EQ(result, fmt_640_480_2vuy_30_60.get());
// Prefer version with the lower maximum framerate when there are multiple // Prefer version with the lower maximum framerate when there are multiple
// framerates. // framerates.
result = FindBestCaptureFormat( result = FindBestCaptureFormat(
@[ fmt_640_480_2vuy_30, fmt_640_480_2vuy_30_60 ], 640, 480, 30); impl, @[ fmt_640_480_2vuy_30, fmt_640_480_2vuy_30_60 ], 640, 480, 30);
EXPECT_EQ(result, fmt_640_480_2vuy_30.get()); EXPECT_EQ(result, fmt_640_480_2vuy_30.get());
result = FindBestCaptureFormat( result = FindBestCaptureFormat(
@[ fmt_640_480_2vuy_30_60, fmt_640_480_2vuy_30 ], 640, 480, 30); impl, @[ fmt_640_480_2vuy_30_60, fmt_640_480_2vuy_30 ], 640, 480, 30);
EXPECT_EQ(result, fmt_640_480_2vuy_30.get()); EXPECT_EQ(result, fmt_640_480_2vuy_30.get());
// Prefer the Chromium format order. // Prefer the Chromium format order.
result = FindBestCaptureFormat(@[ fmt_640_480_yuvs_30, fmt_640_480_2vuy_30 ], result = FindBestCaptureFormat(
640, 480, 30); impl, @[ fmt_640_480_yuvs_30, fmt_640_480_2vuy_30 ], 640, 480, 30);
EXPECT_EQ(result, fmt_640_480_2vuy_30.get()); EXPECT_EQ(result, fmt_640_480_2vuy_30.get());
result = FindBestCaptureFormat(@[ fmt_640_480_2vuy_30, fmt_640_480_yuvs_30 ], result = FindBestCaptureFormat(
640, 480, 30); impl, @[ fmt_640_480_2vuy_30, fmt_640_480_yuvs_30 ], 640, 480, 30);
EXPECT_EQ(result, fmt_640_480_2vuy_30.get()); EXPECT_EQ(result, fmt_640_480_2vuy_30.get());
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment