Commit 36d0d6d6 authored by Henrik Boström's avatar Henrik Boström Committed by Commit Bot

[macOS] In-capturer convert/rescale class: SampleBufferTransformer.

This class supports pixel conversions and rescaling, backed up by both
libyuv functions for SW conversions and PixelBufferTransferer for HW
conversions. It supports conversions from all supported capture formats
(NV12, YUY2, UYVY, MJPEG) to NV12 and I420. (The pixel transfer path
supports even more destination pixel formats, but libyuv's destination
must be NV12 or I420, and these are the only two of interest to us.)

Whether HW or SW conversion is used, the resulting buffer is put in an
IOSurface-backed pixel buffer.

Code paths:
- When we have a pixel buffer, i.e. this is NOT MJPEG:
  - PixelBufferTransferer: X -> I420/NV12 in a single step.
  - libyuv: X -> I420 in a single step. X -> NV12 may involve an
    intermediate step X -> I420 -> NV12. Conversion and rescaling are
    done in separate steps.
- MJPEG (when we only have a sample buffer):
  - PixelBufferTransferer: Not supported.
  - libyuv: MJPEG -> I420 in a single step. MJPEG -> NV12 in two steps
    MJPEG -> I420 -> NV12. Conversion and rescaling are done in separate
    steps.

In a follow-up, we will do MJPEG -> NV12 in a single step based on the
libyuv CL that eshr@ recently landed.

Measurements show that which conversion method is most efficient is
dependent on input and output pixel formats and may even be affected by
cache hits/misses. In future CLs, we should wire up and measure the
optimal SampleBufferTransformer configurations (in which formats to use
libyuv and in which formats to use pixel transfer).

Bug: chromium:1132299
Change-Id: I4f6053c65c0c3b02483d130e25bebd1f656abd89
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2498563
Commit-Queue: Henrik Boström <hbos@chromium.org>
Reviewed-by: default avatarIlya Nikolaevskiy <ilnik@chromium.org>
Cr-Commit-Position: refs/heads/master@{#823502}
parent a110cb9f
......@@ -173,6 +173,8 @@ component("capture_lib") {
"video/mac/pixel_buffer_pool_mac.h",
"video/mac/pixel_buffer_transferer_mac.cc",
"video/mac/pixel_buffer_transferer_mac.h",
"video/mac/sample_buffer_transformer_mac.cc",
"video/mac/sample_buffer_transformer_mac.h",
"video/mac/video_capture_device_avfoundation_legacy_mac.h",
"video/mac/video_capture_device_avfoundation_legacy_mac.mm",
"video/mac/video_capture_device_avfoundation_mac.h",
......@@ -473,6 +475,7 @@ test("capture_unittests") {
sources += [
"video/mac/pixel_buffer_pool_mac_unittest.mm",
"video/mac/pixel_buffer_transferer_mac_unittest.mm",
"video/mac/sample_buffer_transformer_mac_unittest.mm",
"video/mac/test/mock_video_capture_device_avfoundation_frame_receiver_mac.h",
"video/mac/test/mock_video_capture_device_avfoundation_frame_receiver_mac.mm",
"video/mac/test/pixel_buffer_test_utils_mac.cc",
......
......@@ -32,11 +32,9 @@ class CAPTURE_EXPORT PixelBufferPool {
// Creates a new buffer from the pool, or returns null if |max_buffers_| would
// be exceeded. The underlying buffers may be recycled.
//
// The caller owns the returned buffer and is responsible for calling
// CFRelease() after they are done using it. This returns the underlying
// buffer to the pool. In order to free memory, you must both release all
// buffers and call Flush() or delete the pool. It is safe for a buffer to
// outlive its pool.
// Freeing all buffer references returns the underlying buffer to the pool. In
// order to free memory, you must both release all buffers and call Flush() or
// delete the pool. It is safe for a buffer to outlive its pool.
base::ScopedCFTypeRef<CVPixelBufferRef> CreateBuffer();
// Frees the memory of any released buffers returned to the pool.
......
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/capture/video/mac/sample_buffer_transformer_mac.h"
#include <utility>
#include "base/logging.h"
#include "base/notreached.h"
#include "media/base/video_frame.h"
#include "media/base/video_types.h"
#include "third_party/libyuv/include/libyuv.h"
#include "third_party/libyuv/include/libyuv/scale.h"
namespace media {
namespace {
// NV12 a.k.a. 420v
constexpr OSType kPixelFormatNv12 =
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
// I420 a.k.a. y420
constexpr OSType kPixelFormatI420 = kCVPixelFormatType_420YpCbCr8Planar;
// Pixel formats mappings to allow these pixel formats to be converted to I420
// with libyuv.
libyuv::FourCC MacFourCCToLibyuvFourCC(OSType mac_fourcc) {
switch (mac_fourcc) {
// NV12 a.k.a. 420v
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
return libyuv::FOURCC_NV12;
// UYVY a.k.a. 2vuy
case kCVPixelFormatType_422YpCbCr8:
return libyuv::FOURCC_UYVY;
// YUY2 a.k.a. yuvs
case kCMPixelFormat_422YpCbCr8_yuvs:
return libyuv::FOURCC_YUY2;
// MJPEG a.k.a. dmb1
case kCMVideoCodecType_JPEG_OpenDML:
return libyuv::FOURCC_MJPG;
// I420 a.k.a. y420
case kCVPixelFormatType_420YpCbCr8Planar:
return libyuv::FOURCC_I420;
default:
NOTREACHED();
}
return libyuv::FOURCC_ANY;
}
std::pair<uint8_t*, size_t> GetPixelBufferBaseAddressAndSize(
CVPixelBufferRef pixel_buffer) {
uint8_t* data_base_address;
if (!CVPixelBufferIsPlanar(pixel_buffer)) {
data_base_address =
static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(pixel_buffer));
} else {
data_base_address = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0));
}
size_t data_size = CVPixelBufferGetDataSize(pixel_buffer);
DCHECK(data_base_address);
DCHECK(data_size);
return std::make_pair(data_base_address, data_size);
}
std::pair<uint8_t*, size_t> GetSampleBufferBaseAddressAndSize(
CMSampleBufferRef sample_buffer) {
// Access source sample buffer bytes.
CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(sample_buffer);
DCHECK(block_buffer);
char* data_base_address;
size_t data_size;
size_t length_at_offset;
OSStatus status = CMBlockBufferGetDataPointer(
block_buffer, 0, &length_at_offset, &data_size, &data_base_address);
DCHECK_EQ(status, noErr);
DCHECK(data_base_address);
DCHECK(data_size);
DCHECK_EQ(length_at_offset, data_size); // Buffer must be contiguous.
return std::make_pair(reinterpret_cast<uint8_t*>(data_base_address),
data_size);
}
struct I420Planes {
size_t width;
size_t height;
uint8_t* y_plane_data;
uint8_t* u_plane_data;
uint8_t* v_plane_data;
size_t y_plane_stride;
size_t u_plane_stride;
size_t v_plane_stride;
};
size_t GetContiguousI420BufferSize(size_t width, size_t height) {
gfx::Size dimensions(width, height);
return VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane,
dimensions)
.GetArea() +
VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane,
dimensions)
.GetArea() +
VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kVPlane,
dimensions)
.GetArea();
}
I420Planes GetI420PlanesFromContiguousBuffer(uint8_t* data_base_address,
size_t width,
size_t height) {
gfx::Size dimensions(width, height);
gfx::Size y_plane_size =
VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane, dimensions);
gfx::Size u_plane_size =
VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane, dimensions);
gfx::Size v_plane_size =
VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane, dimensions);
I420Planes i420_planes;
i420_planes.width = width;
i420_planes.height = height;
i420_planes.y_plane_data = data_base_address;
i420_planes.u_plane_data = i420_planes.y_plane_data + y_plane_size.GetArea();
i420_planes.v_plane_data = i420_planes.u_plane_data + u_plane_size.GetArea();
i420_planes.y_plane_stride = y_plane_size.width();
i420_planes.u_plane_stride = u_plane_size.width();
i420_planes.v_plane_stride = v_plane_size.width();
return i420_planes;
}
I420Planes EnsureI420BufferSizeAndGetPlanes(size_t width,
size_t height,
std::vector<uint8_t>* i420_buffer) {
size_t required_size = GetContiguousI420BufferSize(width, height);
if (i420_buffer->size() < required_size)
i420_buffer->resize(required_size);
return GetI420PlanesFromContiguousBuffer(&(*i420_buffer)[0], width, height);
}
I420Planes GetI420PlanesFromPixelBuffer(CVPixelBufferRef pixel_buffer) {
DCHECK_EQ(CVPixelBufferGetPlaneCount(pixel_buffer), 3u);
I420Planes i420_planes;
i420_planes.width = CVPixelBufferGetWidth(pixel_buffer);
i420_planes.height = CVPixelBufferGetHeight(pixel_buffer);
i420_planes.y_plane_data = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0));
i420_planes.u_plane_data = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1));
i420_planes.v_plane_data = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 2));
i420_planes.y_plane_stride =
CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0);
i420_planes.u_plane_stride =
CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
i420_planes.v_plane_stride =
CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 2);
return i420_planes;
}
struct NV12Planes {
size_t width;
size_t height;
uint8_t* y_plane_data;
uint8_t* uv_plane_data;
size_t y_plane_stride;
size_t uv_plane_stride;
};
size_t GetContiguousNV12BufferSize(size_t width, size_t height) {
gfx::Size dimensions(width, height);
return VideoFrame::PlaneSize(PIXEL_FORMAT_NV12, VideoFrame::kYPlane,
dimensions)
.GetArea() +
VideoFrame::PlaneSize(PIXEL_FORMAT_NV12, VideoFrame::kUVPlane,
dimensions)
.GetArea();
}
NV12Planes GetNV12PlanesFromContiguousBuffer(uint8_t* data_base_address,
size_t width,
size_t height) {
gfx::Size dimensions(width, height);
gfx::Size y_plane_size =
VideoFrame::PlaneSize(PIXEL_FORMAT_NV12, VideoFrame::kYPlane, dimensions);
gfx::Size uv_plane_size = VideoFrame::PlaneSize(
PIXEL_FORMAT_NV12, VideoFrame::kUVPlane, dimensions);
NV12Planes nv12_planes;
nv12_planes.width = width;
nv12_planes.height = height;
nv12_planes.y_plane_data = data_base_address;
nv12_planes.uv_plane_data = nv12_planes.y_plane_data + y_plane_size.GetArea();
nv12_planes.y_plane_stride = y_plane_size.width();
nv12_planes.uv_plane_stride = uv_plane_size.width();
return nv12_planes;
}
NV12Planes EnsureNV12BufferSizeAndGetPlanes(size_t width,
size_t height,
std::vector<uint8_t>* nv12_buffer) {
size_t required_size = GetContiguousNV12BufferSize(width, height);
if (nv12_buffer->size() < required_size)
nv12_buffer->resize(required_size);
return GetNV12PlanesFromContiguousBuffer(&(*nv12_buffer)[0], width, height);
}
NV12Planes GetNV12PlanesFromPixelBuffer(CVPixelBufferRef pixel_buffer) {
DCHECK_EQ(CVPixelBufferGetPlaneCount(pixel_buffer), 2u);
NV12Planes nv12_planes;
nv12_planes.width = CVPixelBufferGetWidth(pixel_buffer);
nv12_planes.height = CVPixelBufferGetHeight(pixel_buffer);
nv12_planes.y_plane_data = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0));
nv12_planes.uv_plane_data = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1));
nv12_planes.y_plane_stride =
CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0);
nv12_planes.uv_plane_stride =
CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
return nv12_planes;
}
void ConvertFromAnyToI420(OSType source_pixel_format,
uint8_t* source_buffer_base_address,
size_t source_buffer_size,
const I420Planes& destination) {
int result = libyuv::ConvertToI420(
source_buffer_base_address, source_buffer_size, destination.y_plane_data,
destination.y_plane_stride, destination.u_plane_data,
destination.u_plane_stride, destination.v_plane_data,
destination.v_plane_stride,
/*crop_x*/ 0,
/*crop_y*/ 0, destination.width, destination.height,
/*crop_width*/ destination.width,
/*crop_height*/ destination.height, libyuv::kRotate0,
MacFourCCToLibyuvFourCC(source_pixel_format));
DCHECK_EQ(result, 0);
}
void ConvertFromI420ToNV12(const I420Planes& source,
const NV12Planes& destination) {
DCHECK_EQ(source.width, destination.width);
DCHECK_EQ(source.height, destination.height);
int result = libyuv::I420ToNV12(
source.y_plane_data, source.y_plane_stride, source.u_plane_data,
source.u_plane_stride, source.v_plane_data, source.v_plane_stride,
destination.y_plane_data, destination.y_plane_stride,
destination.uv_plane_data, destination.uv_plane_stride, source.width,
source.height);
DCHECK_EQ(result, 0);
}
void ScaleI420(const I420Planes& source, const I420Planes& destination) {
int result = libyuv::I420Scale(
source.y_plane_data, source.y_plane_stride, source.u_plane_data,
source.u_plane_stride, source.v_plane_data, source.v_plane_stride,
source.width, source.height, destination.y_plane_data,
destination.y_plane_stride, destination.u_plane_data,
destination.u_plane_stride, destination.v_plane_data,
destination.v_plane_stride, destination.width, destination.height,
libyuv::kFilterBilinear);
DCHECK_EQ(result, 0);
}
void ScaleNV12(const NV12Planes& source, const NV12Planes& destination) {
int result = libyuv::NV12Scale(
source.y_plane_data, source.y_plane_stride, source.uv_plane_data,
source.uv_plane_stride, source.width, source.height,
destination.y_plane_data, destination.y_plane_stride,
destination.uv_plane_data, destination.uv_plane_stride, destination.width,
destination.height, libyuv::kFilterBilinear);
DCHECK_EQ(result, 0);
}
} // namespace
SampleBufferTransformer::SampleBufferTransformer()
: transformer_(Transformer::kNotConfigured),
destination_pixel_format_(0x0),
destination_width_(0),
destination_height_(0) {}
SampleBufferTransformer::~SampleBufferTransformer() {}
SampleBufferTransformer::Transformer SampleBufferTransformer::transformer()
const {
return transformer_;
}
OSType SampleBufferTransformer::destination_pixel_format() const {
return destination_pixel_format_;
}
void SampleBufferTransformer::Reconfigure(
Transformer transformer,
OSType destination_pixel_format,
size_t destination_width,
size_t destination_height,
base::Optional<size_t> buffer_pool_size) {
DCHECK(transformer != Transformer::kLibyuv ||
destination_pixel_format == kPixelFormatI420 ||
destination_pixel_format == kPixelFormatNv12)
<< "Destination format is unsupported when running libyuv";
transformer_ = transformer;
destination_pixel_format_ = destination_pixel_format;
destination_width_ = destination_width;
destination_height_ = destination_height;
destination_pixel_buffer_pool_ =
PixelBufferPool::Create(destination_pixel_format_, destination_width_,
destination_height_, buffer_pool_size);
if (transformer == Transformer::kPixelBufferTransfer) {
pixel_buffer_transferer_ = std::make_unique<PixelBufferTransferer>();
} else {
pixel_buffer_transferer_.reset();
}
intermediate_i420_buffer_.resize(0);
intermediate_nv12_buffer_.resize(0);
}
base::ScopedCFTypeRef<CVPixelBufferRef> SampleBufferTransformer::Transform(
CMSampleBufferRef sample_buffer) {
DCHECK(destination_pixel_buffer_pool_);
base::ScopedCFTypeRef<CVPixelBufferRef> destination_pixel_buffer =
destination_pixel_buffer_pool_->CreateBuffer();
if (!destination_pixel_buffer) {
// Maximum destination buffers exceeded. Old buffers are not being released
// (and thus not returned to the pool) in time.
LOG(ERROR) << "Maximum destination buffers exceeded";
return base::ScopedCFTypeRef<CVPixelBufferRef>();
}
if (CVPixelBufferRef source_pixel_buffer =
CMSampleBufferGetImageBuffer(sample_buffer)) {
size_t source_width = CVPixelBufferGetWidth(source_pixel_buffer);
size_t source_height = CVPixelBufferGetHeight(source_pixel_buffer);
OSType source_pixel_format =
CVPixelBufferGetPixelFormatType(source_pixel_buffer);
// Fast path: If source and destination formats are identical, return the
// source pixel buffer.
if (destination_width_ == source_width &&
destination_height_ == source_height &&
destination_pixel_format_ == source_pixel_format) {
return base::ScopedCFTypeRef<CVPixelBufferRef>(
source_pixel_buffer, base::scoped_policy::RETAIN);
}
// Pixel buffer path. Do pixel transfer or libyuv conversion + rescale.
TransformPixelBuffer(source_pixel_buffer, destination_pixel_buffer);
return destination_pixel_buffer;
}
// Sample buffer path - it's MJPEG. Do libyuv conversion + rescale.
TransformSampleBuffer(sample_buffer, destination_pixel_buffer);
return destination_pixel_buffer;
}
void SampleBufferTransformer::TransformPixelBuffer(
CVPixelBufferRef source_pixel_buffer,
CVPixelBufferRef destination_pixel_buffer) {
switch (transformer_) {
case Transformer::kPixelBufferTransfer:
return TransformPixelBufferWithPixelTransfer(source_pixel_buffer,
destination_pixel_buffer);
case Transformer::kLibyuv:
return TransformPixelBufferWithLibyuv(source_pixel_buffer,
destination_pixel_buffer);
case Transformer::kNotConfigured:
NOTREACHED();
}
}
void SampleBufferTransformer::TransformPixelBufferWithPixelTransfer(
CVPixelBufferRef source_pixel_buffer,
CVPixelBufferRef destination_pixel_buffer) {
DCHECK(transformer_ == Transformer::kPixelBufferTransfer);
DCHECK(pixel_buffer_transferer_);
bool success = pixel_buffer_transferer_->TransferImage(
source_pixel_buffer, destination_pixel_buffer);
DCHECK(success);
}
void SampleBufferTransformer::TransformPixelBufferWithLibyuv(
CVPixelBufferRef source_pixel_buffer,
CVPixelBufferRef destination_pixel_buffer) {
DCHECK(transformer_ == Transformer::kLibyuv);
// Lock source and destination pixel buffers.
CVReturn lock_status = CVPixelBufferLockBaseAddress(
source_pixel_buffer, kCVPixelBufferLock_ReadOnly);
DCHECK_EQ(lock_status, kCVReturnSuccess);
lock_status = CVPixelBufferLockBaseAddress(destination_pixel_buffer, 0);
DCHECK_EQ(lock_status, kCVReturnSuccess);
// Perform transform with libyuv.
switch (destination_pixel_format_) {
case kPixelFormatI420:
TransformPixelBufferWithLibyuvFromAnyToI420(source_pixel_buffer,
destination_pixel_buffer);
break;
case kPixelFormatNv12:
TransformPixelBufferWithLibyuvFromAnyToNV12(source_pixel_buffer,
destination_pixel_buffer);
break;
default:
NOTREACHED();
}
// Unlock source and destination pixel buffers.
lock_status = CVPixelBufferUnlockBaseAddress(destination_pixel_buffer, 0);
DCHECK_EQ(lock_status, kCVReturnSuccess);
lock_status = CVPixelBufferUnlockBaseAddress(source_pixel_buffer,
kCVPixelBufferLock_ReadOnly);
DCHECK_EQ(lock_status, kCVReturnSuccess);
}
void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToI420(
CVPixelBufferRef source_pixel_buffer,
CVPixelBufferRef destination_pixel_buffer) {
// Get source pixel format and bytes.
size_t source_width = CVPixelBufferGetWidth(source_pixel_buffer);
size_t source_height = CVPixelBufferGetHeight(source_pixel_buffer);
OSType source_pixel_format =
CVPixelBufferGetPixelFormatType(source_pixel_buffer);
uint8_t* source_buffer_data_base_address;
size_t source_buffer_data_size;
std::tie(source_buffer_data_base_address, source_buffer_data_size) =
GetPixelBufferBaseAddressAndSize(source_pixel_buffer);
// Rescaling has to be done in a separate step.
const bool rescale_needed = destination_width_ != source_width ||
destination_height_ != source_height;
// Step 1: Convert to I420.
I420Planes i420_fullscale_buffer;
if (source_pixel_format == kPixelFormatI420) {
// We are already at I420.
i420_fullscale_buffer = GetI420PlanesFromPixelBuffer(source_pixel_buffer);
DCHECK(rescale_needed);
} else {
// Convert X -> I420.
if (!rescale_needed) {
i420_fullscale_buffer =
GetI420PlanesFromPixelBuffer(destination_pixel_buffer);
} else {
i420_fullscale_buffer = EnsureI420BufferSizeAndGetPlanes(
source_width, source_height, &intermediate_i420_buffer_);
}
ConvertFromAnyToI420(source_pixel_format, source_buffer_data_base_address,
source_buffer_data_size, i420_fullscale_buffer);
}
// Step 2: Rescale I420.
if (rescale_needed) {
I420Planes i420_destination_buffer =
GetI420PlanesFromPixelBuffer(destination_pixel_buffer);
ScaleI420(i420_fullscale_buffer, i420_destination_buffer);
}
}
void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToNV12(
CVPixelBufferRef source_pixel_buffer,
CVPixelBufferRef destination_pixel_buffer) {
// Get source pixel format and bytes.
size_t source_width = CVPixelBufferGetWidth(source_pixel_buffer);
size_t source_height = CVPixelBufferGetHeight(source_pixel_buffer);
OSType source_pixel_format =
CVPixelBufferGetPixelFormatType(source_pixel_buffer);
uint8_t* source_buffer_data_base_address;
size_t source_buffer_data_size;
std::tie(source_buffer_data_base_address, source_buffer_data_size) =
GetPixelBufferBaseAddressAndSize(source_pixel_buffer);
// Rescaling has to be done in a separate step.
const bool rescale_needed = destination_width_ != source_width ||
destination_height_ != source_height;
// Step 1: Convert to NV12.
NV12Planes nv12_fullscale_buffer;
if (source_pixel_format == kPixelFormatNv12) {
// We are already at NV12.
nv12_fullscale_buffer = GetNV12PlanesFromPixelBuffer(source_pixel_buffer);
DCHECK(rescale_needed);
} else {
// Convert X -> I420 -> NV12. (We don't know how to do X -> NV12.)
I420Planes i420_fullscale_buffer;
if (source_pixel_format == kPixelFormatI420) {
// We are already at I420.
i420_fullscale_buffer = GetI420PlanesFromPixelBuffer(source_pixel_buffer);
} else {
// Convert X -> I420.
i420_fullscale_buffer = EnsureI420BufferSizeAndGetPlanes(
source_width, source_height, &intermediate_i420_buffer_);
ConvertFromAnyToI420(source_pixel_format, source_buffer_data_base_address,
source_buffer_data_size, i420_fullscale_buffer);
}
// Convert I420 -> NV12.
if (!rescale_needed) {
nv12_fullscale_buffer =
GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
} else {
nv12_fullscale_buffer = EnsureNV12BufferSizeAndGetPlanes(
source_width, source_height, &intermediate_nv12_buffer_);
}
ConvertFromI420ToNV12(i420_fullscale_buffer, nv12_fullscale_buffer);
}
// Step 2: Rescale NV12.
if (rescale_needed) {
NV12Planes nv12_destination_buffer =
GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
ScaleNV12(nv12_fullscale_buffer, nv12_destination_buffer);
}
}
void SampleBufferTransformer::TransformSampleBuffer(
CMSampleBufferRef source_sample_buffer,
CVPixelBufferRef destination_pixel_buffer) {
DCHECK(transformer_ == Transformer::kLibyuv);
// Lock destination pixel buffer.
CVReturn lock_status =
CVPixelBufferLockBaseAddress(destination_pixel_buffer, 0);
DCHECK_EQ(lock_status, kCVReturnSuccess);
// Ensure source pixel format is MJPEG.
CMFormatDescriptionRef source_format_description =
CMSampleBufferGetFormatDescription(source_sample_buffer);
FourCharCode source_pixel_format =
CMFormatDescriptionGetMediaSubType(source_format_description);
DCHECK(source_pixel_format == kCMVideoCodecType_JPEG_OpenDML);
// Access source pixel buffer bytes.
uint8_t* source_buffer_data_base_address;
size_t source_buffer_data_size;
std::tie(source_buffer_data_base_address, source_buffer_data_size) =
GetSampleBufferBaseAddressAndSize(source_sample_buffer);
int mjpg_width;
int mjpg_height;
int result =
libyuv::MJPGSize(source_buffer_data_base_address, source_buffer_data_size,
&mjpg_width, &mjpg_height);
DCHECK(result == 0);
size_t source_width = mjpg_width;
size_t source_height = mjpg_height;
// Rescaling has to be done in a separate step.
const bool rescale_needed = destination_width_ != source_width ||
destination_height_ != source_height;
// We don't know how to convert MJPEG -> NV12 directly, so we always start
// with MJPEG -> I420.
// TODO(hbos): This is no longer true in libyuv. In a follow-up CL, make use
// of eshr@'s MJPEG -> NV12 converter.
// Step 1: Convert MJPEG -> I420.
I420Planes i420_fullscale_buffer;
if (!rescale_needed && destination_pixel_format_ == kPixelFormatI420) {
i420_fullscale_buffer =
GetI420PlanesFromPixelBuffer(destination_pixel_buffer);
} else {
// Convert into an intermediate buffer, to be converted and/or scaled into
// the destination pixel buffer in subsequent steps.
i420_fullscale_buffer = EnsureI420BufferSizeAndGetPlanes(
source_width, source_height, &intermediate_i420_buffer_);
}
ConvertFromAnyToI420(source_pixel_format, source_buffer_data_base_address,
source_buffer_data_size, i420_fullscale_buffer);
// Step 2: Convert to destination format and then rescale.
if (destination_pixel_format_ == kPixelFormatI420) {
// We are already at I420.
if (rescale_needed) {
I420Planes i420_destination_buffer =
GetI420PlanesFromPixelBuffer(destination_pixel_buffer);
ScaleI420(i420_fullscale_buffer, i420_destination_buffer);
}
} else if (destination_pixel_format_ == kPixelFormatNv12) {
// Convert I420 -> NV12.
NV12Planes nv12_fullscale_buffer;
if (!rescale_needed) {
nv12_fullscale_buffer =
GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
} else {
nv12_fullscale_buffer = EnsureNV12BufferSizeAndGetPlanes(
source_width, source_height, &intermediate_nv12_buffer_);
}
ConvertFromI420ToNV12(i420_fullscale_buffer, nv12_fullscale_buffer);
// Rescale NV12.
if (rescale_needed) {
NV12Planes nv12_destination_buffer =
GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
ScaleNV12(nv12_fullscale_buffer, nv12_destination_buffer);
}
} else {
NOTREACHED();
}
// Unlock destination pixel buffer.
lock_status = CVPixelBufferUnlockBaseAddress(destination_pixel_buffer, 0);
DCHECK_EQ(lock_status, kCVReturnSuccess);
}
} // namespace media
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAPTURE_VIDEO_MAC_SAMPLE_BUFFER_TRANSFORMER_MAC_H_
#define MEDIA_CAPTURE_VIDEO_MAC_SAMPLE_BUFFER_TRANSFORMER_MAC_H_
#import <CoreMedia/CoreMedia.h>
#import <CoreVideo/CoreVideo.h>
#include <vector>
#include "base/mac/scoped_cftyperef.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/mac/pixel_buffer_pool_mac.h"
#include "media/capture/video/mac/pixel_buffer_transferer_mac.h"
namespace media {
// Capable of converting from any supported capture format (NV12, YUY2, UYVY and
// MJPEG) to NV12 or I420 and doing rescaling. This class can be configured to
// use VTPixelTransferSession (sometimes HW-accelerated) or third_party/libyuv
// (SW-only). The output is always an IOSurface-backed pixel buffer that comes
// from an internal pixel buffer pool.
class CAPTURE_EXPORT SampleBufferTransformer {
public:
enum class Transformer {
kNotConfigured,
// Supports (Any except MJPEG) -> (NV12, I420, ...)
kPixelBufferTransfer,
// Supports (Any) -> (NV12 or I420)
kLibyuv,
};
SampleBufferTransformer();
~SampleBufferTransformer();
Transformer transformer() const;
OSType destination_pixel_format() const;
// Future calls to Transform() will output pixel buffers according to this
// configuration.
void Reconfigure(Transformer transformer,
OSType destination_pixel_format,
size_t destination_width,
size_t destination_height,
base::Optional<size_t> buffer_pool_size);
// Converts the sample buffer to an IOSurface-backed pixel buffer according to
// current configurations. If no transformation is needed (input format is the
// same as the configured output format), the sample buffer's pixel buffer is
// returned.
base::ScopedCFTypeRef<CVPixelBufferRef> Transform(
CMSampleBufferRef sample_buffer);
private:
// Sample buffers from the camera contain pixel buffers when an uncompressed
// pixel format is used (i.e. it's not MJPEG).
void TransformPixelBuffer(CVPixelBufferRef source_pixel_buffer,
CVPixelBufferRef destination_pixel_buffer);
// (Any uncompressed -> Any uncompressed)
void TransformPixelBufferWithPixelTransfer(
CVPixelBufferRef source_pixel_buffer,
CVPixelBufferRef destination_pixel_buffer);
// (Any uncompressed -> NV12 or I420)
void TransformPixelBufferWithLibyuv(
CVPixelBufferRef source_pixel_buffer,
CVPixelBufferRef destination_pixel_buffer);
void TransformPixelBufferWithLibyuvFromAnyToI420(
CVPixelBufferRef source_pixel_buffer,
CVPixelBufferRef destination_pixel_buffer);
void TransformPixelBufferWithLibyuvFromAnyToNV12(
CVPixelBufferRef source_pixel_buffer,
CVPixelBufferRef destination_pixel_buffer);
// Sample buffers from the camera contain byte buffers when MJPEG is used.
void TransformSampleBuffer(CMSampleBufferRef source_sample_buffer,
CVPixelBufferRef destination_pixel_buffer);
Transformer transformer_;
OSType destination_pixel_format_;
size_t destination_width_;
size_t destination_height_;
std::unique_ptr<PixelBufferPool> destination_pixel_buffer_pool_;
// For kPixelBufferTransfer.
std::unique_ptr<PixelBufferTransferer> pixel_buffer_transferer_;
// For kLibyuv in cases where an intermediate buffer is needed.
std::vector<uint8_t> intermediate_i420_buffer_;
std::vector<uint8_t> intermediate_nv12_buffer_;
};
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_MAC_SAMPLE_BUFFER_TRANSFORMER_MAC_H_
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/capture/video/mac/sample_buffer_transformer_mac.h"
#include <tuple>
#include "base/logging.h"
#include "media/capture/video/mac/test/pixel_buffer_test_utils_mac.h"
#include "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/libyuv/include/libyuv/convert.h"
namespace media {
namespace {
// Example single colored .jpg file (created with MSPaint). It is of RGB color
// (255, 127, 63).
const uint8_t kExampleJpegData[] = {
0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
0x01, 0x01, 0x00, 0x60, 0x00, 0x60, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
0x00, 0x02, 0x01, 0x01, 0x02, 0x01, 0x01, 0x02, 0x02, 0x02, 0x02, 0x02,
0x02, 0x02, 0x02, 0x03, 0x05, 0x03, 0x03, 0x03, 0x03, 0x03, 0x06, 0x04,
0x04, 0x03, 0x05, 0x07, 0x06, 0x07, 0x07, 0x07, 0x06, 0x07, 0x07, 0x08,
0x09, 0x0b, 0x09, 0x08, 0x08, 0x0a, 0x08, 0x07, 0x07, 0x0a, 0x0d, 0x0a,
0x0a, 0x0b, 0x0c, 0x0c, 0x0c, 0x0c, 0x07, 0x09, 0x0e, 0x0f, 0x0d, 0x0c,
0x0e, 0x0b, 0x0c, 0x0c, 0x0c, 0xff, 0xdb, 0x00, 0x43, 0x01, 0x02, 0x02,
0x02, 0x03, 0x03, 0x03, 0x06, 0x03, 0x03, 0x06, 0x0c, 0x08, 0x07, 0x08,
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
0x0c, 0x0c, 0xff, 0xc0, 0x00, 0x11, 0x08, 0x00, 0x10, 0x00, 0x20, 0x03,
0x01, 0x22, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xc4, 0x00,
0x1f, 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05,
0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0xff, 0xc4, 0x00, 0xb5, 0x10, 0x00,
0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x03, 0x05, 0x05, 0x04, 0x04, 0x00,
0x00, 0x01, 0x7d, 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12, 0x21,
0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07, 0x22, 0x71, 0x14, 0x32, 0x81,
0x91, 0xa1, 0x08, 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0, 0x24,
0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x25,
0x26, 0x27, 0x28, 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a,
0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56,
0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a,
0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x83, 0x84, 0x85, 0x86,
0x87, 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99,
0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3,
0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6,
0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9,
0xda, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xf1,
0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xff, 0xc4, 0x00,
0x1f, 0x01, 0x00, 0x03, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05,
0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0xff, 0xc4, 0x00, 0xb5, 0x11, 0x00,
0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x07, 0x05, 0x04, 0x04, 0x00,
0x01, 0x02, 0x77, 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21, 0x31,
0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71, 0x13, 0x22, 0x32, 0x81, 0x08,
0x14, 0x42, 0x91, 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0, 0x15,
0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34, 0xe1, 0x25, 0xf1, 0x17, 0x18,
0x19, 0x1a, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38, 0x39,
0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x53, 0x54, 0x55,
0x56, 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x82, 0x83, 0x84,
0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa,
0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4,
0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
0xd8, 0xd9, 0xda, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xff, 0xda, 0x00,
0x0c, 0x03, 0x01, 0x00, 0x02, 0x11, 0x03, 0x11, 0x00, 0x3f, 0x00, 0xf7,
0x8a, 0x28, 0xa2, 0xbf, 0x89, 0xcf, 0xf4, 0x50, 0x28, 0xa2, 0x8a, 0x00,
0xff, 0xd9};
constexpr size_t kExampleJpegDataSize = 638;
constexpr uint32_t kExampleJpegWidth = 32;
constexpr uint32_t kExampleJpegHeight = 16;
constexpr uint32_t kExampleJpegScaledDownWidth = 16;
constexpr uint32_t kExampleJpegScaledDownHeight = 8;
constexpr uint8_t kColorR = 255u;
constexpr uint8_t kColorG = 127u;
constexpr uint8_t kColorB = 63u;
constexpr unsigned int kFullResolutionWidth = 128;
constexpr unsigned int kFullResolutionHeight = 96;
constexpr unsigned int kScaledDownResolutionWidth = 64;
constexpr unsigned int kScaledDownResolutionHeight = 48;
// NV12 a.k.a. 420v
constexpr OSType kPixelFormatNv12 =
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
// UYVY a.k.a. 2vuy
constexpr OSType kPixelFormatUyvy = kCVPixelFormatType_422YpCbCr8;
// YUY2 a.k.a. yuvs
constexpr OSType kPixelFormatYuy2 = kCVPixelFormatType_422YpCbCr8_yuvs;
// I420 a.k.a. y420
constexpr OSType kPixelFormatI420 = kCVPixelFormatType_420YpCbCr8Planar;
auto SupportedCaptureFormats() {
return ::testing::Values(kPixelFormatNv12, kPixelFormatUyvy,
kPixelFormatYuy2);
}
auto SupportedOutputFormats() {
return ::testing::Values(kPixelFormatNv12, kPixelFormatI420);
}
// Gives parameterized tests a readable suffix.
// E.g. ".../yuvsTo420v" instead of ".../4"
std::string TestParametersOSTypeTupleToString(
testing::TestParamInfo<std::tuple<OSType, OSType>> info) {
OSType input_pixel_format;
OSType output_pixel_format;
std::tie(input_pixel_format, output_pixel_format) = info.param;
return MacFourCCToString(input_pixel_format) + std::string("To") +
MacFourCCToString(output_pixel_format);
}
std::string TestParametersOSTypeToString(testing::TestParamInfo<OSType> info) {
return MacFourCCToString(info.param);
}
base::ScopedCFTypeRef<CVPixelBufferRef> CreatePixelBuffer(OSType pixel_format,
int width,
int height,
uint8_t r,
uint8_t g,
uint8_t b) {
// Create a YUVS buffer in main memory.
std::unique_ptr<ByteArrayPixelBuffer> yuvs_buffer =
CreateYuvsPixelBufferFromSingleRgbColor(width, height, r, g, b);
// Convert and/or transfer to a pixel buffer that has an IOSurface.
base::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer =
PixelBufferPool::Create(pixel_format, width, height, 1)->CreateBuffer();
PixelBufferTransferer transferer;
bool success =
transferer.TransferImage(yuvs_buffer->pixel_buffer, pixel_buffer);
DCHECK(success);
return pixel_buffer;
}
base::ScopedCFTypeRef<CMSampleBufferRef> CreateSampleBuffer(OSType pixel_format,
int width,
int height,
uint8_t r,
uint8_t g,
uint8_t b) {
base::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer =
CreatePixelBuffer(pixel_format, width, height, r, g, b);
// Wrap the pixel buffer in a sample buffer.
CMFormatDescriptionRef format_description;
OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(
nil, pixel_buffer, &format_description);
DCHECK(status == noErr);
// Dummy information to make CMSampleBufferCreateForImageBuffer() happy.
CMSampleTimingInfo timing_info;
timing_info.decodeTimeStamp = kCMTimeInvalid;
timing_info.presentationTimeStamp = CMTimeMake(0, CMTimeScale(NSEC_PER_SEC));
timing_info.duration =
CMTimeMake(33 * NSEC_PER_MSEC, CMTimeScale(NSEC_PER_SEC)); // 30 fps
base::ScopedCFTypeRef<CMSampleBufferRef> sample_buffer;
status = CMSampleBufferCreateForImageBuffer(
nil, pixel_buffer, YES, nil, nullptr, format_description, &timing_info,
sample_buffer.InitializeInto());
DCHECK(status == noErr);
return sample_buffer;
}
base::ScopedCFTypeRef<CMSampleBufferRef> CreateExamlpeMjpegSampleBuffer() {
// Sanity-check the example data.
int width;
int height;
int result =
libyuv::MJPGSize(kExampleJpegData, kExampleJpegDataSize, &width, &height);
DCHECK(result == 0);
DCHECK_EQ(width, static_cast<int>(kExampleJpegWidth));
DCHECK_EQ(height, static_cast<int>(kExampleJpegHeight));
CMBlockBufferRef data_buffer;
OSStatus status = CMBlockBufferCreateWithMemoryBlock(
nil, const_cast<void*>(static_cast<const void*>(kExampleJpegData)),
kExampleJpegDataSize, nil, nil, 0, kExampleJpegDataSize, 0, &data_buffer);
DCHECK(status == noErr);
CMFormatDescriptionRef format_description;
status = CMVideoFormatDescriptionCreate(nil, kCMVideoCodecType_JPEG_OpenDML,
kExampleJpegWidth, kExampleJpegHeight,
nil, &format_description);
DCHECK(status == noErr);
// Dummy information to make CMSampleBufferCreateReady() happy.
CMSampleTimingInfo timing_info;
timing_info.decodeTimeStamp = kCMTimeInvalid;
timing_info.presentationTimeStamp = CMTimeMake(0, CMTimeScale(NSEC_PER_SEC));
timing_info.duration =
CMTimeMake(33 * NSEC_PER_MSEC, CMTimeScale(NSEC_PER_SEC)); // 30 fps
base::ScopedCFTypeRef<CMSampleBufferRef> sample_buffer;
status = CMSampleBufferCreateReady(nil, data_buffer, format_description, 1, 1,
&timing_info, 1, &kExampleJpegDataSize,
sample_buffer.InitializeInto());
DCHECK(status == noErr);
return sample_buffer;
}
} // namespace
class SampleBufferTransformerPixelTransferTest
: public ::testing::Test,
public ::testing::WithParamInterface<std::tuple<OSType, OSType>> {};
TEST_P(SampleBufferTransformerPixelTransferTest, CanConvertFullScale) {
OSType input_pixel_format;
OSType output_pixel_format;
std::tie(input_pixel_format, output_pixel_format) = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
kFullResolutionHeight, kColorR, kColorG, kColorB);
SampleBufferTransformer transformer;
transformer.Reconfigure(
SampleBufferTransformer::Transformer::kPixelBufferTransfer,
output_pixel_format, kFullResolutionWidth, kFullResolutionHeight, 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer.Transform(input_sample_buffer);
EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
EXPECT_TRUE(
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
TEST_P(SampleBufferTransformerPixelTransferTest, CanConvertAndScaleDown) {
OSType input_pixel_format;
OSType output_pixel_format;
std::tie(input_pixel_format, output_pixel_format) = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
kFullResolutionHeight, kColorR, kColorG, kColorB);
SampleBufferTransformer transformer;
transformer.Reconfigure(
SampleBufferTransformer::Transformer::kPixelBufferTransfer,
output_pixel_format, kScaledDownResolutionWidth,
kScaledDownResolutionHeight, 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer.Transform(input_sample_buffer);
EXPECT_EQ(kScaledDownResolutionWidth,
CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kScaledDownResolutionHeight,
CVPixelBufferGetHeight(output_pixel_buffer));
EXPECT_TRUE(
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
INSTANTIATE_TEST_SUITE_P(SampleBufferTransformerTest,
SampleBufferTransformerPixelTransferTest,
::testing::Combine(SupportedCaptureFormats(),
SupportedOutputFormats()),
TestParametersOSTypeTupleToString);
class SampleBufferTransformerLibyuvTest
: public ::testing::Test,
public ::testing::WithParamInterface<std::tuple<OSType, OSType>> {};
TEST_P(SampleBufferTransformerLibyuvTest, CanConvertFullScale) {
OSType input_pixel_format;
OSType output_pixel_format;
std::tie(input_pixel_format, output_pixel_format) = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
kFullResolutionHeight, kColorR, kColorG, kColorB);
SampleBufferTransformer transformer;
transformer.Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
output_pixel_format, kFullResolutionWidth,
kFullResolutionHeight, 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer.Transform(input_sample_buffer);
EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
EXPECT_TRUE(
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
TEST_P(SampleBufferTransformerLibyuvTest, CanConvertAndScaleDown) {
OSType input_pixel_format;
OSType output_pixel_format;
std::tie(input_pixel_format, output_pixel_format) = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
kFullResolutionHeight, kColorR, kColorG, kColorB);
SampleBufferTransformer transformer;
transformer.Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
output_pixel_format, kScaledDownResolutionWidth,
kScaledDownResolutionHeight, 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer.Transform(input_sample_buffer);
EXPECT_EQ(kScaledDownResolutionWidth,
CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kScaledDownResolutionHeight,
CVPixelBufferGetHeight(output_pixel_buffer));
EXPECT_TRUE(
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
INSTANTIATE_TEST_SUITE_P(SampleBufferTransformerTest,
SampleBufferTransformerLibyuvTest,
::testing::Combine(SupportedCaptureFormats(),
SupportedOutputFormats()),
TestParametersOSTypeTupleToString);
class SampleBufferTransformerMjpegTest
: public ::testing::Test,
public ::testing::WithParamInterface<OSType> {};
TEST_P(SampleBufferTransformerMjpegTest, CanConvertFullScale) {
OSType output_pixel_format = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateExamlpeMjpegSampleBuffer();
SampleBufferTransformer transformer;
transformer.Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
output_pixel_format, kExampleJpegWidth,
kExampleJpegHeight, 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer.Transform(input_sample_buffer);
EXPECT_EQ(kExampleJpegWidth, CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kExampleJpegHeight, CVPixelBufferGetHeight(output_pixel_buffer));
EXPECT_TRUE(
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
TEST_P(SampleBufferTransformerMjpegTest, CanConvertAndScaleDown) {
OSType output_pixel_format = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateExamlpeMjpegSampleBuffer();
SampleBufferTransformer transformer;
transformer.Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
output_pixel_format, kExampleJpegScaledDownWidth,
kExampleJpegScaledDownHeight, 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer.Transform(input_sample_buffer);
EXPECT_EQ(kExampleJpegScaledDownWidth,
CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kExampleJpegScaledDownHeight,
CVPixelBufferGetHeight(output_pixel_buffer));
EXPECT_TRUE(
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
INSTANTIATE_TEST_SUITE_P(SampleBufferTransformerTest,
SampleBufferTransformerMjpegTest,
SupportedOutputFormats(),
TestParametersOSTypeToString);
} // namespace media
......@@ -4,6 +4,8 @@
#include "media/capture/video/mac/test/pixel_buffer_test_utils_mac.h"
#include "media/capture/video/mac/pixel_buffer_pool_mac.h"
#include "media/capture/video/mac/pixel_buffer_transferer_mac.h"
#include "third_party/libyuv/include/libyuv/convert_argb.h"
#include "third_party/libyuv/include/libyuv/convert_from_argb.h"
......@@ -130,6 +132,11 @@ std::unique_ptr<ByteArrayPixelBuffer> CreateYuvsPixelBufferFromArgbBuffer(
int width,
int height,
const std::vector<uint8_t>& argb_buffer) {
// These utility methods don't work well with widths that aren't multiples of
// 16. There could be assumptions about memory alignment, or there could
// simply be a loss of information in the YUVS <-> ARGB conversions since YUVS
// is packed. Either way, the pixels may change, so we avoid these widths.
DCHECK(width % 16 == 0);
std::unique_ptr<ByteArrayPixelBuffer> result =
std::make_unique<ByteArrayPixelBuffer>();
size_t yuvs_stride = GetYuvsStride(width);
......@@ -162,6 +169,11 @@ std::vector<uint8_t> CreateArgbBufferFromYuvsIOSurface(
DCHECK(io_surface);
size_t width = IOSurfaceGetWidth(io_surface);
size_t height = IOSurfaceGetHeight(io_surface);
// These utility methods don't work well with widths that aren't multiples of
// 16. There could be assumptions about memory alignment, or there could
// simply be a loss of information in the YUVS <-> ARGB conversions since YUVS
// is packed. Either way, the pixels may change, so we avoid these widths.
DCHECK(width % 16 == 0);
size_t argb_stride = GetArgbStride(width);
size_t yuvs_stride = GetYuvsStride(width);
uint8_t* pixels = static_cast<uint8_t*>(IOSurfaceGetBaseAddress(io_surface));
......@@ -181,4 +193,30 @@ bool YuvsIOSurfaceIsSingleColor(IOSurfaceRef io_surface,
r, g, b);
}
bool PixelBufferIsSingleColor(CVPixelBufferRef pixel_buffer,
uint8_t r,
uint8_t g,
uint8_t b) {
OSType pixel_format = CVPixelBufferGetPixelFormatType(pixel_buffer);
base::ScopedCFTypeRef<CVPixelBufferRef> yuvs_pixel_buffer;
if (pixel_format == kPixelFormatYuvs) {
// The pixel buffer is already YUVS, so we know how to check its color.
yuvs_pixel_buffer.reset(pixel_buffer, base::scoped_policy::RETAIN);
} else {
// Convert to YUVS. We only know how to check the color of YUVS.
yuvs_pixel_buffer =
PixelBufferPool::Create(kPixelFormatYuvs,
CVPixelBufferGetWidth(pixel_buffer),
CVPixelBufferGetHeight(pixel_buffer), 1)
->CreateBuffer();
PixelBufferTransferer transferer;
bool transfer_success =
transferer.TransferImage(pixel_buffer, yuvs_pixel_buffer);
DCHECK(transfer_success);
}
IOSurfaceRef io_surface = CVPixelBufferGetIOSurface(yuvs_pixel_buffer);
DCHECK(io_surface);
return YuvsIOSurfaceIsSingleColor(io_surface, r, g, b);
}
} // namespace media
......@@ -76,6 +76,13 @@ bool YuvsIOSurfaceIsSingleColor(IOSurfaceRef io_surface,
uint8_t g,
uint8_t b);
// True if all pixels of the pixel buffer are the specified RGB color, within
// some margin of error.
bool PixelBufferIsSingleColor(CVPixelBufferRef pixel_buffer,
uint8_t r,
uint8_t g,
uint8_t b);
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_MAC_TEST_PIXEL_BUFFER_TEST_UTILS_MAC_H_
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment