Commit a18a2825 authored by Christopher Cameron's avatar Christopher Cameron Committed by Commit Bot

Mac Zero Copy Capture: Capture IOSurfaces as GpuMemoryBuffers

Retrieve the IOSurface backing a CVPixelBuffer and, if it exists,
construct a GpuMemoryBufferHandle to send via
OnIncomingCapturedExternalBuffer.

Bug: 1125879
Change-Id: I541285d6ccf08d71c93bc29cce2ed39f00c34cdf
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2412994Reviewed-by: default avatarMarkus Handell <handellm@google.com>
Reviewed-by: default avatarDan Sanders <sandersd@chromium.org>
Commit-Queue: ccameron <ccameron@chromium.org>
Cr-Commit-Position: refs/heads/master@{#808512}
parent 263746bb
...@@ -187,6 +187,7 @@ component("capture_lib") { ...@@ -187,6 +187,7 @@ component("capture_lib") {
"CoreMedia.framework", "CoreMedia.framework",
"CoreVideo.framework", "CoreVideo.framework",
"Foundation.framework", "Foundation.framework",
"IOSurface.framework",
] ]
} }
......
...@@ -27,6 +27,17 @@ class MockVideoCaptureDeviceAVFoundationFrameReceiver ...@@ -27,6 +27,17 @@ class MockVideoCaptureDeviceAVFoundationFrameReceiver
base::TimeDelta timestamp), base::TimeDelta timestamp),
(override)); (override));
MOCK_METHOD(void,
ReceiveExternalGpuMemoryBufferFrame,
(gfx::GpuMemoryBufferHandle handle,
std::unique_ptr<
VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
read_access_permission,
const VideoCaptureFormat& frame_format,
const gfx::ColorSpace color_space,
base::TimeDelta timestamp),
(override));
MOCK_METHOD(void, MOCK_METHOD(void,
OnPhotoTaken, OnPhotoTaken,
(const uint8_t* image_data, (const uint8_t* image_data,
...@@ -46,4 +57,4 @@ class MockVideoCaptureDeviceAVFoundationFrameReceiver ...@@ -46,4 +57,4 @@ class MockVideoCaptureDeviceAVFoundationFrameReceiver
} // namespace media } // namespace media
#endif // MEDIA_CAPTURE_VIDEO_MAC_TEST_MOCK_VIDEO_CAPTURE_DEVICE_AVFOUNDATION_FRAME_RECEIVER_MAC_H_ #endif // MEDIA_CAPTURE_VIDEO_MAC_TEST_MOCK_VIDEO_CAPTURE_DEVICE_AVFOUNDATION_FRAME_RECEIVER_MAC_H_
\ No newline at end of file
...@@ -52,6 +52,19 @@ std::string MacFourCCToString(OSType fourcc) { ...@@ -52,6 +52,19 @@ std::string MacFourCCToString(OSType fourcc) {
return arr; return arr;
} }
class CMSampleBufferScopedAccessPermission
: public media::VideoCaptureDevice::Client::Buffer::ScopedAccessPermission {
public:
CMSampleBufferScopedAccessPermission(CMSampleBufferRef buffer)
: buffer_(buffer, base::scoped_policy::RETAIN) {
buffer_.reset();
}
~CMSampleBufferScopedAccessPermission() override {}
private:
base::ScopedCFTypeRef<CMSampleBufferRef> buffer_;
};
} // anonymous namespace } // anonymous namespace
namespace media { namespace media {
...@@ -532,77 +545,75 @@ AVCaptureDeviceFormat* FindBestCaptureFormat( ...@@ -532,77 +545,75 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
} }
} }
- (void)processRamSample:(CMSampleBufferRef)sampleBuffer - (void)processSample:(CMSampleBufferRef)sampleBuffer
baseAddress:(const void*)baseAddress captureFormat:(const media::VideoCaptureFormat&)captureFormat
frameSize:(size_t)frameSize colorSpace:(const gfx::ColorSpace&)colorSpace
pixelFormatType:(OSType)pixelFormat { timestamp:(const base::TimeDelta)timestamp {
VLOG(3) << __func__ << ": format: " << MacFourCCToString(pixelFormat);
const CMFormatDescriptionRef formatDescription =
CMSampleBufferGetFormatDescription(sampleBuffer);
const CMVideoDimensions dimensions =
CMVideoFormatDescriptionGetDimensions(formatDescription);
const media::VideoCaptureFormat captureFormat(
gfx::Size(dimensions.width, dimensions.height), _frameRate,
[VideoCaptureDeviceAVFoundation FourCCToChromiumPixelFormat:pixelFormat]);
base::TimeDelta timestamp = GetCMSampleBufferTimestamp(sampleBuffer);
base::AutoLock lock(_lock);
if (_frameReceiver && baseAddress) {
gfx::ColorSpace colorSpace;
// TODO(julien.isorce): move GetImageBufferColorSpace(CVImageBufferRef)
// from media::VTVideoDecodeAccelerator to media/base/mac and call it
// here to get the color space. See https://crbug.com/959962.
// colorSpace = media::GetImageBufferColorSpace(videoFrame);
_frameReceiver->ReceiveFrame(reinterpret_cast<const uint8_t*>(baseAddress),
frameSize, captureFormat, colorSpace, 0, 0,
timestamp);
}
}
- (void)processRawSample:(CMSampleBufferRef)sampleBuffer {
VLOG(3) << __func__; VLOG(3) << __func__;
// Trust |_frameReceiver| to do decompression. // Trust |_frameReceiver| to do decompression.
char* baseAddress = 0; char* baseAddress = 0;
size_t frameSize = 0; size_t frameSize = 0;
media::ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer); media::ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer);
[self processRamSample:sampleBuffer _frameReceiver->ReceiveFrame(reinterpret_cast<const uint8_t*>(baseAddress),
baseAddress:baseAddress frameSize, captureFormat, colorSpace, 0, 0,
frameSize:frameSize timestamp);
pixelFormatType:CMFormatDescriptionGetMediaSubType(
CMSampleBufferGetFormatDescription(sampleBuffer))];
} }
- (void)processSample:(CMSampleBufferRef)sampleBuffer - (BOOL)processPixelBuffer:(CVImageBufferRef)pixelBuffer
withImageBuffer:(CVImageBufferRef)videoFrame { captureFormat:(const media::VideoCaptureFormat&)captureFormat
if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) != colorSpace:(const gfx::ColorSpace&)colorSpace
timestamp:(const base::TimeDelta)timestamp {
VLOG(3) << __func__;
if (CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly) !=
kCVReturnSuccess) { kCVReturnSuccess) {
return [self processRawSample:sampleBuffer]; return NO;
} }
char* baseAddress = 0; char* baseAddress = 0;
size_t frameSize = 0; size_t frameSize = 0;
if (!CVPixelBufferIsPlanar(videoFrame)) { if (!CVPixelBufferIsPlanar(pixelBuffer)) {
// For nonplanar buffers, CVPixelBufferGetBaseAddress returns a pointer // For nonplanar buffers, CVPixelBufferGetBaseAddress returns a pointer
// to (0,0). (For planar buffers, it returns something else.) // to (0,0). (For planar buffers, it returns something else.)
// https://developer.apple.com/documentation/corevideo/1457115-cvpixelbuffergetbaseaddress?language=objc // https://developer.apple.com/documentation/corevideo/1457115-cvpixelbuffergetbaseaddress?language=objc
baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame)); baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(pixelBuffer));
} else { } else {
// For planar buffers, CVPixelBufferGetBaseAddressOfPlane() is used. If // For planar buffers, CVPixelBufferGetBaseAddressOfPlane() is used. If
// the buffer is contiguous (CHECK'd below) then we only need to know // the buffer is contiguous (CHECK'd below) then we only need to know
// the address of the first plane, regardless of // the address of the first plane, regardless of
// CVPixelBufferGetPlaneCount(). // CVPixelBufferGetPlaneCount().
baseAddress = baseAddress =
static_cast<char*>(CVPixelBufferGetBaseAddressOfPlane(videoFrame, 0)); static_cast<char*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
} }
// CVPixelBufferGetDataSize() works for both nonplanar and planar buffers // CVPixelBufferGetDataSize() works for both nonplanar and planar buffers
// as long as they are contiguous in memory. If it is not contiguous, 0 is // as long as they are contiguous in memory. If it is not contiguous, 0 is
// returned. // returned.
frameSize = CVPixelBufferGetDataSize(videoFrame); frameSize = CVPixelBufferGetDataSize(pixelBuffer);
// Only contiguous buffers are supported. // Only contiguous buffers are supported.
CHECK(frameSize); CHECK(frameSize);
[self processRamSample:sampleBuffer _frameReceiver->ReceiveFrame(reinterpret_cast<const uint8_t*>(baseAddress),
baseAddress:baseAddress frameSize, captureFormat, colorSpace, 0, 0,
frameSize:frameSize timestamp);
pixelFormatType:CVPixelBufferGetPixelFormatType(videoFrame)]; CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly); return YES;
}
- (BOOL)processNV12IOSurface:(IOSurfaceRef)ioSurface
sampleBuffer:(CMSampleBufferRef)sampleBuffer
captureFormat:(const media::VideoCaptureFormat&)captureFormat
colorSpace:(const gfx::ColorSpace&)colorSpace
timestamp:(const base::TimeDelta)timestamp {
VLOG(3) << __func__;
DCHECK_EQ(captureFormat.pixel_format, media::PIXEL_FORMAT_NV12);
gfx::GpuMemoryBufferHandle handle;
handle.id.id = -1;
handle.type = gfx::GpuMemoryBufferType::IO_SURFACE_BUFFER;
handle.mach_port.reset(IOSurfaceCreateMachPort(ioSurface));
if (!handle.mach_port)
return NO;
_frameReceiver->ReceiveExternalGpuMemoryBufferFrame(
std::move(handle),
std::make_unique<CMSampleBufferScopedAccessPermission>(sampleBuffer),
captureFormat, colorSpace, timestamp);
return YES;
} }
// |captureOutput| is called by the capture device to deliver a new frame. // |captureOutput| is called by the capture device to deliver a new frame.
...@@ -612,7 +623,6 @@ AVCaptureDeviceFormat* FindBestCaptureFormat( ...@@ -612,7 +623,6 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection*)connection { fromConnection:(AVCaptureConnection*)connection {
VLOG(3) << __func__; VLOG(3) << __func__;
// We have certain format expectation for capture output: // We have certain format expectation for capture output:
// For MJPEG, |sampleBuffer| is expected to always be a CVBlockBuffer. // For MJPEG, |sampleBuffer| is expected to always be a CVBlockBuffer.
// For other formats, |sampleBuffer| may be either CVBlockBuffer or // For other formats, |sampleBuffer| may be either CVBlockBuffer or
...@@ -620,12 +630,61 @@ AVCaptureDeviceFormat* FindBestCaptureFormat( ...@@ -620,12 +630,61 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
// plugins/virtual cameras. In order to find out whether it is CVBlockBuffer // plugins/virtual cameras. In order to find out whether it is CVBlockBuffer
// or CVImageBuffer we call CMSampleBufferGetImageBuffer() and check if the // or CVImageBuffer we call CMSampleBufferGetImageBuffer() and check if the
// return value is nil. // return value is nil.
CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer); const CMFormatDescriptionRef formatDescription =
if (videoFrame) { CMSampleBufferGetFormatDescription(sampleBuffer);
[self processSample:sampleBuffer withImageBuffer:videoFrame]; const CMVideoDimensions dimensions =
} else { CMVideoFormatDescriptionGetDimensions(formatDescription);
[self processRawSample:sampleBuffer]; OSType sampleBufferPixelFormat =
CMFormatDescriptionGetMediaSubType(formatDescription);
media::VideoPixelFormat videoPixelFormat = [VideoCaptureDeviceAVFoundation
FourCCToChromiumPixelFormat:sampleBufferPixelFormat];
// TODO(julien.isorce): move GetImageBufferColorSpace(CVImageBufferRef)
// from media::VTVideoDecodeAccelerator to media/base/mac and call it
// here to get the color space. See https://crbug.com/959962.
// colorSpace = media::GetImageBufferColorSpace(videoFrame);
gfx::ColorSpace colorSpace;
const media::VideoCaptureFormat captureFormat(
gfx::Size(dimensions.width, dimensions.height), _frameRate,
videoPixelFormat);
const base::TimeDelta timestamp = GetCMSampleBufferTimestamp(sampleBuffer);
if (CVPixelBufferRef pixelBuffer =
CMSampleBufferGetImageBuffer(sampleBuffer)) {
OSType pixelBufferPixelFormat =
CVPixelBufferGetPixelFormatType(pixelBuffer);
DCHECK_EQ(pixelBufferPixelFormat, sampleBufferPixelFormat);
// First preference is to use an NV12 IOSurface as a GpuMemoryBuffer.
// TODO(https://crbug.com/1125879): This path cannot be used in software
// mode yet, and so it cannot be enabled yet.
constexpr bool kEnableGpuMemoryBuffers = false;
if (kEnableGpuMemoryBuffers) {
IOSurfaceRef ioSurface = CVPixelBufferGetIOSurface(pixelBuffer);
if (ioSurface && videoPixelFormat == media::PIXEL_FORMAT_NV12) {
if ([self processNV12IOSurface:ioSurface
sampleBuffer:sampleBuffer
captureFormat:captureFormat
colorSpace:colorSpace
timestamp:timestamp]) {
return;
}
}
}
// Second preference is to read the CVPixelBuffer.
if ([self processPixelBuffer:pixelBuffer
captureFormat:captureFormat
colorSpace:colorSpace
timestamp:timestamp]) {
return;
}
} }
// Last preference is to read the CMSampleBuffer.
[self processSample:sampleBuffer
captureFormat:captureFormat
colorSpace:colorSpace
timestamp:timestamp];
} }
- (void)onVideoError:(NSNotification*)errorNotification { - (void)onVideoError:(NSNotification*)errorNotification {
......
...@@ -21,6 +21,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceAVFoundationFrameReceiver { ...@@ -21,6 +21,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceAVFoundationFrameReceiver {
public: public:
virtual ~VideoCaptureDeviceAVFoundationFrameReceiver() = default; virtual ~VideoCaptureDeviceAVFoundationFrameReceiver() = default;
// Called to deliver captured video frames. It's safe to call this method
// from any thread, including those controlled by AVFoundation.
virtual void ReceiveFrame(const uint8_t* video_frame, virtual void ReceiveFrame(const uint8_t* video_frame,
int video_frame_length, int video_frame_length,
const VideoCaptureFormat& frame_format, const VideoCaptureFormat& frame_format,
...@@ -28,10 +30,29 @@ class CAPTURE_EXPORT VideoCaptureDeviceAVFoundationFrameReceiver { ...@@ -28,10 +30,29 @@ class CAPTURE_EXPORT VideoCaptureDeviceAVFoundationFrameReceiver {
int aspect_numerator, int aspect_numerator,
int aspect_denominator, int aspect_denominator,
base::TimeDelta timestamp) = 0; base::TimeDelta timestamp) = 0;
// Called to deliver GpuMemoryBuffer-wrapped captured video frames. This
// function may be called from any thread, including those controlled by
// AVFoundation.
virtual void ReceiveExternalGpuMemoryBufferFrame(
gfx::GpuMemoryBufferHandle handle,
std::unique_ptr<
VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
read_access_permission,
const VideoCaptureFormat& frame_format,
const gfx::ColorSpace color_space,
base::TimeDelta timestamp) = 0;
// Callbacks with the result of a still image capture, or in case of error,
// respectively. It's safe to call these methods from any thread.
virtual void OnPhotoTaken(const uint8_t* image_data, virtual void OnPhotoTaken(const uint8_t* image_data,
size_t image_length, size_t image_length,
const std::string& mime_type) = 0; const std::string& mime_type) = 0;
// Callback when a call to takePhoto fails.
virtual void OnPhotoError() = 0; virtual void OnPhotoError() = 0;
// Forwarder to VideoCaptureDevice::Client::OnError().
virtual void ReceiveError(VideoCaptureError error, virtual void ReceiveError(VideoCaptureError error,
const base::Location& from_here, const base::Location& from_here,
const std::string& reason) = 0; const std::string& reason) = 0;
......
...@@ -72,8 +72,7 @@ class VideoCaptureDeviceMac ...@@ -72,8 +72,7 @@ class VideoCaptureDeviceMac
bool Init(VideoCaptureApi capture_api_type); bool Init(VideoCaptureApi capture_api_type);
// Called to deliver captured video frames. It's safe to call this method // VideoCaptureDeviceAVFoundationFrameReceiver:
// from any thread, including those controlled by AVFoundation.
void ReceiveFrame(const uint8_t* video_frame, void ReceiveFrame(const uint8_t* video_frame,
int video_frame_length, int video_frame_length,
const VideoCaptureFormat& frame_format, const VideoCaptureFormat& frame_format,
...@@ -81,15 +80,18 @@ class VideoCaptureDeviceMac ...@@ -81,15 +80,18 @@ class VideoCaptureDeviceMac
int aspect_numerator, int aspect_numerator,
int aspect_denominator, int aspect_denominator,
base::TimeDelta timestamp) override; base::TimeDelta timestamp) override;
void ReceiveExternalGpuMemoryBufferFrame(
// Callbacks with the result of a still image capture, or in case of error, gfx::GpuMemoryBufferHandle handle,
// respectively. It's safe to call these methods from any thread. std::unique_ptr<
VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
read_access_permission,
const VideoCaptureFormat& frame_format,
const gfx::ColorSpace color_space,
base::TimeDelta timestamp) override;
void OnPhotoTaken(const uint8_t* image_data, void OnPhotoTaken(const uint8_t* image_data,
size_t image_length, size_t image_length,
const std::string& mime_type) override; const std::string& mime_type) override;
void OnPhotoError() override; void OnPhotoError() override;
// Forwarder to VideoCaptureDevice::Client::OnError().
void ReceiveError(VideoCaptureError error, void ReceiveError(VideoCaptureError error,
const base::Location& from_here, const base::Location& from_here,
const std::string& reason) override; const std::string& reason) override;
......
...@@ -461,6 +461,25 @@ void VideoCaptureDeviceMac::ReceiveFrame(const uint8_t* video_frame, ...@@ -461,6 +461,25 @@ void VideoCaptureDeviceMac::ReceiveFrame(const uint8_t* video_frame,
timestamp); timestamp);
} }
void VideoCaptureDeviceMac::ReceiveExternalGpuMemoryBufferFrame(
gfx::GpuMemoryBufferHandle handle,
std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
read_access_permission,
const VideoCaptureFormat& format,
const gfx::ColorSpace color_space,
base::TimeDelta timestamp) {
if (capture_format_.frame_size != format.frame_size) {
ReceiveError(VideoCaptureError::kMacReceivedFrameWithUnexpectedResolution,
FROM_HERE,
"Captured resolution " + format.frame_size.ToString() +
", and expected " + capture_format_.frame_size.ToString());
return;
}
client_->OnIncomingCapturedExternalBuffer(
std::move(handle), std::move(read_access_permission), format, color_space,
base::TimeTicks::Now(), timestamp);
}
void VideoCaptureDeviceMac::OnPhotoTaken(const uint8_t* image_data, void VideoCaptureDeviceMac::OnPhotoTaken(const uint8_t* image_data,
size_t image_length, size_t image_length,
const std::string& mime_type) { const std::string& mime_type) {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment