Commit 422d099e authored by andresantoso's avatar andresantoso Committed by Commit bot

Add support for converting I420 software frames into NV12 hardware frames

Enhance MaybeCreateHardwareFrame() to be able to create a NV12 hardware
frame backed by a YUV_420_BIPLANAR GpuMemoryBuffer.

This code path is not enabled yet.

BUG=524582
CQ_INCLUDE_TRYBOTS=tryserver.blink:linux_blink_rel

Review URL: https://codereview.chromium.org/1307853003

Cr-Commit-Position: refs/heads/master@{#347864}
parent 995e9645
...@@ -25,6 +25,49 @@ namespace { ...@@ -25,6 +25,49 @@ namespace {
const ResourceFormat kRGBResourceFormat = RGBA_8888; const ResourceFormat kRGBResourceFormat = RGBA_8888;
VideoFrameExternalResources::ResourceType ResourceTypeForVideoFrame(
media::VideoFrame* video_frame) {
switch (video_frame->format()) {
case media::PIXEL_FORMAT_ARGB:
case media::PIXEL_FORMAT_XRGB:
case media::PIXEL_FORMAT_UYVY:
switch (video_frame->mailbox_holder(0).texture_target) {
case GL_TEXTURE_2D:
return (video_frame->format() == media::PIXEL_FORMAT_XRGB)
? VideoFrameExternalResources::RGB_RESOURCE
: VideoFrameExternalResources::RGBA_RESOURCE;
case GL_TEXTURE_EXTERNAL_OES:
return VideoFrameExternalResources::STREAM_TEXTURE_RESOURCE;
case GL_TEXTURE_RECTANGLE_ARB:
return VideoFrameExternalResources::IO_SURFACE;
default:
NOTREACHED();
break;
}
break;
case media::PIXEL_FORMAT_I420:
return VideoFrameExternalResources::YUV_RESOURCE;
break;
case media::PIXEL_FORMAT_NV12:
DCHECK_EQ(static_cast<uint32_t>(GL_TEXTURE_RECTANGLE_ARB),
video_frame->mailbox_holder(0).texture_target);
return VideoFrameExternalResources::IO_SURFACE;
break;
case media::PIXEL_FORMAT_YV12:
case media::PIXEL_FORMAT_YV16:
case media::PIXEL_FORMAT_YV24:
case media::PIXEL_FORMAT_YV12A:
case media::PIXEL_FORMAT_NV21:
case media::PIXEL_FORMAT_YUY2:
case media::PIXEL_FORMAT_RGB24:
case media::PIXEL_FORMAT_RGB32:
case media::PIXEL_FORMAT_MJPEG:
case media::PIXEL_FORMAT_UNKNOWN:
break;
}
return VideoFrameExternalResources::NONE;
}
class SyncPointClientImpl : public media::VideoFrame::SyncPointClient { class SyncPointClientImpl : public media::VideoFrame::SyncPointClient {
public: public:
explicit SyncPointClientImpl(gpu::gles2::GLES2Interface* gl, explicit SyncPointClientImpl(gpu::gles2::GLES2Interface* gl,
...@@ -375,56 +418,21 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes( ...@@ -375,56 +418,21 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes(
if (!context_provider_) if (!context_provider_)
return VideoFrameExternalResources(); return VideoFrameExternalResources();
const size_t textures = media::VideoFrame::NumPlanes(video_frame->format());
DCHECK_GE(textures, 1u);
VideoFrameExternalResources external_resources; VideoFrameExternalResources external_resources;
external_resources.read_lock_fences_enabled = true; external_resources.read_lock_fences_enabled = true;
switch (video_frame->format()) {
case media::PIXEL_FORMAT_ARGB: external_resources.type = ResourceTypeForVideoFrame(video_frame.get());
case media::PIXEL_FORMAT_XRGB: if (external_resources.type == VideoFrameExternalResources::NONE) {
case media::PIXEL_FORMAT_UYVY: DLOG(ERROR) << "Unsupported Texture format"
DCHECK_EQ(1u, textures); << media::VideoPixelFormatToString(video_frame->format());
switch (video_frame->mailbox_holder(0).texture_target) { return external_resources;
case GL_TEXTURE_2D:
external_resources.type =
(video_frame->format() == media::PIXEL_FORMAT_XRGB)
? VideoFrameExternalResources::RGB_RESOURCE
: VideoFrameExternalResources::RGBA_RESOURCE;
break;
case GL_TEXTURE_EXTERNAL_OES:
external_resources.type =
VideoFrameExternalResources::STREAM_TEXTURE_RESOURCE;
break;
case GL_TEXTURE_RECTANGLE_ARB:
external_resources.type = VideoFrameExternalResources::IO_SURFACE;
break;
default:
NOTREACHED();
return VideoFrameExternalResources();
}
break;
case media::PIXEL_FORMAT_I420:
external_resources.type = VideoFrameExternalResources::YUV_RESOURCE;
break;
case media::PIXEL_FORMAT_YV12:
case media::PIXEL_FORMAT_YV16:
case media::PIXEL_FORMAT_YV24:
case media::PIXEL_FORMAT_YV12A:
case media::PIXEL_FORMAT_NV12:
case media::PIXEL_FORMAT_NV21:
case media::PIXEL_FORMAT_YUY2:
case media::PIXEL_FORMAT_RGB24:
case media::PIXEL_FORMAT_RGB32:
case media::PIXEL_FORMAT_MJPEG:
case media::PIXEL_FORMAT_UNKNOWN:
DLOG(ERROR) << "Unsupported Texture format"
<< media::VideoPixelFormatToString(video_frame->format());
return external_resources;
} }
DCHECK_NE(VideoFrameExternalResources::NONE, external_resources.type);
for (size_t i = 0; i < textures; ++i) { const size_t num_planes = media::VideoFrame::NumPlanes(video_frame->format());
for (size_t i = 0; i < num_planes; ++i) {
const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(i); const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(i);
if (mailbox_holder.mailbox.IsZero())
break;
external_resources.mailboxes.push_back( external_resources.mailboxes.push_back(
TextureMailbox(mailbox_holder.mailbox, mailbox_holder.texture_target, TextureMailbox(mailbox_holder.mailbox, mailbox_holder.texture_target,
mailbox_holder.sync_point, video_frame->coded_size(), mailbox_holder.sync_point, video_frame->coded_size(),
......
...@@ -220,7 +220,9 @@ scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture( ...@@ -220,7 +220,9 @@ scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture(
const gfx::Rect& visible_rect, const gfx::Rect& visible_rect,
const gfx::Size& natural_size, const gfx::Size& natural_size,
base::TimeDelta timestamp) { base::TimeDelta timestamp) {
if (format != PIXEL_FORMAT_ARGB && format != PIXEL_FORMAT_UYVY) { if (format != PIXEL_FORMAT_ARGB &&
format != PIXEL_FORMAT_UYVY &&
format != PIXEL_FORMAT_NV12) {
DLOG(ERROR) << "Unsupported pixel format supported, got " DLOG(ERROR) << "Unsupported pixel format supported, got "
<< VideoPixelFormatToString(format); << VideoPixelFormatToString(format);
return nullptr; return nullptr;
......
...@@ -137,6 +137,7 @@ skia::RefPtr<SkImage> NewSkImageFromVideoFrameNative( ...@@ -137,6 +137,7 @@ skia::RefPtr<SkImage> NewSkImageFromVideoFrameNative(
VideoFrame* video_frame, VideoFrame* video_frame,
const Context3D& context_3d) { const Context3D& context_3d) {
DCHECK(PIXEL_FORMAT_ARGB == video_frame->format() || DCHECK(PIXEL_FORMAT_ARGB == video_frame->format() ||
PIXEL_FORMAT_NV12 == video_frame->format() ||
PIXEL_FORMAT_UYVY == video_frame->format()); PIXEL_FORMAT_UYVY == video_frame->format());
const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(0); const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(0);
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
#include "media/renderers/mock_gpu_video_accelerator_factories.h" #include "media/renderers/mock_gpu_video_accelerator_factories.h"
#include "ui/gfx/buffer_format_util.h"
#include "ui/gfx/gpu_memory_buffer.h" #include "ui/gfx/gpu_memory_buffer.h"
namespace media { namespace media {
...@@ -13,16 +14,23 @@ namespace { ...@@ -13,16 +14,23 @@ namespace {
class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer { class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer {
public: public:
GpuMemoryBufferImpl(const gfx::Size& size, gfx::BufferFormat format) GpuMemoryBufferImpl(const gfx::Size& size, gfx::BufferFormat format)
: format_(format), size_(size) { : format_(format), size_(size),
num_planes_(gfx::NumberOfPlanesForBufferFormat(format)) {
DCHECK(gfx::BufferFormat::R_8 == format_ || DCHECK(gfx::BufferFormat::R_8 == format_ ||
gfx::BufferFormat::YUV_420_BIPLANAR == format_ ||
gfx::BufferFormat::UYVY_422 == format_); gfx::BufferFormat::UYVY_422 == format_);
bytes_.resize(size_.GetArea() * DCHECK(num_planes_ <= kMaxPlanes);
(format_ == gfx::BufferFormat::UYVY_422 ? 2 : 1)); for (int i = 0; i < static_cast<int>(num_planes_); ++i) {
bytes_[i].resize(
gfx::RowSizeForBufferFormat(size_.width(), format_, i) *
size_.height() / gfx::SubsamplingFactorForBufferFormat(format_, i));
}
} }
// Overridden from gfx::GpuMemoryBuffer: // Overridden from gfx::GpuMemoryBuffer:
bool Map(void** data) override { bool Map(void** data) override {
data[0] = &bytes_[0]; for (size_t plane = 0; plane < num_planes_; ++plane)
data[plane] = &bytes_[plane][0];
return true; return true;
} }
void Unmap() override{}; void Unmap() override{};
...@@ -31,12 +39,13 @@ class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer { ...@@ -31,12 +39,13 @@ class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer {
return false; return false;
} }
gfx::BufferFormat GetFormat() const override { gfx::BufferFormat GetFormat() const override {
NOTREACHED(); return format_;
return gfx::BufferFormat::R_8;
} }
void GetStride(int* stride) const override { void GetStride(int* strides) const override {
stride[0] = for (int plane = 0; plane < static_cast<int>(num_planes_); ++plane) {
size_.width() * (format_ == gfx::BufferFormat::UYVY_422 ? 2 : 1); strides[plane] = static_cast<int>(
gfx::RowSizeForBufferFormat(size_.width(), format_, plane));
}
} }
gfx::GpuMemoryBufferId GetId() const override { gfx::GpuMemoryBufferId GetId() const override {
NOTREACHED(); NOTREACHED();
...@@ -51,9 +60,12 @@ class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer { ...@@ -51,9 +60,12 @@ class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer {
} }
private: private:
static const size_t kMaxPlanes = 3;
gfx::BufferFormat format_; gfx::BufferFormat format_;
std::vector<unsigned char> bytes_;
const gfx::Size size_; const gfx::Size size_;
size_t num_planes_;
std::vector<uint8> bytes_[kMaxPlanes];
}; };
} // unnamed namespace } // unnamed namespace
......
...@@ -231,4 +231,26 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareUYUVFrame) { ...@@ -231,4 +231,26 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareUYUVFrame) {
EXPECT_EQ(1u, gles2_->gen_textures); EXPECT_EQ(1u, gles2_->gen_textures);
} }
TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareNV12Frame) {
scoped_refptr<VideoFrame> software_frame = CreateTestYUVVideoFrame(10);
scoped_refptr<MockGpuVideoAcceleratorFactories> mock_gpu_factories(
new MockGpuVideoAcceleratorFactories);
mock_gpu_factories->SetVideoFrameOutputFormat(PIXEL_FORMAT_NV12);
scoped_ptr<GpuMemoryBufferVideoFramePool> gpu_memory_buffer_pool_ =
make_scoped_ptr(new GpuMemoryBufferVideoFramePool(
media_task_runner_, copy_task_runner_.get(), mock_gpu_factories));
EXPECT_CALL(*mock_gpu_factories.get(), GetGLES2Interface())
.WillRepeatedly(testing::Return(gles2_.get()));
scoped_refptr<VideoFrame> frame;
gpu_memory_buffer_pool_->MaybeCreateHardwareFrame(
software_frame, base::Bind(MaybeCreateHardwareFrameCallback, &frame));
RunUntilIdle();
EXPECT_NE(software_frame.get(), frame.get());
EXPECT_EQ(1u, gles2_->gen_textures);
}
} // namespace media } // namespace media
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment