Commit 3684eb98 authored by wez@chromium.org's avatar wez@chromium.org

Implement VP9/I444 encode support in the Chromoting host.

This will be selectable by clients that want to avoid I420 artefacts.

BUG=260879,134202

Review URL: https://codereview.chromium.org/261753013

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@269774 0039d316-1c4b-4281-b951-d872f2087c98
parent dd1ae25d
...@@ -45,28 +45,6 @@ int CalculateUVOffset(int x, int y, int stride) { ...@@ -45,28 +45,6 @@ int CalculateUVOffset(int x, int y, int stride) {
return stride * y / 2 + x / 2; return stride * y / 2 + x / 2;
} }
void ConvertRGB32ToYUVWithRect(const uint8* rgb_plane,
uint8* y_plane,
uint8* u_plane,
uint8* v_plane,
int x,
int y,
int width,
int height,
int rgb_stride,
int y_stride,
int uv_stride) {
int rgb_offset = CalculateRGBOffset(x, y, rgb_stride);
int y_offset = CalculateYOffset(x, y, y_stride);
int uv_offset = CalculateUVOffset(x, y, uv_stride);;
libyuv::ARGBToI420(rgb_plane + rgb_offset, rgb_stride,
y_plane + y_offset, y_stride,
u_plane + uv_offset, uv_stride,
v_plane + uv_offset, uv_stride,
width, height);
}
void ConvertAndScaleYUVToRGB32Rect( void ConvertAndScaleYUVToRGB32Rect(
const uint8* source_yplane, const uint8* source_yplane,
const uint8* source_uplane, const uint8* source_uplane,
......
...@@ -50,19 +50,6 @@ void ConvertAndScaleYUVToRGB32Rect( ...@@ -50,19 +50,6 @@ void ConvertAndScaleYUVToRGB32Rect(
const webrtc::DesktopRect& dest_buffer_rect, const webrtc::DesktopRect& dest_buffer_rect,
const webrtc::DesktopRect& dest_rect); const webrtc::DesktopRect& dest_rect);
// Convert RGB32 to YUV on a specific rectangle.
void ConvertRGB32ToYUVWithRect(const uint8* rgb_plane,
uint8* y_plane,
uint8* u_plane,
uint8* v_plane,
int x,
int y,
int width,
int height,
int rgb_stride,
int y_stride,
int uv_stride);
int RoundToTwosMultiple(int x); int RoundToTwosMultiple(int x);
// Align the sides of the rectangle to multiples of 2 (expanding outwards). // Align the sides of the rectangle to multiples of 2 (expanding outwards).
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#include "remoting/base/util.h" #include "remoting/base/util.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "third_party/libyuv/include/libyuv/convert_from_argb.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h" #include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"
static const int kWidth = 32 ; static const int kWidth = 32 ;
...@@ -91,17 +92,16 @@ class YuvToRgbTester { ...@@ -91,17 +92,16 @@ class YuvToRgbTester {
FillRgbBuffer(rect); FillRgbBuffer(rect);
// RGB -> YUV // RGB -> YUV
ConvertRGB32ToYUVWithRect(rgb_buffer_.get(), libyuv::ARGBToI420(rgb_buffer_.get(),
kRgbStride,
yplane_, yplane_,
kYStride,
uplane_, uplane_,
kUvStride,
vplane_, vplane_,
0, kUvStride,
0,
kWidth, kWidth,
kHeight, kHeight);
kRgbStride,
kYStride,
kUvStride);
// Reset RGB buffer and do opposite conversion. // Reset RGB buffer and do opposite conversion.
ResetRgbBuffer(); ResetRgbBuffer();
......
...@@ -45,7 +45,7 @@ class VideoDecoderVp8Test : public VideoDecoderVpxTest { ...@@ -45,7 +45,7 @@ class VideoDecoderVp8Test : public VideoDecoderVpxTest {
class VideoDecoderVp9Test : public VideoDecoderVpxTest { class VideoDecoderVp9Test : public VideoDecoderVpxTest {
protected: protected:
VideoDecoderVp9Test() { VideoDecoderVp9Test() {
encoder_ = VideoEncoderVpx::CreateForVP9(); encoder_ = VideoEncoderVpx::CreateForVP9I420();
decoder_ = VideoDecoderVpx::CreateForVP9(); decoder_ = VideoDecoderVpx::CreateForVP9();
} }
}; };
......
...@@ -7,9 +7,9 @@ ...@@ -7,9 +7,9 @@
#include "base/bind.h" #include "base/bind.h"
#include "base/logging.h" #include "base/logging.h"
#include "base/sys_info.h" #include "base/sys_info.h"
#include "media/base/yuv_convert.h"
#include "remoting/base/util.h" #include "remoting/base/util.h"
#include "remoting/proto/video.pb.h" #include "remoting/proto/video.pb.h"
#include "third_party/libyuv/include/libyuv/convert_from_argb.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h" #include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_region.h" #include "third_party/webrtc/modules/desktop_capture/desktop_region.h"
...@@ -24,10 +24,17 @@ namespace remoting { ...@@ -24,10 +24,17 @@ namespace remoting {
namespace { namespace {
// Number of bytes in an RGBx pixel.
const int kBytesPerRgbPixel = 4;
// Defines the dimension of a macro block. This is used to compute the active // Defines the dimension of a macro block. This is used to compute the active
// map for the encoder. // map for the encoder.
const int kMacroBlockSize = 16; const int kMacroBlockSize = 16;
// Magic encoder profile numbers for I420 and I444 input formats.
const int kVp9I420ProfileNumber = 0;
const int kVp9I444ProfileNumber = 1;
void SetCommonCodecParameters(const webrtc::DesktopSize& size, void SetCommonCodecParameters(const webrtc::DesktopSize& size,
vpx_codec_enc_cfg_t* config) { vpx_codec_enc_cfg_t* config) {
// Use millisecond granularity time base. // Use millisecond granularity time base.
...@@ -90,7 +97,7 @@ ScopedVpxCodec CreateVP8Codec(const webrtc::DesktopSize& size) { ...@@ -90,7 +97,7 @@ ScopedVpxCodec CreateVP8Codec(const webrtc::DesktopSize& size) {
return codec.Pass(); return codec.Pass();
} }
ScopedVpxCodec CreateVP9Codec(const webrtc::DesktopSize& size) { ScopedVpxCodec CreateVP9Codec(bool use_i444, const webrtc::DesktopSize& size) {
ScopedVpxCodec codec(new vpx_codec_ctx_t); ScopedVpxCodec codec(new vpx_codec_ctx_t);
// Configure the encoder. // Configure the encoder.
...@@ -103,8 +110,8 @@ ScopedVpxCodec CreateVP9Codec(const webrtc::DesktopSize& size) { ...@@ -103,8 +110,8 @@ ScopedVpxCodec CreateVP9Codec(const webrtc::DesktopSize& size) {
SetCommonCodecParameters(size, &config); SetCommonCodecParameters(size, &config);
// Configure VP9 for I420 source frames. // Configure VP9 for I420 or I444 source frames.
config.g_profile = 0; config.g_profile = use_i444 ? kVp9I444ProfileNumber : kVp9I420ProfileNumber;
// Disable quantization entirely, putting the encoder in "lossless" mode. // Disable quantization entirely, putting the encoder in "lossless" mode.
config.rc_min_quantizer = 0; config.rc_min_quantizer = 0;
...@@ -128,18 +135,90 @@ ScopedVpxCodec CreateVP9Codec(const webrtc::DesktopSize& size) { ...@@ -128,18 +135,90 @@ ScopedVpxCodec CreateVP9Codec(const webrtc::DesktopSize& size) {
return codec.Pass(); return codec.Pass();
} }
void CreateImage(bool use_i444,
const webrtc::DesktopSize& size,
scoped_ptr<vpx_image_t>* out_image,
scoped_ptr<uint8[]>* out_image_buffer) {
DCHECK(!size.is_empty());
scoped_ptr<vpx_image_t> image(new vpx_image_t());
memset(image.get(), 0, sizeof(vpx_image_t));
// libvpx seems to require both to be assigned.
image->d_w = size.width();
image->w = size.width();
image->d_h = size.height();
image->h = size.height();
// libvpx should derive chroma shifts from|fmt| but currently has a bug:
// https://code.google.com/p/webm/issues/detail?id=627
if (use_i444) {
image->fmt = VPX_IMG_FMT_I444;
image->x_chroma_shift = 0;
image->y_chroma_shift = 0;
} else { // I420
image->fmt = VPX_IMG_FMT_YV12;
image->x_chroma_shift = 1;
image->y_chroma_shift = 1;
}
// libyuv's fast-path requires 16-byte aligned pointers and strides, so pad
// the Y, U and V planes' strides to multiples of 16 bytes.
const int y_stride = ((image->w - 1) & ~15) + 16;
const int uv_unaligned_stride = y_stride >> image->x_chroma_shift;
const int uv_stride = ((uv_unaligned_stride - 1) & ~15) + 16;
// libvpx accesses the source image in macro blocks, and will over-read
// if the image is not padded out to the next macroblock: crbug.com/119633.
// Pad the Y, U and V planes' height out to compensate.
// Assuming macroblocks are 16x16, aligning the planes' strides above also
// macroblock aligned them.
DCHECK_EQ(16, kMacroBlockSize);
const int y_rows = ((image->h - 1) & ~(kMacroBlockSize-1)) + kMacroBlockSize;
const int uv_rows = y_rows >> image->y_chroma_shift;
// Allocate a YUV buffer large enough for the aligned data & padding.
const int buffer_size = y_stride * y_rows + 2*uv_stride * uv_rows;
scoped_ptr<uint8[]> image_buffer(new uint8[buffer_size]);
// Reset image value to 128 so we just need to fill in the y plane.
memset(image_buffer.get(), 128, buffer_size);
// Fill in the information for |image_|.
unsigned char* uchar_buffer =
reinterpret_cast<unsigned char*>(image_buffer.get());
image->planes[0] = uchar_buffer;
image->planes[1] = image->planes[0] + y_stride * y_rows;
image->planes[2] = image->planes[1] + uv_stride * uv_rows;
image->stride[0] = y_stride;
image->stride[1] = uv_stride;
image->stride[2] = uv_stride;
*out_image = image.Pass();
*out_image_buffer = image_buffer.Pass();
}
} // namespace } // namespace
// static // static
scoped_ptr<VideoEncoderVpx> VideoEncoderVpx::CreateForVP8() { scoped_ptr<VideoEncoderVpx> VideoEncoderVpx::CreateForVP8() {
return scoped_ptr<VideoEncoderVpx>( return scoped_ptr<VideoEncoderVpx>(
new VideoEncoderVpx(base::Bind(&CreateVP8Codec))); new VideoEncoderVpx(base::Bind(&CreateVP8Codec),
base::Bind(&CreateImage, false)));
}
// static
scoped_ptr<VideoEncoderVpx> VideoEncoderVpx::CreateForVP9I420() {
return scoped_ptr<VideoEncoderVpx>(
new VideoEncoderVpx(base::Bind(&CreateVP9Codec, false),
base::Bind(&CreateImage, false)));
} }
// static // static
scoped_ptr<VideoEncoderVpx> VideoEncoderVpx::CreateForVP9() { scoped_ptr<VideoEncoderVpx> VideoEncoderVpx::CreateForVP9I444() {
return scoped_ptr<VideoEncoderVpx>( return scoped_ptr<VideoEncoderVpx>(
new VideoEncoderVpx(base::Bind(&CreateVP9Codec))); new VideoEncoderVpx(base::Bind(&CreateVP9Codec, true),
base::Bind(&CreateImage, true)));
} }
VideoEncoderVpx::~VideoEncoderVpx() {} VideoEncoderVpx::~VideoEncoderVpx() {}
...@@ -233,8 +312,10 @@ scoped_ptr<VideoPacket> VideoEncoderVpx::Encode( ...@@ -233,8 +312,10 @@ scoped_ptr<VideoPacket> VideoEncoderVpx::Encode(
return packet.Pass(); return packet.Pass();
} }
VideoEncoderVpx::VideoEncoderVpx(const InitializeCodecCallback& init_codec) VideoEncoderVpx::VideoEncoderVpx(const CreateCodecCallback& create_codec,
: init_codec_(init_codec), const CreateImageCallback& create_image)
: create_codec_(create_codec),
create_image_(create_image),
active_map_width_(0), active_map_width_(0),
active_map_height_(0) { active_map_height_(0) {
} }
...@@ -242,60 +323,16 @@ VideoEncoderVpx::VideoEncoderVpx(const InitializeCodecCallback& init_codec) ...@@ -242,60 +323,16 @@ VideoEncoderVpx::VideoEncoderVpx(const InitializeCodecCallback& init_codec)
bool VideoEncoderVpx::Initialize(const webrtc::DesktopSize& size) { bool VideoEncoderVpx::Initialize(const webrtc::DesktopSize& size) {
codec_.reset(); codec_.reset();
image_.reset(new vpx_image_t()); // (Re)Create the VPX image structure and pixel buffer.
memset(image_.get(), 0, sizeof(vpx_image_t)); create_image_.Run(size, &image_, &image_buffer_);
image_->fmt = VPX_IMG_FMT_YV12;
// libvpx seems to require both to be assigned.
image_->d_w = size.width();
image_->w = size.width();
image_->d_h = size.height();
image_->h = size.height();
// libvpx should derive this from|fmt| but currently has a bug:
// https://code.google.com/p/webm/issues/detail?id=627
image_->x_chroma_shift = 1;
image_->y_chroma_shift = 1;
// Initialize active map. // Initialize active map.
active_map_width_ = (image_->w + kMacroBlockSize - 1) / kMacroBlockSize; active_map_width_ = (image_->w + kMacroBlockSize - 1) / kMacroBlockSize;
active_map_height_ = (image_->h + kMacroBlockSize - 1) / kMacroBlockSize; active_map_height_ = (image_->h + kMacroBlockSize - 1) / kMacroBlockSize;
active_map_.reset(new uint8[active_map_width_ * active_map_height_]); active_map_.reset(new uint8[active_map_width_ * active_map_height_]);
// libyuv's fast-path requires 16-byte aligned pointers and strides, so pad // (Re)Initialize the codec.
// the Y, U and V planes' strides to multiples of 16 bytes. codec_ = create_codec_.Run(size);
const int y_stride = ((image_->w - 1) & ~15) + 16;
const int uv_unaligned_stride = y_stride / 2;
const int uv_stride = ((uv_unaligned_stride - 1) & ~15) + 16;
// libvpx accesses the source image in macro blocks, and will over-read
// if the image is not padded out to the next macroblock: crbug.com/119633.
// Pad the Y, U and V planes' height out to compensate.
// Assuming macroblocks are 16x16, aligning the planes' strides above also
// macroblock aligned them.
DCHECK_EQ(16, kMacroBlockSize);
const int y_rows = active_map_height_ * kMacroBlockSize;
const int uv_rows = y_rows / 2;
// Allocate a YUV buffer large enough for the aligned data & padding.
const int buffer_size = y_stride * y_rows + 2 * uv_stride * uv_rows;
yuv_image_.reset(new uint8[buffer_size]);
// Reset image value to 128 so we just need to fill in the y plane.
memset(yuv_image_.get(), 128, buffer_size);
// Fill in the information for |image_|.
unsigned char* image = reinterpret_cast<unsigned char*>(yuv_image_.get());
image_->planes[0] = image;
image_->planes[1] = image_->planes[0] + y_stride * y_rows;
image_->planes[2] = image_->planes[1] + uv_stride * uv_rows;
image_->stride[0] = y_stride;
image_->stride[1] = uv_stride;
image_->stride[2] = uv_stride;
// Initialize the codec.
codec_ = init_codec_.Run(size);
return codec_; return codec_;
} }
...@@ -336,13 +373,40 @@ void VideoEncoderVpx::PrepareImage(const webrtc::DesktopFrame& frame, ...@@ -336,13 +373,40 @@ void VideoEncoderVpx::PrepareImage(const webrtc::DesktopFrame& frame,
uint8* y_data = image_->planes[0]; uint8* y_data = image_->planes[0];
uint8* u_data = image_->planes[1]; uint8* u_data = image_->planes[1];
uint8* v_data = image_->planes[2]; uint8* v_data = image_->planes[2];
switch (image_->fmt) {
case VPX_IMG_FMT_I444:
for (webrtc::DesktopRegion::Iterator r(*updated_region); !r.IsAtEnd(); for (webrtc::DesktopRegion::Iterator r(*updated_region); !r.IsAtEnd();
r.Advance()) { r.Advance()) {
const webrtc::DesktopRect& rect = r.rect(); const webrtc::DesktopRect& rect = r.rect();
ConvertRGB32ToYUVWithRect( int rgb_offset = rgb_stride * rect.top() +
rgb_data, y_data, u_data, v_data, rect.left() * kBytesPerRgbPixel;
rect.left(), rect.top(), rect.width(), rect.height(), int yuv_offset = uv_stride * rect.top() + rect.left();
rgb_stride, y_stride, uv_stride); libyuv::ARGBToI444(rgb_data + rgb_offset, rgb_stride,
y_data + yuv_offset, y_stride,
u_data + yuv_offset, uv_stride,
v_data + yuv_offset, uv_stride,
rect.width(), rect.height());
}
break;
case VPX_IMG_FMT_YV12:
for (webrtc::DesktopRegion::Iterator r(*updated_region); !r.IsAtEnd();
r.Advance()) {
const webrtc::DesktopRect& rect = r.rect();
int rgb_offset = rgb_stride * rect.top() +
rect.left() * kBytesPerRgbPixel;
int y_offset = y_stride * rect.top() + rect.left();
int uv_offset = uv_stride * rect.top() / 2 + rect.left() / 2;
libyuv::ARGBToI420(rgb_data + rgb_offset, rgb_stride,
y_data + y_offset, y_stride,
u_data + uv_offset, uv_stride,
v_data + uv_offset, uv_stride,
rect.width(), rect.height());
}
break;
default:
NOTREACHED();
break;
} }
} }
......
...@@ -23,7 +23,8 @@ class VideoEncoderVpx : public VideoEncoder { ...@@ -23,7 +23,8 @@ class VideoEncoderVpx : public VideoEncoder {
public: public:
// Create encoder for the specified protocol. // Create encoder for the specified protocol.
static scoped_ptr<VideoEncoderVpx> CreateForVP8(); static scoped_ptr<VideoEncoderVpx> CreateForVP8();
static scoped_ptr<VideoEncoderVpx> CreateForVP9(); static scoped_ptr<VideoEncoderVpx> CreateForVP9I420();
static scoped_ptr<VideoEncoderVpx> CreateForVP9I444();
virtual ~VideoEncoderVpx(); virtual ~VideoEncoderVpx();
...@@ -33,9 +34,14 @@ class VideoEncoderVpx : public VideoEncoder { ...@@ -33,9 +34,14 @@ class VideoEncoderVpx : public VideoEncoder {
private: private:
typedef base::Callback<ScopedVpxCodec(const webrtc::DesktopSize&)> typedef base::Callback<ScopedVpxCodec(const webrtc::DesktopSize&)>
InitializeCodecCallback; CreateCodecCallback;
typedef base::Callback<void(const webrtc::DesktopSize&,
scoped_ptr<vpx_image_t>* out_image,
scoped_ptr<uint8[]>* out_image_buffer)>
CreateImageCallback;
VideoEncoderVpx(const InitializeCodecCallback& init_codec); VideoEncoderVpx(const CreateCodecCallback& create_codec,
const CreateImageCallback& create_image);
// Initializes the codec for frames of |size|. Returns true if successful. // Initializes the codec for frames of |size|. Returns true if successful.
bool Initialize(const webrtc::DesktopSize& size); bool Initialize(const webrtc::DesktopSize& size);
...@@ -49,17 +55,20 @@ class VideoEncoderVpx : public VideoEncoder { ...@@ -49,17 +55,20 @@ class VideoEncoderVpx : public VideoEncoder {
// given to the encoder to speed up encoding. // given to the encoder to speed up encoding.
void PrepareActiveMap(const webrtc::DesktopRegion& updated_region); void PrepareActiveMap(const webrtc::DesktopRegion& updated_region);
InitializeCodecCallback init_codec_; CreateCodecCallback create_codec_;
CreateImageCallback create_image_;
ScopedVpxCodec codec_; ScopedVpxCodec codec_;
base::TimeTicks timestamp_base_;
// VPX image and buffer to hold the actual YUV planes.
scoped_ptr<vpx_image_t> image_; scoped_ptr<vpx_image_t> image_;
scoped_ptr<uint8[]> image_buffer_;
// Active map used to optimize out processing of un-changed macroblocks.
scoped_ptr<uint8[]> active_map_; scoped_ptr<uint8[]> active_map_;
int active_map_width_; int active_map_width_;
int active_map_height_; int active_map_height_;
base::TimeTicks timestamp_base_;
// Buffer for storing the yuv image.
scoped_ptr<uint8[]> yuv_image_;
DISALLOW_COPY_AND_ASSIGN(VideoEncoderVpx); DISALLOW_COPY_AND_ASSIGN(VideoEncoderVpx);
}; };
......
...@@ -450,7 +450,7 @@ scoped_ptr<VideoEncoder> ClientSession::CreateVideoEncoder( ...@@ -450,7 +450,7 @@ scoped_ptr<VideoEncoder> ClientSession::CreateVideoEncoder(
if (video_config.codec == protocol::ChannelConfig::CODEC_VP8) { if (video_config.codec == protocol::ChannelConfig::CODEC_VP8) {
return remoting::VideoEncoderVpx::CreateForVP8().PassAs<VideoEncoder>(); return remoting::VideoEncoderVpx::CreateForVP8().PassAs<VideoEncoder>();
} else if (video_config.codec == protocol::ChannelConfig::CODEC_VP9) { } else if (video_config.codec == protocol::ChannelConfig::CODEC_VP9) {
return remoting::VideoEncoderVpx::CreateForVP9().PassAs<VideoEncoder>(); return remoting::VideoEncoderVpx::CreateForVP9I420().PassAs<VideoEncoder>();
} }
NOTREACHED(); NOTREACHED();
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
'../ppapi/ppapi.gyp:ppapi_cpp', '../ppapi/ppapi.gyp:ppapi_cpp',
'../testing/gmock.gyp:gmock', '../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest', '../testing/gtest.gyp:gtest',
'../third_party/libyuv/libyuv.gyp:libyuv',
'../third_party/webrtc/modules/modules.gyp:desktop_capture', '../third_party/webrtc/modules/modules.gyp:desktop_capture',
'../ui/base/ui_base.gyp:ui_base', '../ui/base/ui_base.gyp:ui_base',
'../ui/gfx/gfx.gyp:gfx', '../ui/gfx/gfx.gyp:gfx',
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment