Commit 4b0cc731 authored by sandersd@chromium.org's avatar sandersd@chromium.org

Support for YUV 4:4:4 subsampling.

Plumb a new VideoFrame format (YV24) through the stack and add a conversion using libyuv to the software path.

BUG=104711

Review URL: https://codereview.chromium.org/289373011

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@274434 0039d316-1c4b-4281-b951-d872f2087c98
parent 195c092d
...@@ -65,6 +65,7 @@ bool VideoResourceUpdater::VerifyFrame( ...@@ -65,6 +65,7 @@ bool VideoResourceUpdater::VerifyFrame(
case media::VideoFrame::YV12A: case media::VideoFrame::YV12A:
case media::VideoFrame::YV16: case media::VideoFrame::YV16:
case media::VideoFrame::YV12J: case media::VideoFrame::YV12J:
case media::VideoFrame::YV24:
case media::VideoFrame::NATIVE_TEXTURE: case media::VideoFrame::NATIVE_TEXTURE:
#if defined(VIDEO_HOLE) #if defined(VIDEO_HOLE)
case media::VideoFrame::HOLE: case media::VideoFrame::HOLE:
...@@ -111,12 +112,14 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( ...@@ -111,12 +112,14 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
input_frame_format == media::VideoFrame::I420 || input_frame_format == media::VideoFrame::I420 ||
input_frame_format == media::VideoFrame::YV12A || input_frame_format == media::VideoFrame::YV12A ||
input_frame_format == media::VideoFrame::YV12J || input_frame_format == media::VideoFrame::YV12J ||
input_frame_format == media::VideoFrame::YV16); input_frame_format == media::VideoFrame::YV16 ||
input_frame_format == media::VideoFrame::YV24);
if (input_frame_format != media::VideoFrame::YV12 && if (input_frame_format != media::VideoFrame::YV12 &&
input_frame_format != media::VideoFrame::I420 && input_frame_format != media::VideoFrame::I420 &&
input_frame_format != media::VideoFrame::YV12A && input_frame_format != media::VideoFrame::YV12A &&
input_frame_format != media::VideoFrame::YV12J && input_frame_format != media::VideoFrame::YV12J &&
input_frame_format != media::VideoFrame::YV16) input_frame_format != media::VideoFrame::YV16 &&
input_frame_format != media::VideoFrame::YV24)
return VideoFrameExternalResources(); return VideoFrameExternalResources();
bool software_compositor = context_provider_ == NULL; bool software_compositor = context_provider_ == NULL;
......
...@@ -217,8 +217,7 @@ IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv422pTheora)) { ...@@ -217,8 +217,7 @@ IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv422pTheora)) {
} }
IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv444pTheora)) { IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv444pTheora)) {
// TODO(scherkus): Support YUV444 http://crbug.com/104711 RunColorFormatTest("yuv444p.ogv", "ENDED");
RunColorFormatTest("yuv424p.ogv", "ERROR");
} }
IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv420pVp8)) { IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv420pVp8)) {
...@@ -239,8 +238,7 @@ IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv422pH264)) { ...@@ -239,8 +238,7 @@ IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv422pH264)) {
} }
IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv444pH264)) { IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv444pH264)) {
// TODO(scherkus): Support YUV444 http://crbug.com/104711 RunColorFormatTest("yuv444p.mp4", "ENDED");
RunColorFormatTest("yuv444p.mp4", "ERROR");
} }
#if defined(OS_CHROMEOS) #if defined(OS_CHROMEOS)
......
...@@ -499,7 +499,7 @@ class CompositingRenderWidgetHostViewBrowserTestTabCapture ...@@ -499,7 +499,7 @@ class CompositingRenderWidgetHostViewBrowserTestTabCapture
bitmap.allocPixels(SkImageInfo::Make(video_frame->visible_rect().width(), bitmap.allocPixels(SkImageInfo::Make(video_frame->visible_rect().width(),
video_frame->visible_rect().height(), video_frame->visible_rect().height(),
kPMColor_SkColorType, kPMColor_SkColorType,
kOpaque_SkAlphaType)); kPremul_SkAlphaType));
bitmap.allocPixels(); bitmap.allocPixels();
SkCanvas canvas(bitmap); SkCanvas canvas(bitmap);
......
...@@ -15,6 +15,7 @@ static bool IsOpaque(const scoped_refptr<media::VideoFrame>& frame) { ...@@ -15,6 +15,7 @@ static bool IsOpaque(const scoped_refptr<media::VideoFrame>& frame) {
case media::VideoFrame::YV12J: case media::VideoFrame::YV12J:
case media::VideoFrame::YV16: case media::VideoFrame::YV16:
case media::VideoFrame::I420: case media::VideoFrame::I420:
case media::VideoFrame::YV24:
case media::VideoFrame::NV12: case media::VideoFrame::NV12:
return true; return true;
......
This diff is collapsed.
...@@ -57,7 +57,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { ...@@ -57,7 +57,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
NATIVE_TEXTURE = 6, // Native texture. Pixel-format agnostic. NATIVE_TEXTURE = 6, // Native texture. Pixel-format agnostic.
YV12J = 7, // JPEG color range version of YV12 YV12J = 7, // JPEG color range version of YV12
NV12 = 8, // 12bpp 1x1 Y plane followed by an interleaved 2x2 UV plane. NV12 = 8, // 12bpp 1x1 Y plane followed by an interleaved 2x2 UV plane.
FORMAT_MAX = NV12, // Must always be equal to largest entry logged. YV24 = 9, // 24bpp YUV planar, no subsampling.
FORMAT_MAX = YV24, // Must always be equal to largest entry logged.
}; };
// Returns the name of a Format as a string. // Returns the name of a Format as a string.
......
...@@ -400,9 +400,11 @@ void AVStreamToVideoDecoderConfig( ...@@ -400,9 +400,11 @@ void AVStreamToVideoDecoderConfig(
} }
// Pad out |coded_size| for subsampled YUV formats. // Pad out |coded_size| for subsampled YUV formats.
coded_size.set_width((coded_size.width() + 1) / 2 * 2); if (format != VideoFrame::YV24) {
if (format != VideoFrame::YV16) coded_size.set_width((coded_size.width() + 1) / 2 * 2);
coded_size.set_height((coded_size.height() + 1) / 2 * 2); if (format != VideoFrame::YV16)
coded_size.set_height((coded_size.height() + 1) / 2 * 2);
}
bool is_encrypted = false; bool is_encrypted = false;
AVDictionaryEntry* key = av_dict_get(stream->metadata, "enc_key_id", NULL, 0); AVDictionaryEntry* key = av_dict_get(stream->metadata, "enc_key_id", NULL, 0);
...@@ -517,6 +519,8 @@ VideoFrame::Format PixelFormatToVideoFormat(PixelFormat pixel_format) { ...@@ -517,6 +519,8 @@ VideoFrame::Format PixelFormatToVideoFormat(PixelFormat pixel_format) {
switch (pixel_format) { switch (pixel_format) {
case PIX_FMT_YUV422P: case PIX_FMT_YUV422P:
return VideoFrame::YV16; return VideoFrame::YV16;
case PIX_FMT_YUV444P:
return VideoFrame::YV24;
case PIX_FMT_YUV420P: case PIX_FMT_YUV420P:
return VideoFrame::YV12; return VideoFrame::YV12;
case PIX_FMT_YUVJ420P: case PIX_FMT_YUVJ420P:
...@@ -539,6 +543,8 @@ PixelFormat VideoFormatToPixelFormat(VideoFrame::Format video_format) { ...@@ -539,6 +543,8 @@ PixelFormat VideoFormatToPixelFormat(VideoFrame::Format video_format) {
return PIX_FMT_YUVJ420P; return PIX_FMT_YUVJ420P;
case VideoFrame::YV12A: case VideoFrame::YV12A:
return PIX_FMT_YUVA420P; return PIX_FMT_YUVA420P;
case VideoFrame::YV24:
return PIX_FMT_YUV444P;
default: default:
DVLOG(1) << "Unsupported VideoFrame::Format: " << video_format; DVLOG(1) << "Unsupported VideoFrame::Format: " << video_format;
} }
......
...@@ -82,7 +82,7 @@ int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context, ...@@ -82,7 +82,7 @@ int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
if (format == VideoFrame::UNKNOWN) if (format == VideoFrame::UNKNOWN)
return AVERROR(EINVAL); return AVERROR(EINVAL);
DCHECK(format == VideoFrame::YV12 || format == VideoFrame::YV16 || DCHECK(format == VideoFrame::YV12 || format == VideoFrame::YV16 ||
format == VideoFrame::YV12J); format == VideoFrame::YV12J || format == VideoFrame::YV24);
gfx::Size size(codec_context->width, codec_context->height); gfx::Size size(codec_context->width, codec_context->height);
const int ret = av_image_check_size(size.width(), size.height(), 0, NULL); const int ret = av_image_check_size(size.width(), size.height(), 0, NULL);
......
...@@ -29,23 +29,24 @@ namespace media { ...@@ -29,23 +29,24 @@ namespace media {
static bool IsYUV(media::VideoFrame::Format format) { static bool IsYUV(media::VideoFrame::Format format) {
return format == media::VideoFrame::YV12 || return format == media::VideoFrame::YV12 ||
format == media::VideoFrame::YV16 ||
format == media::VideoFrame::I420 || format == media::VideoFrame::I420 ||
format == media::VideoFrame::YV12A ||
format == media::VideoFrame::YV12J ||
format == media::VideoFrame::YV24;
}
static bool IsFastPaintYUV(media::VideoFrame::Format format) {
return format == media::VideoFrame::YV12 ||
format == media::VideoFrame::YV16 || format == media::VideoFrame::YV16 ||
format == media::VideoFrame::I420 ||
format == media::VideoFrame::YV12J; format == media::VideoFrame::YV12J;
} }
static bool IsEitherYUVOrNative(media::VideoFrame::Format format) { static bool IsYUVOrNative(media::VideoFrame::Format format) {
return IsYUV(format) || format == media::VideoFrame::NATIVE_TEXTURE; return IsYUV(format) || format == media::VideoFrame::NATIVE_TEXTURE;
} }
static bool IsEitherYUVOrYUVA(media::VideoFrame::Format format) {
return IsYUV(format) || format == media::VideoFrame::YV12A;
}
static bool IsEitherYUVOrYUVAOrNative(media::VideoFrame::Format format) {
return IsEitherYUVOrNative(format) || format == media::VideoFrame::YV12A;
}
// CanFastPaint is a helper method to determine the conditions for fast // CanFastPaint is a helper method to determine the conditions for fast
// painting. The conditions are: // painting. The conditions are:
// 1. No skew in canvas matrix. // 1. No skew in canvas matrix.
...@@ -58,7 +59,7 @@ static bool IsEitherYUVOrYUVAOrNative(media::VideoFrame::Format format) { ...@@ -58,7 +59,7 @@ static bool IsEitherYUVOrYUVAOrNative(media::VideoFrame::Format format) {
// Disable the flipping and mirroring checks once we have it. // Disable the flipping and mirroring checks once we have it.
static bool CanFastPaint(SkCanvas* canvas, uint8 alpha, static bool CanFastPaint(SkCanvas* canvas, uint8 alpha,
media::VideoFrame::Format format) { media::VideoFrame::Format format) {
if (alpha != 0xFF || !IsYUV(format)) if (alpha != 0xFF || !IsFastPaintYUV(format))
return false; return false;
const SkMatrix& total_matrix = canvas->getTotalMatrix(); const SkMatrix& total_matrix = canvas->getTotalMatrix();
...@@ -88,7 +89,7 @@ static void FastPaint( ...@@ -88,7 +89,7 @@ static void FastPaint(
const scoped_refptr<media::VideoFrame>& video_frame, const scoped_refptr<media::VideoFrame>& video_frame,
SkCanvas* canvas, SkCanvas* canvas,
const SkRect& dest_rect) { const SkRect& dest_rect) {
DCHECK(IsYUV(video_frame->format())) << video_frame->format(); DCHECK(IsFastPaintYUV(video_frame->format())) << video_frame->format();
DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane), DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane),
video_frame->stride(media::VideoFrame::kVPlane)); video_frame->stride(media::VideoFrame::kVPlane));
...@@ -96,8 +97,7 @@ static void FastPaint( ...@@ -96,8 +97,7 @@ static void FastPaint(
media::YUVType yuv_type = media::YV16; media::YUVType yuv_type = media::YV16;
int y_shift = 0; int y_shift = 0;
if (video_frame->format() == media::VideoFrame::YV12 || if (video_frame->format() == media::VideoFrame::YV12 ||
video_frame->format() == media::VideoFrame::I420 || video_frame->format() == media::VideoFrame::I420) {
video_frame->format() == media::VideoFrame::YV12A) {
yuv_type = media::YV12; yuv_type = media::YV12;
y_shift = 1; y_shift = 1;
} }
...@@ -251,9 +251,9 @@ static void FastPaint( ...@@ -251,9 +251,9 @@ static void FastPaint(
static void ConvertVideoFrameToBitmap( static void ConvertVideoFrameToBitmap(
const scoped_refptr<media::VideoFrame>& video_frame, const scoped_refptr<media::VideoFrame>& video_frame,
SkBitmap* bitmap) { SkBitmap* bitmap) {
DCHECK(IsEitherYUVOrYUVAOrNative(video_frame->format())) DCHECK(IsYUVOrNative(video_frame->format()))
<< video_frame->format(); << video_frame->format();
if (IsEitherYUVOrYUVA(video_frame->format())) { if (IsYUV(video_frame->format())) {
DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane), DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane),
video_frame->stride(media::VideoFrame::kVPlane)); video_frame->stride(media::VideoFrame::kVPlane));
} }
...@@ -273,7 +273,7 @@ static void ConvertVideoFrameToBitmap( ...@@ -273,7 +273,7 @@ static void ConvertVideoFrameToBitmap(
size_t y_offset = 0; size_t y_offset = 0;
size_t uv_offset = 0; size_t uv_offset = 0;
if (IsEitherYUVOrYUVA(video_frame->format())) { if (IsYUV(video_frame->format())) {
int y_shift = (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1; int y_shift = (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1;
// Use the "left" and "top" of the destination rect to locate the offset // Use the "left" and "top" of the destination rect to locate the offset
// in Y, U and V planes. // in Y, U and V planes.
...@@ -350,6 +350,30 @@ static void ConvertVideoFrameToBitmap( ...@@ -350,6 +350,30 @@ static void ConvertVideoFrameToBitmap(
media::YV12); media::YV12);
break; break;
case media::VideoFrame::YV24:
libyuv::I444ToARGB(
video_frame->data(media::VideoFrame::kYPlane) + y_offset,
video_frame->stride(media::VideoFrame::kYPlane),
video_frame->data(media::VideoFrame::kUPlane) + uv_offset,
video_frame->stride(media::VideoFrame::kUPlane),
video_frame->data(media::VideoFrame::kVPlane) + uv_offset,
video_frame->stride(media::VideoFrame::kVPlane),
static_cast<uint8*>(bitmap->getPixels()),
bitmap->rowBytes(),
video_frame->visible_rect().width(),
video_frame->visible_rect().height());
#if SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \
SK_A32_SHIFT == 24
libyuv::ARGBToABGR(
static_cast<uint8*>(bitmap->getPixels()),
bitmap->rowBytes(),
static_cast<uint8*>(bitmap->getPixels()),
bitmap->rowBytes(),
video_frame->visible_rect().width(),
video_frame->visible_rect().height());
#endif
break;
case media::VideoFrame::NATIVE_TEXTURE: case media::VideoFrame::NATIVE_TEXTURE:
DCHECK_EQ(video_frame->format(), media::VideoFrame::NATIVE_TEXTURE); DCHECK_EQ(video_frame->format(), media::VideoFrame::NATIVE_TEXTURE);
video_frame->ReadPixelsFromNativeTexture(*bitmap); video_frame->ReadPixelsFromNativeTexture(*bitmap);
...@@ -385,7 +409,7 @@ void SkCanvasVideoRenderer::Paint(media::VideoFrame* video_frame, ...@@ -385,7 +409,7 @@ void SkCanvasVideoRenderer::Paint(media::VideoFrame* video_frame,
// Paint black rectangle if there isn't a frame available or the // Paint black rectangle if there isn't a frame available or the
// frame has an unexpected format. // frame has an unexpected format.
if (!video_frame || !IsEitherYUVOrYUVAOrNative(video_frame->format())) { if (!video_frame || !IsYUVOrNative(video_frame->format())) {
canvas->drawRect(dest, paint); canvas->drawRect(dest, paint);
return; return;
} }
...@@ -403,7 +427,7 @@ void SkCanvasVideoRenderer::Paint(media::VideoFrame* video_frame, ...@@ -403,7 +427,7 @@ void SkCanvasVideoRenderer::Paint(media::VideoFrame* video_frame,
last_frame_timestamp_ = video_frame->timestamp(); last_frame_timestamp_ = video_frame->timestamp();
} }
// Do a slower paint using |last_frame_|. // Paint using |last_frame_|.
paint.setFilterLevel(SkPaint::kLow_FilterLevel); paint.setFilterLevel(SkPaint::kLow_FilterLevel);
canvas->drawBitmapRect(last_frame_, NULL, dest, &paint); canvas->drawBitmapRect(last_frame_, NULL, dest, &paint);
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment