Commit c0444978 authored by Andres Calderon Jaramillo's avatar Andres Calderon Jaramillo Committed by Commit Bot

VAAPI-JDA: Re-extract lower level decode code.

This CL effectively reverts the refactoring in
https://chromium-review.googlesource.com/c/chromium/src/+/1185218/. It
extracts the lower level decode code so that it can be used (later) to
implement a gpu::ImageDecodeAcceleratorWorker that decodes JPEGs using
the VAAPI.

A VaapiJpegDecoder class is created to contain this lower level code. In
a follow-up CL, this class is expanded to be responsible for dealing
with the VaapiWrapper and surface creation.

This opportunity is used to fix some includes and build dependencies.
Additionally, some code health fixes are done, the most significant
being getting rid of memset in the Fill*() methods.

Bug: 924310
Test: the VaapiJpegDecoderTest unit tests pass on a nocturne.
Change-Id: Idf207e9f81385611e84069e229a0e59eec2479ec
Reviewed-on: https://chromium-review.googlesource.com/c/1424401Reviewed-by: default avatarDan Sanders <sandersd@chromium.org>
Reviewed-by: default avatarMiguel Casas <mcasas@chromium.org>
Commit-Queue: Andres Calderon Jaramillo <andrescj@chromium.org>
Cr-Commit-Position: refs/heads/master@{#626162}
parent 9aa2302f
......@@ -564,7 +564,7 @@ if (is_chromeos || is_linux) {
"//media/test/data/peach_pi-41x23.jpg",
]
if (use_vaapi) {
deps += [ "//media/gpu/vaapi:jpeg_decode_accelerator_unit_test" ]
deps += [ "//media/gpu/vaapi:jpeg_decoder_unit_test" ]
data += [ "//media/test/data/pixel-1280x720.jpg" ]
}
if (use_x11) {
......
......@@ -41,6 +41,8 @@ source_set("vaapi") {
"vaapi_h264_accelerator.h",
"vaapi_jpeg_decode_accelerator.cc",
"vaapi_jpeg_decode_accelerator.h",
"vaapi_jpeg_decoder.cc",
"vaapi_jpeg_decoder.h",
"vaapi_jpeg_encode_accelerator.cc",
"vaapi_jpeg_encode_accelerator.h",
"vaapi_jpeg_encoder.cc",
......@@ -69,10 +71,12 @@ source_set("vaapi") {
deps = [
":libva_stubs",
"//base",
"//gpu/ipc/service",
"//media",
"//media/gpu:common",
"//third_party/libyuv",
"//ui/gfx/geometry",
]
if (is_linux) {
......@@ -127,19 +131,18 @@ source_set("unit_test") {
]
}
source_set("jpeg_decode_accelerator_unit_test") {
source_set("jpeg_decoder_unit_test") {
testonly = true
sources = [
"vaapi_jpeg_decode_accelerator_unittest.cc",
"vaapi_jpeg_decoder_unittest.cc",
]
deps = [
":vaapi",
"//base",
"//base/test:test_support",
"//gpu:test_support",
"//media",
"//media:test_support",
"//media/gpu:common",
"//testing/gmock",
"//testing/gtest",
"//ui/gfx:test_support",
"//ui/gfx/geometry",
]
}
......@@ -5,16 +5,18 @@
#include "media/gpu/vaapi/vaapi_jpeg_decode_accelerator.h"
#include <stddef.h>
#include <string.h>
#include <memory>
#include <iostream>
#include <utility>
#include <vector>
#include <va/va.h>
#include "base/bind.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/metrics/histogram_macros.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
#include "media/base/bitstream_buffer.h"
......@@ -22,7 +24,7 @@
#include "media/base/video_frame.h"
#include "media/filters/jpeg_parser.h"
#include "media/gpu/macros.h"
#include "media/gpu/vaapi/vaapi_picture.h"
#include "media/gpu/vaapi/vaapi_jpeg_decoder.h"
#include "media/gpu/vaapi/vaapi_utils.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "third_party/libyuv/include/libyuv.h"
......@@ -30,6 +32,15 @@
namespace media {
namespace {
// TODO(andrescj): remove this once VaapiJpegDecoder is responsible for
// obtaining the VAImage.
constexpr VAImageFormat kImageFormatI420 = {
.fourcc = VA_FOURCC_I420,
.byte_order = VA_LSB_FIRST,
.bits_per_pixel = 12,
};
// UMA errors that the VaapiJpegDecodeAccelerator class reports.
enum VAJDADecoderFailure {
VAAPI_ERROR = 0,
......@@ -41,235 +52,6 @@ static void ReportToUMA(VAJDADecoderFailure failure) {
VAJDA_DECODER_FAILURES_MAX + 1);
}
// Check the value of VA_FOURCC_YUYV, as we don't have access to the VA_FOURCC
// macro in the header file without pulling in the entire <va/va.h>.
static_assert(VA_FOURCC_YUYV == VA_FOURCC('Y', 'U', 'Y', 'V'),
"VA_FOURCC_YUYV must be equal to VA_FOURCC('Y', 'U', 'Y', 'V')");
constexpr VAImageFormat kImageFormatI420 = {.fourcc = VA_FOURCC_I420,
.byte_order = VA_LSB_FIRST,
.bits_per_pixel = 12};
constexpr VAImageFormat kImageFormatYUYV = {.fourcc = VA_FOURCC_YUYV,
.byte_order = VA_LSB_FIRST,
.bits_per_pixel = 16};
// Convert the specified surface format to the associated output image format.
bool VaSurfaceFormatToImageFormat(uint32_t va_rt_format,
VAImageFormat* va_image_format) {
switch (va_rt_format) {
case VA_RT_FORMAT_YUV420:
*va_image_format = kImageFormatI420;
return true;
case VA_RT_FORMAT_YUV422:
*va_image_format = kImageFormatYUYV;
return true;
default:
return false;
}
}
static unsigned int VaSurfaceFormatForJpeg(
const JpegFrameHeader& frame_header) {
// The range of sampling factor is [1, 4]. Pack them into integer to make the
// matching code simpler. For example, 0x211 means the sampling factor are 2,
// 1, 1 for 3 components.
unsigned int h = 0, v = 0;
for (int i = 0; i < frame_header.num_components; i++) {
DCHECK_LE(frame_header.components[i].horizontal_sampling_factor, 4);
DCHECK_LE(frame_header.components[i].vertical_sampling_factor, 4);
h = h << 4 | frame_header.components[i].horizontal_sampling_factor;
v = v << 4 | frame_header.components[i].vertical_sampling_factor;
}
switch (frame_header.num_components) {
case 1: // Grey image
return VA_RT_FORMAT_YUV400;
case 3: // Y Cb Cr color image
// See https://en.wikipedia.org/wiki/Chroma_subsampling for the
// definition of these numbers.
if (h == 0x211 && v == 0x211)
return VA_RT_FORMAT_YUV420;
if (h == 0x211 && v == 0x111)
return VA_RT_FORMAT_YUV422;
if (h == 0x111 && v == 0x111)
return VA_RT_FORMAT_YUV444;
if (h == 0x411 && v == 0x111)
return VA_RT_FORMAT_YUV411;
}
VLOGF(1) << "Unsupported sampling factor: num_components="
<< frame_header.num_components << ", h=" << std::hex << h
<< ", v=" << v;
return 0;
}
// VAAPI only supports a subset of JPEG profiles. This function determines
// whether a given parsed JPEG result is supported or not.
static bool IsVaapiSupportedJpeg(const JpegParseResult& jpeg) {
if (jpeg.frame_header.visible_width < 1 ||
jpeg.frame_header.visible_height < 1) {
DLOG(ERROR) << "width(" << jpeg.frame_header.visible_width
<< ") and height(" << jpeg.frame_header.visible_height
<< ") should be at least 1";
return false;
}
// Size 64k*64k is the maximum in the JPEG standard. VAAPI doesn't support
// resolutions larger than 16k*16k.
const int kMaxDimension = 16384;
if (jpeg.frame_header.coded_width > kMaxDimension ||
jpeg.frame_header.coded_height > kMaxDimension) {
DLOG(ERROR) << "VAAPI doesn't support size("
<< jpeg.frame_header.coded_width << "*"
<< jpeg.frame_header.coded_height << ") larger than "
<< kMaxDimension << "*" << kMaxDimension;
return false;
}
if (jpeg.frame_header.num_components != 3) {
DLOG(ERROR) << "VAAPI doesn't support num_components("
<< static_cast<int>(jpeg.frame_header.num_components)
<< ") != 3";
return false;
}
if (jpeg.frame_header.components[0].horizontal_sampling_factor <
jpeg.frame_header.components[1].horizontal_sampling_factor ||
jpeg.frame_header.components[0].horizontal_sampling_factor <
jpeg.frame_header.components[2].horizontal_sampling_factor) {
DLOG(ERROR) << "VAAPI doesn't supports horizontal sampling factor of Y"
<< " smaller than Cb and Cr";
return false;
}
if (jpeg.frame_header.components[0].vertical_sampling_factor <
jpeg.frame_header.components[1].vertical_sampling_factor ||
jpeg.frame_header.components[0].vertical_sampling_factor <
jpeg.frame_header.components[2].vertical_sampling_factor) {
DLOG(ERROR) << "VAAPI doesn't supports vertical sampling factor of Y"
<< " smaller than Cb and Cr";
return false;
}
return true;
}
static void FillPictureParameters(
const JpegFrameHeader& frame_header,
VAPictureParameterBufferJPEGBaseline* pic_param) {
memset(pic_param, 0, sizeof(*pic_param));
pic_param->picture_width = frame_header.coded_width;
pic_param->picture_height = frame_header.coded_height;
pic_param->num_components = frame_header.num_components;
for (int i = 0; i < pic_param->num_components; i++) {
pic_param->components[i].component_id = frame_header.components[i].id;
pic_param->components[i].h_sampling_factor =
frame_header.components[i].horizontal_sampling_factor;
pic_param->components[i].v_sampling_factor =
frame_header.components[i].vertical_sampling_factor;
pic_param->components[i].quantiser_table_selector =
frame_header.components[i].quantization_table_selector;
}
}
static void FillIQMatrix(const JpegQuantizationTable* q_table,
VAIQMatrixBufferJPEGBaseline* iq_matrix) {
memset(iq_matrix, 0, sizeof(*iq_matrix));
static_assert(kJpegMaxQuantizationTableNum ==
std::extent<decltype(iq_matrix->load_quantiser_table)>(),
"max number of quantization table mismatched");
static_assert(
sizeof(iq_matrix->quantiser_table[0]) == sizeof(q_table[0].value),
"number of quantization entries mismatched");
for (size_t i = 0; i < kJpegMaxQuantizationTableNum; i++) {
if (!q_table[i].valid)
continue;
iq_matrix->load_quantiser_table[i] = 1;
for (size_t j = 0; j < base::size(q_table[i].value); j++)
iq_matrix->quantiser_table[i][j] = q_table[i].value[j];
}
}
static void FillHuffmanTable(const JpegHuffmanTable* dc_table,
const JpegHuffmanTable* ac_table,
VAHuffmanTableBufferJPEGBaseline* huffman_table) {
memset(huffman_table, 0, sizeof(*huffman_table));
// Use default huffman tables if not specified in header.
bool has_huffman_table = false;
for (size_t i = 0; i < kJpegMaxHuffmanTableNumBaseline; i++) {
if (dc_table[i].valid || ac_table[i].valid) {
has_huffman_table = true;
break;
}
}
if (!has_huffman_table) {
dc_table = kDefaultDcTable;
ac_table = kDefaultAcTable;
}
static_assert(kJpegMaxHuffmanTableNumBaseline ==
std::extent<decltype(huffman_table->load_huffman_table)>(),
"max number of huffman table mismatched");
static_assert(sizeof(huffman_table->huffman_table[0].num_dc_codes) ==
sizeof(dc_table[0].code_length),
"size of huffman table code length mismatch");
static_assert(sizeof(huffman_table->huffman_table[0].dc_values[0]) ==
sizeof(dc_table[0].code_value[0]),
"size of huffman table code value mismatch");
for (size_t i = 0; i < kJpegMaxHuffmanTableNumBaseline; i++) {
if (!dc_table[i].valid || !ac_table[i].valid)
continue;
huffman_table->load_huffman_table[i] = 1;
memcpy(huffman_table->huffman_table[i].num_dc_codes,
dc_table[i].code_length,
sizeof(huffman_table->huffman_table[i].num_dc_codes));
memcpy(huffman_table->huffman_table[i].dc_values, dc_table[i].code_value,
sizeof(huffman_table->huffman_table[i].dc_values));
memcpy(huffman_table->huffman_table[i].num_ac_codes,
ac_table[i].code_length,
sizeof(huffman_table->huffman_table[i].num_ac_codes));
memcpy(huffman_table->huffman_table[i].ac_values, ac_table[i].code_value,
sizeof(huffman_table->huffman_table[i].ac_values));
}
}
static void FillSliceParameters(
const JpegParseResult& parse_result,
VASliceParameterBufferJPEGBaseline* slice_param) {
memset(slice_param, 0, sizeof(*slice_param));
slice_param->slice_data_size = parse_result.data_size;
slice_param->slice_data_offset = 0;
slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
slice_param->slice_horizontal_position = 0;
slice_param->slice_vertical_position = 0;
slice_param->num_components = parse_result.scan.num_components;
for (int i = 0; i < slice_param->num_components; i++) {
slice_param->components[i].component_selector =
parse_result.scan.components[i].component_selector;
slice_param->components[i].dc_table_selector =
parse_result.scan.components[i].dc_selector;
slice_param->components[i].ac_table_selector =
parse_result.scan.components[i].ac_selector;
}
slice_param->restart_interval = parse_result.restart_interval;
// Cast to int to prevent overflow.
int max_h_factor =
parse_result.frame_header.components[0].horizontal_sampling_factor;
int max_v_factor =
parse_result.frame_header.components[0].vertical_sampling_factor;
int mcu_cols = parse_result.frame_header.coded_width / (max_h_factor * 8);
DCHECK_GT(mcu_cols, 0);
int mcu_rows = parse_result.frame_header.coded_height / (max_v_factor * 8);
DCHECK_GT(mcu_rows, 0);
slice_param->num_mcus = mcu_rows * mcu_cols;
}
} // namespace
void VaapiJpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id,
......@@ -399,7 +181,7 @@ bool VaapiJpegDecodeAccelerator::OutputPicture(
break;
}
case VA_FOURCC_YUY2:
case VA_FOURCC_YUYV: {
case VA_FOURCC('Y', 'U', 'Y', 'V'): {
DCHECK_EQ(image->num_planes, 1u);
const uint8_t* src_yuy2 = mem + image->offsets[0];
const size_t src_yuy2_stride = image->pitches[0];
......@@ -469,7 +251,8 @@ void VaapiJpegDecodeAccelerator::DecodeTask(
coded_size_ = new_coded_size;
}
if (!DoDecode(vaapi_wrapper_.get(), parse_result, va_surface_id_)) {
if (!VaapiJpegDecoder::DoDecode(vaapi_wrapper_.get(), parse_result,
va_surface_id_)) {
VLOGF(1) << "Decode JPEG failed";
NotifyError(bitstream_buffer_id, PLATFORM_FAILURE);
return;
......@@ -520,51 +303,4 @@ bool VaapiJpegDecodeAccelerator::IsSupported() {
return VaapiWrapper::IsJpegDecodeSupported();
}
// static
bool VaapiJpegDecodeAccelerator::DoDecode(VaapiWrapper* vaapi_wrapper,
const JpegParseResult& parse_result,
VASurfaceID va_surface) {
DCHECK_NE(va_surface, VA_INVALID_SURFACE);
if (!IsVaapiSupportedJpeg(parse_result))
return false;
// Set picture parameters.
VAPictureParameterBufferJPEGBaseline pic_param;
FillPictureParameters(parse_result.frame_header, &pic_param);
if (!vaapi_wrapper->SubmitBuffer(VAPictureParameterBufferType, &pic_param)) {
return false;
}
// Set quantization table.
VAIQMatrixBufferJPEGBaseline iq_matrix;
FillIQMatrix(parse_result.q_table, &iq_matrix);
if (!vaapi_wrapper->SubmitBuffer(VAIQMatrixBufferType, &iq_matrix)) {
return false;
}
// Set huffman table.
VAHuffmanTableBufferJPEGBaseline huffman_table;
FillHuffmanTable(parse_result.dc_table, parse_result.ac_table,
&huffman_table);
if (!vaapi_wrapper->SubmitBuffer(VAHuffmanTableBufferType, &huffman_table)) {
return false;
}
// Set slice parameters.
VASliceParameterBufferJPEGBaseline slice_param;
FillSliceParameters(parse_result, &slice_param);
if (!vaapi_wrapper->SubmitBuffer(VASliceParameterBufferType, &slice_param)) {
return false;
}
// Set scan data.
if (!vaapi_wrapper->SubmitBuffer(VASliceDataBufferType,
parse_result.data_size,
const_cast<char*>(parse_result.data))) {
return false;
}
return vaapi_wrapper->ExecuteAndDestroyPendingBuffers(va_surface);
}
} // namespace media
......@@ -10,27 +10,28 @@
#include <memory>
#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread.h"
#include "media/gpu/media_gpu_export.h"
#include "media/video/jpeg_decode_accelerator.h"
#include "ui/gfx/geometry/size.h"
// These data types are defined in va/va.h using typedef, reproduced here.
typedef struct _VAImageFormat VAImageFormat;
typedef unsigned int VASurfaceID;
// This data type is defined in va/va.h using typedef, reproduced here.
// TODO(andrescj): remove this once VaapiJpegDecoder is responsible for
// obtaining the VAImage.
using VASurfaceID = unsigned int;
namespace base {
class SingleThreadTaskRunner;
}
namespace media {
class BitstreamBuffer;
struct JpegParseResult;
class UnalignedSharedMemory;
class VaapiWrapper;
class VaapiJpegDecodeAcceleratorTest;
// Alternative notation for the VA_FOURCC_YUY2 format, <va/va.h> doesn't provide
// this specific packing/ordering.
constexpr uint32_t VA_FOURCC_YUYV = 0x56595559;
class VideoFrame;
// Class to provide JPEG decode acceleration for Intel systems with hardware
// support for it, and on which libva is available.
......@@ -54,8 +55,6 @@ class MEDIA_GPU_EXPORT VaapiJpegDecodeAccelerator
bool IsSupported() override;
private:
friend class VaapiJpegDecodeAcceleratorTest;
// Notifies the client that an error has occurred and decoding cannot
// continue. The client is notified on the |task_runner_|, i.e., the thread in
// which |*this| was created.
......@@ -78,16 +77,6 @@ class MEDIA_GPU_EXPORT VaapiJpegDecodeAccelerator
int32_t input_buffer_id,
const scoped_refptr<VideoFrame>& video_frame);
// Decodes a JPEG picture. It will fill VA-API parameters and call
// corresponding VA-API methods according to the JPEG |parse_result|. Decoded
// data will be outputted to the given |va_surface|. Returns false on failure.
// |vaapi_wrapper| should be initialized in kDecode mode with
// VAProfileJPEGBaseline profile. |va_surface| should be created with size at
// least as large as the picture size.
static bool DoDecode(VaapiWrapper* vaapi_wrapper,
const JpegParseResult& parse_result,
VASurfaceID va_surface);
// ChildThread's task runner.
const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
......
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/gpu/vaapi/vaapi_jpeg_decoder.h"
#include <string.h>
#include <iostream>
#include <type_traits>
#include <va/va.h>
#include "base/logging.h"
#include "base/stl_util.h"
#include "media/filters/jpeg_parser.h"
#include "media/gpu/macros.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
namespace media {
namespace {
constexpr VAImageFormat kImageFormatI420 = {
.fourcc = VA_FOURCC_I420,
.byte_order = VA_LSB_FIRST,
.bits_per_pixel = 12,
};
constexpr VAImageFormat kImageFormatYUYV = {
.fourcc = VA_FOURCC('Y', 'U', 'Y', 'V'),
.byte_order = VA_LSB_FIRST,
.bits_per_pixel = 16,
};
static void FillPictureParameters(
const JpegFrameHeader& frame_header,
VAPictureParameterBufferJPEGBaseline* pic_param) {
pic_param->picture_width = frame_header.coded_width;
pic_param->picture_height = frame_header.coded_height;
pic_param->num_components = frame_header.num_components;
for (int i = 0; i < pic_param->num_components; i++) {
pic_param->components[i].component_id = frame_header.components[i].id;
pic_param->components[i].h_sampling_factor =
frame_header.components[i].horizontal_sampling_factor;
pic_param->components[i].v_sampling_factor =
frame_header.components[i].vertical_sampling_factor;
pic_param->components[i].quantiser_table_selector =
frame_header.components[i].quantization_table_selector;
}
}
static void FillIQMatrix(const JpegQuantizationTable* q_table,
VAIQMatrixBufferJPEGBaseline* iq_matrix) {
static_assert(kJpegMaxQuantizationTableNum ==
std::extent<decltype(iq_matrix->load_quantiser_table)>(),
"max number of quantization table mismatched");
static_assert(
sizeof(iq_matrix->quantiser_table[0]) == sizeof(q_table[0].value),
"number of quantization entries mismatched");
for (size_t i = 0; i < kJpegMaxQuantizationTableNum; i++) {
if (!q_table[i].valid)
continue;
iq_matrix->load_quantiser_table[i] = 1;
for (size_t j = 0; j < base::size(q_table[i].value); j++)
iq_matrix->quantiser_table[i][j] = q_table[i].value[j];
}
}
static void FillHuffmanTable(const JpegHuffmanTable* dc_table,
const JpegHuffmanTable* ac_table,
VAHuffmanTableBufferJPEGBaseline* huffman_table) {
// Use default huffman tables if not specified in header.
bool has_huffman_table = false;
for (size_t i = 0; i < kJpegMaxHuffmanTableNumBaseline; i++) {
if (dc_table[i].valid || ac_table[i].valid) {
has_huffman_table = true;
break;
}
}
if (!has_huffman_table) {
dc_table = kDefaultDcTable;
ac_table = kDefaultAcTable;
}
static_assert(kJpegMaxHuffmanTableNumBaseline ==
std::extent<decltype(huffman_table->load_huffman_table)>(),
"max number of huffman table mismatched");
static_assert(sizeof(huffman_table->huffman_table[0].num_dc_codes) ==
sizeof(dc_table[0].code_length),
"size of huffman table code length mismatch");
static_assert(sizeof(huffman_table->huffman_table[0].dc_values[0]) ==
sizeof(dc_table[0].code_value[0]),
"size of huffman table code value mismatch");
for (size_t i = 0; i < kJpegMaxHuffmanTableNumBaseline; i++) {
if (!dc_table[i].valid || !ac_table[i].valid)
continue;
huffman_table->load_huffman_table[i] = 1;
memcpy(huffman_table->huffman_table[i].num_dc_codes,
dc_table[i].code_length,
sizeof(huffman_table->huffman_table[i].num_dc_codes));
memcpy(huffman_table->huffman_table[i].dc_values, dc_table[i].code_value,
sizeof(huffman_table->huffman_table[i].dc_values));
memcpy(huffman_table->huffman_table[i].num_ac_codes,
ac_table[i].code_length,
sizeof(huffman_table->huffman_table[i].num_ac_codes));
memcpy(huffman_table->huffman_table[i].ac_values, ac_table[i].code_value,
sizeof(huffman_table->huffman_table[i].ac_values));
}
}
static void FillSliceParameters(
const JpegParseResult& parse_result,
VASliceParameterBufferJPEGBaseline* slice_param) {
slice_param->slice_data_size = parse_result.data_size;
slice_param->slice_data_offset = 0;
slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
slice_param->slice_horizontal_position = 0;
slice_param->slice_vertical_position = 0;
slice_param->num_components = parse_result.scan.num_components;
for (int i = 0; i < slice_param->num_components; i++) {
slice_param->components[i].component_selector =
parse_result.scan.components[i].component_selector;
slice_param->components[i].dc_table_selector =
parse_result.scan.components[i].dc_selector;
slice_param->components[i].ac_table_selector =
parse_result.scan.components[i].ac_selector;
}
slice_param->restart_interval = parse_result.restart_interval;
// Cast to int to prevent overflow.
int max_h_factor =
parse_result.frame_header.components[0].horizontal_sampling_factor;
int max_v_factor =
parse_result.frame_header.components[0].vertical_sampling_factor;
int mcu_cols = parse_result.frame_header.coded_width / (max_h_factor * 8);
DCHECK_GT(mcu_cols, 0);
int mcu_rows = parse_result.frame_header.coded_height / (max_v_factor * 8);
DCHECK_GT(mcu_rows, 0);
slice_param->num_mcus = mcu_rows * mcu_cols;
}
// VAAPI only supports a subset of JPEG profiles. This function determines
// whether a given parsed JPEG result is supported or not.
static bool IsVaapiSupportedJpeg(const JpegParseResult& jpeg) {
if (jpeg.frame_header.visible_width < 1 ||
jpeg.frame_header.visible_height < 1) {
DLOG(ERROR) << "width(" << jpeg.frame_header.visible_width
<< ") and height(" << jpeg.frame_header.visible_height
<< ") should be at least 1";
return false;
}
// Size 64k*64k is the maximum in the JPEG standard. VAAPI doesn't support
// resolutions larger than 16k*16k.
constexpr int kMaxDimension = 16384;
if (jpeg.frame_header.coded_width > kMaxDimension ||
jpeg.frame_header.coded_height > kMaxDimension) {
DLOG(ERROR) << "VAAPI doesn't support size("
<< jpeg.frame_header.coded_width << "*"
<< jpeg.frame_header.coded_height << ") larger than "
<< kMaxDimension << "*" << kMaxDimension;
return false;
}
if (jpeg.frame_header.num_components != 3) {
DLOG(ERROR) << "VAAPI doesn't support num_components("
<< static_cast<int>(jpeg.frame_header.num_components)
<< ") != 3";
return false;
}
if (jpeg.frame_header.components[0].horizontal_sampling_factor <
jpeg.frame_header.components[1].horizontal_sampling_factor ||
jpeg.frame_header.components[0].horizontal_sampling_factor <
jpeg.frame_header.components[2].horizontal_sampling_factor) {
DLOG(ERROR) << "VAAPI doesn't supports horizontal sampling factor of Y"
<< " smaller than Cb and Cr";
return false;
}
if (jpeg.frame_header.components[0].vertical_sampling_factor <
jpeg.frame_header.components[1].vertical_sampling_factor ||
jpeg.frame_header.components[0].vertical_sampling_factor <
jpeg.frame_header.components[2].vertical_sampling_factor) {
DLOG(ERROR) << "VAAPI doesn't supports vertical sampling factor of Y"
<< " smaller than Cb and Cr";
return false;
}
return true;
}
} // namespace
bool VaSurfaceFormatToImageFormat(uint32_t va_rt_format,
VAImageFormat* va_image_format) {
switch (va_rt_format) {
case VA_RT_FORMAT_YUV420:
*va_image_format = kImageFormatI420;
return true;
case VA_RT_FORMAT_YUV422:
*va_image_format = kImageFormatYUYV;
return true;
default:
return false;
}
}
unsigned int VaSurfaceFormatForJpeg(const JpegFrameHeader& frame_header) {
// The range of sampling factor is [1, 4]. Pack them into integer to make the
// matching code simpler. For example, 0x211 means the sampling factor are 2,
// 1, 1 for 3 components.
unsigned int h = 0, v = 0;
for (int i = 0; i < frame_header.num_components; i++) {
DCHECK_LE(frame_header.components[i].horizontal_sampling_factor, 4);
DCHECK_LE(frame_header.components[i].vertical_sampling_factor, 4);
h = h << 4 | frame_header.components[i].horizontal_sampling_factor;
v = v << 4 | frame_header.components[i].vertical_sampling_factor;
}
switch (frame_header.num_components) {
case 1: // Grey image
return VA_RT_FORMAT_YUV400;
case 3: // Y Cb Cr color image
// See https://en.wikipedia.org/wiki/Chroma_subsampling for the
// definition of these numbers.
if (h == 0x211 && v == 0x211)
return VA_RT_FORMAT_YUV420;
if (h == 0x211 && v == 0x111)
return VA_RT_FORMAT_YUV422;
if (h == 0x111 && v == 0x111)
return VA_RT_FORMAT_YUV444;
if (h == 0x411 && v == 0x111)
return VA_RT_FORMAT_YUV411;
}
VLOGF(1) << "Unsupported sampling factor: num_components="
<< frame_header.num_components << ", h=" << std::hex << h
<< ", v=" << v;
return 0;
}
bool VaapiJpegDecoder::DoDecode(VaapiWrapper* vaapi_wrapper,
const JpegParseResult& parse_result,
VASurfaceID va_surface) {
DCHECK_NE(va_surface, VA_INVALID_SURFACE);
if (!IsVaapiSupportedJpeg(parse_result))
return false;
// Set picture parameters.
VAPictureParameterBufferJPEGBaseline pic_param{};
FillPictureParameters(parse_result.frame_header, &pic_param);
if (!vaapi_wrapper->SubmitBuffer(VAPictureParameterBufferType, &pic_param)) {
return false;
}
// Set quantization table.
VAIQMatrixBufferJPEGBaseline iq_matrix{};
FillIQMatrix(parse_result.q_table, &iq_matrix);
if (!vaapi_wrapper->SubmitBuffer(VAIQMatrixBufferType, &iq_matrix)) {
return false;
}
// Set huffman table.
VAHuffmanTableBufferJPEGBaseline huffman_table{};
FillHuffmanTable(parse_result.dc_table, parse_result.ac_table,
&huffman_table);
if (!vaapi_wrapper->SubmitBuffer(VAHuffmanTableBufferType, &huffman_table)) {
return false;
}
// Set slice parameters.
VASliceParameterBufferJPEGBaseline slice_param{};
FillSliceParameters(parse_result, &slice_param);
if (!vaapi_wrapper->SubmitBuffer(VASliceParameterBufferType, &slice_param)) {
return false;
}
// Set scan data.
if (!vaapi_wrapper->SubmitBuffer(VASliceDataBufferType,
parse_result.data_size,
const_cast<char*>(parse_result.data))) {
return false;
}
return vaapi_wrapper->ExecuteAndDestroyPendingBuffers(va_surface);
}
} // namespace media
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_GPU_VAAPI_VAAPI_JPEG_DECODER_H_
#define MEDIA_GPU_VAAPI_VAAPI_JPEG_DECODER_H_
#include <stdint.h>
// These data types are defined in va/va.h using typedef, reproduced here.
// TODO(andrescj): revisit this once VaSurfaceFormatToImageFormat() and
// VaSurfaceFormatForJpeg() are moved to the anonymous namespace in the .cc
// file.
using VAImageFormat = struct _VAImageFormat;
using VASurfaceID = unsigned int;
namespace media {
struct JpegFrameHeader;
struct JpegParseResult;
class VaapiWrapper;
// Convert the specified surface format to the associated output image format.
bool VaSurfaceFormatToImageFormat(uint32_t va_rt_format,
VAImageFormat* va_image_format);
unsigned int VaSurfaceFormatForJpeg(const JpegFrameHeader& frame_header);
class VaapiJpegDecoder {
public:
VaapiJpegDecoder() = default;
virtual ~VaapiJpegDecoder() = default;
// Decodes a JPEG picture. It will fill VA-API parameters and call
// corresponding VA-API methods according to the JPEG |parse_result|. Decoded
// data will be outputted to the given |va_surface|. Returns false on failure.
// |vaapi_wrapper| should be initialized in kDecode mode with
// VAProfileJPEGBaseline profile. |va_surface| should be created with size at
// least as large as the picture size.
static bool DoDecode(VaapiWrapper* vaapi_wrapper,
const JpegParseResult& parse_result,
VASurfaceID va_surface);
};
} // namespace media
#endif // MEDIA_GPU_VAAPI_VAAPI_JPEG_DECODER_H_
......@@ -3,7 +3,10 @@
// found in the LICENSE file.
#include <stdint.h>
#include <string.h>
#include <memory>
#include <string>
#include <vector>
#include <va/va.h>
......@@ -11,21 +14,26 @@
// See http://code.google.com/p/googletest/issues/detail?id=371
#include "testing/gtest/include/gtest/gtest.h"
#include "base/at_exit.h"
#include "base/bind.h"
#include "base/callback.h"
#include "base/command_line.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/logging.h"
#include "base/md5.h"
#include "base/path_service.h"
#include "base/memory/scoped_refptr.h"
#include "base/strings/string_piece.h"
#include "base/synchronization/lock.h"
#include "base/test/gtest_util.h"
#include "base/thread_annotations.h"
#include "media/base/test_data_util.h"
#include "media/base/video_frame.h"
#include "media/base/video_types.h"
#include "media/filters/jpeg_parser.h"
#include "media/gpu/vaapi/vaapi_jpeg_decode_accelerator.h"
#include "media/gpu/vaapi/vaapi_jpeg_decoder.h"
#include "media/gpu/vaapi/vaapi_utils.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "ui/gfx/geometry/size.h"
namespace media {
namespace {
......@@ -34,12 +42,17 @@ constexpr const char* kTestFilename = "pixel-1280x720.jpg";
constexpr const char* kExpectedMd5SumI420 = "6e9e1716073c9a9a1282e3f0e0dab743";
constexpr const char* kExpectedMd5SumYUYV = "ff313a6aedbc4e157561e5c2d5c2e079";
constexpr VAImageFormat kImageFormatI420 = {.fourcc = VA_FOURCC_I420,
constexpr VAImageFormat kImageFormatI420 = {
.fourcc = VA_FOURCC_I420,
.byte_order = VA_LSB_FIRST,
.bits_per_pixel = 12};
constexpr VAImageFormat kImageFormatYUYV = {.fourcc = VA_FOURCC_YUYV,
.bits_per_pixel = 12,
};
constexpr VAImageFormat kImageFormatYUYV = {
.fourcc = VA_FOURCC('Y', 'U', 'Y', 'V'),
.byte_order = VA_LSB_FIRST,
.bits_per_pixel = 16};
.bits_per_pixel = 16,
};
void LogOnError() {
LOG(FATAL) << "Oh noes! Decoder failed";
......@@ -67,14 +80,13 @@ VAImageFormat GetVAImageFormat() {
} // namespace
class VaapiJpegDecodeAcceleratorTest : public ::testing::Test {
class VaapiJpegDecoderTest : public ::testing::Test {
protected:
VaapiJpegDecodeAcceleratorTest() {
VaapiJpegDecoderTest() {
const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
if (cmd_line && cmd_line->HasSwitch("test_data_path")) {
if (cmd_line && cmd_line->HasSwitch("test_data_path"))
test_data_path_ = cmd_line->GetSwitchValueASCII("test_data_path");
}
}
void SetUp() override {
base::RepeatingClosure report_error_cb = base::BindRepeating(&LogOnError);
......@@ -118,7 +130,7 @@ class VaapiJpegDecodeAcceleratorTest : public ::testing::Test {
// is not found, treat the file as being relative to the test file directory.
// This is either a custom test data path provided by --test_data_path, or the
// default test data path (//media/test/data).
base::FilePath VaapiJpegDecodeAcceleratorTest::FindTestDataFilePath(
base::FilePath VaapiJpegDecoderTest::FindTestDataFilePath(
const std::string& file_name) {
const base::FilePath file_path = base::FilePath(file_name);
if (base::PathExists(file_path))
......@@ -128,7 +140,7 @@ base::FilePath VaapiJpegDecodeAcceleratorTest::FindTestDataFilePath(
return GetTestDataFilePath(file_name);
}
bool VaapiJpegDecodeAcceleratorTest::VerifyDecode(
bool VaapiJpegDecoderTest::VerifyDecode(
const JpegParseResult& parse_result) const {
gfx::Size size(parse_result.frame_header.coded_width,
parse_result.frame_header.coded_height);
......@@ -179,14 +191,13 @@ bool VaapiJpegDecodeAcceleratorTest::VerifyDecode(
return true;
}
bool VaapiJpegDecodeAcceleratorTest::Decode(VaapiWrapper* vaapi_wrapper,
bool VaapiJpegDecoderTest::Decode(VaapiWrapper* vaapi_wrapper,
const JpegParseResult& parse_result,
VASurfaceID va_surface) const {
return VaapiJpegDecodeAccelerator::DoDecode(vaapi_wrapper, parse_result,
va_surface);
return VaapiJpegDecoder::DoDecode(vaapi_wrapper, parse_result, va_surface);
}
TEST_F(VaapiJpegDecodeAcceleratorTest, DecodeSuccess) {
TEST_F(VaapiJpegDecoderTest, DecodeSuccess) {
JpegParseResult parse_result;
ASSERT_TRUE(
ParseJpegPicture(reinterpret_cast<const uint8_t*>(jpeg_data_.data()),
......@@ -195,7 +206,7 @@ TEST_F(VaapiJpegDecodeAcceleratorTest, DecodeSuccess) {
EXPECT_TRUE(VerifyDecode(parse_result));
}
TEST_F(VaapiJpegDecodeAcceleratorTest, DecodeFail) {
TEST_F(VaapiJpegDecoderTest, DecodeFail) {
JpegParseResult parse_result;
ASSERT_TRUE(
ParseJpegPicture(reinterpret_cast<const uint8_t*>(jpeg_data_.data()),
......@@ -216,7 +227,7 @@ TEST_F(VaapiJpegDecodeAcceleratorTest, DecodeFail) {
}
// This test exercises the usual ScopedVAImage lifetime.
TEST_F(VaapiJpegDecodeAcceleratorTest, ScopedVAImage) {
TEST_F(VaapiJpegDecoderTest, ScopedVAImage) {
std::vector<VASurfaceID> va_surfaces;
const gfx::Size coded_size(64, 64);
ASSERT_TRUE(wrapper_->CreateContextAndSurfaces(VA_RT_FORMAT_YUV420,
......@@ -242,7 +253,7 @@ TEST_F(VaapiJpegDecodeAcceleratorTest, ScopedVAImage) {
}
// This test exercises creation of a ScopedVAImage with a bad VASurfaceID.
TEST_F(VaapiJpegDecodeAcceleratorTest, BadScopedVAImage) {
TEST_F(VaapiJpegDecoderTest, BadScopedVAImage) {
const std::vector<VASurfaceID> va_surfaces = {VA_INVALID_ID};
const gfx::Size coded_size(64, 64);
......@@ -265,7 +276,7 @@ TEST_F(VaapiJpegDecodeAcceleratorTest, BadScopedVAImage) {
}
// This test exercises creation of a ScopedVABufferMapping with bad VABufferIDs.
TEST_F(VaapiJpegDecodeAcceleratorTest, BadScopedVABufferMapping) {
TEST_F(VaapiJpegDecoderTest, BadScopedVABufferMapping) {
base::AutoLock auto_lock(*GetVaapiWrapperLock());
// A ScopedVABufferMapping with a VA_INVALID_ID VABufferID is DCHECK()ed.
......
......@@ -223,7 +223,7 @@ class MEDIA_GPU_EXPORT VaapiWrapper
private:
friend class base::RefCountedThreadSafe<VaapiWrapper>;
friend class VaapiJpegDecodeAcceleratorTest;
friend class VaapiJpegDecoderTest;
bool Initialize(CodecMode mode, VAProfile va_profile);
void Deinitialize();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment