Commit 2c2156f5 authored by Hirokazu Honda's avatar Hirokazu Honda Committed by Commit Bot

video_encode_accelerator.mojom: Attach format information on Encode()

ArcVideoEncoder has two patterns to encode:
1.) I420 buffer (OMX_COLOR_FormatYUV420Planar)
2.) several formats, including YV12, NV12, ARGB (OMX_COLOR_FormatAndroidOpaque)
In the latter case, no detail about pixel format of video frame to be encoded is
provided in initialization. We always configure I420 in initializing and perform
pixel format conversion in ArcVideoEncoder using libyuv.

One of the most common encoder use case is video capture. It is the latter case.
The pixel format of video frame is dependent on platform, YV12 on mediatek
devices, and NV12 on others. Furtheremore, video frame is provided as DmaBuf. If
no pixel format conversion is done anywhere, we can pass video frame to VDA as
DmaBuf without any mapping.

Our design is we configure flexible format on Initialize() if
OMX_COLOR_FormatAndroidOpaque is configured in ArcVideoEncoder. Thereafter, if
the different pixel format is provided on Encode(), we convert pixel format on
GpuArcVEA. We can have a chance to use Image Processor with HW Acceleration like
V4L2 Image Processor.

This is the first step for this task, attach format information on Encode() and
split GpuArcVideoEncoder::Encode() to EncodeSharedMemory() (I420 case) and
EncodeDmabuf() (Flexible format case). EncodeDmaBuf() is not implemented yet.

BUG=chromium:895230, b:118544836
TEST=CtsMediaTestCases

Change-Id: Ia12447b93f71fb2af579a9e27a1055b43e81cc2a
Reviewed-on: https://chromium-review.googlesource.com/c/1343593
Commit-Queue: Hirokazu Honda <hiroh@chromium.org>
Reviewed-by: default avatarPawel Osciak <posciak@chromium.org>
Reviewed-by: default avatarDaniel Cheng <dcheng@chromium.org>
Cr-Commit-Position: refs/heads/master@{#611064}
parent 69512362
......@@ -9,7 +9,7 @@ module arc.mojom;
import "components/arc/common/video_common.mojom";
// Next MinVersion: 3
// Next MinVersion: 4
[Extensible]
enum VideoPixelFormat {
......@@ -65,7 +65,7 @@ struct VideoEncodeAcceleratorConfig {
};
// Video encoder IPC interface.
// Next Method ID: 8
// Next Method ID: 9
interface VideoEncodeAccelerator {
// Enumeration of potential errors generated by the API.
[Extensible]
......@@ -110,7 +110,27 @@ interface VideoEncodeAccelerator {
// Callback:
// Called when the frame has been processed and no longer used by this
// accelerator.
Encode@2(handle frame_fd,
EncodeDeprecated@2(handle frame_fd,
array<VideoFramePlane> planes,
int64 timestamp,
bool force_keyframe) => ();
// Encodes the given frame.
// Parameters:
// |format| is the pixel format of video frame. This could be different from
// pixel format configured on Initialize().
// |frame_fd| is the handle of the video frame buffer. This could be the
// file descriptor of the shared memory or the dmabuf, depends on the
// storage type assigned in Initialize().
// |planes| is arrays of offset and stride of planes in the video frame.
// |timestamp| the timestamp of the video frame(in microseconds).
// |force_keyframe| forces the encoding of a keyframe for this frame.
// Callback:
// Called when the frame has been processed and no longer used by this
// accelerator.
[MinVersion=3]
Encode@8(VideoPixelFormat format,
handle frame_fd,
array<VideoFramePlane> planes,
int64 timestamp,
bool force_keyframe) => ();
......
......@@ -78,8 +78,13 @@ void GpuArcVideoEncodeAccelerator::Initialize(
VideoEncodeClientPtr client,
InitializeCallback callback) {
DVLOGF(2) << config.AsHumanReadableString();
if (!config.storage_type.has_value()) {
DLOG(ERROR) << "storage type must be specified";
std::move(callback).Run(false);
return;
}
input_pixel_format_ = config.input_format;
input_storage_type_ = *config.storage_type;
visible_size_ = config.input_visible_size;
accelerator_ = media::GpuVideoEncodeAcceleratorFactory::CreateVEA(
config, this, gpu_preferences_);
......@@ -92,7 +97,18 @@ void GpuArcVideoEncodeAccelerator::Initialize(
std::move(callback).Run(true);
}
void GpuArcVideoEncodeAccelerator::EncodeDeprecated(
mojo::ScopedHandle handle,
std::vector<::arc::VideoFramePlane> planes,
int64_t timestamp,
bool force_keyframe,
EncodeCallback callback) {
Encode(input_pixel_format_, std::move(handle), planes, timestamp,
force_keyframe, std::move(callback));
}
void GpuArcVideoEncodeAccelerator::Encode(
media::VideoPixelFormat format,
mojo::ScopedHandle handle,
std::vector<::arc::VideoFramePlane> planes,
int64_t timestamp,
......@@ -115,9 +131,42 @@ void GpuArcVideoEncodeAccelerator::Encode(
return;
}
size_t allocation_size =
media::VideoFrame::AllocationSize(input_pixel_format_, coded_size_);
if (input_storage_type_ ==
media::VideoEncodeAccelerator::Config::StorageType::kShmem) {
EncodeSharedMemory(std::move(fd), format, planes, timestamp, force_keyframe,
std::move(callback));
} else {
EncodeDmabuf(std::move(fd), format, planes, timestamp, force_keyframe,
std::move(callback));
}
}
void GpuArcVideoEncodeAccelerator::EncodeDmabuf(
base::ScopedFD fd,
media::VideoPixelFormat format,
const std::vector<::arc::VideoFramePlane>& planes,
int64_t timestamp,
bool force_keyframe,
EncodeCallback callback) {
client_->NotifyError(Error::kInvalidArgumentError);
NOTIMPLEMENTED();
}
void GpuArcVideoEncodeAccelerator::EncodeSharedMemory(
base::ScopedFD fd,
media::VideoPixelFormat format,
const std::vector<::arc::VideoFramePlane>& planes,
int64_t timestamp,
bool force_keyframe,
EncodeCallback callback) {
if (format != media::PIXEL_FORMAT_I420) {
DLOG(ERROR) << "Formats other than I420 are unsupported. format=" << format;
client_->NotifyError(Error::kInvalidArgumentError);
return;
}
size_t allocation_size =
media::VideoFrame::AllocationSize(format, coded_size_);
// TODO(rockot): Pass GUIDs through Mojo. https://crbug.com/713763.
// TODO(rockot): This fd comes from a mojo::ScopedHandle in
// GpuArcVideoService::BindSharedMemory. That should be passed through,
......@@ -148,7 +197,7 @@ void GpuArcVideoEncodeAccelerator::Encode(
uint8_t* shm_memory = reinterpret_cast<uint8_t*>(shm->memory());
auto frame = media::VideoFrame::WrapExternalSharedMemory(
input_pixel_format_, coded_size_, gfx::Rect(visible_size_), visible_size_,
format, coded_size_, gfx::Rect(visible_size_), visible_size_,
shm_memory + aligned_offset, allocation_size, shm_handle,
planes[0].offset, base::TimeDelta::FromMicroseconds(timestamp));
......
......@@ -48,7 +48,13 @@ class GpuArcVideoEncodeAccelerator
void Initialize(const media::VideoEncodeAccelerator::Config& config,
VideoEncodeClientPtr client,
InitializeCallback callback) override;
void Encode(mojo::ScopedHandle fd,
void EncodeDeprecated(mojo::ScopedHandle fd,
std::vector<::arc::VideoFramePlane> planes,
int64_t timestamp,
bool force_keyframe,
EncodeCallback callback) override;
void Encode(media::VideoPixelFormat format,
mojo::ScopedHandle fd,
std::vector<::arc::VideoFramePlane> planes,
int64_t timestamp,
bool force_keyframe,
......@@ -61,12 +67,26 @@ class GpuArcVideoEncodeAccelerator
uint32_t framerate) override;
void Flush(FlushCallback callback) override;
void EncodeDmabuf(base::ScopedFD fd,
media::VideoPixelFormat format,
const std::vector<::arc::VideoFramePlane>& planes,
int64_t timestamp,
bool force_keyframe,
EncodeCallback callback);
void EncodeSharedMemory(base::ScopedFD fd,
media::VideoPixelFormat format,
const std::vector<::arc::VideoFramePlane>& planes,
int64_t timestamp,
bool force_keyframe,
EncodeCallback callback);
gpu::GpuPreferences gpu_preferences_;
std::unique_ptr<media::VideoEncodeAccelerator> accelerator_;
::arc::mojom::VideoEncodeClientPtr client_;
gfx::Size coded_size_;
gfx::Size visible_size_;
VideoPixelFormat input_pixel_format_;
media::VideoEncodeAccelerator::Config::StorageType input_storage_type_;
int32_t bitstream_buffer_serial_;
std::unordered_map<uint32_t, UseBitstreamBufferCallback> use_bitstream_cbs_;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment