Commit 1b3709ce authored by Ricky Liang's avatar Ricky Liang Committed by Commit Bot

RELAND: media: implement TakePhoto in Chrome OS camera HAL VideoCaptureDevice

Fix an out-of-bound memory access bug in unit tests.

Original change's description --------------------------------------------------

media: implement TakePhoto in Chrome OS camera HAL VideoCaptureDevice

This CL implements the TakePhoto() API for the Chrome OS camera HAL
VideoCaptureDevice, including the following features:

  - Full size still capture
  - Auto-focus, auto-exposure, and auto-white-balancing stabilization

BUG=chromium:832772
TEST=Run capture_unittests on DUT
TEST=Make sure TakePhoto() API works on
https://googlechrome.github.io/samples/image-capture/grab-frame-take-photo.html
TEST=Run capture_unittests on host with asan and lsan build
TBR=wuchengli@chromium.org,emircan@chromium.org

Change-Id: Ieafc98e07bbc39ac64347eea76baa3a496c0c775
Reviewed-on: https://chromium-review.googlesource.com/1059915Reviewed-by: default avatarRicky Liang <jcliang@chromium.org>
Commit-Queue: Ricky Liang <jcliang@chromium.org>
Cr-Commit-Position: refs/heads/master@{#558770}
parent a988115a
......@@ -223,6 +223,8 @@ component("capture_lib") {
if (is_chromeos) {
sources += [
"video/chromeos/camera_3a_controller.cc",
"video/chromeos/camera_3a_controller.h",
"video/chromeos/camera_buffer_factory.cc",
"video/chromeos/camera_buffer_factory.h",
"video/chromeos/camera_device_context.cc",
......@@ -342,6 +344,7 @@ test("capture_unittests") {
if (is_chromeos) {
sources += [
"video/chromeos/camera_3a_controller_unittest.cc",
"video/chromeos/camera_device_delegate_unittest.cc",
"video/chromeos/camera_hal_delegate_unittest.cc",
"video/chromeos/camera_hal_dispatcher_impl_unittest.cc",
......
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/capture/video/chromeos/camera_3a_controller.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
namespace media {
namespace {
template <typename EntryType>
bool Get3AEntry(const cros::mojom::CameraMetadataPtr& metadata,
cros::mojom::CameraMetadataTag control,
EntryType* result) {
const auto* entry = GetMetadataEntry(metadata, control);
if (entry) {
*result = static_cast<EntryType>((*entry)->data[0]);
return true;
} else {
return false;
}
}
} // namespace
Camera3AController::Camera3AController(
const cros::mojom::CameraMetadataPtr& static_metadata,
CaptureMetadataDispatcher* capture_metadata_dispatcher,
scoped_refptr<base::SingleThreadTaskRunner> task_runner)
: capture_metadata_dispatcher_(capture_metadata_dispatcher),
task_runner_(std::move(task_runner)),
af_mode_(cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF),
af_state_(cros::mojom::AndroidControlAfState::
ANDROID_CONTROL_AF_STATE_INACTIVE),
af_mode_set_(false),
ae_mode_(cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_ON),
ae_state_(cros::mojom::AndroidControlAeState::
ANDROID_CONTROL_AE_STATE_INACTIVE),
ae_mode_set_(false),
awb_mode_(
cros::mojom::AndroidControlAwbMode::ANDROID_CONTROL_AWB_MODE_AUTO),
awb_state_(cros::mojom::AndroidControlAwbState::
ANDROID_CONTROL_AWB_STATE_INACTIVE),
awb_mode_set_(false),
weak_ptr_factory_(this) {
DCHECK(task_runner_->BelongsToCurrentThread());
capture_metadata_dispatcher_->AddResultMetadataObserver(this);
auto* af_modes = GetMetadataEntry(
static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES);
if (af_modes) {
for (const auto& m : (*af_modes)->data) {
available_af_modes_.insert(
static_cast<cros::mojom::AndroidControlAfMode>(m));
}
}
auto* ae_modes = GetMetadataEntry(
static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_AVAILABLE_MODES);
if (ae_modes) {
for (const auto& m : (*ae_modes)->data) {
available_ae_modes_.insert(
static_cast<cros::mojom::AndroidControlAeMode>(m));
}
}
auto* awb_modes = GetMetadataEntry(
static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_AVAILABLE_MODES);
if (awb_modes) {
for (const auto& m : (*awb_modes)->data) {
available_awb_modes_.insert(
static_cast<cros::mojom::AndroidControlAwbMode>(m));
}
}
// Enable AF if supported. MODE_AUTO is always supported on auto-focus camera
// modules; fixed focus camera modules always has MODE_OFF.
if (available_af_modes_.count(
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO)) {
af_mode_ = cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO;
}
// AE should always be MODE_ON unless we enable manual sensor control. Since
// we don't have flash on any of our devices we don't care about the
// flash-related AE modes.
//
// AWB should always be MODE_AUTO unless we enable manual sensor control.
Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
base::checked_cast<uint8_t>(af_mode_));
Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE,
base::checked_cast<uint8_t>(ae_mode_));
Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE,
base::checked_cast<uint8_t>(awb_mode_));
}
Camera3AController::~Camera3AController() {
DCHECK(task_runner_->BelongsToCurrentThread());
capture_metadata_dispatcher_->RemoveResultMetadataObserver(this);
}
void Camera3AController::Stabilize3AForStillCapture(
base::OnceClosure on_3a_stabilized_callback) {
DCHECK(task_runner_->BelongsToCurrentThread());
if (on_3a_stabilized_callback_ || on_3a_mode_set_callback_) {
// Already stabilizing 3A.
return;
}
if (Is3AStabilized()) {
std::move(on_3a_stabilized_callback).Run();
return;
}
// Wait until all the 3A modes are set in the HAL; otherwise the AF trigger
// and AE precapture trigger may be invalidated during mode transition.
if (!af_mode_set_ || !ae_mode_set_ || !awb_mode_set_) {
on_3a_mode_set_callback_ =
base::BindOnce(&Camera3AController::Stabilize3AForStillCapture,
GetWeakPtr(), base::Passed(&on_3a_stabilized_callback));
return;
}
on_3a_stabilized_callback_ = std::move(on_3a_stabilized_callback);
if (af_mode_ !=
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF) {
DVLOG(1) << "Start AF trigger to lock focus";
std::vector<uint8_t> af_trigger = {
base::checked_cast<uint8_t>(cros::mojom::AndroidControlAfTrigger::
ANDROID_CONTROL_AF_TRIGGER_START)};
capture_metadata_dispatcher_->SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, std::move(af_trigger));
}
if (ae_mode_ !=
cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_OFF) {
DVLOG(1) << "Start AE precapture trigger to converge exposure";
std::vector<uint8_t> ae_precapture_trigger = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAePrecaptureTrigger::
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START)};
capture_metadata_dispatcher_->SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, std::move(ae_precapture_trigger));
}
}
void Camera3AController::OnResultMetadataAvailable(
const cros::mojom::CameraMetadataPtr& result_metadata) {
DCHECK(task_runner_->BelongsToCurrentThread());
if (af_mode_set_ && ae_mode_set_ && awb_mode_set_ &&
!on_3a_stabilized_callback_) {
// Process the result metadata only when we need to check if 3A modes are
// synchronized, or when there's a pending 3A stabilization request.
return;
}
cros::mojom::AndroidControlAfMode af_mode;
if (Get3AEntry(result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
&af_mode)) {
af_mode_set_ = (af_mode == af_mode_);
} else {
DVLOG(2) << "AF mode is not available in the metadata";
}
if (!Get3AEntry(result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_STATE,
&af_state_)) {
DVLOG(2) << "AF state is not available in the metadata";
}
cros::mojom::AndroidControlAeMode ae_mode;
if (Get3AEntry(result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE,
&ae_mode)) {
ae_mode_set_ = (ae_mode == ae_mode_);
} else {
DVLOG(2) << "AE mode is not available in the metadata";
}
if (!Get3AEntry(result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_STATE,
&ae_state_)) {
DVLOG(2) << "AE state is not available in the metadata";
}
cros::mojom::AndroidControlAwbMode awb_mode;
if (Get3AEntry(result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE,
&awb_mode)) {
awb_mode_set_ = (awb_mode == awb_mode_);
} else {
DVLOG(2) << "AWB mode is not available in the metadata";
}
if (!Get3AEntry(result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_STATE,
&awb_state_)) {
DVLOG(2) << "AWB state is not available in the metadata";
}
DVLOG(2) << "AF mode: " << af_mode_;
DVLOG(2) << "AF state: " << af_state_;
DVLOG(2) << "AE mode: " << ae_mode_;
DVLOG(2) << "AE state: " << ae_state_;
DVLOG(2) << "AWB mode: " << awb_mode_;
DVLOG(2) << "AWB state: " << awb_state_;
if (on_3a_mode_set_callback_ && af_mode_set_ && ae_mode_set_ &&
awb_mode_set_) {
task_runner_->PostTask(FROM_HERE, std::move(on_3a_mode_set_callback_));
}
if (on_3a_stabilized_callback_ && Is3AStabilized()) {
std::move(on_3a_stabilized_callback_).Run();
}
}
void Camera3AController::SetAutoFocusModeForStillCapture() {
DCHECK(task_runner_->BelongsToCurrentThread());
std::vector<uint8_t> af_trigger = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_CANCEL)};
capture_metadata_dispatcher_->SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, std::move(af_trigger));
if (available_af_modes_.count(
cros::mojom::AndroidControlAfMode::
ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
af_mode_ = cros::mojom::AndroidControlAfMode::
ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
}
std::vector<uint8_t> af_mode = {base::checked_cast<uint8_t>(af_mode_)};
Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
base::checked_cast<uint8_t>(af_mode_));
DVLOG(1) << "Setting AF mode to: " << af_mode_;
}
void Camera3AController::SetAutoFocusModeForVideoRecording() {
DCHECK(task_runner_->BelongsToCurrentThread());
std::vector<uint8_t> af_trigger = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_CANCEL)};
capture_metadata_dispatcher_->SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, std::move(af_trigger));
if (available_af_modes_.count(cros::mojom::AndroidControlAfMode::
ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
af_mode_ = cros::mojom::AndroidControlAfMode::
ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
}
Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
base::checked_cast<uint8_t>(af_mode_));
DVLOG(1) << "Setting AF mode to: " << af_mode_;
}
base::WeakPtr<Camera3AController> Camera3AController::GetWeakPtr() {
DCHECK(task_runner_->BelongsToCurrentThread());
return weak_ptr_factory_.GetWeakPtr();
}
void Camera3AController::Set3AMode(cros::mojom::CameraMetadataTag tag,
uint8_t target_mode) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(tag == cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE ||
tag == cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE ||
tag == cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE);
std::vector<uint8_t> mode = {base::checked_cast<uint8_t>(target_mode)};
capture_metadata_dispatcher_->SetCaptureMetadata(
tag, cros::mojom::EntryType::TYPE_BYTE, 1, std::move(mode));
switch (tag) {
case cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE:
af_mode_set_ = false;
break;
case cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE:
ae_mode_set_ = false;
break;
case cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE:
awb_mode_set_ = false;
break;
default:
NOTREACHED() << "Invalid 3A mode: " << tag;
}
}
bool Camera3AController::Is3AStabilized() {
DCHECK(task_runner_->BelongsToCurrentThread());
if (af_mode_ !=
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF) {
if (af_state_ != cros::mojom::AndroidControlAfState::
ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
af_state_ != cros::mojom::AndroidControlAfState::
ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
return false;
}
}
if (ae_mode_ !=
cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_OFF) {
if (ae_state_ != cros::mojom::AndroidControlAeState::
ANDROID_CONTROL_AE_STATE_CONVERGED &&
ae_state_ != cros::mojom::AndroidControlAeState::
ANDROID_CONTROL_AE_STATE_FLASH_REQUIRED) {
return false;
}
}
if (awb_mode_ ==
cros::mojom::AndroidControlAwbMode::ANDROID_CONTROL_AWB_MODE_AUTO) {
if (awb_state_ != cros::mojom::AndroidControlAwbState::
ANDROID_CONTROL_AWB_STATE_CONVERGED) {
return false;
}
}
DVLOG(1) << "3A stabilized";
return true;
}
} // namespace media
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_3A_CONTROLLER_H_
#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_3A_CONTROLLER_H_
#include <unordered_set>
#include "media/base/media_export.h"
#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
#include "media/capture/video/chromeos/stream_buffer_manager.h"
namespace media {
// A class to control the auto-exposure, auto-focus, and auto-white-balancing
// operations and modes of the camera. For the detailed state transitions for
// auto-exposure, auto-focus, and auto-white-balancing, see
// https://source.android.com/devices/camera/camera3_3Amodes
class CAPTURE_EXPORT Camera3AController
: public CaptureMetadataDispatcher::ResultMetadataObserver {
public:
Camera3AController(const cros::mojom::CameraMetadataPtr& static_metadata,
CaptureMetadataDispatcher* capture_metadata_dispatcher,
scoped_refptr<base::SingleThreadTaskRunner> task_runner);
~Camera3AController() final;
// Trigger the camera to start exposure, focus, and white-balance metering and
// lock them for still capture.
void Stabilize3AForStillCapture(base::OnceClosure on_3a_stabilized_callback);
// CaptureMetadataDispatcher::ResultMetadataObserver implementation.
void OnResultMetadataAvailable(
const cros::mojom::CameraMetadataPtr& result_metadata) final;
// Enable the auto-focus mode suitable for still capture.
void SetAutoFocusModeForStillCapture();
// Enable the auto-focus mode suitable for video recording.
void SetAutoFocusModeForVideoRecording();
base::WeakPtr<Camera3AController> GetWeakPtr();
private:
void Set3AMode(cros::mojom::CameraMetadataTag tag, uint8_t target_mode);
bool Is3AStabilized();
CaptureMetadataDispatcher* capture_metadata_dispatcher_;
const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
std::unordered_set<cros::mojom::AndroidControlAfMode> available_af_modes_;
cros::mojom::AndroidControlAfMode af_mode_;
cros::mojom::AndroidControlAfState af_state_;
// |af_mode_set_| is set to true when the AF mode is synchronized between the
// HAL and the Camera3AController.
bool af_mode_set_;
std::unordered_set<cros::mojom::AndroidControlAeMode> available_ae_modes_;
cros::mojom::AndroidControlAeMode ae_mode_;
cros::mojom::AndroidControlAeState ae_state_;
// |ae_mode_set_| is set to true when the AE mode is synchronized between the
// HAL and the Camera3AController.
bool ae_mode_set_;
std::unordered_set<cros::mojom::AndroidControlAwbMode> available_awb_modes_;
cros::mojom::AndroidControlAwbMode awb_mode_;
cros::mojom::AndroidControlAwbState awb_state_;
// |awb_mode_set_| is set to true when the AWB mode is synchronized between
// the HAL and the Camera3AController.
bool awb_mode_set_;
base::OnceClosure on_3a_mode_set_callback_;
base::OnceClosure on_3a_stabilized_callback_;
base::WeakPtrFactory<Camera3AController> weak_ptr_factory_;
DISALLOW_IMPLICIT_CONSTRUCTORS(Camera3AController);
};
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_3A_CONTROLLER_H_
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/capture/video/chromeos/camera_3a_controller.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
#include "media/capture/video/chromeos/stream_buffer_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using testing::_;
namespace media {
namespace {
class MockCaptureMetadataDispatcher : public CaptureMetadataDispatcher {
public:
MockCaptureMetadataDispatcher() {}
~MockCaptureMetadataDispatcher() override {}
MOCK_METHOD1(
AddResultMetadataObserver,
void(CaptureMetadataDispatcher::ResultMetadataObserver* observer));
MOCK_METHOD1(
RemoveResultMetadataObserver,
void(CaptureMetadataDispatcher::ResultMetadataObserver* observer));
MOCK_METHOD4(SetCaptureMetadata,
void(cros::mojom::CameraMetadataTag tag,
cros::mojom::EntryType type,
size_t count,
std::vector<uint8_t> value));
};
} // namespace
class Camera3AControllerTest : public ::testing::Test {
public:
Camera3AControllerTest() : thread_("Camera3AControllerThread") {}
void SetUp() override {
thread_.Start();
mock_capture_metadata_dispatcher_ =
std::make_unique<MockCaptureMetadataDispatcher>();
}
void TearDown() override {
thread_.task_runner()->PostTask(
FROM_HERE,
base::BindOnce(&Camera3AControllerTest::Clear3AControllerOnThread,
base::Unretained(this)));
thread_.Stop();
mock_capture_metadata_dispatcher_.reset();
}
void RunOnThreadSync(const base::Location& location,
base::OnceClosure closure) {
base::WaitableEvent done(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
thread_.task_runner()->PostTask(
location,
base::BindOnce(&Camera3AControllerTest::RunOnThread,
base::Unretained(this), base::ConstRef(location),
base::Passed(&closure), base::Unretained(&done)));
done.Wait();
}
void Reset3AController(
const cros::mojom::CameraMetadataPtr& static_metadata) {
RunOnThreadSync(
FROM_HERE,
base::BindOnce(&Camera3AControllerTest::Reset3AControllerOnThread,
base::Unretained(this),
base::ConstRef(static_metadata)));
}
template <typename Value>
void Set3AMode(cros::mojom::CameraMetadataPtr* metadata,
cros::mojom::CameraMetadataTag control,
Value value,
bool append = false) {
auto* e = GetMetadataEntry(*metadata, control);
if (e) {
if (append) {
(*e)->count++;
(*e)->data.push_back(base::checked_cast<uint8_t>(value));
} else {
(*e)->count = 1;
(*e)->data = {base::checked_cast<uint8_t>(value)};
}
} else {
cros::mojom::CameraMetadataEntryPtr entry =
cros::mojom::CameraMetadataEntry::New();
entry->index = (*metadata)->entries.value().size();
entry->tag = control;
entry->type = cros::mojom::EntryType::TYPE_BYTE;
entry->count = 1;
entry->data = {base::checked_cast<uint8_t>(value)};
(*metadata)->entries.value().push_back(std::move(entry));
(*metadata)->entry_count++;
(*metadata)->entry_capacity++;
}
SortCameraMetadata(metadata);
}
cros::mojom::CameraMetadataPtr CreateDefaultFakeStaticMetadata() {
auto metadata = cros::mojom::CameraMetadata::New();
metadata->entries = std::vector<cros::mojom::CameraMetadataEntryPtr>();
metadata->entry_count = 0;
metadata->entry_capacity = 0;
// Set the available AF modes.
Set3AMode(
&metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF);
Set3AMode(
&metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO,
/* append */ true);
Set3AMode(
&metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
cros::mojom::AndroidControlAfMode::
ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
/* append */ true);
Set3AMode(
&metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
cros::mojom::AndroidControlAfMode::
ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
/* append */ true);
// Set the available AE modes.
Set3AMode(
&metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_AVAILABLE_MODES,
cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_OFF);
Set3AMode(
&metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_AVAILABLE_MODES,
cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_ON,
/* append */ true);
// Set the available AWB modes.
Set3AMode(
&metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_AVAILABLE_MODES,
cros::mojom::AndroidControlAwbMode::ANDROID_CONTROL_AWB_MODE_OFF);
Set3AMode(
&metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_AVAILABLE_MODES,
cros::mojom::AndroidControlAwbMode::ANDROID_CONTROL_AWB_MODE_AUTO,
/* append */ true);
return metadata;
}
void On3AStabilizedCallback(base::WaitableEvent* done) { done->Signal(); }
protected:
base::Thread thread_;
std::unique_ptr<MockCaptureMetadataDispatcher>
mock_capture_metadata_dispatcher_;
std::unique_ptr<Camera3AController> camera_3a_controller_;
private:
void RunOnThread(const base::Location& location,
base::OnceClosure closure,
base::WaitableEvent* done) {
DCHECK(thread_.task_runner()->BelongsToCurrentThread());
std::move(closure).Run();
done->Signal();
}
void Clear3AControllerOnThread() {
DCHECK(thread_.task_runner()->BelongsToCurrentThread());
if (camera_3a_controller_) {
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
RemoveResultMetadataObserver(camera_3a_controller_.get()))
.Times(1);
}
camera_3a_controller_.reset();
}
void Reset3AControllerOnThread(
const cros::mojom::CameraMetadataPtr& static_metadata) {
DCHECK(thread_.task_runner()->BelongsToCurrentThread());
Clear3AControllerOnThread();
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
AddResultMetadataObserver(_))
.Times(1);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
cros::mojom::EntryType::TYPE_BYTE, 1, _))
.Times(1);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE,
cros::mojom::EntryType::TYPE_BYTE, 1, _))
.Times(1);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE,
cros::mojom::EntryType::TYPE_BYTE, 1, _))
.Times(1);
camera_3a_controller_ = std::make_unique<Camera3AController>(
static_metadata, mock_capture_metadata_dispatcher_.get(),
thread_.task_runner());
}
};
TEST_F(Camera3AControllerTest, Stabilize3AForStillCaptureTest) {
Reset3AController(CreateDefaultFakeStaticMetadata());
// Set AF mode.
std::vector<uint8_t> af_trigger_start, af_trigger_cancel, af_mode, ae_trigger;
af_trigger_start = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_START)};
af_trigger_cancel = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_CANCEL)};
af_mode = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfMode::
ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE)};
ae_trigger = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAePrecaptureTrigger::
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START)};
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger_cancel))
.Times(1);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
.Times(1);
RunOnThreadSync(
FROM_HERE,
base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
base::Unretained(camera_3a_controller_.get())));
// |camera_3a_controller_| should wait until the AF mode is set
// before setting the AF and AE precapture triggers.
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger_start))
.Times(0);
EXPECT_CALL(
*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, ae_trigger))
.Times(0);
base::WaitableEvent done(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
RunOnThreadSync(
FROM_HERE,
base::BindOnce(
&Camera3AController::Stabilize3AForStillCapture,
base::Unretained(camera_3a_controller_.get()),
base::BindOnce(&Camera3AControllerTest::On3AStabilizedCallback,
base::Unretained(this), &done)));
testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
// |camera_3a_controller_| should set the AF and AE precapture triggers once
// the 3A modes are set.
auto result_metadata = CreateDefaultFakeStaticMetadata();
Set3AMode(&result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
cros::mojom::AndroidControlAfMode::
ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE);
Set3AMode(
&result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_STATE,
cros::mojom::AndroidControlAfState::ANDROID_CONTROL_AF_STATE_INACTIVE);
Set3AMode(&result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE,
cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_ON);
Set3AMode(
&result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_STATE,
cros::mojom::AndroidControlAeState::ANDROID_CONTROL_AE_STATE_INACTIVE);
Set3AMode(&result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE,
cros::mojom::AndroidControlAwbMode::ANDROID_CONTROL_AWB_MODE_AUTO);
Set3AMode(
&result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_STATE,
cros::mojom::AndroidControlAwbState::ANDROID_CONTROL_AWB_STATE_INACTIVE);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger_start))
.Times(1);
EXPECT_CALL(
*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, ae_trigger))
.Times(1);
RunOnThreadSync(FROM_HERE,
base::BindOnce(&Camera3AController::OnResultMetadataAvailable,
base::Unretained(camera_3a_controller_.get()),
base::ConstRef(result_metadata)));
// |camera_3a_controller_| should call the registered callback once 3A are
// stabilized.
Set3AMode(&result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_STATE,
cros::mojom::AndroidControlAfState::
ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
Set3AMode(
&result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_STATE,
cros::mojom::AndroidControlAeState::ANDROID_CONTROL_AE_STATE_CONVERGED);
Set3AMode(
&result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_STATE,
cros::mojom::AndroidControlAwbState::ANDROID_CONTROL_AWB_STATE_CONVERGED);
RunOnThreadSync(FROM_HERE,
base::BindOnce(&Camera3AController::OnResultMetadataAvailable,
base::Unretained(camera_3a_controller_.get()),
base::ConstRef(result_metadata)));
done.Wait();
}
// Test that SetAutoFocusModeForStillCapture sets the right auto-focus mode on
// cameras with different capabilities.
TEST_F(Camera3AControllerTest, SetAutoFocusModeForStillCaptureTest) {
auto static_metadata = CreateDefaultFakeStaticMetadata();
std::vector<uint8_t> af_mode;
std::vector<uint8_t> af_trigger = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_CANCEL)};
// For camera that supports continuous auto-focus for picture mode.
Reset3AController(static_metadata);
af_mode = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfMode::
ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE)};
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
.Times(1);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
.Times(1);
RunOnThreadSync(
FROM_HERE,
base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
base::Unretained(camera_3a_controller_.get())));
testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
// For camera that only supports basic auto focus.
Set3AMode(&static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF);
Set3AMode(&static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO,
/* append */ true);
Reset3AController(static_metadata);
af_mode.clear();
af_mode = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO)};
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
.Times(1);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
.Times(1);
RunOnThreadSync(
FROM_HERE,
base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
base::Unretained(camera_3a_controller_.get())));
testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
// For camera that is fixed-focus.
Set3AMode(&static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF);
Reset3AController(static_metadata);
af_mode.clear();
af_mode = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF)};
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
.Times(1);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
.Times(1);
RunOnThreadSync(
FROM_HERE,
base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
base::Unretained(camera_3a_controller_.get())));
testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
}
// Test that SetAutoFocusModeForVideoRecording sets the right auto-focus mode on
// cameras with different capabilities.
TEST_F(Camera3AControllerTest, SetAutoFocusModeForVideoRecordingTest) {
auto static_metadata = CreateDefaultFakeStaticMetadata();
std::vector<uint8_t> af_mode;
std::vector<uint8_t> af_trigger = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_CANCEL)};
// For camera that supports continuous auto-focus for picture mode.
Reset3AController(static_metadata);
af_mode = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfMode::
ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO)};
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
.Times(1);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
.Times(1);
RunOnThreadSync(
FROM_HERE,
base::BindOnce(&Camera3AController::SetAutoFocusModeForVideoRecording,
base::Unretained(camera_3a_controller_.get())));
testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
// For camera that only supports basic auto focus.
Set3AMode(&static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF);
Set3AMode(&static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO,
/* append */ true);
Reset3AController(static_metadata);
af_mode.clear();
af_mode = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO)};
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
.Times(1);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
.Times(1);
RunOnThreadSync(
FROM_HERE,
base::BindOnce(&Camera3AController::SetAutoFocusModeForVideoRecording,
base::Unretained(camera_3a_controller_.get())));
testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
// For camera that is fixed-focus.
Set3AMode(&static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF);
Reset3AController(static_metadata);
af_mode.clear();
af_mode = {base::checked_cast<uint8_t>(
cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF)};
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
.Times(1);
EXPECT_CALL(*mock_capture_metadata_dispatcher_,
SetCaptureMetadata(
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
.Times(1);
RunOnThreadSync(
FROM_HERE,
base::BindOnce(&Camera3AController::SetAutoFocusModeForVideoRecording,
base::Unretained(camera_3a_controller_.get())));
testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
}
} // namespace media
......@@ -21,8 +21,11 @@ CameraBufferFactory::CreateGpuMemoryBuffer(const gfx::Size& size,
LOG(ERROR) << "GpuMemoryBufferManager not set";
return std::unique_ptr<gfx::GpuMemoryBuffer>();
}
return buf_manager->CreateGpuMemoryBuffer(
size, format, gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
gfx::BufferUsage buffer_usage = gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE;
if (format == gfx::BufferFormat::R_8) {
buffer_usage = gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE;
}
return buf_manager->CreateGpuMemoryBuffer(size, format, buffer_usage,
gpu::kNullSurfaceHandle);
}
......
......@@ -56,7 +56,7 @@ class CAPTURE_EXPORT CameraDeviceContext {
//
// ConstructDefaultRequestSettings() ->
// OnConstructedDefaultRequestSettings() ->
// |stream_buffer_manager_|->StartCapture()
// |stream_buffer_manager_|->StartPreview()
//
// In the kCapturing state the |stream_buffer_manager_| runs the capture
// loop to send capture requests and process capture results.
......
......@@ -10,6 +10,9 @@
#include <vector>
#include "media/base/bind_to_current_loop.h"
#include "media/capture/mojom/image_capture_types.h"
#include "media/capture/video/blob_utils.h"
#include "media/capture/video/chromeos/camera_3a_controller.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_hal_delegate.h"
......@@ -20,6 +23,81 @@
namespace media {
namespace {
void GetMaxBlobStreamResolution(
const cros::mojom::CameraMetadataPtr& static_metadata,
int32_t* max_blob_width,
int32_t* max_blob_height) {
const cros::mojom::CameraMetadataEntryPtr* stream_configurations =
GetMetadataEntry(static_metadata,
cros::mojom::CameraMetadataTag::
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
DCHECK(stream_configurations);
// The available stream configurations are stored as tuples of four int32s:
// (hal_pixel_format, width, height, type) x n
const size_t kStreamFormatOffset = 0;
const size_t kStreamWidthOffset = 1;
const size_t kStreamHeightOffset = 2;
const size_t kStreamTypeOffset = 3;
const size_t kStreamConfigurationSize = 4;
int32_t* iter =
reinterpret_cast<int32_t*>((*stream_configurations)->data.data());
*max_blob_width = 0;
*max_blob_height = 0;
for (size_t i = 0; i < (*stream_configurations)->count;
i += kStreamConfigurationSize) {
auto format =
static_cast<cros::mojom::HalPixelFormat>(iter[kStreamFormatOffset]);
int32_t width = iter[kStreamWidthOffset];
int32_t height = iter[kStreamHeightOffset];
auto type =
static_cast<cros::mojom::Camera3StreamType>(iter[kStreamTypeOffset]);
iter += kStreamConfigurationSize;
if (type != cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT ||
format != cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB) {
continue;
}
if (width > *max_blob_width && height > *max_blob_height) {
*max_blob_width = width;
*max_blob_height = height;
}
}
DCHECK_GT(*max_blob_width, 0);
DCHECK_GT(*max_blob_height, 0);
}
// VideoCaptureDevice::TakePhotoCallback is given by the application and is used
// to return the captured JPEG blob buffer. The second base::OnceClosure is
// created locally by the caller of TakePhoto(), and can be used to, for
// exmaple, restore some settings to the values before TakePhoto() is called to
// facilitate the switch between photo and non-photo modes.
void TakePhotoCallbackBundle(VideoCaptureDevice::TakePhotoCallback callback,
base::OnceClosure on_photo_taken_callback,
mojom::BlobPtr blob) {
std::move(callback).Run(std::move(blob));
std::move(on_photo_taken_callback).Run();
}
} // namespace
std::string StreamTypeToString(StreamType stream_type) {
switch (stream_type) {
case StreamType::kPreview:
return std::string("StreamType::kPreview");
case StreamType::kStillCapture:
return std::string("StreamType::kStillCapture");
default:
return std::string("Unknown StreamType value: ") +
std::to_string(static_cast<int32_t>(stream_type));
}
} // namespace media
std::ostream& operator<<(std::ostream& os, StreamType stream_type) {
return os << StreamTypeToString(stream_type);
}
StreamCaptureInterface::Plane::Plane() = default;
StreamCaptureInterface::Plane::~Plane() = default;
......@@ -110,7 +188,7 @@ void CameraDeviceDelegate::StopAndDeAllocate(
// The device delegate is in the process of opening the camera device.
return;
}
stream_buffer_manager_->StopCapture();
stream_buffer_manager_->StopPreview();
device_ops_->Close(
base::BindOnce(&CameraDeviceDelegate::OnClosed, GetWeakPtr()));
}
......@@ -118,23 +196,57 @@ void CameraDeviceDelegate::StopAndDeAllocate(
void CameraDeviceDelegate::TakePhoto(
VideoCaptureDevice::TakePhotoCallback callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
// TODO(jcliang): Implement TakePhoto.
NOTIMPLEMENTED() << "TakePhoto is not implemented";
take_photo_callbacks_.push(std::move(callback));
if (!device_context_ ||
(device_context_->GetState() !=
CameraDeviceContext::State::kStreamConfigured &&
device_context_->GetState() != CameraDeviceContext::State::kCapturing)) {
return;
}
camera_3a_controller_->Stabilize3AForStillCapture(
base::BindOnce(&CameraDeviceDelegate::ConstructDefaultRequestSettings,
GetWeakPtr(), StreamType::kStillCapture));
}
void CameraDeviceDelegate::GetPhotoState(
VideoCaptureDevice::GetPhotoStateCallback callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
// TODO(jcliang): Implement GetPhotoState.
NOTIMPLEMENTED() << "GetPhotoState is not implemented";
auto photo_state = mojo::CreateEmptyPhotoState();
if (!device_context_ ||
(device_context_->GetState() !=
CameraDeviceContext::State::kStreamConfigured &&
device_context_->GetState() != CameraDeviceContext::State::kCapturing)) {
std::move(callback).Run(std::move(photo_state));
return;
}
auto stream_config =
stream_buffer_manager_->GetStreamConfiguration(StreamType::kStillCapture);
if (stream_config) {
photo_state->width->current = stream_config->width;
photo_state->width->min = stream_config->width;
photo_state->width->max = stream_config->width;
photo_state->width->step = 0.0;
photo_state->height->current = stream_config->height;
photo_state->height->min = stream_config->height;
photo_state->height->max = stream_config->height;
photo_state->height->step = 0.0;
}
std::move(callback).Run(std::move(photo_state));
}
void CameraDeviceDelegate::SetPhotoOptions(
mojom::PhotoSettingsPtr settings,
VideoCaptureDevice::SetPhotoOptionsCallback callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
// TODO(jcliang): Implement SetPhotoOptions.
NOTIMPLEMENTED() << "SetPhotoOptions is not implemented";
// Not supported at the moment.
std::move(callback).Run(true);
}
void CameraDeviceDelegate::SetRotation(int rotation) {
......@@ -158,7 +270,7 @@ void CameraDeviceDelegate::OnMojoConnectionError() {
} else {
// The Mojo channel terminated unexpectedly.
if (stream_buffer_manager_) {
stream_buffer_manager_->StopCapture();
stream_buffer_manager_->StopPreview();
}
device_context_->SetState(CameraDeviceContext::State::kStopped);
device_context_->SetErrorState(FROM_HERE, "Mojo connection error");
......@@ -187,6 +299,7 @@ void CameraDeviceDelegate::ResetMojoInterface() {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
device_ops_.reset();
camera_3a_controller_.reset();
stream_buffer_manager_.reset();
}
......@@ -206,6 +319,7 @@ void CameraDeviceDelegate::OnGotCameraInfo(
device_context_->SetErrorState(FROM_HERE, "Failed to get camera info");
return;
}
SortCameraMetadata(&camera_info->static_camera_characteristics);
static_metadata_ = std::move(camera_info->static_camera_characteristics);
const cros::mojom::CameraMetadataEntryPtr* sensor_orientation =
......@@ -265,7 +379,9 @@ void CameraDeviceDelegate::Initialize() {
std::move(callback_ops_request),
std::make_unique<StreamCaptureInterfaceImpl>(GetWeakPtr()),
device_context_, std::make_unique<CameraBufferFactory>(),
ipc_task_runner_);
base::BindRepeating(&Blobify), ipc_task_runner_);
camera_3a_controller_ = std::make_unique<Camera3AController>(
static_metadata_, stream_buffer_manager_.get(), ipc_task_runner_);
device_ops_->Initialize(
std::move(callback_ops_ptr),
base::BindOnce(&CameraDeviceDelegate::OnInitialized, GetWeakPtr()));
......@@ -297,8 +413,7 @@ void CameraDeviceDelegate::ConfigureStreams() {
// Set up context for preview stream.
cros::mojom::Camera3StreamPtr preview_stream =
cros::mojom::Camera3Stream::New();
preview_stream->id = static_cast<uint64_t>(
cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
preview_stream->id = static_cast<uint64_t>(StreamType::kPreview);
preview_stream->stream_type =
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
preview_stream->width =
......@@ -311,9 +426,31 @@ void CameraDeviceDelegate::ConfigureStreams() {
preview_stream->rotation =
cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
// Set up context for still capture stream. We set still capture stream to the
// JPEG stream configuration with maximum supported resolution.
// TODO(jcliang): Once we support SetPhotoOptions() the still capture stream
// should be configured dynamically per the photo options.
int32_t max_blob_width = 0, max_blob_height = 0;
GetMaxBlobStreamResolution(static_metadata_, &max_blob_width,
&max_blob_height);
cros::mojom::Camera3StreamPtr still_capture_stream =
cros::mojom::Camera3Stream::New();
still_capture_stream->id = static_cast<uint64_t>(StreamType::kStillCapture);
still_capture_stream->stream_type =
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
still_capture_stream->width = max_blob_width;
still_capture_stream->height = max_blob_height;
still_capture_stream->format =
cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB;
still_capture_stream->data_space = 0;
still_capture_stream->rotation =
cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
cros::mojom::Camera3StreamConfigurationPtr stream_config =
cros::mojom::Camera3StreamConfiguration::New();
stream_config->streams.push_back(std::move(preview_stream));
stream_config->streams.push_back(std::move(still_capture_stream));
stream_config->operation_mode = cros::mojom::Camera3StreamConfigurationMode::
CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE;
device_ops_->ConfigureStreams(
......@@ -337,43 +474,46 @@ void CameraDeviceDelegate::OnConfiguredStreams(
std::string(strerror(result)));
return;
}
if (!updated_config || updated_config->streams.size() != 1) {
if (!updated_config ||
updated_config->streams.size() != kMaxConfiguredStreams) {
device_context_->SetErrorState(
FROM_HERE, std::string("Wrong number of streams configured: ") +
std::to_string(updated_config->streams.size()));
return;
}
// The partial result count metadata is optional; defaults to 1 in case it
// is not set in the static metadata.
uint32_t partial_result_count = 1;
const cros::mojom::CameraMetadataEntryPtr* partial_count = GetMetadataEntry(
static_metadata_,
cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
if (partial_count) {
partial_result_count =
*reinterpret_cast<int32_t*>((*partial_count)->data.data());
}
stream_buffer_manager_->SetUpStreamAndBuffers(
chrome_capture_params_.requested_format, partial_result_count,
std::move(updated_config->streams[0]));
stream_buffer_manager_->SetUpStreamsAndBuffers(
chrome_capture_params_.requested_format, static_metadata_,
std::move(updated_config->streams));
device_context_->SetState(CameraDeviceContext::State::kStreamConfigured);
ConstructDefaultRequestSettings();
// Kick off the preview stream.
ConstructDefaultRequestSettings(StreamType::kPreview);
}
void CameraDeviceDelegate::ConstructDefaultRequestSettings() {
void CameraDeviceDelegate::ConstructDefaultRequestSettings(
StreamType stream_type) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK_EQ(device_context_->GetState(),
CameraDeviceContext::State::kStreamConfigured);
DCHECK(device_context_->GetState() ==
CameraDeviceContext::State::kStreamConfigured ||
device_context_->GetState() == CameraDeviceContext::State::kCapturing);
if (stream_type == StreamType::kPreview) {
device_ops_->ConstructDefaultRequestSettings(
cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW,
base::BindOnce(&CameraDeviceDelegate::OnConstructedDefaultRequestSettings,
base::BindOnce(
&CameraDeviceDelegate::OnConstructedDefaultPreviewRequestSettings,
GetWeakPtr()));
} else { // stream_type == StreamType::kStillCapture
device_ops_->ConstructDefaultRequestSettings(
cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_STILL_CAPTURE,
base::BindOnce(&CameraDeviceDelegate::
OnConstructedDefaultStillCaptureRequestSettings,
GetWeakPtr()));
}
}
void CameraDeviceDelegate::OnConstructedDefaultRequestSettings(
void CameraDeviceDelegate::OnConstructedDefaultPreviewRequestSettings(
cros::mojom::CameraMetadataPtr settings) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
......@@ -389,7 +529,29 @@ void CameraDeviceDelegate::OnConstructedDefaultRequestSettings(
return;
}
device_context_->SetState(CameraDeviceContext::State::kCapturing);
stream_buffer_manager_->StartCapture(std::move(settings));
camera_3a_controller_->SetAutoFocusModeForStillCapture();
stream_buffer_manager_->StartPreview(std::move(settings));
if (!take_photo_callbacks_.empty()) {
camera_3a_controller_->Stabilize3AForStillCapture(
base::BindOnce(&CameraDeviceDelegate::ConstructDefaultRequestSettings,
GetWeakPtr(), StreamType::kStillCapture));
}
}
void CameraDeviceDelegate::OnConstructedDefaultStillCaptureRequestSettings(
cros::mojom::CameraMetadataPtr settings) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
while (!take_photo_callbacks_.empty()) {
stream_buffer_manager_->TakePhoto(
std::move(settings),
base::BindOnce(
&TakePhotoCallbackBundle, std::move(take_photo_callbacks_.front()),
base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
camera_3a_controller_->GetWeakPtr())));
take_photo_callbacks_.pop();
}
}
void CameraDeviceDelegate::RegisterBuffer(
......
......@@ -17,10 +17,21 @@
namespace media {
class CameraHalDelegate;
class Camera3AController;
class CameraDeviceContext;
class CameraHalDelegate;
class StreamBufferManager;
enum class StreamType : int32_t {
kPreview = 0,
kStillCapture = 1,
kUnknown,
};
std::string StreamTypeToString(StreamType stream_type);
std::ostream& operator<<(std::ostream& os, StreamType stream_type);
// The interface to register buffer with and send capture request to the
// camera HAL.
class CAPTURE_EXPORT StreamCaptureInterface {
......@@ -122,10 +133,14 @@ class CAPTURE_EXPORT CameraDeviceDelegate final {
// settings of the stream in |stream_context_|.
// OnConstructedDefaultRequestSettings sets the request settings in
// |streams_context_|. If there's no error
// OnConstructedDefaultRequestSettings calls StartCapture to start the video
// capture loop.
void ConstructDefaultRequestSettings();
void OnConstructedDefaultRequestSettings(
// OnConstructedDefaultPreviewRequestSettings calls StartPreview to start the
// video capture loop.
// OnConstructDefaultStillCaptureRequestSettings triggers
// |stream_buffer_manager_| to request a still capture.
void ConstructDefaultRequestSettings(StreamType stream_type);
void OnConstructedDefaultPreviewRequestSettings(
cros::mojom::CameraMetadataPtr settings);
void OnConstructedDefaultStillCaptureRequestSettings(
cros::mojom::CameraMetadataPtr settings);
// StreamCaptureInterface implementations. These methods are called by
......@@ -151,8 +166,12 @@ class CAPTURE_EXPORT CameraDeviceDelegate final {
CameraDeviceContext* device_context_;
std::queue<VideoCaptureDevice::TakePhotoCallback> take_photo_callbacks_;
std::unique_ptr<StreamBufferManager> stream_buffer_manager_;
std::unique_ptr<Camera3AController> camera_3a_controller_;
// Stores the static camera characteristics of the camera device. E.g. the
// supported formats and resolution, various available exposure and apeture
// settings, etc.
......
......@@ -110,8 +110,11 @@ class MockCameraDevice : public cros::mojom::Camera3DeviceOps {
DISALLOW_COPY_AND_ASSIGN(MockCameraDevice);
};
constexpr int32_t kJpegMaxBufferSize = 1024;
constexpr size_t kDefaultWidth = 1280, kDefaultHeight = 720;
const VideoCaptureDeviceDescriptor kDefaultDescriptor("Fake device", "0");
const VideoCaptureFormat kDefaultCaptureFormat(gfx::Size(1280, 720),
const VideoCaptureFormat kDefaultCaptureFormat(gfx::Size(kDefaultWidth,
kDefaultHeight),
30.0,
PIXEL_FORMAT_I420);
......@@ -153,16 +156,60 @@ class CameraDeviceDelegateTest : public ::testing::Test {
cros::mojom::CameraInfoPtr camera_info = cros::mojom::CameraInfo::New();
cros::mojom::CameraMetadataPtr static_metadata =
cros::mojom::CameraMetadata::New();
static_metadata->entry_count = 3;
static_metadata->entry_capacity = 3;
static_metadata->entries =
std::vector<cros::mojom::CameraMetadataEntryPtr>();
cros::mojom::CameraMetadataEntryPtr entry =
cros::mojom::CameraMetadataEntry::New();
entry->index = 0;
entry->tag = cros::mojom::CameraMetadataTag::
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
entry->type = cros::mojom::EntryType::TYPE_INT32;
entry->count = 12;
std::vector<int32_t> stream_configurations(entry->count);
stream_configurations[0] = static_cast<int32_t>(
cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
stream_configurations[1] = kDefaultWidth;
stream_configurations[2] = kDefaultHeight;
stream_configurations[3] = static_cast<int32_t>(
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT);
stream_configurations[4] = static_cast<int32_t>(
cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888);
stream_configurations[5] = kDefaultWidth;
stream_configurations[6] = kDefaultHeight;
stream_configurations[7] = static_cast<int32_t>(
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT);
stream_configurations[8] = static_cast<int32_t>(
cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB);
stream_configurations[9] = kDefaultWidth;
stream_configurations[10] = kDefaultHeight;
stream_configurations[11] = static_cast<int32_t>(
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT);
uint8_t* as_int8 = reinterpret_cast<uint8_t*>(stream_configurations.data());
entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
static_metadata->entries->push_back(std::move(entry));
entry = cros::mojom::CameraMetadataEntry::New();
entry->index = 1;
entry->tag = cros::mojom::CameraMetadataTag::ANDROID_SENSOR_ORIENTATION;
entry->type = cros::mojom::EntryType::TYPE_INT32;
entry->count = 1;
entry->data = std::vector<uint8_t>(4, 0);
static_metadata->entries =
std::vector<cros::mojom::CameraMetadataEntryPtr>();
static_metadata->entries->push_back(std::move(entry));
entry = cros::mojom::CameraMetadataEntry::New();
entry->index = 2;
entry->tag = cros::mojom::CameraMetadataTag::ANDROID_JPEG_MAX_SIZE;
entry->type = cros::mojom::EntryType::TYPE_INT32;
entry->count = 1;
int32_t jpeg_max_size = kJpegMaxBufferSize;
as_int8 = reinterpret_cast<uint8_t*>(&jpeg_max_size);
entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
static_metadata->entries->push_back(std::move(entry));
switch (camera_id) {
case 0:
camera_info->facing = cros::mojom::CameraFacing::CAMERA_FACING_FRONT;
......@@ -195,23 +242,17 @@ class CameraDeviceDelegateTest : public ::testing::Test {
base::OnceCallback<void(int32_t,
cros::mojom::Camera3StreamConfigurationPtr)>&
callback) {
ASSERT_EQ(1u, config->streams.size());
ASSERT_EQ(static_cast<uint32_t>(kDefaultCaptureFormat.frame_size.width()),
config->streams[0]->width);
ASSERT_EQ(static_cast<uint32_t>(kDefaultCaptureFormat.frame_size.height()),
config->streams[0]->height);
ASSERT_EQ(cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888,
config->streams[0]->format);
config->streams[0]->usage = 0;
config->streams[0]->max_buffers = 1;
ASSERT_EQ(2u, config->streams.size());
for (size_t i = 0; i < config->streams.size(); ++i) {
config->streams[i]->usage = 0;
config->streams[i]->max_buffers = 1;
}
std::move(callback).Run(0, std::move(config));
}
void ConstructFakeRequestSettings(
cros::mojom::Camera3RequestTemplate type,
base::OnceCallback<void(cros::mojom::CameraMetadataPtr)>& callback) {
ASSERT_EQ(cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW,
type);
cros::mojom::CameraMetadataPtr fake_settings =
cros::mojom::CameraMetadata::New();
fake_settings->entry_count = 1;
......@@ -291,10 +332,27 @@ class CameraDeviceDelegateTest : public ::testing::Test {
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
EXPECT_CALL(
mock_gpu_memory_buffer_manager_,
CreateGpuMemoryBuffer(_, gfx::BufferFormat::R_8,
gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE,
gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
EXPECT_CALL(
mock_gpu_memory_buffer_manager_,
CreateGpuMemoryBuffer(gfx::Size(kDefaultWidth, kDefaultHeight),
gfx::BufferFormat::YUV_420_BIPLANAR,
gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
EXPECT_CALL(mock_gpu_memory_buffer_manager_,
CreateGpuMemoryBuffer(
gfx::Size(1280, 720), gfx::BufferFormat::YUV_420_BIPLANAR,
gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
gfx::Size(kJpegMaxBufferSize, 1), gfx::BufferFormat::R_8,
gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE,
gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
......
......@@ -130,21 +130,25 @@ void CameraHalDelegate::GetSupportedFormats(
reinterpret_cast<int64_t*>((*min_frame_durations)->data.data());
for (size_t i = 0; i < (*min_frame_durations)->count;
i += kStreamDurationSize) {
int32_t format = base::checked_cast<int32_t>(iter[kStreamFormatOffset]);
auto hal_format =
static_cast<cros::mojom::HalPixelFormat>(iter[kStreamFormatOffset]);
int32_t width = base::checked_cast<int32_t>(iter[kStreamWidthOffset]);
int32_t height = base::checked_cast<int32_t>(iter[kStreamHeightOffset]);
int64_t duration = iter[kStreamDurationOffset];
iter += kStreamDurationSize;
if (hal_format == cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB) {
// Skip BLOB formats and use it only for TakePicture() since it's
// inefficient to stream JPEG frames for CrOS camera HAL.
continue;
}
if (duration <= 0) {
LOG(ERROR) << "Ignoring invalid frame duration: " << duration;
continue;
}
float max_fps = 1.0 * 1000000000LL / duration;
DVLOG(1) << "[" << std::hex << format << " " << std::dec << width << " "
<< height << " " << duration << "]";
auto hal_format = static_cast<cros::mojom::HalPixelFormat>(format);
const ChromiumPixelFormat cr_format =
camera_buffer_factory_->ResolveStreamBufferFormat(hal_format);
if (cr_format.video_format == PIXEL_FORMAT_UNKNOWN) {
......@@ -327,6 +331,7 @@ void CameraHalDelegate::OnGotCameraInfoOnIpcThread(
LOG(ERROR) << "Failed to get camera info. Camera id: " << camera_id;
}
// In case of error |camera_info| is empty.
SortCameraMetadata(&camera_info->static_camera_characteristics);
camera_info_[std::to_string(camera_id)] = std::move(camera_info);
if (camera_info_.size() == num_builtin_cameras_) {
builtin_camera_info_updated_.Signal();
......
......@@ -4,22 +4,43 @@
#include "media/capture/video/chromeos/camera_metadata_utils.h"
#include <set>
#include <algorithm>
#include <unordered_set>
namespace media {
const cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
const cros::mojom::CameraMetadataPtr& camera_metadata,
cros::mojom::CameraMetadataTag tag) {
if (!camera_metadata->entries.has_value()) {
if (!camera_metadata || !camera_metadata->entries.has_value()) {
return nullptr;
}
for (const auto& entry : camera_metadata->entries.value()) {
if (entry->tag == tag) {
return &entry;
// We assume the metadata entries are sorted.
auto iter = std::find_if(camera_metadata->entries.value().begin(),
camera_metadata->entries.value().end(),
[tag](const cros::mojom::CameraMetadataEntryPtr& e) {
return e->tag == tag;
});
if (iter == camera_metadata->entries.value().end()) {
return nullptr;
}
return &(camera_metadata->entries.value()[(*iter)->index]);
}
void SortCameraMetadata(cros::mojom::CameraMetadataPtr* camera_metadata) {
if (!camera_metadata || !(*camera_metadata) ||
!(*camera_metadata)->entries.has_value()) {
return;
}
std::sort((*camera_metadata)->entries.value().begin(),
(*camera_metadata)->entries.value().end(),
[](const cros::mojom::CameraMetadataEntryPtr& a,
const cros::mojom::CameraMetadataEntryPtr& b) {
return a->tag < b->tag;
});
for (size_t i = 0; i < (*camera_metadata)->entries.value().size(); ++i) {
(*camera_metadata)->entries.value()[i]->index = i;
}
return nullptr;
}
void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
......@@ -34,7 +55,7 @@ void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
return;
}
std::set<cros::mojom::CameraMetadataTag> tags;
std::unordered_set<cros::mojom::CameraMetadataTag> tags;
if ((*to)->entries) {
for (const auto& entry : (*to)->entries.value()) {
tags.insert(entry->tag);
......
......@@ -5,15 +5,20 @@
#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_METADATA_UTILS_H_
#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_METADATA_UTILS_H_
#include "media/capture/capture_export.h"
#include "media/capture/video/chromeos/mojo/camera_metadata.mojom.h"
namespace media {
const cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
CAPTURE_EXPORT cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
const cros::mojom::CameraMetadataPtr& camera_metadata,
cros::mojom::CameraMetadataTag tag);
void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
// Sort the camera metadata entries using the metadata tags.
CAPTURE_EXPORT void SortCameraMetadata(
cros::mojom::CameraMetadataPtr* camera_metadata);
CAPTURE_EXPORT void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
const cros::mojom::CameraMetadataPtr& from);
} // namespace media
......
......@@ -51,6 +51,8 @@ gbm_device* CreateGbmDevice() {
uint32_t GetDrmFormat(gfx::BufferFormat gfx_format) {
switch (gfx_format) {
case gfx::BufferFormat::R_8:
return DRM_FORMAT_R8;
case gfx::BufferFormat::YUV_420_BIPLANAR:
return DRM_FORMAT_NV12;
// Add more formats when needed.
......@@ -186,7 +188,8 @@ LocalGpuMemoryBufferManager::CreateGpuMemoryBuffer(
gfx::BufferFormat format,
gfx::BufferUsage usage,
gpu::SurfaceHandle surface_handle) {
if (usage != gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE) {
if (usage != gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE &&
usage != gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE) {
LOG(ERROR) << "Unsupported gfx::BufferUsage" << static_cast<int>(usage);
return std::unique_ptr<gfx::GpuMemoryBuffer>();
}
......
......@@ -31,6 +31,9 @@ struct SupportedFormat {
// support YUV flexbile format video streams.
{cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888,
{PIXEL_FORMAT_NV12, gfx::BufferFormat::YUV_420_BIPLANAR}},
// FIXME(jcliang): MJPEG is not accurate; we should have BLOB or JPEG
{cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB,
{PIXEL_FORMAT_MJPEG, gfx::BufferFormat::R_8}},
// Add more mappings when we have more devices.
};
......@@ -51,6 +54,8 @@ uint32_t PixFormatVideoToDrm(VideoPixelFormat from) {
switch (from) {
case PIXEL_FORMAT_NV12:
return DRM_FORMAT_NV12;
case PIXEL_FORMAT_MJPEG:
return DRM_FORMAT_R8;
default:
// Unsupported format.
return 0;
......
......@@ -15,16 +15,40 @@
namespace media {
namespace {
size_t GetBufferIndex(uint64_t buffer_id) {
return buffer_id & 0xFFFFFFFF;
}
StreamType StreamIdToStreamType(uint64_t stream_id) {
switch (stream_id) {
case 0:
return StreamType::kPreview;
case 1:
return StreamType::kStillCapture;
default:
return StreamType::kUnknown;
}
}
} // namespace
StreamBufferManager::StreamBufferManager(
cros::mojom::Camera3CallbackOpsRequest callback_ops_request,
std::unique_ptr<StreamCaptureInterface> capture_interface,
CameraDeviceContext* device_context,
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
base::RepeatingCallback<mojom::BlobPtr(
const uint8_t* buffer,
const uint32_t bytesused,
const VideoCaptureFormat& capture_format)> blobify_callback,
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
: callback_ops_(this, std::move(callback_ops_request)),
capture_interface_(std::move(capture_interface)),
device_context_(device_context),
camera_buffer_factory_(std::move(camera_buffer_factory)),
blobify_callback_(std::move(blobify_callback)),
ipc_task_runner_(std::move(ipc_task_runner)),
capturing_(false),
frame_number_(0),
......@@ -38,53 +62,88 @@ StreamBufferManager::StreamBufferManager(
StreamBufferManager::~StreamBufferManager() {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
if (stream_context_) {
for (const auto& buf : stream_context_->buffers) {
for (const auto& iter : stream_context_) {
if (iter.second) {
for (const auto& buf : iter.second->buffers) {
if (buf) {
buf->Unmap();
}
}
}
}
}
void StreamBufferManager::SetUpStreamAndBuffers(
void StreamBufferManager::SetUpStreamsAndBuffers(
VideoCaptureFormat capture_format,
uint32_t partial_result_count,
cros::mojom::Camera3StreamPtr stream) {
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(!stream_context_);
DCHECK(!stream_context_[StreamType::kPreview]);
VLOG(2) << "Stream " << stream->id << " configured: usage=" << stream->usage
// The partial result count metadata is optional; defaults to 1 in case it
// is not set in the static metadata.
const cros::mojom::CameraMetadataEntryPtr* partial_count = GetMetadataEntry(
static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
if (partial_count) {
partial_result_count_ =
*reinterpret_cast<int32_t*>((*partial_count)->data.data());
}
for (auto& stream : streams) {
DVLOG(2) << "Stream " << stream->id
<< " configured: usage=" << stream->usage
<< " max_buffers=" << stream->max_buffers;
const size_t kMaximumAllowedBuffers = 15;
if (stream->max_buffers > kMaximumAllowedBuffers) {
device_context_->SetErrorState(
FROM_HERE, std::string("Camera HAL requested ") +
FROM_HERE,
std::string("Camera HAL requested ") +
std::to_string(stream->max_buffers) +
std::string(" buffers which exceeds the allowed maximum "
"number of buffers"));
return;
}
partial_result_count_ = partial_result_count;
stream_context_ = std::make_unique<StreamContext>();
stream_context_->capture_format = capture_format;
stream_context_->stream = std::move(stream);
// A better way to tell the stream type here would be to check on the usage
// flags of the stream.
StreamType stream_type;
if (stream->format ==
cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888) {
stream_type = StreamType::kPreview;
} else { // stream->format ==
// cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB
stream_type = StreamType::kStillCapture;
}
stream_context_[stream_type] = std::make_unique<StreamContext>();
stream_context_[stream_type]->capture_format = capture_format;
stream_context_[stream_type]->stream = std::move(stream);
const ChromiumPixelFormat stream_format =
camera_buffer_factory_->ResolveStreamBufferFormat(
stream_context_->stream->format);
stream_context_->capture_format.pixel_format = stream_format.video_format;
stream_context_[stream_type]->stream->format);
stream_context_[stream_type]->capture_format.pixel_format =
stream_format.video_format;
// Allocate buffers.
size_t num_buffers = stream_context_->stream->max_buffers;
stream_context_->buffers.resize(num_buffers);
size_t num_buffers = stream_context_[stream_type]->stream->max_buffers;
stream_context_[stream_type]->buffers.resize(num_buffers);
int32_t buffer_width, buffer_height;
if (stream_type == StreamType::kPreview) {
buffer_width = stream_context_[stream_type]->stream->width;
buffer_height = stream_context_[stream_type]->stream->height;
} else { // StreamType::kStillCapture
const cros::mojom::CameraMetadataEntryPtr* jpeg_max_size =
GetMetadataEntry(
static_metadata,
cros::mojom::CameraMetadataTag::ANDROID_JPEG_MAX_SIZE);
buffer_width = *reinterpret_cast<int32_t*>((*jpeg_max_size)->data.data());
buffer_height = 1;
}
for (size_t j = 0; j < num_buffers; ++j) {
auto buffer = camera_buffer_factory_->CreateGpuMemoryBuffer(
gfx::Size(stream_context_->stream->width,
stream_context_->stream->height),
stream_format.gfx_format);
gfx::Size(buffer_width, buffer_height), stream_format.gfx_format);
if (!buffer) {
device_context_->SetErrorState(FROM_HERE,
"Failed to create GpuMemoryBuffer");
......@@ -96,49 +155,142 @@ void StreamBufferManager::SetUpStreamAndBuffers(
"Failed to map GpuMemoryBuffer");
return;
}
stream_context_->buffers[j] = std::move(buffer);
stream_context_->free_buffers.push(j);
stream_context_[stream_type]->buffers[j] = std::move(buffer);
stream_context_[stream_type]->free_buffers.push(
GetBufferIpcId(stream_type, j));
}
DVLOG(2) << "Allocated "
<< stream_context_[stream_type]->stream->max_buffers << " buffers";
}
VLOG(2) << "Allocated " << stream_context_->stream->max_buffers << " buffers";
}
void StreamBufferManager::StartCapture(
cros::mojom::CameraMetadataPtr settings) {
void StreamBufferManager::StartPreview(
cros::mojom::CameraMetadataPtr preview_settings) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(stream_context_);
DCHECK(stream_context_->request_settings.is_null());
DCHECK(stream_context_[StreamType::kPreview]);
DCHECK(repeating_request_settings_.is_null());
capturing_ = true;
stream_context_->request_settings = std::move(settings);
repeating_request_settings_ = std::move(preview_settings);
// We cannot use a loop to register all the free buffers in one shot here
// because the camera HAL v3 API specifies that the client cannot call
// ProcessCaptureRequest before the previous one returns.
RegisterBuffer();
RegisterBuffer(StreamType::kPreview);
}
void StreamBufferManager::StopCapture() {
void StreamBufferManager::StopPreview() {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
capturing_ = false;
}
void StreamBufferManager::RegisterBuffer() {
cros::mojom::Camera3StreamPtr StreamBufferManager::GetStreamConfiguration(
StreamType stream_type) {
if (!stream_context_.count(stream_type)) {
return cros::mojom::Camera3Stream::New();
}
return stream_context_[stream_type]->stream.Clone();
}
void StreamBufferManager::TakePhoto(
cros::mojom::CameraMetadataPtr settings,
VideoCaptureDevice::TakePhotoCallback callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(stream_context_[StreamType::kStillCapture]);
pending_still_capture_callbacks_.push(std::move(callback));
oneshot_request_settings_.push(std::move(settings));
RegisterBuffer(StreamType::kStillCapture);
}
void StreamBufferManager::AddResultMetadataObserver(
ResultMetadataObserver* observer) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(!result_metadata_observers_.count(observer));
result_metadata_observers_.insert(observer);
}
void StreamBufferManager::RemoveResultMetadataObserver(
ResultMetadataObserver* observer) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(stream_context_);
DCHECK(result_metadata_observers_.count(observer));
result_metadata_observers_.erase(observer);
}
void StreamBufferManager::SetCaptureMetadata(cros::mojom::CameraMetadataTag tag,
cros::mojom::EntryType type,
size_t count,
std::vector<uint8_t> value) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
cros::mojom::CameraMetadataEntryPtr setting =
cros::mojom::CameraMetadataEntry::New();
setting->tag = tag;
setting->type = type;
setting->count = count;
setting->data = std::move(value);
capture_settings_override_.push_back(std::move(setting));
}
// static
uint64_t StreamBufferManager::GetBufferIpcId(StreamType stream_type,
size_t index) {
uint64_t id = 0;
id |= static_cast<int64_t>(stream_type) << 32;
id |= index;
return id;
}
void StreamBufferManager::ApplyCaptureSettings(
cros::mojom::CameraMetadataPtr* capture_settings) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
if (capture_settings_override_.empty()) {
return;
}
for (auto& s : capture_settings_override_) {
auto* entry = GetMetadataEntry(*capture_settings, s->tag);
if (entry) {
DCHECK_EQ((*entry)->type, s->type);
(*entry).Swap(&s);
} else {
(*capture_settings)->entry_count += 1;
(*capture_settings)->entry_capacity += 1;
(*capture_settings)->data_count += s->data.size();
(*capture_settings)->data_capacity += s->data.size();
if (!(*capture_settings)->entries) {
(*capture_settings)->entries =
std::vector<cros::mojom::CameraMetadataEntryPtr>();
}
(*capture_settings)->entries.value().push_back(std::move(s));
}
}
capture_settings_override_.clear();
SortCameraMetadata(capture_settings);
}
void StreamBufferManager::RegisterBuffer(StreamType stream_type) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(stream_context_[stream_type]);
if (!capturing_) {
return;
}
if (stream_context_->free_buffers.empty()) {
if (stream_context_[stream_type]->free_buffers.empty()) {
return;
}
size_t buffer_id = stream_context_->free_buffers.front();
stream_context_->free_buffers.pop();
size_t buffer_id = stream_context_[stream_type]->free_buffers.front();
stream_context_[stream_type]->free_buffers.pop();
const gfx::GpuMemoryBuffer* buffer =
stream_context_->buffers[buffer_id].get();
stream_context_[stream_type]->buffers[GetBufferIndex(buffer_id)].get();
VideoPixelFormat buffer_format = stream_context_->capture_format.pixel_format;
VideoPixelFormat buffer_format =
stream_context_[stream_type]->capture_format.pixel_format;
uint32_t drm_format = PixFormatVideoToDrm(buffer_format);
if (!drm_format) {
device_context_->SetErrorState(
......@@ -147,7 +299,7 @@ void StreamBufferManager::RegisterBuffer() {
return;
}
cros::mojom::HalPixelFormat hal_pixel_format =
stream_context_->stream->format;
stream_context_[stream_type]->stream->format;
gfx::NativePixmapHandle buffer_handle =
buffer->GetHandle().native_pixmap_handle;
......@@ -178,15 +330,18 @@ void StreamBufferManager::RegisterBuffer() {
// gralloc buffers.
capture_interface_->RegisterBuffer(
buffer_id, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, drm_format,
hal_pixel_format, stream_context_->stream->width,
stream_context_->stream->height, std::move(planes),
hal_pixel_format, buffer->GetSize().width(), buffer->GetSize().height(),
std::move(planes),
base::BindOnce(&StreamBufferManager::OnRegisteredBuffer,
weak_ptr_factory_.GetWeakPtr(), buffer_id));
VLOG(2) << "Registered buffer " << buffer_id;
weak_ptr_factory_.GetWeakPtr(), stream_type, buffer_id));
DVLOG(2) << "Registered buffer " << buffer_id;
}
void StreamBufferManager::OnRegisteredBuffer(size_t buffer_id, int32_t result) {
void StreamBufferManager::OnRegisteredBuffer(StreamType stream_type,
size_t buffer_id,
int32_t result) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(stream_context_[stream_type]);
if (!capturing_) {
return;
......@@ -197,32 +352,64 @@ void StreamBufferManager::OnRegisteredBuffer(size_t buffer_id, int32_t result) {
std::string(strerror(result)));
return;
}
ProcessCaptureRequest(buffer_id);
stream_context_[stream_type]->registered_buffers.push(buffer_id);
ProcessCaptureRequest();
}
void StreamBufferManager::ProcessCaptureRequest(size_t buffer_id) {
void StreamBufferManager::ProcessCaptureRequest() {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(stream_context_);
DCHECK(stream_context_[StreamType::kPreview]);
DCHECK(stream_context_[StreamType::kStillCapture]);
cros::mojom::Camera3CaptureRequestPtr request =
cros::mojom::Camera3CaptureRequest::New();
request->frame_number = frame_number_;
CaptureResult& pending_result = pending_results_[frame_number_];
if (!stream_context_[StreamType::kPreview]->registered_buffers.empty()) {
cros::mojom::Camera3StreamBufferPtr buffer =
cros::mojom::Camera3StreamBuffer::New();
buffer->stream_id = static_cast<uint64_t>(
cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
buffer->buffer_id = buffer_id;
buffer->stream_id = static_cast<uint64_t>(StreamType::kPreview);
buffer->buffer_id =
stream_context_[StreamType::kPreview]->registered_buffers.front();
stream_context_[StreamType::kPreview]->registered_buffers.pop();
buffer->status = cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK;
cros::mojom::Camera3CaptureRequestPtr request =
cros::mojom::Camera3CaptureRequest::New();
request->frame_number = frame_number_;
request->settings = stream_context_->request_settings.Clone();
DVLOG(2) << "Requested capture for stream " << StreamType::kPreview
<< " in frame " << frame_number_;
request->settings = repeating_request_settings_.Clone();
request->output_buffers.push_back(std::move(buffer));
}
if (!stream_context_[StreamType::kStillCapture]->registered_buffers.empty()) {
DCHECK(!pending_still_capture_callbacks_.empty());
cros::mojom::Camera3StreamBufferPtr buffer =
cros::mojom::Camera3StreamBuffer::New();
buffer->stream_id = static_cast<uint64_t>(StreamType::kStillCapture);
buffer->buffer_id =
stream_context_[StreamType::kStillCapture]->registered_buffers.front();
stream_context_[StreamType::kStillCapture]->registered_buffers.pop();
buffer->status = cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK;
DVLOG(2) << "Requested capture for stream " << StreamType::kStillCapture
<< " in frame " << frame_number_;
// Use the still capture settings and override the preview ones.
request->settings = std::move(oneshot_request_settings_.front());
oneshot_request_settings_.pop();
pending_result.still_capture_callback =
std::move(pending_still_capture_callbacks_.front());
pending_still_capture_callbacks_.pop();
request->output_buffers.push_back(std::move(buffer));
}
pending_result.unsubmitted_buffer_count = request->output_buffers.size();
ApplyCaptureSettings(&request->settings);
capture_interface_->ProcessCaptureRequest(
std::move(request),
base::BindOnce(&StreamBufferManager::OnProcessedCaptureRequest,
weak_ptr_factory_.GetWeakPtr()));
VLOG(2) << "Requested capture for frame " << frame_number_ << " with buffer "
<< buffer_id;
frame_number_++;
}
......@@ -238,7 +425,8 @@ void StreamBufferManager::OnProcessedCaptureRequest(int32_t result) {
std::string(strerror(result)));
return;
}
RegisterBuffer();
// Keeps the preview stream going.
RegisterBuffer(StreamType::kPreview);
}
void StreamBufferManager::ProcessCaptureResult(
......@@ -251,70 +439,78 @@ void StreamBufferManager::ProcessCaptureResult(
uint32_t frame_number = result->frame_number;
// A new partial result may be created in either ProcessCaptureResult or
// Notify.
CaptureResult& partial_result = partial_results_[frame_number];
if (partial_results_.size() > stream_context_->stream->max_buffers) {
CaptureResult& pending_result = pending_results_[frame_number];
// |result->pending_result| is set to 0 if the capture result contains only
// the result buffer handles and no result metadata.
if (result->partial_result) {
uint32_t result_id = result->partial_result;
if (result_id > partial_result_count_) {
device_context_->SetErrorState(
FROM_HERE,
"Received more capture results than the maximum number of buffers");
FROM_HERE, std::string("Invalid pending_result id: ") +
std::to_string(result_id));
return;
}
if (pending_result.partial_metadata_received.count(result_id)) {
device_context_->SetErrorState(
FROM_HERE, std::string("Received duplicated partial metadata: ") +
std::to_string(result_id));
return;
}
DVLOG(2) << "Received partial result " << result_id << " for frame "
<< frame_number;
pending_result.partial_metadata_received.insert(result_id);
MergeMetadata(&pending_result.metadata, result->result);
}
if (result->output_buffers) {
if (result->output_buffers->size() != 1) {
if (result->output_buffers->size() > kMaxConfiguredStreams) {
device_context_->SetErrorState(
FROM_HERE,
std::string("Incorrect number of output buffers received: ") +
std::to_string(result->output_buffers->size()));
return;
}
cros::mojom::Camera3StreamBufferPtr& stream_buffer =
result->output_buffers.value()[0];
VLOG(2) << "Received capture result for frame " << frame_number
for (auto& stream_buffer : result->output_buffers.value()) {
DVLOG(2) << "Received capture result for frame " << frame_number
<< " stream_id: " << stream_buffer->stream_id;
StreamType stream_type = StreamIdToStreamType(stream_buffer->stream_id);
if (stream_type == StreamType::kUnknown) {
device_context_->SetErrorState(
FROM_HERE,
std::string("Invalid type of output buffers received: ") +
std::to_string(stream_buffer->stream_id));
return;
}
// The camera HAL v3 API specifies that only one capture result can carry
// the result buffer for any given frame number.
if (!partial_result.buffer.is_null()) {
if (stream_context_[stream_type]->capture_results_with_buffer.count(
frame_number)) {
device_context_->SetErrorState(
FROM_HERE,
std::string("Received multiple result buffers for frame ") +
std::to_string(frame_number));
std::to_string(frame_number) + std::string(" for stream ") +
std::to_string(stream_buffer->stream_id));
return;
} else {
partial_result.buffer = std::move(stream_buffer);
// If the buffer is marked as error it is due to either a request or a
// buffer error. In either case the content of the buffer must be dropped
// and the buffer can be reused. We simply submit the buffer here and
// don't wait for any partial results. SubmitCaptureResult() will drop
// and reuse the buffer.
if (partial_result.buffer->status ==
cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
SubmitCaptureResult(frame_number);
return;
}
}
}
// |result->partial_result| is set to 0 if the capture result contains only
// the result buffer handles and no result metadata.
if (result->partial_result) {
uint32_t result_id = result->partial_result;
if (result_id > partial_result_count_) {
device_context_->SetErrorState(
FROM_HERE, std::string("Invalid partial_result id: ") +
std::to_string(result_id));
return;
pending_result.buffers[stream_type] = std::move(stream_buffer);
stream_context_[stream_type]->capture_results_with_buffer[frame_number] =
&pending_result;
if (pending_result.buffers[stream_type]->status ==
cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
// If the buffer is marked as error, its content is discarded for this
// frame. Send the buffer to the free list directly through
// SubmitCaptureResult.
SubmitCaptureResult(frame_number, stream_type);
}
if (partial_result.partial_metadata_received.find(result_id) !=
partial_result.partial_metadata_received.end()) {
device_context_->SetErrorState(
FROM_HERE, std::string("Received duplicated partial metadata: ") +
std::to_string(result_id));
return;
}
partial_result.partial_metadata_received.insert(result_id);
MergeMetadata(&partial_result.metadata, result->result);
}
SubmitCaptureResultIfComplete(frame_number);
for (const auto& iter : stream_context_) {
SubmitCaptureResultIfComplete(frame_number, iter.first);
}
}
void StreamBufferManager::Notify(cros::mojom::Camera3NotifyMsgPtr message) {
......@@ -326,46 +522,49 @@ void StreamBufferManager::Notify(cros::mojom::Camera3NotifyMsgPtr message) {
if (message->type == cros::mojom::Camera3MsgType::CAMERA3_MSG_ERROR) {
uint32_t frame_number = message->message->get_error()->frame_number;
uint64_t error_stream_id = message->message->get_error()->error_stream_id;
StreamType stream_type = StreamIdToStreamType(error_stream_id);
if (stream_type == StreamType::kUnknown) {
device_context_->SetErrorState(
FROM_HERE, std::string("Unknown stream in Camera3NotifyMsg: ") +
std::to_string(error_stream_id));
return;
}
cros::mojom::Camera3ErrorMsgCode error_code =
message->message->get_error()->error_code;
HandleNotifyError(frame_number, error_stream_id, error_code);
HandleNotifyError(frame_number, stream_type, error_code);
} else { // cros::mojom::Camera3MsgType::CAMERA3_MSG_SHUTTER
uint32_t frame_number = message->message->get_shutter()->frame_number;
uint64_t shutter_time = message->message->get_shutter()->timestamp;
// A new partial result may be created in either ProcessCaptureResult or
// Notify.
VLOG(2) << "Received shutter time for frame " << frame_number;
DVLOG(2) << "Received shutter time for frame " << frame_number;
if (!shutter_time) {
device_context_->SetErrorState(
FROM_HERE, std::string("Received invalid shutter time: ") +
std::to_string(shutter_time));
return;
}
CaptureResult& partial_result = partial_results_[frame_number];
if (partial_results_.size() > stream_context_->stream->max_buffers) {
device_context_->SetErrorState(
FROM_HERE,
"Received more capture results than the maximum number of buffers");
return;
}
CaptureResult& pending_result = pending_results_[frame_number];
// Shutter timestamp is in ns.
base::TimeTicks reference_time =
base::TimeTicks::FromInternalValue(shutter_time / 1000);
partial_result.reference_time = reference_time;
pending_result.reference_time = reference_time;
if (first_frame_shutter_time_.is_null()) {
// Record the shutter time of the first frame for calculating the
// timestamp.
first_frame_shutter_time_ = reference_time;
}
partial_result.timestamp = reference_time - first_frame_shutter_time_;
SubmitCaptureResultIfComplete(frame_number);
pending_result.timestamp = reference_time - first_frame_shutter_time_;
for (const auto& iter : stream_context_) {
SubmitCaptureResultIfComplete(frame_number, iter.first);
}
}
}
void StreamBufferManager::HandleNotifyError(
uint32_t frame_number,
uint64_t error_stream_id,
StreamType stream_type,
cros::mojom::Camera3ErrorMsgCode error_code) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
std::string warning_msg;
switch (error_code) {
......@@ -400,7 +599,7 @@ void StreamBufferManager::HandleNotifyError(
case cros::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_BUFFER:
// An error has occurred in placing the output buffer into a stream for
// a request. |frame_number| specifies the request for which the buffer
// was dropped, and |error_stream_id| specifies the stream that dropped
// was dropped, and |stream_type| specifies the stream that dropped
// the buffer.
//
// The HAL will call ProcessCaptureResult with the buffer's state set to
......@@ -409,7 +608,7 @@ void StreamBufferManager::HandleNotifyError(
warning_msg =
std::string(
"An error occurred while filling output buffer of stream ") +
std::to_string(error_stream_id) + std::string(" in frame ") +
StreamTypeToString(stream_type) + std::string(" in frame ") +
std::to_string(frame_number);
break;
......@@ -418,55 +617,76 @@ void StreamBufferManager::HandleNotifyError(
break;
}
LOG(WARNING) << warning_msg;
LOG(WARNING) << warning_msg << stream_type;
device_context_->LogToClient(warning_msg);
// If the buffer is already returned by the HAL, submit it and we're done.
auto partial_result = partial_results_.find(frame_number);
if (partial_result != partial_results_.end() &&
!partial_result->second.buffer.is_null()) {
SubmitCaptureResult(frame_number);
if (pending_results_.count(frame_number) &&
pending_results_[frame_number].buffers.count(stream_type)) {
SubmitCaptureResult(frame_number, stream_type);
}
}
void StreamBufferManager::SubmitCaptureResultIfComplete(uint32_t frame_number) {
void StreamBufferManager::SubmitCaptureResultIfComplete(
uint32_t frame_number,
StreamType stream_type) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(partial_results_.find(frame_number) != partial_results_.end());
CaptureResult& partial_result = partial_results_[frame_number];
if (partial_result.partial_metadata_received.size() < partial_result_count_ ||
partial_result.buffer.is_null() ||
partial_result.reference_time == base::TimeTicks()) {
// We can only submit the result buffer when:
// 1. All the result metadata are received, and
// 2. The result buffer is received, and
// 3. The the shutter time is received.
if (!pending_results_.count(frame_number)) {
// The capture result may be discarded in case of error.
return;
}
SubmitCaptureResult(frame_number);
CaptureResult& pending_result = pending_results_[frame_number];
if (!stream_context_[stream_type]->capture_results_with_buffer.count(
frame_number) ||
pending_result.partial_metadata_received.size() < partial_result_count_ ||
pending_result.reference_time == base::TimeTicks()) {
// We can only submit the result buffer of |frame_number| for |stream_type|
// when:
// 1. The result buffer for |stream_type| is received, and
// 2. All the result metadata are received, and
// 3. The shutter time is received.
return;
}
SubmitCaptureResult(frame_number, stream_type);
}
void StreamBufferManager::SubmitCaptureResult(uint32_t frame_number) {
void StreamBufferManager::SubmitCaptureResult(uint32_t frame_number,
StreamType stream_type) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(partial_results_.find(frame_number) != partial_results_.end());
CaptureResult& partial_result = partial_results_[frame_number];
if (partial_results_.begin()->first != frame_number) {
DCHECK(pending_results_.count(frame_number));
DCHECK(stream_context_[stream_type]->capture_results_with_buffer.count(
frame_number));
CaptureResult& pending_result =
*stream_context_[stream_type]->capture_results_with_buffer[frame_number];
if (stream_context_[stream_type]
->capture_results_with_buffer.begin()
->first != frame_number) {
device_context_->SetErrorState(
FROM_HERE, std::string("Received frame is out-of-order; expect ") +
std::to_string(partial_results_.begin()->first) +
std::to_string(pending_results_.begin()->first) +
std::string(" but got ") + std::to_string(frame_number));
return;
}
VLOG(2) << "Submit capture result of frame " << frame_number;
uint32_t buffer_id = partial_result.buffer->buffer_id;
DVLOG(2) << "Submit capture result of frame " << frame_number
<< " for stream " << static_cast<int>(stream_type);
for (auto* iter : result_metadata_observers_) {
iter->OnResultMetadataAvailable(pending_result.metadata);
}
DCHECK(pending_result.buffers[stream_type]);
const cros::mojom::Camera3StreamBufferPtr& stream_buffer =
pending_result.buffers[stream_type];
uint64_t buffer_id = stream_buffer->buffer_id;
// Wait on release fence before delivering the result buffer to client.
if (partial_result.buffer->release_fence.is_valid()) {
if (stream_buffer->release_fence.is_valid()) {
const int kSyncWaitTimeoutMs = 1000;
mojo::edk::ScopedPlatformHandle fence;
MojoResult result = mojo::edk::PassWrappedPlatformHandle(
partial_result.buffer->release_fence.release().value(), &fence);
stream_buffer->release_fence.release().value(), &fence);
if (result != MOJO_RESULT_OK) {
device_context_->SetErrorState(FROM_HERE,
"Failed to unwrap release fence fd");
......@@ -479,17 +699,55 @@ void StreamBufferManager::SubmitCaptureResult(uint32_t frame_number) {
}
}
// Deliver the captured data to client and then re-queue the buffer.
if (partial_result.buffer->status !=
// Deliver the captured data to client.
if (stream_buffer->status !=
cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
gfx::GpuMemoryBuffer* buffer = stream_context_->buffers[buffer_id].get();
device_context_->SubmitCapturedData(buffer, stream_context_->capture_format,
partial_result.reference_time,
partial_result.timestamp);
}
stream_context_->free_buffers.push(buffer_id);
partial_results_.erase(frame_number);
RegisterBuffer();
size_t buffer_index = GetBufferIndex(buffer_id);
gfx::GpuMemoryBuffer* buffer =
stream_context_[stream_type]->buffers[buffer_index].get();
if (stream_type == StreamType::kPreview) {
device_context_->SubmitCapturedData(
buffer, stream_context_[StreamType::kPreview]->capture_format,
pending_result.reference_time, pending_result.timestamp);
ipc_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&StreamBufferManager::RegisterBuffer,
weak_ptr_factory_.GetWeakPtr(), StreamType::kPreview));
} else { // StreamType::kStillCapture
DCHECK(pending_result.still_capture_callback);
const Camera3JpegBlob* header = reinterpret_cast<Camera3JpegBlob*>(
reinterpret_cast<uintptr_t>(buffer->memory(0)) +
buffer->GetSize().width() - sizeof(Camera3JpegBlob));
if (header->jpeg_blob_id != kCamera3JpegBlobId) {
device_context_->SetErrorState(FROM_HERE, "Invalid JPEG blob");
return;
}
mojom::BlobPtr blob = blobify_callback_.Run(
reinterpret_cast<uint8_t*>(buffer->memory(0)), header->jpeg_size,
stream_context_[stream_type]->capture_format);
if (blob) {
std::move(pending_result.still_capture_callback).Run(std::move(blob));
} else {
LOG(ERROR) << "Failed to blobify the captured JPEG image";
}
}
}
stream_context_[stream_type]->free_buffers.push(buffer_id);
stream_context_[stream_type]->capture_results_with_buffer.erase(frame_number);
pending_result.unsubmitted_buffer_count--;
if (!pending_result.unsubmitted_buffer_count) {
pending_results_.erase(frame_number);
}
if (stream_type == StreamType::kPreview) {
// Always keep the preview stream running.
RegisterBuffer(StreamType::kPreview);
} else { // stream_type == StreamType::kStillCapture
if (!pending_still_capture_callbacks_.empty()) {
RegisterBuffer(StreamType::kStillCapture);
}
}
}
StreamBufferManager::StreamContext::StreamContext() = default;
......@@ -497,7 +755,8 @@ StreamBufferManager::StreamContext::StreamContext() = default;
StreamBufferManager::StreamContext::~StreamContext() = default;
StreamBufferManager::CaptureResult::CaptureResult()
: metadata(cros::mojom::CameraMetadata::New()) {}
: metadata(cros::mojom::CameraMetadata::New()),
unsubmitted_buffer_count(0) {}
StreamBufferManager::CaptureResult::~CaptureResult() = default;
......
......@@ -5,6 +5,11 @@
#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_STREAM_BUFFER_MANAGER_H_
#define MEDIA_CAPTURE_VIDEO_CHROMEOS_STREAM_BUFFER_MANAGER_H_
#include <memory>
#include <queue>
#include <unordered_map>
#include <vector>
#include "base/containers/queue.h"
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
......@@ -24,29 +29,65 @@ namespace media {
class CameraBufferFactory;
class CameraDeviceContext;
// One stream for preview, one stream for still capture.
constexpr size_t kMaxConfiguredStreams = 2;
// The JPEG transport header as defined by Android camera HAL v3 API. The JPEG
// transport header is at the end of the blob buffer filled by the HAL.
constexpr uint16_t kCamera3JpegBlobId = 0x00FF;
struct Camera3JpegBlob {
uint16_t jpeg_blob_id;
uint32_t jpeg_size;
};
class CAPTURE_EXPORT CaptureMetadataDispatcher {
public:
class ResultMetadataObserver {
public:
virtual ~ResultMetadataObserver() {}
virtual void OnResultMetadataAvailable(
const cros::mojom::CameraMetadataPtr&) = 0;
};
virtual ~CaptureMetadataDispatcher() {}
virtual void AddResultMetadataObserver(ResultMetadataObserver* observer) = 0;
virtual void RemoveResultMetadataObserver(
ResultMetadataObserver* observer) = 0;
virtual void SetCaptureMetadata(cros::mojom::CameraMetadataTag tag,
cros::mojom::EntryType type,
size_t count,
std::vector<uint8_t> value) = 0;
};
// StreamBufferManager is responsible for managing the buffers of the
// stream. StreamBufferManager allocates buffers according to the given
// stream configuration, and circulates the buffers along with capture
// requests and results between Chrome and the camera HAL process.
class CAPTURE_EXPORT StreamBufferManager final
: public cros::mojom::Camera3CallbackOps {
: public cros::mojom::Camera3CallbackOps,
public CaptureMetadataDispatcher {
public:
StreamBufferManager(
cros::mojom::Camera3CallbackOpsRequest callback_ops_request,
std::unique_ptr<StreamCaptureInterface> capture_interface,
CameraDeviceContext* device_context,
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
base::RepeatingCallback<mojom::BlobPtr(
const uint8_t* buffer,
const uint32_t bytesused,
const VideoCaptureFormat& capture_format)> blobify_callback,
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
~StreamBufferManager() final;
~StreamBufferManager() override;
// Sets up the stream context and allocate buffers according to the
// configuration specified in |stream|.
void SetUpStreamAndBuffers(VideoCaptureFormat capture_format,
uint32_t partial_result_count,
cros::mojom::Camera3StreamPtr stream);
void SetUpStreamsAndBuffers(
VideoCaptureFormat capture_format,
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams);
// StartCapture is the entry point to starting the video capture. The way
// StartPreview is the entry point to starting the video capture. The way
// the video capture loop works is:
//
// (1) If there is a free buffer, RegisterBuffer registers the buffer with
......@@ -60,24 +101,48 @@ class CAPTURE_EXPORT StreamBufferManager final
// SubmitCaptureResultIfComplete is called to deliver the filled buffer
// to Chrome. After the buffer is consumed by Chrome it is enqueued back
// to the free buffer queue. Goto (1) to start another capture loop.
void StartCapture(cros::mojom::CameraMetadataPtr settings);
//
// When TakePhoto() is called, an additional BLOB buffer is queued in step (2)
// to let the HAL fill the still capture JPEG image. When the JPEG image is
// returned in (4), it's passed to upper layer through the TakePhotoCallback.
void StartPreview(cros::mojom::CameraMetadataPtr preview_settings);
// Stops the capture loop. After StopCapture is called |callback_ops_| is
// Stops the capture loop. After StopPreview is called |callback_ops_| is
// unbound, so no new capture request or result will be processed.
void StopCapture();
void StopPreview();
cros::mojom::Camera3StreamPtr GetStreamConfiguration(StreamType stream_type);
void TakePhoto(cros::mojom::CameraMetadataPtr settings,
VideoCaptureDevice::TakePhotoCallback callback);
// CaptureMetadataDispatcher implementations.
void AddResultMetadataObserver(ResultMetadataObserver* observer) override;
void RemoveResultMetadataObserver(ResultMetadataObserver* observer) override;
// Queues a capture setting that will be send along with the earliest next
// capture request.
void SetCaptureMetadata(cros::mojom::CameraMetadataTag tag,
cros::mojom::EntryType type,
size_t count,
std::vector<uint8_t> value) override;
static uint64_t GetBufferIpcId(StreamType stream_type, size_t index);
private:
friend class StreamBufferManagerTest;
// Registers a free buffer, if any, to the camera HAL.
void RegisterBuffer();
// Registers a free buffer, if any, for the give |stream_type| to the camera
// HAL.
void RegisterBuffer(StreamType stream_type);
// Calls ProcessCaptureRequest if the buffer specified by |buffer_id| is
// successfully registered.
void OnRegisteredBuffer(size_t buffer_id, int32_t result);
void OnRegisteredBuffer(StreamType stream_type,
size_t buffer_id,
int32_t result);
// The capture request contains the buffer handle specified by |buffer_id|.
void ProcessCaptureRequest(size_t buffer_id);
// The capture request contains the buffer handles waiting to be filled.
void ProcessCaptureRequest();
// Calls RegisterBuffer to attempt to register any remaining free buffers.
void OnProcessedCaptureRequest(int32_t result);
......@@ -86,23 +151,27 @@ class CAPTURE_EXPORT StreamBufferManager final
// ProcessCaptureResult receives the result metadata as well as the filled
// buffer from camera HAL. The result metadata may be divided and delivered
// in several stages. Before all the result metadata is received the
// partial results are kept in |partial_results_|.
void ProcessCaptureResult(cros::mojom::Camera3CaptureResultPtr result) final;
// partial results are kept in |pending_results_|.
void ProcessCaptureResult(
cros::mojom::Camera3CaptureResultPtr result) override;
// Notify receives the shutter time of capture requests and various errors
// from camera HAL. The shutter time is used as the timestamp in the video
// frame delivered to Chrome.
void Notify(cros::mojom::Camera3NotifyMsgPtr message) final;
void Notify(cros::mojom::Camera3NotifyMsgPtr message) override;
void HandleNotifyError(uint32_t frame_number,
uint64_t error_stream_id,
StreamType stream_type,
cros::mojom::Camera3ErrorMsgCode error_code);
// Submits the captured buffer of frame |frame_number_| to Chrome if all the
// required metadata and the captured buffer are received. After the buffer
// is submitted the function then enqueues the buffer to free buffer queue for
// the next capture request.
void SubmitCaptureResultIfComplete(uint32_t frame_number);
void SubmitCaptureResult(uint32_t frame_number);
// Submits the captured buffer of frame |frame_number_| for the give
// |stream_type| to Chrome if all the required metadata and the captured
// buffer are received. After the buffer is submitted the function then
// enqueues the buffer to free buffer queue for the next capture request.
void SubmitCaptureResultIfComplete(uint32_t frame_number,
StreamType stream_type);
void SubmitCaptureResult(uint32_t frame_number, StreamType stream_type);
void ApplyCaptureSettings(cros::mojom::CameraMetadataPtr* capture_settings);
mojo::Binding<cros::mojom::Camera3CallbackOps> callback_ops_;
......@@ -112,6 +181,12 @@ class CAPTURE_EXPORT StreamBufferManager final
std::unique_ptr<CameraBufferFactory> camera_buffer_factory_;
base::RepeatingCallback<mojom::BlobPtr(
const uint8_t* buffer,
const uint32_t bytesused,
const VideoCaptureFormat& capture_format)>
blobify_callback_;
// Where all the Mojo IPC calls takes place.
const scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner_;
......@@ -122,26 +197,7 @@ class CAPTURE_EXPORT StreamBufferManager final
// to zero in AllocateAndStart.
uint32_t frame_number_;
struct StreamContext {
StreamContext();
~StreamContext();
// The actual pixel format used in the capture request.
VideoCaptureFormat capture_format;
// The camera HAL stream.
cros::mojom::Camera3StreamPtr stream;
// The request settings used in the capture request of this stream.
cros::mojom::CameraMetadataPtr request_settings;
// The allocated buffers of this stream.
std::vector<std::unique_ptr<gfx::GpuMemoryBuffer>> buffers;
// The free buffers of this stream. The queue stores indices into the
// |buffers| vector.
base::queue<size_t> free_buffers;
};
// The stream context of the preview stream.
std::unique_ptr<StreamContext> stream_context_;
// CaptureResult is used to hold the partial capture results for each frame.
// CaptureResult is used to hold the pending capture results for each frame.
struct CaptureResult {
CaptureResult();
~CaptureResult();
......@@ -153,14 +209,59 @@ class CAPTURE_EXPORT StreamBufferManager final
// The result metadata. Contains various information about the captured
// frame.
cros::mojom::CameraMetadataPtr metadata;
// The buffer handle that hold the captured data of this frame.
cros::mojom::Camera3StreamBufferPtr buffer;
// The buffer handles that hold the captured data of this frame.
std::unordered_map<StreamType, cros::mojom::Camera3StreamBufferPtr> buffers;
// The set of the partial metadata received. For each capture result, the
// total number of partial metadata should equal to
// |partial_result_count_|.
std::set<uint32_t> partial_metadata_received;
// Incremented for every stream buffer requested for the given frame.
// StreamBufferManager destructs the CaptureResult when
// |unsubmitted_buffer_count| drops to zero.
size_t unsubmitted_buffer_count;
// The callback used to return the captured still capture JPEG buffer. Set
// if and only if the capture request was sent with a still capture buffer.
VideoCaptureDevice::TakePhotoCallback still_capture_callback;
};
struct StreamContext {
StreamContext();
~StreamContext();
// The actual pixel format used in the capture request.
VideoCaptureFormat capture_format;
// The camera HAL stream.
cros::mojom::Camera3StreamPtr stream;
// The allocated buffers of this stream.
std::vector<std::unique_ptr<gfx::GpuMemoryBuffer>> buffers;
// The free buffers of this stream. The queue stores indices into the
// |buffers| vector.
std::queue<size_t> free_buffers;
// The buffers that are registered to the HAL, which can be used as the
// output buffers for capture requests.
std::queue<size_t> registered_buffers;
// The pointers to the pending capture results that have unsubmitted result
// buffers.
std::map<uint32_t, CaptureResult*> capture_results_with_buffer;
};
// The context for the set of active streams.
std::unordered_map<StreamType, std::unique_ptr<StreamContext>>
stream_context_;
// The repeating request settings. The settings come from the default preview
// request settings reported by the HAL. |repeating_request_settings_| is the
// default settings for each capture request.
cros::mojom::CameraMetadataPtr repeating_request_settings_;
// A queue of oneshot request settings. These are the request settings for
// each still capture requests. |oneshot_request_settings_| overrides
// |repeating_request_settings_| if present.
std::queue<cros::mojom::CameraMetadataPtr> oneshot_request_settings_;
// The pending callbacks for the TakePhoto requests.
std::queue<VideoCaptureDevice::TakePhotoCallback>
pending_still_capture_callbacks_;
// The number of partial stages. |partial_result_count_| is learned by
// querying |static_metadata_|. In case the result count is absent in
// |static_metadata_|, it defaults to one which means all the result
......@@ -173,8 +274,15 @@ class CAPTURE_EXPORT StreamBufferManager final
// |first_frame_shutter_time_|.
base::TimeTicks first_frame_shutter_time_;
// Stores the partial capture results of the current in-flight frames.
std::map<uint32_t, CaptureResult> partial_results_;
// Stores the pending capture results of the current in-flight frames.
std::map<uint32_t, CaptureResult> pending_results_;
// StreamBufferManager does not own the ResultMetadataObservers. The
// observers are responsible for removing itself before self-destruction.
std::unordered_set<ResultMetadataObserver*> result_metadata_observers_;
// The list of settings to set/override in the capture request.
std::vector<cros::mojom::CameraMetadataEntryPtr> capture_settings_override_;
base::WeakPtrFactory<StreamBufferManager> weak_ptr_factory_;
......
......@@ -21,6 +21,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "media/capture/video/blob_utils.h"
using testing::_;
using testing::A;
using testing::AtLeast;
......@@ -109,6 +110,10 @@ class StreamBufferManagerTest : public ::testing::Test {
std::move(callback_ops_request),
std::make_unique<MockStreamCaptureInterface>(), device_context_.get(),
std::make_unique<FakeCameraBufferFactory>(),
base::BindRepeating([](const uint8_t* buffer, const uint32_t bytesused,
const VideoCaptureFormat& capture_format) {
return mojom::Blob::New();
}),
base::ThreadTaskRunnerHandle::Get());
}
......@@ -126,6 +131,39 @@ class StreamBufferManagerTest : public ::testing::Test {
}
}
cros::mojom::CameraMetadataPtr GetFakeStaticMetadata(
int32_t partial_result_count) {
cros::mojom::CameraMetadataPtr static_metadata =
cros::mojom::CameraMetadata::New();
static_metadata->entry_count = 2;
static_metadata->entry_capacity = 2;
static_metadata->entries =
std::vector<cros::mojom::CameraMetadataEntryPtr>();
cros::mojom::CameraMetadataEntryPtr entry =
cros::mojom::CameraMetadataEntry::New();
entry->index = 0;
entry->tag =
cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PARTIAL_RESULT_COUNT;
entry->type = cros::mojom::EntryType::TYPE_INT32;
entry->count = 1;
uint8_t* as_int8 = reinterpret_cast<uint8_t*>(&partial_result_count);
entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
static_metadata->entries->push_back(std::move(entry));
entry = cros::mojom::CameraMetadataEntry::New();
entry->index = 1;
entry->tag = cros::mojom::CameraMetadataTag::ANDROID_JPEG_MAX_SIZE;
entry->type = cros::mojom::EntryType::TYPE_INT32;
entry->count = 1;
int32_t jpeg_max_size = 65535;
as_int8 = reinterpret_cast<uint8_t*>(&jpeg_max_size);
entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
static_metadata->entries->push_back(std::move(entry));
return static_metadata;
}
void RegisterBuffer(uint64_t buffer_id,
cros::mojom::Camera3DeviceOps::BufferType type,
uint32_t drm_format,
......@@ -165,25 +203,46 @@ class StreamBufferManagerTest : public ::testing::Test {
device_context_->client_.get());
}
std::map<uint32_t, StreamBufferManager::CaptureResult>& GetPartialResults() {
std::map<uint32_t, StreamBufferManager::CaptureResult>& GetPendingResults() {
EXPECT_NE(nullptr, stream_buffer_manager_.get());
return stream_buffer_manager_->partial_results_;
return stream_buffer_manager_->pending_results_;
}
cros::mojom::Camera3StreamPtr PrepareCaptureStream(uint32_t max_buffers) {
auto stream = cros::mojom::Camera3Stream::New();
stream->id = 0;
stream->stream_type = cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
stream->width = kDefaultCaptureFormat.frame_size.width();
stream->height = kDefaultCaptureFormat.frame_size.height();
stream->format =
std::vector<cros::mojom::Camera3StreamPtr> PrepareCaptureStream(
uint32_t max_buffers) {
std::vector<cros::mojom::Camera3StreamPtr> streams;
auto preview_stream = cros::mojom::Camera3Stream::New();
preview_stream->id = static_cast<uint64_t>(StreamType::kPreview);
preview_stream->stream_type =
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
preview_stream->width = kDefaultCaptureFormat.frame_size.width();
preview_stream->height = kDefaultCaptureFormat.frame_size.height();
preview_stream->format =
cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
stream->usage = 0;
stream->max_buffers = max_buffers;
stream->data_space = 0;
stream->rotation =
preview_stream->usage = 0;
preview_stream->max_buffers = max_buffers;
preview_stream->data_space = 0;
preview_stream->rotation =
cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
return stream;
streams.push_back(std::move(preview_stream));
auto still_capture_stream = cros::mojom::Camera3Stream::New();
still_capture_stream->id = static_cast<uint64_t>(StreamType::kStillCapture);
still_capture_stream->stream_type =
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
still_capture_stream->width = kDefaultCaptureFormat.frame_size.width();
still_capture_stream->height = kDefaultCaptureFormat.frame_size.height();
still_capture_stream->format =
cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB;
still_capture_stream->usage = 0;
still_capture_stream->max_buffers = max_buffers;
still_capture_stream->data_space = 0;
still_capture_stream->rotation =
cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
streams.push_back(std::move(still_capture_stream));
return streams;
}
cros::mojom::Camera3NotifyMsgPtr PrepareErrorNotifyMessage(
......@@ -192,7 +251,7 @@ class StreamBufferManagerTest : public ::testing::Test {
auto error_msg = cros::mojom::Camera3ErrorMsg::New();
error_msg->frame_number = frame_number;
// There is only the preview stream.
error_msg->error_stream_id = 1;
error_msg->error_stream_id = static_cast<uint64_t>(StreamType::kPreview);
error_msg->error_code = error_code;
auto notify_msg = cros::mojom::Camera3NotifyMsg::New();
notify_msg->message = cros::mojom::Camera3NotifyMsgMessage::New();
......@@ -247,18 +306,20 @@ TEST_F(StreamBufferManagerTest, SimpleCaptureTest) {
&StreamBufferManagerTest::QuitCaptureLoop, base::Unretained(this)));
EXPECT_CALL(
*GetMockCaptureInterface(),
DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
_, _, _, _, _))
DoRegisterBuffer(
StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(1))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
.Times(1)
.WillOnce(Invoke(this, &StreamBufferManagerTest::ProcessCaptureRequest));
stream_buffer_manager_->SetUpStreamAndBuffers(
kDefaultCaptureFormat, /* partial_result_count */ 1,
stream_buffer_manager_->SetUpStreamsAndBuffers(
kDefaultCaptureFormat,
GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until a captured frame is received by MockVideoCaptureClient.
DoLoop();
......@@ -269,18 +330,19 @@ TEST_F(StreamBufferManagerTest, SimpleCaptureTest) {
TEST_F(StreamBufferManagerTest, PartialResultTest) {
GetMockVideoCaptureClient()->SetFrameCb(base::BindOnce(
[](StreamBufferManagerTest* test) {
EXPECT_EQ(1u, test->GetPartialResults().size());
EXPECT_EQ(1u, test->GetPendingResults().size());
// Make sure all the three partial metadata are received before the
// captured result is submitted.
EXPECT_EQ(
3u, test->GetPartialResults()[0].partial_metadata_received.size());
3u, test->GetPendingResults()[0].partial_metadata_received.size());
test->QuitCaptureLoop();
},
base::Unretained(this)));
EXPECT_CALL(
*GetMockCaptureInterface(),
DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
_, _, _, _, _))
DoRegisterBuffer(
StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(1))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
......@@ -293,20 +355,19 @@ TEST_F(StreamBufferManagerTest, PartialResultTest) {
mock_callback_ops_->ProcessCaptureResult(PrepareCapturedResult(
request->frame_number, cros::mojom::CameraMetadata::New(), 1,
std::move(request->output_buffers)));
mock_callback_ops_->ProcessCaptureResult(PrepareCapturedResult(
request->frame_number, cros::mojom::CameraMetadata::New(), 2,
std::vector<cros::mojom::Camera3StreamBufferPtr>()));
mock_callback_ops_->ProcessCaptureResult(PrepareCapturedResult(
request->frame_number, cros::mojom::CameraMetadata::New(), 3,
std::vector<cros::mojom::Camera3StreamBufferPtr>()));
}));
stream_buffer_manager_->SetUpStreamAndBuffers(
kDefaultCaptureFormat, /* partial_result_count */ 3,
stream_buffer_manager_->SetUpStreamsAndBuffers(
kDefaultCaptureFormat,
GetFakeStaticMetadata(/* partial_result_count */ 3),
PrepareCaptureStream(/* max_buffers */ 1));
stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until a captured frame is received by MockVideoCaptureClient.
DoLoop();
......@@ -327,8 +388,9 @@ TEST_F(StreamBufferManagerTest, DeviceErrorTest) {
InvokeWithoutArgs(this, &StreamBufferManagerTest::QuitCaptureLoop));
EXPECT_CALL(
*GetMockCaptureInterface(),
DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
_, _, _, _, _))
DoRegisterBuffer(
StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(1)
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
......@@ -341,10 +403,11 @@ TEST_F(StreamBufferManagerTest, DeviceErrorTest) {
cros::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_DEVICE));
}));
stream_buffer_manager_->SetUpStreamAndBuffers(
kDefaultCaptureFormat, /* partial_result_count */ 1,
stream_buffer_manager_->SetUpStreamsAndBuffers(
kDefaultCaptureFormat,
GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until the MockVideoCaptureClient is deleted.
DoLoop();
......@@ -357,17 +420,18 @@ TEST_F(StreamBufferManagerTest, RequestErrorTest) {
[](StreamBufferManagerTest* test) {
// Frame 0 should be dropped, and the frame callback should be called
// with frame 1.
EXPECT_EQ(test->GetPartialResults().end(),
test->GetPartialResults().find(0));
EXPECT_NE(test->GetPartialResults().end(),
test->GetPartialResults().find(1));
EXPECT_EQ(test->GetPendingResults().end(),
test->GetPendingResults().find(0));
EXPECT_NE(test->GetPendingResults().end(),
test->GetPendingResults().find(1));
test->QuitCaptureLoop();
},
base::Unretained(this)));
EXPECT_CALL(
*GetMockCaptureInterface(),
DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
_, _, _, _, _))
DoRegisterBuffer(
StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(2))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
......@@ -387,10 +451,11 @@ TEST_F(StreamBufferManagerTest, RequestErrorTest) {
}))
.WillOnce(Invoke(this, &StreamBufferManagerTest::ProcessCaptureRequest));
stream_buffer_manager_->SetUpStreamAndBuffers(
kDefaultCaptureFormat, /* partial_result_count */ 1,
stream_buffer_manager_->SetUpStreamsAndBuffers(
kDefaultCaptureFormat,
GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until the MockVideoCaptureClient is deleted.
DoLoop();
......@@ -402,19 +467,20 @@ TEST_F(StreamBufferManagerTest, ResultErrorTest) {
GetMockVideoCaptureClient()->SetFrameCb(base::BindOnce(
[](StreamBufferManagerTest* test) {
// Frame 0 should be submitted.
EXPECT_NE(test->GetPartialResults().end(),
test->GetPartialResults().find(0));
EXPECT_NE(test->GetPendingResults().end(),
test->GetPendingResults().find(0));
test->QuitCaptureLoop();
},
base::Unretained(this)));
EXPECT_CALL(
*GetMockCaptureInterface(),
DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
_, _, _, _, _))
DoRegisterBuffer(
StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(1))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
.WillRepeatedly(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
.Times(1)
.Times(AtLeast(1))
.WillOnce(Invoke([this](cros::mojom::Camera3CaptureRequestPtr& request,
base::OnceCallback<void(int32_t)>& callback) {
std::move(callback).Run(0);
......@@ -432,10 +498,11 @@ TEST_F(StreamBufferManagerTest, ResultErrorTest) {
}))
.WillOnce(Invoke(this, &StreamBufferManagerTest::ProcessCaptureRequest));
stream_buffer_manager_->SetUpStreamAndBuffers(
kDefaultCaptureFormat, /* partial_result_count */ 2,
stream_buffer_manager_->SetUpStreamsAndBuffers(
kDefaultCaptureFormat,
GetFakeStaticMetadata(/* partial_result_count */ 2),
PrepareCaptureStream(/* max_buffers */ 1));
stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until the MockVideoCaptureClient is deleted.
DoLoop();
......@@ -448,17 +515,18 @@ TEST_F(StreamBufferManagerTest, BufferErrorTest) {
[](StreamBufferManagerTest* test) {
// Frame 0 should be dropped, and the frame callback should be called
// with frame 1.
EXPECT_EQ(test->GetPartialResults().end(),
test->GetPartialResults().find(0));
EXPECT_NE(test->GetPartialResults().end(),
test->GetPartialResults().find(1));
EXPECT_EQ(test->GetPendingResults().end(),
test->GetPendingResults().find(0));
EXPECT_NE(test->GetPendingResults().end(),
test->GetPendingResults().find(1));
test->QuitCaptureLoop();
},
base::Unretained(this)));
EXPECT_CALL(
*GetMockCaptureInterface(),
DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
_, _, _, _, _))
DoRegisterBuffer(
StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(2))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
......@@ -480,13 +548,50 @@ TEST_F(StreamBufferManagerTest, BufferErrorTest) {
}))
.WillOnce(Invoke(this, &StreamBufferManagerTest::ProcessCaptureRequest));
stream_buffer_manager_->SetUpStreamAndBuffers(
kDefaultCaptureFormat, /* partial_result_count */ 1,
stream_buffer_manager_->SetUpStreamsAndBuffers(
kDefaultCaptureFormat,
GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until the MockVideoCaptureClient is deleted.
DoLoop();
}
// Test that preview and still capture buffers can be correctly submitted.
TEST_F(StreamBufferManagerTest, TakePhotoTest) {
EXPECT_CALL(
*GetMockCaptureInterface(),
DoRegisterBuffer(
StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(1))
.WillRepeatedly(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
EXPECT_CALL(
*GetMockCaptureInterface(),
DoRegisterBuffer(
StreamBufferManager::GetBufferIpcId(StreamType::kStillCapture, 0),
cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(1)
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
.Times(AtLeast(1))
.WillRepeatedly(
Invoke(this, &StreamBufferManagerTest::ProcessCaptureRequest));
stream_buffer_manager_->SetUpStreamsAndBuffers(
kDefaultCaptureFormat,
GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
stream_buffer_manager_->TakePhoto(
GetFakeStaticMetadata(/* partial_result_count */ 1),
base::BindOnce([](StreamBufferManagerTest* test,
mojom::BlobPtr blob) { test->QuitCaptureLoop(); },
base::Unretained(this)));
// Wait until a captured frame is received by MockVideoCaptureClient.
DoLoop();
}
} // namespace media
......@@ -6,6 +6,10 @@
#include <memory>
#if defined(OS_CHROMEOS)
#include "media/capture/video/chromeos/stream_buffer_manager.h"
#endif
using ::testing::Return;
namespace media {
......@@ -17,8 +21,9 @@ class FakeGpuMemoryBuffer : public gfx::GpuMemoryBuffer {
public:
FakeGpuMemoryBuffer(const gfx::Size& size, gfx::BufferFormat format)
: size_(size), format_(format) {
// We use only NV12 in unit tests.
EXPECT_EQ(gfx::BufferFormat::YUV_420_BIPLANAR, format);
// We use only NV12 or R8 in unit tests.
EXPECT_TRUE(format == gfx::BufferFormat::YUV_420_BIPLANAR ||
format == gfx::BufferFormat::R_8);
size_t y_plane_size = size_.width() * size_.height();
size_t uv_plane_size = size_.width() * size_.height() / 2;
......@@ -36,6 +41,15 @@ class FakeGpuMemoryBuffer : public gfx::GpuMemoryBuffer {
handle_.native_pixmap_handle.planes.push_back(gfx::NativePixmapPlane(
size_.width(), handle_.native_pixmap_handle.planes[0].size,
uv_plane_size));
// For faking a valid JPEG blob buffer.
if (base::checked_cast<size_t>(size_.width()) >= sizeof(Camera3JpegBlob)) {
Camera3JpegBlob* header = reinterpret_cast<Camera3JpegBlob*>(
reinterpret_cast<uintptr_t>(data_.data()) + size_.width() -
sizeof(Camera3JpegBlob));
header->jpeg_blob_id = kCamera3JpegBlobId;
header->jpeg_size = size_.width();
}
#endif
}
......
......@@ -664,13 +664,6 @@ WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_TakePhoto) {
if (!descriptor)
return;
#if defined(OS_CHROMEOS)
// TODO(jcliang): Remove this after we implement TakePhoto.
if (VideoCaptureDeviceFactoryChromeOS::ShouldEnable()) {
return;
}
#endif
#if defined(OS_ANDROID)
// TODO(mcasas): fails on Lollipop devices, reconnect https://crbug.com/646840
if (base::android::BuildInfo::GetInstance()->sdk_int() <
......@@ -713,13 +706,6 @@ WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_GetPhotoState) {
if (!descriptor)
return;
#if defined(OS_CHROMEOS)
// TODO(jcliang): Remove this after we implement GetPhotoCapabilities.
if (VideoCaptureDeviceFactoryChromeOS::ShouldEnable()) {
return;
}
#endif
#if defined(OS_ANDROID)
// TODO(mcasas): fails on Lollipop devices, reconnect https://crbug.com/646840
if (base::android::BuildInfo::GetInstance()->sdk_int() <
......@@ -745,6 +731,9 @@ WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_GetPhotoState) {
base::BindOnce(&MockImageCaptureClient::DoOnGetPhotoState,
image_capture_client_);
// On Chrome OS AllocateAndStart() is asynchronous, so wait until we get the
// first frame.
WaitForCapturedFrame();
base::RunLoop run_loop;
base::Closure quit_closure = BindToCurrentLoop(run_loop.QuitClosure());
EXPECT_CALL(*image_capture_client_.get(), OnCorrectGetPhotoState())
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment