Commit 547610b2 authored by mcasas's avatar mcasas Committed by Commit bot

Revert of RELAND: ImageCapture: Implement takePhoto() for Mac AVFoundation...

Revert of RELAND: ImageCapture: Implement takePhoto() for Mac AVFoundation (patchset #2 id:20001 of https://codereview.chromium.org/2146973002/ )

Reason for revert:
It caused timeout in
WebRtcWebcamBrowserTests/WebRtcWebcamBrowserTest.MANUAL_TestAcquiringAndReacquiringWebcam/0

see
https://build.chromium.org/p/chromium.webrtc/builders/Mac%20Tester/builds/56929

Original issue's description:
> RELAND: ImageCapture: Implement takePhoto() for Mac AVFoundation
>
> Got reverted due to breaking
> WebRtcWebcamBrowserTests/WebRtcWebcamBrowserTest.MANUAL_TestAcquiringAndReacquiringWebcam/0
> in some WebRtc Mac bot, possibly because it has no real WebCam
> or it has one without  AVCaptureStillImageOutputClass support.
>
> This CL adds a guard  [captureSession_ canAddOutput:...]
> before [captureSession_ addOutput:...]
>
> Original description ----------------------------------------------
> ImageCapture: Implement takePhoto() for Mac AVFoundation
>
> By pulling the necessary symbols out of AVFoundation
> and using them; most notably (Cr)AVCaptureStillImageOutput.
>
> TakePhoto is already implemented for Android (both APIs).
>
> Also adding VideoCaptureDeviceTest::MAYBE_TakePhoto
> test case, enabled only for Mac.
>
> BUG=518807
> TEST=Run build with flag --enable-blink-features=ImageCapture,
> navigate to [1] and push buttons
> - Open Camera ...
> - Create ImageCapturer
> - takePhoto()  (N times!) --> profit
>
> [1] https://rawgit.com/Miguelao/demos/master/imagecapture.html
>
> TBR=rsesek@chromium.org since the change is trivial.
>
> Committed: https://crrev.com/d4179235b928634aeadb0c3379d385caf0ad3a07
> Cr-Commit-Position: refs/heads/master@{#405243}

TBR=
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=518807

Review-Url: https://codereview.chromium.org/2151443003
Cr-Commit-Position: refs/heads/master@{#405294}
parent 48308675
......@@ -2,9 +2,12 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// TODO(mcasas): Remove this whole glue, https://crbug.com/579648. This glue was
// introduced to support Mac OS X <= 10.6 where AVFoundation was not available,
// and had to happen in runtime.
// AVFoundation API is only introduced in Mac OS X > 10.6, and there is only one
// build of Chromium, so the (potential) linking with AVFoundation has to happen
// in runtime. For this to be clean, an AVFoundationGlue class is defined to try
// and load these AVFoundation system libraries. If it succeeds, subsequent
// clients can use AVFoundation via the rest of the classes declared in this
// file.
#ifndef MEDIA_BASE_MAC_AVFOUNDATION_GLUE_H_
#define MEDIA_BASE_MAC_AVFOUNDATION_GLUE_H_
......@@ -48,7 +51,6 @@ class MEDIA_EXPORT AVFoundationGlue {
static Class AVCaptureSessionClass();
static Class AVCaptureVideoDataOutputClass();
static Class AVCaptureStillImageOutputClass();
#endif // defined(__OBJC__)
private:
......@@ -93,9 +95,6 @@ MEDIA_EXPORT
MEDIA_EXPORT
@interface CrAVCaptureOutput : NSObject // Originally from AVCaptureOutput.h.
- (NSArray*)connections;
@end
// Originally AVCaptureSession and coming from AVCaptureSession.h.
......@@ -105,8 +104,6 @@ MEDIA_EXPORT
- (void)release;
- (void)addInput:(CrAVCaptureInput*)input;
- (void)removeInput:(CrAVCaptureInput*)input;
- (NSArray*)outputs;
- (BOOL)canAddOutput:(CrAVCaptureOutput*)output;
- (void)addOutput:(CrAVCaptureOutput*)output;
- (void)removeOutput:(CrAVCaptureOutput*)output;
- (BOOL)isRunning;
......@@ -159,17 +156,6 @@ MEDIA_EXPORT
@end
// Originally AVCaptureStillImageOutput and coming from AVCaptureOutput.h.
MEDIA_EXPORT
@interface CrAVCaptureStillImageOutput : CrAVCaptureOutput
typedef void (^CompletionHandler)(CoreMediaGlue::CMSampleBufferRef, NSError*);
- (void)
captureStillImageAsynchronouslyFromConnection:(CrAVCaptureConnection*)connection
completionHandler:(CompletionHandler)handler;
@end
// Class to provide access to class methods of AVCaptureDevice.
MEDIA_EXPORT
@interface AVCaptureDeviceGlue : NSObject
......
......@@ -167,10 +167,6 @@ Class AVFoundationGlue::AVCaptureVideoDataOutputClass() {
return [AVFoundationBundle() classNamed:@"AVCaptureVideoDataOutput"];
}
Class AVFoundationGlue::AVCaptureStillImageOutputClass() {
return [AVFoundationBundle() classNamed:@"AVCaptureStillImageOutput"];
}
@implementation AVCaptureDeviceGlue
+ (NSArray*)devices {
......
......@@ -52,7 +52,6 @@ class MEDIA_EXPORT CoreMediaGlue {
kCMPixelFormat_422YpCbCr8_yuvs = 'yuvs',
};
enum {
kCMVideoCodecType_JPEG = 'jpeg',
kCMVideoCodecType_JPEG_OpenDML = 'dmb1',
kCMVideoCodecType_H264 = 'avc1',
};
......
......@@ -21,9 +21,8 @@ class VideoCaptureDeviceMac;
@class CrAVCaptureDevice;
@class CrAVCaptureSession;
@class CrAVCaptureVideoDataOutput;
@class CrAVCaptureStillImageOutput;
// Class used by VideoCaptureDeviceMac (VCDM) for video and image capture using
// Class used by VideoCaptureDeviceMac (VCDM) for video capture using
// AVFoundation API. This class lives inside the thread created by its owner
// VCDM.
//
......@@ -63,7 +62,7 @@ class VideoCaptureDeviceMac;
int frameHeight_;
float frameRate_;
base::Lock lock_; // Protects concurrent setting and using |frameReceiver_|.
base::Lock lock_; // Protects concurrent setting and using of frameReceiver_.
media::VideoCaptureDeviceMac* frameReceiver_; // weak.
base::scoped_nsobject<CrAVCaptureSession> captureSession_;
......@@ -75,9 +74,6 @@ class VideoCaptureDeviceMac;
CrAVCaptureDeviceInput* captureDeviceInput_;
base::scoped_nsobject<CrAVCaptureVideoDataOutput> captureVideoDataOutput_;
// An AVDataOutput specialized for taking pictures out of |captureSession_|.
base::scoped_nsobject<CrAVCaptureStillImageOutput> stillImageOutput_;
base::ThreadChecker main_thread_checker_;
}
......@@ -118,10 +114,6 @@ class VideoCaptureDeviceMac;
// Stops video capturing and stops listening to notifications.
- (void)stopCapture;
// Takes a photo. This method should only be called between -startCapture and
// -stopCapture.
- (void)takePhoto;
@end
#endif // MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_DEVICE_AVFOUNDATION_MAC_H_
......@@ -95,6 +95,8 @@ void MaybeWriteUma(int number_of_devices, int number_of_suspended_devices) {
}
}
} // anonymous namespace
// This function translates Mac Core Video pixel formats to Chromium pixel
// formats.
media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
......@@ -110,26 +112,6 @@ media::VideoPixelFormat FourCCToChromiumPixelFormat(FourCharCode code) {
}
}
// Extracts |base_address| and |length| out of a SampleBuffer.
void ExtractBaseAddressAndLength(
char** base_address,
size_t* length,
CoreMediaGlue::CMSampleBufferRef sample_buffer) {
CoreMediaGlue::CMBlockBufferRef block_buffer =
CoreMediaGlue::CMSampleBufferGetDataBuffer(sample_buffer);
DCHECK(block_buffer);
size_t length_at_offset;
const OSStatus status = CoreMediaGlue::CMBlockBufferGetDataPointer(
block_buffer, 0, &length_at_offset, length, base_address);
DCHECK_EQ(noErr, status);
// Expect the (M)JPEG data to be available as a contiguous reference, i.e.
// not covered by multiple memory blocks.
DCHECK_EQ(length_at_offset, *length);
}
} // anonymous namespace
@implementation VideoCaptureDeviceAVFoundation
#pragma mark Class methods
......@@ -235,8 +217,6 @@ void ExtractBaseAddressAndLength(
// No need to release |captureDeviceInput_|, is owned by the session.
captureDeviceInput_ = nil;
}
if (stillImageOutput_)
[captureSession_ removeOutput:stillImageOutput_];
return YES;
}
......@@ -281,14 +261,6 @@ void ExtractBaseAddressAndLength(
queue:dispatch_get_global_queue(
DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
[captureSession_ addOutput:captureVideoDataOutput_];
// Create and plug the still image capture output. This should happen in
// advance of the actual picture to allow for the 3A to stabilize.
stillImageOutput_.reset(
[[AVFoundationGlue::AVCaptureStillImageOutputClass() alloc] init]);
if ([captureSession_ canAddOutput:stillImageOutput_])
[captureSession_ addOutput:stillImageOutput_];
return YES;
}
......@@ -387,48 +359,6 @@ void ExtractBaseAddressAndLength(
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
- (void)takePhoto {
DCHECK(main_thread_checker_.CalledOnValidThread());
DCHECK([captureSession_ isRunning]);
DCHECK_EQ(1u, [[stillImageOutput_ connections] count]);
CrAVCaptureConnection* const connection =
[[stillImageOutput_ connections] firstObject];
if (!connection) {
base::AutoLock lock(lock_);
frameReceiver_->OnPhotoError();
return;
}
const auto handler = ^(CoreMediaGlue::CMSampleBufferRef sampleBuffer,
NSError* error) {
base::AutoLock lock(lock_);
if (!frameReceiver_)
return;
if (error != nil) {
frameReceiver_->OnPhotoError();
return;
}
// Recommended compressed pixel format is JPEG, we don't expect surprises.
// TODO(mcasas): Consider using [1] for merging EXIF output information:
// [1] +(NSData*)jpegStillImageNSDataRepresentation:jpegSampleBuffer;
DCHECK_EQ(
CoreMediaGlue::kCMVideoCodecType_JPEG,
CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
CoreMediaGlue::CMSampleBufferGetFormatDescription(sampleBuffer)));
char* baseAddress = 0;
size_t length = 0;
ExtractBaseAddressAndLength(&baseAddress, &length, sampleBuffer);
frameReceiver_->OnPhotoTaken(reinterpret_cast<uint8_t*>(baseAddress),
length, "image/jpeg");
};
[stillImageOutput_ captureStillImageAsynchronouslyFromConnection:connection
completionHandler:handler];
}
#pragma mark Private methods
// |captureOutput| is called by the capture device to deliver a new frame.
......@@ -451,7 +381,17 @@ didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
size_t frameSize = 0;
CVImageBufferRef videoFrame = nil;
if (fourcc == CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML) {
ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer);
// If MJPEG, use block buffer instead of pixel buffer.
CoreMediaGlue::CMBlockBufferRef blockBuffer =
CoreMediaGlue::CMSampleBufferGetDataBuffer(sampleBuffer);
if (blockBuffer) {
size_t lengthAtOffset;
CoreMediaGlue::CMBlockBufferGetDataPointer(
blockBuffer, 0, &lengthAtOffset, &frameSize, &baseAddress);
// Expect the MJPEG data to be available as a contiguous reference, i.e.
// not covered by multiple memory blocks.
CHECK_EQ(lengthAtOffset, frameSize);
}
} else {
videoFrame = CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the frame and calculate frame size.
......
......@@ -62,12 +62,10 @@ class VideoCaptureDeviceMac : public VideoCaptureDevice {
const VideoCaptureParams& params,
std::unique_ptr<VideoCaptureDevice::Client> client) override;
void StopAndDeAllocate() override;
void TakePhoto(TakePhotoCallback callback) override;
bool Init(VideoCaptureDevice::Name::CaptureApiType capture_api_type);
// Called to deliver captured video frames. It's safe to call this method
// from any thread, including those controlled by AVFoundation.
// Called to deliver captured video frames.
void ReceiveFrame(const uint8_t* video_frame,
int video_frame_length,
const VideoCaptureFormat& frame_format,
......@@ -75,13 +73,6 @@ class VideoCaptureDeviceMac : public VideoCaptureDevice {
int aspect_denominator,
base::TimeDelta timestamp);
// Callbacks with the result of a still image capture, or in case of error,
// respectively. It's safe to call these methods from any thread.
void OnPhotoTaken(const uint8_t* image_data,
size_t image_length,
const std::string& mime_type);
void OnPhotoError();
// Forwarder to VideoCaptureDevice::Client::OnError().
void ReceiveError(const tracked_objects::Location& from_here,
const std::string& reason);
......@@ -108,9 +99,6 @@ class VideoCaptureDeviceMac : public VideoCaptureDevice {
base::scoped_nsobject<VideoCaptureDeviceAVFoundation> capture_device_;
// To hold on to the TakePhotoCallback while the picture is being taken.
std::unique_ptr<TakePhotoCallback> photo_callback_;
// Used with Bind and PostTask to ensure that methods aren't called after the
// VideoCaptureDeviceMac is destroyed.
// NOTE: Weak pointers must be invalidated before all other member variables.
......
......@@ -389,17 +389,6 @@ void VideoCaptureDeviceMac::StopAndDeAllocate() {
state_ = kIdle;
}
void VideoCaptureDeviceMac::TakePhoto(TakePhotoCallback callback) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(state_ == kCapturing) << state_;
if (photo_callback_) // Only one picture can be in flight at a time.
return;
photo_callback_.reset(new TakePhotoCallback(std::move(callback)));
[capture_device_ takePhoto];
}
bool VideoCaptureDeviceMac::Init(
VideoCaptureDevice::Name::CaptureApiType capture_api_type) {
DCHECK(task_runner_->BelongsToCurrentThread());
......@@ -424,6 +413,8 @@ void VideoCaptureDeviceMac::ReceiveFrame(const uint8_t* video_frame,
int aspect_numerator,
int aspect_denominator,
base::TimeDelta timestamp) {
// This method is safe to call from a device capture thread, i.e. any thread
// controlled by AVFoundation.
if (capture_format_.frame_size != frame_format.frame_size) {
ReceiveError(FROM_HERE,
"Captured resolution " + frame_format.frame_size.ToString() +
......@@ -435,26 +426,6 @@ void VideoCaptureDeviceMac::ReceiveFrame(const uint8_t* video_frame,
0, base::TimeTicks::Now(), timestamp);
}
void VideoCaptureDeviceMac::OnPhotoTaken(const uint8_t* image_data,
size_t image_length,
const std::string& mime_type) {
DCHECK(photo_callback_);
if (!image_data || !image_length) {
OnPhotoError();
return;
}
photo_callback_->Run(mojo::String::From(mime_type),
mojo::Array<uint8_t>(std::vector<uint8_t>(
image_data, image_data + image_length)));
photo_callback_.reset();
}
void VideoCaptureDeviceMac::OnPhotoError() {
DLOG(ERROR) << __FUNCTION__ << " error taking picture";
photo_callback_.reset();
}
void VideoCaptureDeviceMac::ReceiveError(
const tracked_objects::Location& from_here,
const std::string& reason) {
......
......@@ -19,7 +19,6 @@
#include "base/threading/thread.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/video_capture_types.h"
#include "media/capture/video/video_capture_device_factory.h"
#include "testing/gmock/include/gmock/gmock.h"
......@@ -46,11 +45,9 @@
#define MAYBE_AllocateBadSize DISABLED_AllocateBadSize
// We will always get YUYV from the Mac AVFoundation implementations.
#define MAYBE_CaptureMjpeg DISABLED_CaptureMjpeg
#define MAYBE_TakePhoto TakePhoto
#elif defined(OS_WIN)
#define MAYBE_AllocateBadSize AllocateBadSize
#define MAYBE_CaptureMjpeg CaptureMjpeg
#define MAYBE_TakePhoto DISABLED_TakePhoto
#elif defined(OS_ANDROID)
// TODO(wjia): enable those tests on Android.
// On Android, native camera (JAVA) delivers frames on UI thread which is the
......@@ -60,7 +57,6 @@
#define DeAllocateCameraWhileRunning DISABLED_DeAllocateCameraWhileRunning
#define DeAllocateCameraWhileRunning DISABLED_DeAllocateCameraWhileRunning
#define MAYBE_CaptureMjpeg DISABLED_CaptureMjpeg
#define MAYBE_TakePhoto DISABLED_TakePhoto
#elif defined(OS_LINUX)
// AllocateBadSize will hang when a real camera is attached and if more than one
// test is trying to use the camera (even across processes). Do NOT renable
......@@ -68,11 +64,9 @@
// http://crbug.com/94134 http://crbug.com/137260 http://crbug.com/417824
#define MAYBE_AllocateBadSize DISABLED_AllocateBadSize
#define MAYBE_CaptureMjpeg CaptureMjpeg
#define MAYBE_TakePhoto DISABLED_TakePhoto
#else
#define MAYBE_AllocateBadSize AllocateBadSize
#define MAYBE_CaptureMjpeg CaptureMjpeg
#define MAYBE_TakePhoto DISABLED_TakePhoto
#endif
using ::testing::_;
......@@ -142,30 +136,6 @@ class MockVideoCaptureClient : public VideoCaptureDevice::Client {
base::Callback<void(const VideoCaptureFormat&)> frame_cb_;
};
class MockImageCaptureClient : public base::RefCounted<MockImageCaptureClient> {
public:
// GMock doesn't support move-only arguments, so we use this forward method.
void DoOnPhotoTaken(mojo::String mime_type, mojo::Array<uint8_t> data) {
EXPECT_STREQ("image/jpeg", mime_type.storage().c_str());
ASSERT_GT(data.size(), 4u);
// Check some bytes that univocally identify |data| as a JPEG File.
// https://en.wikipedia.org/wiki/JPEG_File_Interchange_Format#File_format_structure
EXPECT_EQ(0xFF, data[0]); // First SOI byte
EXPECT_EQ(0xD8, data[1]); // Second SOI byte
EXPECT_EQ(0xFF, data[2]); // First JFIF-APP0 byte
EXPECT_EQ(0xE0, data[3]); // Second JFIF-APP0 byte
OnCorrectPhotoTaken();
}
MOCK_METHOD0(OnCorrectPhotoTaken, void(void));
MOCK_METHOD1(
OnTakePhotoFailure,
void(const base::Callback<void(mojo::String, mojo::Array<uint8_t>)>&));
private:
friend class base::RefCounted<MockImageCaptureClient>;
virtual ~MockImageCaptureClient() {}
};
class DeviceEnumerationListener
: public base::RefCounted<DeviceEnumerationListener> {
public:
......@@ -193,10 +163,10 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
video_capture_client_(new MockVideoCaptureClient(
base::Bind(&VideoCaptureDeviceTest::OnFrameCaptured,
base::Unretained(this)))),
device_enumeration_listener_(new DeviceEnumerationListener()),
image_capture_client_(new MockImageCaptureClient()),
video_capture_device_factory_(VideoCaptureDeviceFactory::CreateFactory(
base::ThreadTaskRunnerHandle::Get())) {}
base::ThreadTaskRunnerHandle::Get())) {
device_enumeration_listener_ = new DeviceEnumerationListener();
}
void SetUp() override {
#if defined(OS_ANDROID)
......@@ -285,14 +255,12 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
base::win::ScopedCOMInitializer initialize_com_;
#endif
std::unique_ptr<VideoCaptureDevice::Names> names_;
const std::unique_ptr<base::MessageLoop> loop_;
std::unique_ptr<base::MessageLoop> loop_;
std::unique_ptr<base::RunLoop> run_loop_;
std::unique_ptr<MockVideoCaptureClient> video_capture_client_;
const scoped_refptr<DeviceEnumerationListener> device_enumeration_listener_;
const scoped_refptr<MockImageCaptureClient> image_capture_client_;
scoped_refptr<DeviceEnumerationListener> device_enumeration_listener_;
VideoCaptureFormat last_format_;
const std::unique_ptr<VideoCaptureDeviceFactory>
video_capture_device_factory_;
std::unique_ptr<VideoCaptureDeviceFactory> video_capture_device_factory_;
};
// Cause hangs on Windows Debug. http://crbug.com/417824
......@@ -357,7 +325,8 @@ TEST_P(VideoCaptureDeviceTest, CaptureWithSize) {
VideoCaptureParams capture_params;
capture_params.requested_format.frame_size.SetSize(width, height);
capture_params.requested_format.frame_rate = 30.0f;
capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
capture_params.requested_format.pixel_format =
PIXEL_FORMAT_I420;
device->AllocateAndStart(capture_params, std::move(video_capture_client_));
// Get captured video frames.
WaitForCapturedFrame();
......@@ -392,7 +361,8 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_AllocateBadSize) {
VideoCaptureParams capture_params;
capture_params.requested_format.frame_size.SetSize(637, 472);
capture_params.requested_format.frame_rate = 35;
capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
capture_params.requested_format.pixel_format =
PIXEL_FORMAT_I420;
device->AllocateAndStart(capture_params, std::move(video_capture_client_));
WaitForCapturedFrame();
device->StopAndDeAllocate();
......@@ -514,42 +484,10 @@ TEST_F(VideoCaptureDeviceTest, GetDeviceSupportedFormats) {
// GetDeviceSupportedFormats().
std::unique_ptr<VideoCaptureDevice::Name> name =
GetFirstDeviceNameSupportingPixelFormat(PIXEL_FORMAT_MAX);
// Verify no camera returned for PIXEL_FORMAT_MAX. Nothing else to test here
// Verify no camera returned for PIXEL_FORMAT_MAX. Nothing else
// to test here
// since we cannot forecast the hardware capabilities.
ASSERT_FALSE(name);
}
// Start the camera and take a photo.
TEST_F(VideoCaptureDeviceTest, MAYBE_TakePhoto) {
names_ = EnumerateDevices();
if (names_->empty()) {
VLOG(1) << "No camera available. Exiting test.";
return;
}
std::unique_ptr<VideoCaptureDevice> device(
video_capture_device_factory_->Create(names_->front()));
ASSERT_TRUE(device);
EXPECT_CALL(*video_capture_client_, OnError(_, _)).Times(0);
VideoCaptureParams capture_params;
capture_params.requested_format.frame_size.SetSize(640, 480);
capture_params.requested_format.frame_rate = 30;
capture_params.requested_format.pixel_format = PIXEL_FORMAT_I420;
device->AllocateAndStart(capture_params, std::move(video_capture_client_));
WaitForCapturedFrame();
VideoCaptureDevice::TakePhotoCallback scoped_callback(
base::Bind(&MockImageCaptureClient::DoOnPhotoTaken,
image_capture_client_),
media::BindToCurrentLoop(base::Bind(
&MockImageCaptureClient::OnTakePhotoFailure, image_capture_client_)));
EXPECT_CALL(*image_capture_client_.get(), OnCorrectPhotoTaken()).Times(1);
device->TakePhoto(std::move(scoped_callback));
WaitForCapturedFrame();
device->StopAndDeAllocate();
}
}; // namespace media
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment