Commit 2c176764 authored by jfroy's avatar jfroy Committed by Commit bot

[cast] Add optional VideoEncoder method to flush frames.

This patch adds EmitFrames() to VideoEncoder. The method is documented
as indicating that the encoder should produce all the frames that it may
be buffering for analysis.

This patch modifies VideoSender::InsertRawVideoFrame() to call
EmitFrames() when a frames is dropped because the in-flight frames
duration exceeds the target playout delay.

This patch provides an implementation of EmitFrames() for the
VideoToolbox H.264 encoder, which buffers frames for analysis as
described in the bug. It also uses the existing
max_number_of_video_buffers_used field in VideoConfig to set the
upper bound on the encoder's frame window.

BUG=450798
R=hclam, miu, DaleCurtis, Robert Sesek

Review URL: https://codereview.chromium.org/863083002

Cr-Commit-Position: refs/heads/master@{#313643}
parent d1cc658f
......@@ -48,6 +48,7 @@ class MEDIA_EXPORT VideoToolboxGlue {
CFStringRef kVTCompressionPropertyKey_AverageBitRate() const;
CFStringRef kVTCompressionPropertyKey_ColorPrimaries() const;
CFStringRef kVTCompressionPropertyKey_ExpectedFrameRate() const;
CFStringRef kVTCompressionPropertyKey_MaxFrameDelayCount() const;
CFStringRef kVTCompressionPropertyKey_MaxKeyFrameInterval() const;
CFStringRef kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration() const;
CFStringRef kVTCompressionPropertyKey_ProfileLevel() const;
......@@ -89,6 +90,9 @@ class MEDIA_EXPORT VideoToolboxGlue {
CVPixelBufferPoolRef VTCompressionSessionGetPixelBufferPool(
VTCompressionSessionRef session) const;
void VTCompressionSessionInvalidate(VTCompressionSessionRef session) const;
OSStatus VTCompressionSessionCompleteFrames(
VTCompressionSessionRef session,
CoreMediaGlue::CMTime completeUntilPresentationTimeStamp) const;
// Originally from VTSession.h
OSStatus VTSessionSetProperty(VTSessionRef session,
......
......@@ -34,6 +34,9 @@ struct VideoToolboxGlue::Library {
typedef CVPixelBufferPoolRef (*VTCompressionSessionGetPixelBufferPoolMethod)(
VTCompressionSessionRef);
typedef void (*VTCompressionSessionInvalidateMethod)(VTCompressionSessionRef);
typedef OSStatus (*VTCompressionSessionCompleteFramesMethod)(
VTCompressionSessionRef,
CoreMediaGlue::CMTime);
typedef OSStatus (*VTSessionSetPropertyMethod)(VTSessionRef,
CFStringRef,
CFTypeRef);
......@@ -43,12 +46,14 @@ struct VideoToolboxGlue::Library {
VTCompressionSessionGetPixelBufferPoolMethod
VTCompressionSessionGetPixelBufferPool;
VTCompressionSessionInvalidateMethod VTCompressionSessionInvalidate;
VTCompressionSessionCompleteFramesMethod VTCompressionSessionCompleteFrames;
VTSessionSetPropertyMethod VTSessionSetProperty;
CFStringRef* kVTCompressionPropertyKey_AllowFrameReordering;
CFStringRef* kVTCompressionPropertyKey_AverageBitRate;
CFStringRef* kVTCompressionPropertyKey_ColorPrimaries;
CFStringRef* kVTCompressionPropertyKey_ExpectedFrameRate;
CFStringRef* kVTCompressionPropertyKey_MaxFrameDelayCount;
CFStringRef* kVTCompressionPropertyKey_MaxKeyFrameInterval;
CFStringRef* kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration;
CFStringRef* kVTCompressionPropertyKey_ProfileLevel;
......@@ -86,12 +91,14 @@ class VideoToolboxGlue::Loader {
LOAD_SYMBOL(VTCompressionSessionEncodeFrame)
LOAD_SYMBOL(VTCompressionSessionGetPixelBufferPool)
LOAD_SYMBOL(VTCompressionSessionInvalidate)
LOAD_SYMBOL(VTCompressionSessionCompleteFrames)
LOAD_SYMBOL(VTSessionSetProperty)
LOAD_SYMBOL(kVTCompressionPropertyKey_AllowFrameReordering)
LOAD_SYMBOL(kVTCompressionPropertyKey_AverageBitRate)
LOAD_SYMBOL(kVTCompressionPropertyKey_ColorPrimaries)
LOAD_SYMBOL(kVTCompressionPropertyKey_ExpectedFrameRate)
LOAD_SYMBOL(kVTCompressionPropertyKey_MaxFrameDelayCount)
LOAD_SYMBOL(kVTCompressionPropertyKey_MaxKeyFrameInterval)
LOAD_SYMBOL(kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration)
LOAD_SYMBOL(kVTCompressionPropertyKey_ProfileLevel)
......@@ -189,6 +196,13 @@ void VideoToolboxGlue::VTCompressionSessionInvalidate(
library_->VTCompressionSessionInvalidate(session);
}
OSStatus VideoToolboxGlue::VTCompressionSessionCompleteFrames(
VTCompressionSessionRef session,
CoreMediaGlue::CMTime completeUntilPresentationTimeStamp) const {
return library_->VTCompressionSessionCompleteFrames(
session, completeUntilPresentationTimeStamp);
}
OSStatus VideoToolboxGlue::VTSessionSetProperty(VTSessionRef session,
CFStringRef propertyKey,
CFTypeRef propertyValue) const {
......@@ -202,6 +216,7 @@ KEY_ACCESSOR(kVTCompressionPropertyKey_AllowFrameReordering)
KEY_ACCESSOR(kVTCompressionPropertyKey_AverageBitRate)
KEY_ACCESSOR(kVTCompressionPropertyKey_ColorPrimaries)
KEY_ACCESSOR(kVTCompressionPropertyKey_ExpectedFrameRate)
KEY_ACCESSOR(kVTCompressionPropertyKey_MaxFrameDelayCount)
KEY_ACCESSOR(kVTCompressionPropertyKey_MaxKeyFrameInterval)
KEY_ACCESSOR(kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration)
KEY_ACCESSOR(kVTCompressionPropertyKey_ProfileLevel)
......
......@@ -97,7 +97,19 @@ struct VideoSenderConfig {
int max_qp;
int min_qp;
int max_frame_rate; // TODO(miu): Should be double, not int.
int max_number_of_video_buffers_used; // Max value depend on codec.
// This field is used differently by various encoders. It defaults to 1.
//
// For VP8, it should be 1 to operate in single-buffer mode, or 3 to operate
// in multi-buffer mode. See
// http://www.webmproject.org/docs/encoder-parameters/ for details.
//
// For H.264 on Mac or iOS, it controls the max number of frames the encoder
// may hold before emitting a frame. A larger window may allow higher encoding
// efficiency at the cost of latency and memory. Set to 0 to let the encoder
// choose a suitable value for the platform and other encoding settings.
int max_number_of_video_buffers_used;
Codec codec;
int number_of_encode_threads;
......
......@@ -335,6 +335,11 @@ void H264VideoToolboxEncoder::ConfigureSession(
SetSessionProperty(
videotoolbox_glue_->kVTCompressionPropertyKey_YCbCrMatrix(),
kCVImageBufferYCbCrMatrix_ITU_R_709_2);
if (video_config.max_number_of_video_buffers_used > 0) {
SetSessionProperty(
videotoolbox_glue_->kVTCompressionPropertyKey_MaxFrameDelayCount(),
video_config.max_number_of_video_buffers_used);
}
}
void H264VideoToolboxEncoder::Teardown() {
......@@ -430,6 +435,21 @@ H264VideoToolboxEncoder::CreateVideoFrameFactory() {
new VideoFrameFactoryCVPixelBufferPoolImpl(pool));
}
void H264VideoToolboxEncoder::EmitFrames() {
DCHECK(thread_checker_.CalledOnValidThread());
if (!compression_session_) {
DLOG(ERROR) << " compression session is null";
return;
}
OSStatus status = videotoolbox_glue_->VTCompressionSessionCompleteFrames(
compression_session_, CoreMediaGlue::CMTime{0, 0, 0, 0});
if (status != noErr) {
DLOG(ERROR) << " VTCompressionSessionCompleteFrames failed: " << status;
}
}
bool H264VideoToolboxEncoder::SetSessionProperty(CFStringRef key,
int32_t value) {
base::ScopedCFTypeRef<CFNumberRef> cfvalue(
......
......@@ -36,6 +36,7 @@ class H264VideoToolboxEncoder : public VideoEncoder {
void GenerateKeyFrame() override;
void LatestFrameIdToReference(uint32 frame_id) override;
scoped_ptr<VideoFrameFactory> CreateVideoFrameFactory() override;
void EmitFrames() override;
private:
// Initialize the compression session.
......
......@@ -12,5 +12,8 @@ scoped_ptr<VideoFrameFactory> VideoEncoder::CreateVideoFrameFactory() {
return nullptr;
}
void VideoEncoder::EmitFrames() {
}
} // namespace cast
} // namespace media
......@@ -48,6 +48,14 @@ class VideoEncoder {
// encoder affinity (defined as offering some sort of performance benefit).
// This is an optional capability and by default returns null.
virtual scoped_ptr<VideoFrameFactory> CreateVideoFrameFactory();
// Instructs the encoder to finish and emit all frames that have been
// submitted for encoding. An encoder may hold a certain number of frames for
// analysis. Under certain network conditions, particularly when there is
// network congestion, it is necessary to flush out of the encoder all
// submitted frames so that eventually new frames may be encoded. Like
// EncodeVideoFrame(), the encoder will process this request asynchronously.
virtual void EmitFrames();
};
} // namespace cast
......
......@@ -186,6 +186,14 @@ void VideoSender::InsertRawVideoFrame(
VLOG(1) << "New target delay: " << new_target_delay.InMilliseconds();
playout_delay_change_cb_.Run(new_target_delay);
}
// Some encoder implementations have a frame window for analysis. Since we
// are dropping this frame, unless we instruct the encoder to flush all the
// frames that have been enqueued for encoding, frames_in_encoder_ and
// last_enqueued_frame_reference_time_ will never be updated and we will
// drop every subsequent frame for the rest of the session.
video_encoder_->EmitFrames();
return;
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment