Commit bbb87513 authored by miu's avatar miu Committed by Commit bot

[Cast] In Audio/VideoSender, drop frames when too-long a duration is in-flight.

BUG=404813,405622

Review URL: https://codereview.chromium.org/502333002

Cr-Commit-Position: refs/heads/master@{#292673}
parent 198637a9
......@@ -122,8 +122,6 @@ source_set("sender") {
"sender/fake_software_video_encoder.cc",
"sender/frame_sender.cc",
"sender/frame_sender.h",
"sender/rtp_timestamp_helper.cc",
"sender/rtp_timestamp_helper.h",
"sender/software_video_encoder.h",
"sender/video_encoder.h",
"sender/video_encoder_impl.h",
......
......@@ -154,8 +154,6 @@
'sender/fake_software_video_encoder.cc',
'sender/frame_sender.cc',
'sender/frame_sender.h',
'sender/rtp_timestamp_helper.cc',
'sender/rtp_timestamp_helper.h',
'sender/software_video_encoder.h',
'sender/video_encoder.h',
'sender/video_encoder_impl.h',
......
......@@ -195,6 +195,11 @@ inline base::TimeDelta RtpDeltaToTimeDelta(int64 rtp_delta, int rtp_timebase) {
return rtp_delta * base::TimeDelta::FromSeconds(1) / rtp_timebase;
}
inline int64 TimeDeltaToRtpDelta(base::TimeDelta delta, int rtp_timebase) {
DCHECK_GT(rtp_timebase, 0);
return delta * rtp_timebase / base::TimeDelta::FromSeconds(1);
}
} // namespace cast
} // namespace media
......
......@@ -84,7 +84,7 @@ void AudioSender::InsertAudio(scoped_ptr<AudioBus> audio_bus,
}
DCHECK(audio_encoder_.get()) << "Invalid internal state";
if (AreTooManyFramesInFlight()) {
if (ShouldDropNextFrame(recorded_time)) {
VLOG(1) << "Dropping frame due to too many frames currently in-flight.";
return;
}
......@@ -114,12 +114,10 @@ void AudioSender::SendEncodedAudioFrame(
frame_id, static_cast<int>(encoded_frame->data.size()),
encoded_frame->dependency == EncodedFrame::KEY,
configured_encoder_bitrate_);
// Only use lowest 8 bits as key.
frame_id_to_rtp_timestamp_[frame_id & 0xff] = encoded_frame->rtp_timestamp;
DCHECK(!encoded_frame->reference_time.is_null());
rtp_timestamp_helper_.StoreLatestTime(encoded_frame->reference_time,
encoded_frame->rtp_timestamp);
RecordLatestFrameTimestamps(frame_id,
encoded_frame->reference_time,
encoded_frame->rtp_timestamp);
// At the start of the session, it's important to send reports before each
// frame so that the receiver can properly compute playout times. The reason
......@@ -180,15 +178,12 @@ void AudioSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
duplicate_ack_counter_ = 0;
}
const base::TimeTicks now = cast_environment_->Clock()->NowTicks();
const RtpTimestamp rtp_timestamp =
frame_id_to_rtp_timestamp_[cast_feedback.ack_frame_id & 0xff];
cast_environment_->Logging()->InsertFrameEvent(now,
FRAME_ACK_RECEIVED,
AUDIO_EVENT,
rtp_timestamp,
cast_feedback.ack_frame_id);
cast_environment_->Logging()->InsertFrameEvent(
cast_environment_->Clock()->NowTicks(),
FRAME_ACK_RECEIVED,
AUDIO_EVENT,
GetRecordedRtpTimestamp(cast_feedback.ack_frame_id),
cast_feedback.ack_frame_id);
const bool is_acked_out_of_order =
static_cast<int32>(cast_feedback.ack_frame_id -
......@@ -207,17 +202,29 @@ void AudioSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
}
}
bool AudioSender::AreTooManyFramesInFlight() const {
bool AudioSender::ShouldDropNextFrame(base::TimeTicks capture_time) const {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
int frames_in_flight = 0;
base::TimeDelta duration_in_flight;
if (!last_send_time_.is_null()) {
frames_in_flight +=
frames_in_flight =
static_cast<int32>(last_sent_frame_id_ - latest_acked_frame_id_);
if (frames_in_flight > 0) {
const uint32 oldest_unacked_frame_id = latest_acked_frame_id_ + 1;
duration_in_flight =
capture_time - GetRecordedReferenceTime(oldest_unacked_frame_id);
}
}
VLOG(2) << frames_in_flight
<< " frames in flight; last sent: " << last_sent_frame_id_
<< " latest acked: " << latest_acked_frame_id_;
return frames_in_flight >= max_unacked_frames_;
<< "; latest acked: " << latest_acked_frame_id_
<< "; duration in flight: "
<< duration_in_flight.InMicroseconds() << " usec ("
<< (target_playout_delay_ > base::TimeDelta() ?
100 * duration_in_flight / target_playout_delay_ :
kint64max) << "%)";
return frames_in_flight >= max_unacked_frames_ ||
duration_in_flight >= target_playout_delay_;
}
} // namespace cast
......
......@@ -55,11 +55,11 @@ class AudioSender : public FrameSender,
void OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback);
private:
// Returns true if there are too many frames in flight, as defined by the
// configured target playout delay plus simple logic. When this is true,
// InsertAudio() will silenty drop frames instead of sending them to the audio
// encoder.
bool AreTooManyFramesInFlight() const;
// Returns true if there are too many frames in flight, or if the media
// duration of the frames in flight would be too high by sending the next
// frame. The latter metric is determined from the given |capture_time|
// for the next frame to be encoded and sent.
bool ShouldDropNextFrame(base::TimeTicks capture_time) const;
// Called by the |audio_encoder_| with the next EncodedFrame to send.
void SendEncodedAudioFrame(scoped_ptr<EncodedFrame> audio_frame);
......
......@@ -13,14 +13,13 @@ const int kMinSchedulingDelayMs = 1;
FrameSender::FrameSender(scoped_refptr<CastEnvironment> cast_environment,
CastTransportSender* const transport_sender,
base::TimeDelta rtcp_interval,
int frequency,
int rtp_timebase,
uint32 ssrc,
double max_frame_rate,
base::TimeDelta playout_delay)
: cast_environment_(cast_environment),
transport_sender_(transport_sender),
ssrc_(ssrc),
rtp_timestamp_helper_(frequency),
rtt_available_(false),
rtcp_interval_(rtcp_interval),
max_frame_rate_(max_frame_rate),
......@@ -28,10 +27,12 @@ FrameSender::FrameSender(scoped_refptr<CastEnvironment> cast_environment,
last_sent_frame_id_(0),
latest_acked_frame_id_(0),
duplicate_ack_counter_(0),
rtp_timebase_(rtp_timebase),
weak_factory_(this) {
DCHECK_GT(rtp_timebase_, 0);
SetTargetPlayoutDelay(playout_delay);
send_target_playout_delay_ = false;
memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_));
memset(frame_rtp_timestamps_, 0, sizeof(frame_rtp_timestamps_));
}
FrameSender::~FrameSender() {
......@@ -54,15 +55,25 @@ void FrameSender::ScheduleNextRtcpReport() {
void FrameSender::SendRtcpReport(bool schedule_future_reports) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
// Sanity-check: We should have sent at least the first frame by this point.
DCHECK(!last_send_time_.is_null());
// Create lip-sync info for the sender report. The last sent frame's
// reference time and RTP timestamp are used to estimate an RTP timestamp in
// terms of "now." Note that |now| is never likely to be precise to an exact
// frame boundary; and so the computation here will result in a
// |now_as_rtp_timestamp| value that is rarely equal to any one emitted by the
// encoder.
const base::TimeTicks now = cast_environment_->Clock()->NowTicks();
uint32 now_as_rtp_timestamp = 0;
if (rtp_timestamp_helper_.GetCurrentTimeAsRtpTimestamp(
now, &now_as_rtp_timestamp)) {
transport_sender_->SendSenderReport(ssrc_, now, now_as_rtp_timestamp);
} else {
// |rtp_timestamp_helper_| should have stored a mapping by this point.
NOTREACHED();
}
const base::TimeDelta time_delta =
now - GetRecordedReferenceTime(last_sent_frame_id_);
const int64 rtp_delta = TimeDeltaToRtpDelta(time_delta, rtp_timebase_);
const uint32 now_as_rtp_timestamp =
GetRecordedRtpTimestamp(last_sent_frame_id_) +
static_cast<uint32>(rtp_delta);
transport_sender_->SendSenderReport(ssrc_, now, now_as_rtp_timestamp);
if (schedule_future_reports)
ScheduleNextRtcpReport();
}
......@@ -129,5 +140,23 @@ void FrameSender::ResendForKickstart() {
transport_sender_->ResendFrameForKickstart(ssrc_, last_sent_frame_id_);
}
void FrameSender::RecordLatestFrameTimestamps(uint32 frame_id,
base::TimeTicks reference_time,
RtpTimestamp rtp_timestamp) {
DCHECK(!reference_time.is_null());
frame_reference_times_[frame_id % arraysize(frame_reference_times_)] =
reference_time;
frame_rtp_timestamps_[frame_id % arraysize(frame_rtp_timestamps_)] =
rtp_timestamp;
}
base::TimeTicks FrameSender::GetRecordedReferenceTime(uint32 frame_id) const {
return frame_reference_times_[frame_id % arraysize(frame_reference_times_)];
}
RtpTimestamp FrameSender::GetRecordedRtpTimestamp(uint32 frame_id) const {
return frame_rtp_timestamps_[frame_id % arraysize(frame_rtp_timestamps_)];
}
} // namespace cast
} // namespace media
......@@ -15,7 +15,6 @@
#include "base/time/time.h"
#include "media/cast/cast_environment.h"
#include "media/cast/net/rtcp/rtcp.h"
#include "media/cast/sender/rtp_timestamp_helper.h"
namespace media {
namespace cast {
......@@ -25,7 +24,7 @@ class FrameSender {
FrameSender(scoped_refptr<CastEnvironment> cast_environment,
CastTransportSender* const transport_sender,
base::TimeDelta rtcp_interval,
int frequency,
int rtp_timebase,
uint32 ssrc,
double max_frame_rate,
base::TimeDelta playout_delay);
......@@ -62,10 +61,6 @@ class FrameSender {
const uint32 ssrc_;
// Records lip-sync (i.e., mapping of RTP <--> NTP timestamps), and
// extrapolates this mapping to any other point in time.
RtpTimestampHelper rtp_timestamp_helper_;
// RTT information from RTCP.
bool rtt_available_;
base::TimeDelta rtt_;
......@@ -83,6 +78,16 @@ class FrameSender {
void ResendCheck();
void ResendForKickstart();
// Record or retrieve a recent history of each frame's timestamps.
// Warning: If a frame ID too far in the past is requested, the getters will
// silently succeed but return incorrect values. Be sure to respect
// media::cast::kMaxUnackedFrames.
void RecordLatestFrameTimestamps(uint32 frame_id,
base::TimeTicks reference_time,
RtpTimestamp rtp_timestamp);
base::TimeTicks GetRecordedReferenceTime(uint32 frame_id) const;
RtpTimestamp GetRecordedRtpTimestamp(uint32 frame_id) const;
const base::TimeDelta rtcp_interval_;
// The total amount of time between a frame's capture/recording on the sender
......@@ -112,7 +117,7 @@ class FrameSender {
// last time any frame was sent or re-sent.
base::TimeTicks last_send_time_;
// The ID of the last frame sent. Logic throughout AudioSender assumes this
// The ID of the last frame sent. Logic throughout FrameSender assumes this
// can safely wrap-around. This member is invalid until
// |!last_send_time_.is_null()|.
uint32 last_sent_frame_id_;
......@@ -132,12 +137,16 @@ class FrameSender {
// STATUS_VIDEO_INITIALIZED.
CastInitializationStatus cast_initialization_status_;
// This is a "good enough" mapping for finding the RTP timestamp associated
// with a video frame. The key is the lowest 8 bits of frame id (which is
// what is sent via RTCP). This map is used for logging purposes.
RtpTimestamp frame_id_to_rtp_timestamp_[256];
private:
// RTP timestamp increment representing one second.
const int rtp_timebase_;
// Ring buffers to keep track of recent frame timestamps (both in terms of
// local reference time and RTP media time). These should only be accessed
// through the Record/GetXXX() methods.
base::TimeTicks frame_reference_times_[256];
RtpTimestamp frame_rtp_timestamps_[256];
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<FrameSender> weak_factory_;
......
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/sender/rtp_timestamp_helper.h"
namespace media {
namespace cast {
RtpTimestampHelper::RtpTimestampHelper(int frequency)
: frequency_(frequency),
last_rtp_timestamp_(0) {
}
RtpTimestampHelper::~RtpTimestampHelper() {
}
bool RtpTimestampHelper::GetCurrentTimeAsRtpTimestamp(
const base::TimeTicks& now, uint32* rtp_timestamp) const {
if (last_capture_time_.is_null())
return false;
const base::TimeDelta elapsed_time = now - last_capture_time_;
const int64 rtp_delta =
elapsed_time * frequency_ / base::TimeDelta::FromSeconds(1);
*rtp_timestamp = last_rtp_timestamp_ + static_cast<uint32>(rtp_delta);
return true;
}
void RtpTimestampHelper::StoreLatestTime(
base::TimeTicks capture_time, uint32 rtp_timestamp) {
last_capture_time_ = capture_time;
last_rtp_timestamp_ = rtp_timestamp;
}
} // namespace cast
} // namespace media
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAST_SENDER_RTP_TIMESTAMP_HELPER_H_
#define MEDIA_CAST_SENDER_RTP_TIMESTAMP_HELPER_H_
#include "base/basictypes.h"
#include "base/time/time.h"
namespace media {
namespace cast {
// A helper class used to convert current time ticks into RTP timestamp.
class RtpTimestampHelper {
public:
explicit RtpTimestampHelper(int frequency);
~RtpTimestampHelper();
// Compute a RTP timestamp using current time, last encoded time and
// last encoded RTP timestamp.
// Return true if |rtp_timestamp| is computed.
bool GetCurrentTimeAsRtpTimestamp(const base::TimeTicks& now,
uint32* rtp_timestamp) const;
// Store the capture time and the corresponding RTP timestamp for the
// last encoded frame.
void StoreLatestTime(base::TimeTicks capture_time, uint32 rtp_timestamp);
private:
int frequency_;
base::TimeTicks last_capture_time_;
uint32 last_rtp_timestamp_;
DISALLOW_COPY_AND_ASSIGN(RtpTimestampHelper);
};
} // namespace cast
} // namespace media
#endif // MEDIA_CAST_SENDER_RTP_TIMESTAMP_HELPER_H_
......@@ -119,7 +119,7 @@ void VideoSender::InsertRawVideoFrame(
"timestamp", capture_time.ToInternalValue(),
"rtp_timestamp", rtp_timestamp);
if (AreTooManyFramesInFlight()) {
if (ShouldDropNextFrame(capture_time)) {
VLOG(1) << "Dropping frame due to too many frames currently in-flight.";
return;
}
......@@ -178,8 +178,10 @@ void VideoSender::SendEncodedVideoFrame(
frame_id, static_cast<int>(encoded_frame->data.size()),
encoded_frame->dependency == EncodedFrame::KEY,
requested_bitrate_before_encode);
// Only use lowest 8 bits as key.
frame_id_to_rtp_timestamp_[frame_id & 0xff] = encoded_frame->rtp_timestamp;
RecordLatestFrameTimestamps(frame_id,
encoded_frame->reference_time,
encoded_frame->rtp_timestamp);
// Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
TRACE_EVENT_INSTANT1(
......@@ -187,10 +189,6 @@ void VideoSender::SendEncodedVideoFrame(
TRACE_EVENT_SCOPE_THREAD,
"rtp_timestamp", encoded_frame->rtp_timestamp);
DCHECK(!encoded_frame->reference_time.is_null());
rtp_timestamp_helper_.StoreLatestTime(encoded_frame->reference_time,
encoded_frame->rtp_timestamp);
// At the start of the session, it's important to send reports before each
// frame so that the receiver can properly compute playout times. The reason
// more than one report is sent is because transmission is not guaranteed,
......@@ -275,13 +273,12 @@ void VideoSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
base::TimeTicks now = cast_environment_->Clock()->NowTicks();
congestion_control_.AckFrame(cast_feedback.ack_frame_id, now);
RtpTimestamp rtp_timestamp =
frame_id_to_rtp_timestamp_[cast_feedback.ack_frame_id & 0xff];
cast_environment_->Logging()->InsertFrameEvent(now,
FRAME_ACK_RECEIVED,
VIDEO_EVENT,
rtp_timestamp,
cast_feedback.ack_frame_id);
cast_environment_->Logging()->InsertFrameEvent(
now,
FRAME_ACK_RECEIVED,
VIDEO_EVENT,
GetRecordedRtpTimestamp(cast_feedback.ack_frame_id),
cast_feedback.ack_frame_id);
const bool is_acked_out_of_order =
static_cast<int32>(cast_feedback.ack_frame_id -
......@@ -300,18 +297,31 @@ void VideoSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
}
}
bool VideoSender::AreTooManyFramesInFlight() const {
bool VideoSender::ShouldDropNextFrame(base::TimeTicks capture_time) const {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
int frames_in_flight = frames_in_encoder_;
int frames_in_flight = 0;
base::TimeDelta duration_in_flight;
if (!last_send_time_.is_null()) {
frames_in_flight +=
frames_in_flight =
static_cast<int32>(last_sent_frame_id_ - latest_acked_frame_id_);
if (frames_in_flight > 0) {
const uint32 oldest_unacked_frame_id = latest_acked_frame_id_ + 1;
duration_in_flight =
capture_time - GetRecordedReferenceTime(oldest_unacked_frame_id);
}
}
frames_in_flight += frames_in_encoder_;
VLOG(2) << frames_in_flight
<< " frames in flight; last sent: " << last_sent_frame_id_
<< " latest acked: " << latest_acked_frame_id_
<< " frames in encoder: " << frames_in_encoder_;
return frames_in_flight >= max_unacked_frames_;
<< "; latest acked: " << latest_acked_frame_id_
<< "; frames in encoder: " << frames_in_encoder_
<< "; duration in flight: "
<< duration_in_flight.InMicroseconds() << " usec ("
<< (target_playout_delay_ > base::TimeDelta() ?
100 * duration_in_flight / target_playout_delay_ :
kint64max) << "%)";
return frames_in_flight >= max_unacked_frames_ ||
duration_in_flight >= target_playout_delay_;
}
} // namespace cast
......
......@@ -61,11 +61,11 @@ class VideoSender : public FrameSender,
void OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback);
private:
// Returns true if there are too many frames in flight, as defined by the
// configured target playout delay plus simple logic. When this is true,
// InsertRawVideoFrame() will silenty drop frames instead of sending them to
// the video encoder.
bool AreTooManyFramesInFlight() const;
// Returns true if there are too many frames in flight, or if the media
// duration of the frames in flight would be too high by sending the next
// frame. The latter metric is determined from the given |capture_time|
// for the next frame to be encoded and sent.
bool ShouldDropNextFrame(base::TimeTicks capture_time) const;
// Called by the |video_encoder_| with the next EncodeFrame to send.
void SendEncodedVideoFrame(int requested_bitrate_before_encode,
......
......@@ -17,7 +17,7 @@ namespace cast {
static const uint32 kMinIntra = 300;
static int ComputeMaxNumOfRepeatedBuffes(int max_unacked_frames) {
static int ComputeMaxNumOfRepeatedBuffers(int max_unacked_frames) {
if (max_unacked_frames > kNumberOfVp8VideoBuffers)
return (max_unacked_frames - 1) / kNumberOfVp8VideoBuffers;
......@@ -31,7 +31,7 @@ Vp8Encoder::Vp8Encoder(const VideoSenderConfig& video_config,
cast_config_.max_number_of_video_buffers_used ==
kNumberOfVp8VideoBuffers),
max_number_of_repeated_buffers_in_a_row_(
ComputeMaxNumOfRepeatedBuffes(max_unacked_frames)),
ComputeMaxNumOfRepeatedBuffers(max_unacked_frames)),
key_frame_requested_(true),
first_frame_received_(false),
last_encoded_frame_id_(kStartFrameId),
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment