Commit bb517383 authored by miu@chromium.org's avatar miu@chromium.org

[Cast] Halt AudioSender transmission when too many frames are in-flight.

The approach here is to literally copy most of the logic in VideoSender
over into AudioSender, since VideoSender already solves a number of
transmission/re-transmission issues using heuristics that should work
well for audio.  (This also brings us much closer to being able to merge
AudioSender and VideoSender into one implementation.)

Testing: Confirmed correct halting and recovery behavior between
cast_sender_app and cast_receiver_app (SIGSTOPP'ed and SIGCONT'ed each
to simulate a temporary outage).  Also tested a Chromium sender with a
Chromecast receiver.

BUG=380023

Review URL: https://codereview.chromium.org/340903003

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@278323 0039d316-1c4b-4281-b951-d872f2087c98
parent 5f5249e6
...@@ -9,14 +9,12 @@ ...@@ -9,14 +9,12 @@
#include "base/bind.h" #include "base/bind.h"
#include "base/bind_helpers.h" #include "base/bind_helpers.h"
#include "base/location.h" #include "base/location.h"
#include "base/logging.h"
#include "base/stl_util.h" #include "base/stl_util.h"
#include "base/sys_byteorder.h" #include "base/sys_byteorder.h"
#include "base/time/time.h" #include "base/time/time.h"
#include "media/base/audio_bus.h" #include "media/base/audio_bus.h"
#include "media/cast/cast_defines.h" #include "media/cast/cast_defines.h"
#include "media/cast/cast_environment.h" #include "media/cast/cast_environment.h"
#include "media/cast/logging/logging_defines.h"
#include "third_party/opus/src/include/opus.h" #include "third_party/opus/src/include/opus.h"
namespace media { namespace media {
...@@ -33,28 +31,6 @@ const int kFrameDurationMillis = 1000 / kFramesPerSecond; // No remainder! ...@@ -33,28 +31,6 @@ const int kFrameDurationMillis = 1000 / kFramesPerSecond; // No remainder!
// coming in too slow with respect to the capture timestamps. // coming in too slow with respect to the capture timestamps.
const int kUnderrunThresholdMillis = 3 * kFrameDurationMillis; const int kUnderrunThresholdMillis = 3 * kFrameDurationMillis;
void LogAudioFrameEncodedEvent(
const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
base::TimeTicks event_time,
media::cast::RtpTimestamp rtp_timestamp,
uint32 frame_id,
size_t frame_size) {
if (!cast_environment->CurrentlyOn(CastEnvironment::MAIN)) {
cast_environment->PostTask(
CastEnvironment::MAIN,
FROM_HERE,
base::Bind(&LogAudioFrameEncodedEvent,
cast_environment, event_time,
rtp_timestamp, frame_id, frame_size));
return;
}
cast_environment->Logging()->InsertEncodedFrameEvent(
event_time, media::cast::FRAME_ENCODED, media::cast::AUDIO_EVENT,
rtp_timestamp, frame_id,
static_cast<int>(frame_size), /* key_frame - unused */ false,
/*target_bitrate - unused*/ 0);
}
} // namespace } // namespace
...@@ -150,11 +126,6 @@ class AudioEncoder::ImplBase ...@@ -150,11 +126,6 @@ class AudioEncoder::ImplBase
audio_frame->reference_time = frame_capture_time_; audio_frame->reference_time = frame_capture_time_;
if (EncodeFromFilledBuffer(&audio_frame->data)) { if (EncodeFromFilledBuffer(&audio_frame->data)) {
LogAudioFrameEncodedEvent(cast_environment_,
cast_environment_->Clock()->NowTicks(),
audio_frame->rtp_timestamp,
audio_frame->frame_id,
audio_frame->data.size());
cast_environment_->PostTask( cast_environment_->PostTask(
CastEnvironment::MAIN, CastEnvironment::MAIN,
FROM_HERE, FROM_HERE,
......
...@@ -8,7 +8,9 @@ ...@@ -8,7 +8,9 @@
#include "base/logging.h" #include "base/logging.h"
#include "base/message_loop/message_loop.h" #include "base/message_loop/message_loop.h"
#include "media/cast/audio_sender/audio_encoder.h" #include "media/cast/audio_sender/audio_encoder.h"
#include "media/cast/transport/cast_transport_defines.h" #include "media/cast/cast_defines.h"
#include "media/cast/rtcp/rtcp_defines.h"
#include "media/cast/transport/cast_transport_config.h"
namespace media { namespace media {
namespace cast { namespace cast {
...@@ -16,13 +18,24 @@ namespace cast { ...@@ -16,13 +18,24 @@ namespace cast {
const int kNumAggressiveReportsSentAtStart = 100; const int kNumAggressiveReportsSentAtStart = 100;
const int kMinSchedulingDelayMs = 1; const int kMinSchedulingDelayMs = 1;
// TODO(mikhal): Reduce heap allocation when not needed. // TODO(miu): This should be specified in AudioSenderConfig, but currently it is
// fixed to 100 FPS (i.e., 10 ms per frame), and AudioEncoder assumes this as
// well.
const int kAudioFrameRate = 100;
AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment, AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
const AudioSenderConfig& audio_config, const AudioSenderConfig& audio_config,
transport::CastTransportSender* const transport_sender) transport::CastTransportSender* const transport_sender)
: cast_environment_(cast_environment), : cast_environment_(cast_environment),
target_playout_delay_(base::TimeDelta::FromMilliseconds(
audio_config.rtp_config.max_delay_ms)),
transport_sender_(transport_sender), transport_sender_(transport_sender),
rtp_timestamp_helper_(audio_config.frequency), max_unacked_frames_(
std::min(kMaxUnackedFrames,
1 + static_cast<int>(target_playout_delay_ *
kAudioFrameRate /
base::TimeDelta::FromSeconds(1)))),
configured_encoder_bitrate_(audio_config.bitrate),
rtcp_(cast_environment, rtcp_(cast_environment,
this, this,
transport_sender_, transport_sender_,
...@@ -34,10 +47,16 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment, ...@@ -34,10 +47,16 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
audio_config.incoming_feedback_ssrc, audio_config.incoming_feedback_ssrc,
audio_config.rtcp_c_name, audio_config.rtcp_c_name,
AUDIO_EVENT), AUDIO_EVENT),
rtp_timestamp_helper_(audio_config.frequency),
num_aggressive_rtcp_reports_sent_(0), num_aggressive_rtcp_reports_sent_(0),
last_sent_frame_id_(0),
latest_acked_frame_id_(0),
duplicate_ack_counter_(0),
cast_initialization_status_(STATUS_AUDIO_UNINITIALIZED), cast_initialization_status_(STATUS_AUDIO_UNINITIALIZED),
weak_factory_(this) { weak_factory_(this) {
rtcp_.SetCastReceiverEventHistorySize(kReceiverRtcpEventHistorySize); VLOG(1) << "max_unacked_frames " << max_unacked_frames_;
DCHECK_GT(max_unacked_frames_, 0);
if (!audio_config.use_external_encoder) { if (!audio_config.use_external_encoder) {
audio_encoder_.reset( audio_encoder_.reset(
new AudioEncoder(cast_environment, new AudioEncoder(cast_environment,
...@@ -47,7 +66,7 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment, ...@@ -47,7 +66,7 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
cast_initialization_status_ = audio_encoder_->InitializationResult(); cast_initialization_status_ = audio_encoder_->InitializationResult();
} else { } else {
NOTREACHED(); // No support for external audio encoding. NOTREACHED(); // No support for external audio encoding.
cast_initialization_status_ = STATUS_AUDIO_INITIALIZED; cast_initialization_status_ = STATUS_AUDIO_UNINITIALIZED;
} }
media::cast::transport::CastTransportAudioConfig transport_config; media::cast::transport::CastTransportAudioConfig transport_config;
...@@ -55,10 +74,11 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment, ...@@ -55,10 +74,11 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
transport_config.rtp.config = audio_config.rtp_config; transport_config.rtp.config = audio_config.rtp_config;
transport_config.frequency = audio_config.frequency; transport_config.frequency = audio_config.frequency;
transport_config.channels = audio_config.channels; transport_config.channels = audio_config.channels;
transport_config.rtp.max_outstanding_frames = transport_config.rtp.max_outstanding_frames = max_unacked_frames_;
audio_config.rtp_config.max_delay_ms / 100 + 1;
transport_sender_->InitializeAudio(transport_config); transport_sender_->InitializeAudio(transport_config);
rtcp_.SetCastReceiverEventHistorySize(kReceiverRtcpEventHistorySize);
memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_)); memset(frame_id_to_rtp_timestamp_, 0, sizeof(frame_id_to_rtp_timestamp_));
} }
...@@ -72,16 +92,43 @@ void AudioSender::InsertAudio(scoped_ptr<AudioBus> audio_bus, ...@@ -72,16 +92,43 @@ void AudioSender::InsertAudio(scoped_ptr<AudioBus> audio_bus,
return; return;
} }
DCHECK(audio_encoder_.get()) << "Invalid internal state"; DCHECK(audio_encoder_.get()) << "Invalid internal state";
if (AreTooManyFramesInFlight()) {
VLOG(1) << "Dropping frame due to too many frames currently in-flight.";
return;
}
audio_encoder_->InsertAudio(audio_bus.Pass(), recorded_time); audio_encoder_->InsertAudio(audio_bus.Pass(), recorded_time);
} }
void AudioSender::SendEncodedAudioFrame( void AudioSender::SendEncodedAudioFrame(
scoped_ptr<transport::EncodedFrame> audio_frame) { scoped_ptr<transport::EncodedFrame> encoded_frame) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(!audio_frame->reference_time.is_null()); const uint32 frame_id = encoded_frame->frame_id;
rtp_timestamp_helper_.StoreLatestTime(audio_frame->reference_time,
audio_frame->rtp_timestamp); const bool is_first_frame_to_be_sent = last_send_time_.is_null();
last_send_time_ = cast_environment_->Clock()->NowTicks();
last_sent_frame_id_ = frame_id;
// If this is the first frame about to be sent, fake the value of
// |latest_acked_frame_id_| to indicate the receiver starts out all caught up.
// Also, schedule the periodic frame re-send checks.
if (is_first_frame_to_be_sent) {
latest_acked_frame_id_ = frame_id - 1;
ScheduleNextResendCheck();
}
cast_environment_->Logging()->InsertEncodedFrameEvent(
last_send_time_, FRAME_ENCODED, AUDIO_EVENT, encoded_frame->rtp_timestamp,
frame_id, static_cast<int>(encoded_frame->data.size()),
encoded_frame->dependency == transport::EncodedFrame::KEY,
configured_encoder_bitrate_);
// Only use lowest 8 bits as key.
frame_id_to_rtp_timestamp_[frame_id & 0xff] = encoded_frame->rtp_timestamp;
DCHECK(!encoded_frame->reference_time.is_null());
rtp_timestamp_helper_.StoreLatestTime(encoded_frame->reference_time,
encoded_frame->rtp_timestamp);
// At the start of the session, it's important to send reports before each // At the start of the session, it's important to send reports before each
// frame so that the receiver can properly compute playout times. The reason // frame so that the receiver can properly compute playout times. The reason
...@@ -98,15 +145,7 @@ void AudioSender::SendEncodedAudioFrame( ...@@ -98,15 +145,7 @@ void AudioSender::SendEncodedAudioFrame(
SendRtcpReport(is_last_aggressive_report); SendRtcpReport(is_last_aggressive_report);
} }
frame_id_to_rtp_timestamp_[audio_frame->frame_id & 0xff] = transport_sender_->InsertCodedAudioFrame(*encoded_frame);
audio_frame->rtp_timestamp;
transport_sender_->InsertCodedAudioFrame(*audio_frame);
}
void AudioSender::ResendPackets(
const MissingFramesAndPacketsMap& missing_frames_and_packets) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
transport_sender_->ResendPackets(true, missing_frames_and_packets, false);
} }
void AudioSender::IncomingRtcpPacket(scoped_ptr<Packet> packet) { void AudioSender::IncomingRtcpPacket(scoped_ptr<Packet> packet) {
...@@ -146,6 +185,37 @@ void AudioSender::SendRtcpReport(bool schedule_future_reports) { ...@@ -146,6 +185,37 @@ void AudioSender::SendRtcpReport(bool schedule_future_reports) {
ScheduleNextRtcpReport(); ScheduleNextRtcpReport();
} }
void AudioSender::ScheduleNextResendCheck() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(!last_send_time_.is_null());
base::TimeDelta time_to_next =
last_send_time_ - cast_environment_->Clock()->NowTicks() +
target_playout_delay_;
time_to_next = std::max(
time_to_next, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
cast_environment_->PostDelayedTask(
CastEnvironment::MAIN,
FROM_HERE,
base::Bind(&AudioSender::ResendCheck, weak_factory_.GetWeakPtr()),
time_to_next);
}
void AudioSender::ResendCheck() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(!last_send_time_.is_null());
const base::TimeDelta time_since_last_send =
cast_environment_->Clock()->NowTicks() - last_send_time_;
if (time_since_last_send > target_playout_delay_) {
if (latest_acked_frame_id_ == last_sent_frame_id_) {
// Last frame acked, no point in doing anything
} else {
VLOG(1) << "ACK timeout; last acked frame: " << latest_acked_frame_id_;
ResendForKickstart();
}
}
ScheduleNextResendCheck();
}
void AudioSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) { void AudioSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
...@@ -161,15 +231,92 @@ void AudioSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) { ...@@ -161,15 +231,92 @@ void AudioSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
} }
} }
if (!cast_feedback.missing_frames_and_packets_.empty()) { if (last_send_time_.is_null())
ResendPackets(cast_feedback.missing_frames_and_packets_); return; // Cannot get an ACK without having first sent a frame.
if (cast_feedback.missing_frames_and_packets_.empty()) {
// We only count duplicate ACKs when we have sent newer frames.
if (latest_acked_frame_id_ == cast_feedback.ack_frame_id_ &&
latest_acked_frame_id_ != last_sent_frame_id_) {
duplicate_ack_counter_++;
} else {
duplicate_ack_counter_ = 0;
} }
uint32 acked_frame_id = static_cast<uint32>(cast_feedback.ack_frame_id_); // TODO(miu): The values "2" and "3" should be derived from configuration.
VLOG(2) << "Received audio ACK: " << acked_frame_id; if (duplicate_ack_counter_ >= 2 && duplicate_ack_counter_ % 3 == 2) {
cast_environment_->Logging()->InsertFrameEvent( VLOG(1) << "Received duplicate ACK for frame " << latest_acked_frame_id_;
cast_environment_->Clock()->NowTicks(), ResendForKickstart();
FRAME_ACK_RECEIVED, AUDIO_EVENT, }
frame_id_to_rtp_timestamp_[acked_frame_id & 0xff], acked_frame_id); } else {
// Only count duplicated ACKs if there is no NACK request in between.
// This is to avoid aggresive resend.
duplicate_ack_counter_ = 0;
// A NACK is also used to cancel pending re-transmissions.
transport_sender_->ResendPackets(
true, cast_feedback.missing_frames_and_packets_, true);
}
const base::TimeTicks now = cast_environment_->Clock()->NowTicks();
const RtpTimestamp rtp_timestamp =
frame_id_to_rtp_timestamp_[cast_feedback.ack_frame_id_ & 0xff];
cast_environment_->Logging()->InsertFrameEvent(now,
FRAME_ACK_RECEIVED,
AUDIO_EVENT,
rtp_timestamp,
cast_feedback.ack_frame_id_);
const bool is_acked_out_of_order =
static_cast<int32>(cast_feedback.ack_frame_id_ -
latest_acked_frame_id_) < 0;
VLOG(2) << "Received ACK" << (is_acked_out_of_order ? " out-of-order" : "")
<< " for frame " << cast_feedback.ack_frame_id_;
if (!is_acked_out_of_order) {
// Cancel resends of acked frames.
MissingFramesAndPacketsMap missing_frames_and_packets;
PacketIdSet missing;
while (latest_acked_frame_id_ != cast_feedback.ack_frame_id_) {
latest_acked_frame_id_++;
missing_frames_and_packets[latest_acked_frame_id_] = missing;
}
transport_sender_->ResendPackets(true, missing_frames_and_packets, true);
latest_acked_frame_id_ = cast_feedback.ack_frame_id_;
}
}
bool AudioSender::AreTooManyFramesInFlight() const {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
int frames_in_flight = 0;
if (!last_send_time_.is_null()) {
frames_in_flight +=
static_cast<int32>(last_sent_frame_id_ - latest_acked_frame_id_);
}
VLOG(2) << frames_in_flight
<< " frames in flight; last sent: " << last_sent_frame_id_
<< " latest acked: " << latest_acked_frame_id_;
return frames_in_flight >= max_unacked_frames_;
}
void AudioSender::ResendForKickstart() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(!last_send_time_.is_null());
VLOG(1) << "Resending last packet of frame " << last_sent_frame_id_
<< " to kick-start.";
// Send the first packet of the last encoded frame to kick start
// retransmission. This gives enough information to the receiver what
// packets and frames are missing.
MissingFramesAndPacketsMap missing_frames_and_packets;
PacketIdSet missing;
missing.insert(kRtcpCastLastPacket);
missing_frames_and_packets.insert(
std::make_pair(last_sent_frame_id_, missing));
last_send_time_ = cast_environment_->Clock()->NowTicks();
// Sending this extra packet is to kick-start the session. There is
// no need to optimize re-transmission for this case.
transport_sender_->ResendPackets(true, missing_frames_and_packets,
false);
} }
} // namespace cast } // namespace cast
......
...@@ -14,6 +14,8 @@ ...@@ -14,6 +14,8 @@
#include "base/time/time.h" #include "base/time/time.h"
#include "media/base/audio_bus.h" #include "media/base/audio_bus.h"
#include "media/cast/cast_config.h" #include "media/cast/cast_config.h"
#include "media/cast/cast_environment.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/rtcp/rtcp.h" #include "media/cast/rtcp/rtcp.h"
#include "media/cast/rtp_timestamp_helper.h" #include "media/cast/rtp_timestamp_helper.h"
...@@ -22,8 +24,12 @@ namespace cast { ...@@ -22,8 +24,12 @@ namespace cast {
class AudioEncoder; class AudioEncoder;
// This class is not thread safe. // Not thread safe. Only called from the main cast thread.
// It's only called from the main cast thread. // This class owns all objects related to sending audio, objects that create RTP
// packets, congestion control, audio encoder, parsing and sending of
// RTCP packets.
// Additionally it posts a bunch of delayed tasks to the main thread for various
// timeouts.
class AudioSender : public RtcpSenderFeedback, class AudioSender : public RtcpSenderFeedback,
public base::NonThreadSafe, public base::NonThreadSafe,
public base::SupportsWeakPtr<AudioSender> { public base::SupportsWeakPtr<AudioSender> {
...@@ -38,6 +44,10 @@ class AudioSender : public RtcpSenderFeedback, ...@@ -38,6 +44,10 @@ class AudioSender : public RtcpSenderFeedback,
return cast_initialization_status_; return cast_initialization_status_;
} }
// Note: It is not guaranteed that |audio_frame| will actually be encoded and
// sent, if AudioSender detects too many frames in flight. Therefore, clients
// should be careful about the rate at which this method is called.
//
// Note: It is invalid to call this method if InitializationResult() returns // Note: It is invalid to call this method if InitializationResult() returns
// anything but STATUS_AUDIO_INITIALIZED. // anything but STATUS_AUDIO_INITIALIZED.
void InsertAudio(scoped_ptr<AudioBus> audio_bus, void InsertAudio(scoped_ptr<AudioBus> audio_bus,
...@@ -46,31 +56,98 @@ class AudioSender : public RtcpSenderFeedback, ...@@ -46,31 +56,98 @@ class AudioSender : public RtcpSenderFeedback,
// Only called from the main cast thread. // Only called from the main cast thread.
void IncomingRtcpPacket(scoped_ptr<Packet> packet); void IncomingRtcpPacket(scoped_ptr<Packet> packet);
private: protected:
void ResendPackets( // Protected for testability.
const MissingFramesAndPacketsMap& missing_frames_and_packets); virtual void OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback)
OVERRIDE;
private:
// Schedule and execute periodic sending of RTCP report.
void ScheduleNextRtcpReport(); void ScheduleNextRtcpReport();
void SendRtcpReport(bool schedule_future_reports); void SendRtcpReport(bool schedule_future_reports);
// Schedule and execute periodic checks for re-sending packets. If no
// acknowledgements have been received for "too long," AudioSender will
// speculatively re-send certain packets of an unacked frame to kick-start
// re-transmission. This is a last resort tactic to prevent the session from
// getting stuck after a long outage.
void ScheduleNextResendCheck();
void ResendCheck();
void ResendForKickstart();
// Returns true if there are too many frames in flight, as defined by the
// configured target playout delay plus simple logic. When this is true,
// InsertAudio() will silenty drop frames instead of sending them to the audio
// encoder.
bool AreTooManyFramesInFlight() const;
// Called by the |audio_encoder_| with the next EncodedFrame to send. // Called by the |audio_encoder_| with the next EncodedFrame to send.
void SendEncodedAudioFrame(scoped_ptr<transport::EncodedFrame> audio_frame); void SendEncodedAudioFrame(scoped_ptr<transport::EncodedFrame> audio_frame);
virtual void OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) const scoped_refptr<CastEnvironment> cast_environment_;
OVERRIDE;
// The total amount of time between a frame's capture/recording on the sender
scoped_refptr<CastEnvironment> cast_environment_; // and its playback on the receiver (i.e., shown to a user). This is fixed as
// a value large enough to give the system sufficient time to encode,
// transmit/retransmit, receive, decode, and render; given its run-time
// environment (sender/receiver hardware performance, network conditions,
// etc.).
const base::TimeDelta target_playout_delay_;
// Sends encoded frames over the configured transport (e.g., UDP). In
// Chromium, this could be a proxy that first sends the frames from a renderer
// process to the browser process over IPC, with the browser process being
// responsible for "packetizing" the frames and pushing packets into the
// network layer.
transport::CastTransportSender* const transport_sender_; transport::CastTransportSender* const transport_sender_;
// Maximum number of outstanding frames before the encoding and sending of
// new frames shall halt.
const int max_unacked_frames_;
// Encodes AudioBuses into EncodedFrames.
scoped_ptr<AudioEncoder> audio_encoder_; scoped_ptr<AudioEncoder> audio_encoder_;
RtpTimestampHelper rtp_timestamp_helper_; const int configured_encoder_bitrate_;
// Manages sending/receiving of RTCP packets, including sender/receiver
// reports.
Rtcp rtcp_; Rtcp rtcp_;
// Records lip-sync (i.e., mapping of RTP <--> NTP timestamps), and
// extrapolates this mapping to any other point in time.
RtpTimestampHelper rtp_timestamp_helper_;
// Counts how many RTCP reports are being "aggressively" sent (i.e., one per
// frame) at the start of the session. Once a threshold is reached, RTCP
// reports are instead sent at the configured interval + random drift.
int num_aggressive_rtcp_reports_sent_; int num_aggressive_rtcp_reports_sent_;
// This is "null" until the first frame is sent. Thereafter, this tracks the
// last time any frame was sent or re-sent.
base::TimeTicks last_send_time_;
// The ID of the last frame sent. Logic throughout AudioSender assumes this
// can safely wrap-around. This member is invalid until
// |!last_send_time_.is_null()|.
uint32 last_sent_frame_id_;
// The ID of the latest (not necessarily the last) frame that has been
// acknowledged. Logic throughout AudioSender assumes this can safely
// wrap-around. This member is invalid until |!last_send_time_.is_null()|.
uint32 latest_acked_frame_id_;
// Counts the number of duplicate ACK that are being received. When this
// number reaches a threshold, the sender will take this as a sign that the
// receiver hasn't yet received the first packet of the next frame. In this
// case, AudioSender will trigger a re-send of the next frame.
int duplicate_ack_counter_;
// If this sender is ready for use, this is STATUS_AUDIO_INITIALIZED. // If this sender is ready for use, this is STATUS_AUDIO_INITIALIZED.
CastInitializationStatus cast_initialization_status_; CastInitializationStatus cast_initialization_status_;
// Used to map the lower 8 bits of the frame id to a RTP timestamp. This is // This is a "good enough" mapping for finding the RTP timestamp associated
// good enough as we only use it for logging. // with a video frame. The key is the lowest 8 bits of frame id (which is
// what is sent via RTCP). This map is used for logging purposes.
RtpTimestamp frame_id_to_rtp_timestamp_[256]; RtpTimestamp frame_id_to_rtp_timestamp_[256];
// NOTE: Weak pointers must be invalidated before all other member variables. // NOTE: Weak pointers must be invalidated before all other member variables.
......
...@@ -78,7 +78,7 @@ VideoSender::VideoSender( ...@@ -78,7 +78,7 @@ VideoSender::VideoSender(
media::cast::transport::CastTransportVideoConfig transport_config; media::cast::transport::CastTransportVideoConfig transport_config;
transport_config.codec = video_config.codec; transport_config.codec = video_config.codec;
transport_config.rtp.config = video_config.rtp_config; transport_config.rtp.config = video_config.rtp_config;
transport_config.rtp.max_outstanding_frames = max_unacked_frames_ + 1; transport_config.rtp.max_outstanding_frames = max_unacked_frames_;
transport_sender_->InitializeVideo(transport_config); transport_sender_->InitializeVideo(transport_config);
rtcp_.SetCastReceiverEventHistorySize(kReceiverRtcpEventHistorySize); rtcp_.SetCastReceiverEventHistorySize(kReceiverRtcpEventHistorySize);
......
...@@ -76,15 +76,13 @@ class VideoSender : public RtcpSenderFeedback, ...@@ -76,15 +76,13 @@ class VideoSender : public RtcpSenderFeedback,
void ScheduleNextRtcpReport(); void ScheduleNextRtcpReport();
void SendRtcpReport(bool schedule_future_reports); void SendRtcpReport(bool schedule_future_reports);
// Schedule and execute periodic checks for re-sending frames. If no // Schedule and execute periodic checks for re-sending packets. If no
// acknowledgements have been received for "too long," VideoSender will // acknowledgements have been received for "too long," VideoSender will
// speculatively re-send the frame just after |latest_acked_frame_id_| (the // speculatively re-send certain packets of an unacked frame to kick-start
// whole frame). This is a last resort tactic to prevent the session from // re-transmission. This is a last resort tactic to prevent the session from
// getting stuck after a long outage. // getting stuck after a long outage.
void ScheduleNextResendCheck(); void ScheduleNextResendCheck();
void ResendCheck(); void ResendCheck();
// Resend certain packets of an unacked frame to kick start re-transmission.
void ResendForKickstart(); void ResendForKickstart();
// Returns true if there are too many frames in flight, as defined by the // Returns true if there are too many frames in flight, as defined by the
...@@ -169,10 +167,7 @@ class VideoSender : public RtcpSenderFeedback, ...@@ -169,10 +167,7 @@ class VideoSender : public RtcpSenderFeedback,
// This is a "good enough" mapping for finding the RTP timestamp associated // This is a "good enough" mapping for finding the RTP timestamp associated
// with a video frame. The key is the lowest 8 bits of frame id (which is // with a video frame. The key is the lowest 8 bits of frame id (which is
// what is sent via RTCP). This map is used for logging purposes. The only // what is sent via RTCP). This map is used for logging purposes.
// time when this mapping will be incorrect is when it receives an ACK for a
// old enough frame such that 8-bit wrap around has already occurred, which
// should be pretty rare.
RtpTimestamp frame_id_to_rtp_timestamp_[256]; RtpTimestamp frame_id_to_rtp_timestamp_[256];
// NOTE: Weak pointers must be invalidated before all other member variables. // NOTE: Weak pointers must be invalidated before all other member variables.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment