/* * Copyright 2018 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree.
*/ #include"video/video_send_stream_impl.h"
// Max positive size difference to treat allocations as "similar". static constexpr int kMaxVbaSizeDifferencePercent = 10; // Max time we will throttle similar video bitrate allocations. static constexpr int64_t kMaxVbaThrottleTimeMs = 500;
constexpr int kMinDefaultAv1BitrateBps =
15000; // This value acts as an absolute minimum AV1 bitrate limit.
// When send-side BWE is used a stricter 1.1x pacing factor is used, rather than // the 2.5x which is used with receive-side BWE. Provides a more careful // bandwidth rampup with less risk of overshoots causing adverse effects like // packet loss. Not used for receive side BWE, since there we lack the probing // feature and so may result in too slow initial rampup. static constexpr double kStrictPacingMultiplier = 1.1;
// Calculate max padding bitrate for a multi layer codec. int CalculateMaxPadBitrateBps(const std::vector<VideoStream>& streams, bool is_svc,
VideoEncoderConfig::ContentType content_type, int min_transmit_bitrate_bps, bool pad_to_min_bitrate, bool alr_probing) { int pad_up_to_bitrate_bps = 0;
RTC_DCHECK(!is_svc || streams.size() <= 1) << "Only one stream is allowed in " "SVC mode.";
// Filter out only the active streams;
std::vector<VideoStream> active_streams; for (const VideoStream& stream : streams) { if (stream.active)
active_streams.emplace_back(stream);
}
if (active_streams.size() > 1 || (!active_streams.empty() && is_svc)) { // Simulcast or SVC is used. // if SVC is used, stream bitrates should already encode svc bitrates: // min_bitrate = min bitrate of a lowest svc layer. // target_bitrate = sum of target bitrates of lower layers + min bitrate // of the last one (as used in the calculations below). // max_bitrate = sum of all active layers' max_bitrate. if (alr_probing) { // With alr probing, just pad to the min bitrate of the lowest stream, // probing will handle the rest of the rampup.
pad_up_to_bitrate_bps = active_streams[0].min_bitrate_bps;
} else { // Without alr probing, pad up to start bitrate of the // highest active stream. constdouble hysteresis_factor =
content_type == VideoEncoderConfig::ContentType::kScreen
? kScreenshareHysteresis
: kVideoHysteresis; if (is_svc) { // For SVC, since there is only one "stream", the padding bitrate // needed to enable the top spatial layer is stored in the // `target_bitrate_bps` field. // TODO(sprang): This behavior needs to die.
pad_up_to_bitrate_bps = static_cast<int>(
hysteresis_factor * active_streams[0].target_bitrate_bps + 0.5);
} else { const size_t top_active_stream_idx = active_streams.size() - 1;
pad_up_to_bitrate_bps = std::min( static_cast<int>(
hysteresis_factor *
active_streams[top_active_stream_idx].min_bitrate_bps +
0.5),
active_streams[top_active_stream_idx].target_bitrate_bps);
// Add target_bitrate_bps of the lower active streams. for (size_t i = 0; i < top_active_stream_idx; ++i) {
pad_up_to_bitrate_bps += active_streams[i].target_bitrate_bps;
}
}
}
} elseif (!active_streams.empty() && pad_to_min_bitrate) {
pad_up_to_bitrate_bps = active_streams[0].min_bitrate_bps;
}
bool SameStreamsEnabled(const VideoBitrateAllocation& lhs, const VideoBitrateAllocation& rhs) { for (size_t si = 0; si < kMaxSpatialLayers; ++si) { for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) { if (lhs.HasBitrate(si, ti) != rhs.HasBitrate(si, ti)) { returnfalse;
}
}
} returntrue;
}
// Returns an optional that has value iff TransportSeqNumExtensionConfigured // is `true` for the given video send stream config.
std::optional<float> GetConfiguredPacingFactor( const VideoSendStream::Config& config,
VideoEncoderConfig::ContentType content_type, const PacingConfig& default_pacing_config, const FieldTrialsView& field_trials) { if (!TransportSeqNumExtensionConfigured(config)) return std::nullopt;
std::optional<AlrExperimentSettings> alr_settings =
GetAlrSettings(field_trials, content_type); if (alr_settings) return alr_settings->pacing_factor;
int GetEncoderPriorityBitrate(std::string codec_name, const FieldTrialsView& field_trials) { int priority_bitrate = 0; if (PayloadStringToCodecType(codec_name) == VideoCodecType::kVideoCodecAV1) {
webrtc::FieldTrialParameter<int> av1_priority_bitrate("bitrate", 0);
webrtc::ParseFieldTrial(
{&av1_priority_bitrate},
field_trials.Lookup("WebRTC-AV1-OverridePriorityBitrate"));
priority_bitrate = av1_priority_bitrate;
} return priority_bitrate;
}
uint32_t GetInitialEncoderMaxBitrate(int initial_encoder_max_bitrate) { if (initial_encoder_max_bitrate > 0) return rtc::dchecked_cast<uint32_t>(initial_encoder_max_bitrate);
// TODO(srte): Make sure max bitrate is not set to negative values. We don't // have any way to handle unset values in downstream code, such as the // bitrate allocator. Previously -1 was implicitly casted to UINT32_MAX, a // behaviour that is not safe. Converting to 10 Mbps should be safe for // reasonable use cases as it allows adding the max of multiple streams // without wrappping around. constint kFallbackMaxBitrateBps = 10000000;
RTC_DLOG(LS_ERROR) << "ERROR: Initial encoder max bitrate = "
<< initial_encoder_max_bitrate << " which is <= 0!";
RTC_DLOG(LS_INFO) << "Using default encoder max bitrate = 10 Mbps"; return kFallbackMaxBitrateBps;
}
int GetDefaultMinVideoBitrateBps(VideoCodecType codec_type) { if (codec_type == VideoCodecType::kVideoCodecAV1) { return kMinDefaultAv1BitrateBps;
} return kDefaultMinVideoBitrateBps;
}
size_t CalculateMaxHeaderSize(const RtpConfig& config) {
size_t header_size = kRtpHeaderSize;
size_t extensions_size = 0;
size_t fec_extensions_size = 0; if (!config.extensions.empty()) {
RtpHeaderExtensionMap extensions_map(config.extensions);
extensions_size = RtpHeaderExtensionSize(RTPSender::VideoExtensionSizes(),
extensions_map);
fec_extensions_size =
RtpHeaderExtensionSize(RTPSender::FecExtensionSizes(), extensions_map);
}
header_size += extensions_size; if (config.flexfec.payload_type >= 0) { // All FEC extensions again plus maximum FlexFec overhead.
header_size += fec_extensions_size + 32;
} else { if (config.ulpfec.ulpfec_payload_type >= 0) { // Header with all the FEC extensions will be repeated plus maximum // UlpFec overhead.
header_size += fec_extensions_size + 18;
} if (config.ulpfec.red_payload_type >= 0) {
header_size += 1; // RED header.
}
} // Additional room for Rtx. if (config.rtx.payload_type >= 0)
header_size += kRtxHeaderSize; return header_size;
}
if (config_.periodic_alr_bandwidth_probing) {
enable_alr_bw_probing = config_.periodic_alr_bandwidth_probing;
}
if (enable_alr_bw_probing) {
transport->EnablePeriodicAlrProbing(*enable_alr_bw_probing);
}
if (configured_pacing_factor_)
transport_->SetPacingFactor(*configured_pacing_factor_);
// Only request rotation at the source when we positively know that the remote // side doesn't support the rotation extension. This allows us to prepare the // encoder in the expectation that rotation is supported - which is the common // case. bool rotation_applied = absl::c_none_of(
config_.rtp.extensions, [](const RtpExtension& extension) { return extension.uri == RtpExtension::kVideoRotationUri;
});
running_ = false; // Always run these cleanup steps regardless of whether running_ was set // or not. This will unregister callbacks before destruction. // See `VideoSendStreamImpl::StopVideoSendStream` for more.
Stop();
*rtp_state_map = GetRtpStates();
*payload_state_map = GetRtpPayloadStates();
}
void VideoSendStreamImpl::GenerateKeyFrame( const std::vector<std::string>& rids) {
RTC_DCHECK_RUN_ON(&thread_checker_); // Map rids to layers. If rids is empty, generate a keyframe for all layers.
std::vector<VideoFrameType> next_frames(config_.rtp.ssrcs.size(),
VideoFrameType::kVideoFrameKey); if (!config_.rtp.rids.empty() && !rids.empty()) {
std::fill(next_frames.begin(), next_frames.end(),
VideoFrameType::kVideoFrameDelta); for (constauto& rid : rids) { for (size_t i = 0; i < config_.rtp.rids.size(); i++) { if (config_.rtp.rids[i] == rid) {
next_frames[i] = VideoFrameType::kVideoFrameKey; break;
}
}
}
} if (video_stream_encoder_) {
video_stream_encoder_->SendKeyFrame(next_frames);
}
}
void VideoSendStreamImpl::Start() {
RTC_DCHECK_RUN_ON(&thread_checker_); // This sender is allowed to send RTP packets. Start monitoring and allocating // a rate if there is also active encodings. (has_active_encodings_).
rtp_video_sender_->SetSending(true); if (!IsRunning() && has_active_encodings_) {
StartupVideoSendStream();
}
}
void VideoSendStreamImpl::SignalEncoderTimedOut() {
RTC_DCHECK_RUN_ON(&thread_checker_); // If the encoder has not produced anything the last kEncoderTimeOut and it // is supposed to, deregister as BitrateAllocatorObserver. This can happen // if a camera stops producing frames. if (encoder_target_rate_bps_ > 0) {
RTC_LOG(LS_INFO) << "SignalEncoderTimedOut, Encoder timed out.";
bitrate_allocator_->RemoveObserver(this);
}
}
void VideoSendStreamImpl::OnBitrateAllocationUpdated( const VideoBitrateAllocation& allocation) { // OnBitrateAllocationUpdated is invoked from the encoder task queue or // the worker_queue_. auto task = [this, allocation] {
RTC_DCHECK_RUN_ON(&thread_checker_); if (encoder_target_rate_bps_ == 0) { return;
}
int64_t now_ms = env_.clock().TimeInMilliseconds(); if (video_bitrate_allocation_context_) { // If new allocation is within kMaxVbaSizeDifferencePercent larger // than the previously sent allocation and the same streams are still // enabled, it is considered "similar". We do not want send similar // allocations more once per kMaxVbaThrottleTimeMs. const VideoBitrateAllocation& last =
video_bitrate_allocation_context_->last_sent_allocation; constbool is_similar =
allocation.get_sum_bps() >= last.get_sum_bps() &&
allocation.get_sum_bps() <
(last.get_sum_bps() * (100 + kMaxVbaSizeDifferencePercent)) /
100 &&
SameStreamsEnabled(allocation, last); if (is_similar &&
(now_ms - video_bitrate_allocation_context_->last_send_time_ms) <
kMaxVbaThrottleTimeMs) { // This allocation is too similar, cache it and return.
video_bitrate_allocation_context_->throttled_allocation = allocation; return;
}
} else {
video_bitrate_allocation_context_.emplace();
}
// Send bitrate allocation metadata only if encoder is not paused.
rtp_video_sender_->OnBitrateAllocationUpdated(allocation);
}; if (!worker_queue_->IsCurrent()) {
worker_queue_->PostTask(
SafeTask(worker_queue_safety_.flag(), std::move(task)));
} else {
task();
}
}
void VideoSendStreamImpl::OnVideoLayersAllocationUpdated(
VideoLayersAllocation allocation) { // OnVideoLayersAllocationUpdated is handled on the encoder task queue in // order to not race with OnEncodedImage callbacks.
rtp_video_sender_->OnVideoLayersAllocationUpdated(allocation);
}
void VideoSendStreamImpl::SignalEncoderActive() {
RTC_DCHECK_RUN_ON(&thread_checker_); if (IsRunning()) {
RTC_LOG(LS_INFO) << "SignalEncoderActive, Encoder is active.";
bitrate_allocator_->AddObserver(this, GetAllocationConfig());
}
}
// Clear stats for disabled layers. for (size_t i = streams.size(); i < config_.rtp.ssrcs.size(); ++i) {
stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]);
}
if (IsRunning()) { // The send stream is started already. Update the allocator with new // bitrate limits.
bitrate_allocator_->AddObserver(this, GetAllocationConfig());
}
};
EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info) { // Encoded is called on whatever thread the real encoder implementation run // on. In the case of hardware encoders, there might be several encoders // running in parallel on different threads.
// Indicate that there still is activity going on.
activity_ = true;
RTC_DCHECK(!worker_queue_->IsCurrent());
auto task_to_run_on_worker = [this]() {
RTC_DCHECK_RUN_ON(&thread_checker_); if (disable_padding_) {
disable_padding_ = false; // To ensure that padding bitrate is propagated to the bitrate allocator.
SignalEncoderActive();
} // Check if there's a throttled VideoBitrateAllocation that we should try // sending. auto& context = video_bitrate_allocation_context_; if (context && context->throttled_allocation) {
OnBitrateAllocationUpdated(*context->throttled_allocation);
}
};
worker_queue_->PostTask(
SafeTask(worker_queue_safety_.flag(), std::move(task_to_run_on_worker)));
uint32_t VideoSendStreamImpl::OnBitrateUpdated(BitrateAllocationUpdate update) {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(rtp_video_sender_->IsActive())
<< "VideoSendStream::Start has not been called.";
// When the BWE algorithm doesn't pass a stable estimate, we'll use the // unstable one instead. if (update.stable_target_bitrate.IsZero()) {
update.stable_target_bitrate = update.target_bitrate;
}
std::optional<DataRate> VideoSendStreamImpl::GetUsedRate() const { // This value is for real-time video. Screenshare may have unused bandwidth // that can be shared, and this needs to be changed to support that. return std::nullopt;
}
} // namespace internal
} // namespace webrtc
Messung V0.5
¤ Dauer der Verarbeitung: 0.3 Sekunden
(vorverarbeitet)
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.