/* * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree.
*/
// The default delay before re-requesting a key frame to be sent.
constexpr TimeDelta kMinBaseMinimumDelay = TimeDelta::Zero();
constexpr TimeDelta kMaxBaseMinimumDelay = TimeDelta::Seconds(10);
// Concrete instance of RecordableEncodedFrame wrapping needed content // from EncodedFrame. class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { public: explicit WebRtcRecordableEncodedFrame( const EncodedFrame& frame,
RecordableEncodedFrame::EncodedResolution resolution)
: buffer_(frame.GetEncodedData()),
render_time_ms_(frame.RenderTime()),
codec_(frame.CodecSpecific()->codecType),
is_key_frame_(frame.FrameType() == VideoFrameType::kVideoFrameKey),
resolution_(resolution) { if (frame.ColorSpace()) {
color_space_ = *frame.ColorSpace();
}
}
// Video decoder class to be used for unknown codecs. Doesn't support decoding // but logs messages to LS_ERROR. class NullVideoDecoder : public webrtc::VideoDecoder { public: bool Configure(const Settings& settings) override {
RTC_LOG(LS_ERROR) << "Can't initialize NullVideoDecoder."; returntrue;
}
int32_t Decode(const webrtc::EncodedImage& input_image,
int64_t render_time_ms) override {
RTC_LOG(LS_ERROR) << "The NullVideoDecoder doesn't support decoding."; return WEBRTC_VIDEO_CODEC_OK;
}
TimeDelta DetermineMaxWaitForFrame(TimeDelta rtp_history, bool is_keyframe) { // A (arbitrary) conversion factor between the remotely signalled NACK buffer // time (if not present defaults to 1000ms) and the maximum time we wait for a // remote frame. Chosen to not change existing defaults when using not // rtx-time. constint conversion_factor = 3; if (rtp_history > TimeDelta::Zero() &&
conversion_factor * rtp_history < kMaxWaitForFrame) { return is_keyframe ? rtp_history : conversion_factor * rtp_history;
} return is_keyframe ? kMaxWaitForKeyFrame : kMaxWaitForFrame;
}
void VideoReceiveStream2::SetLocalSsrc(uint32_t local_ssrc) {
RTC_DCHECK_RUN_ON(&packet_sequence_checker_); if (config_.rtp.local_ssrc == local_ssrc) return;
// TODO(tommi): Make sure we don't rely on local_ssrc via the config struct. const_cast<uint32_t&>(config_.rtp.local_ssrc) = local_ssrc;
rtp_video_stream_receiver_.OnLocalSsrcChange(local_ssrc);
}
{ // TODO(bugs.webrtc.org/11993): Make this call on the network thread.
RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
rtp_video_stream_receiver_.StartReceive();
}
}
// TODO(bugs.webrtc.org/11993): Make this call on the network thread. // Also call `GetUniqueFramesSeen()` at the same time (since it's a counter // that's updated on the network thread).
RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
rtp_video_stream_receiver_.StopReceive();
if (decoder_running_) {
rtc::Event done;
decode_queue_->PostTask([this, &done] {
RTC_DCHECK_RUN_ON(&decode_sequence_checker_); // Set `decoder_stopped_` before deregistering all decoders. This means // that any pending encoded frame will return early without trying to // access the decoder database.
decoder_stopped_ = true; for (const Decoder& decoder : config_.decoders) {
video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type);
}
done.Set();
});
done.Wait(rtc::Event::kForever);
// TODO(bugs.webrtc.org/11993): Make these calls on the network thread.
RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
rtp_video_stream_receiver_.RemoveReceiveCodecs();
video_receiver_.DeregisterReceiveCodecs();
void VideoReceiveStream2::CreateAndRegisterExternalDecoder( const Decoder& decoder) {
TRACE_EVENT0("webrtc", "VideoReceiveStream2::CreateAndRegisterExternalDecoder");
std::unique_ptr<VideoDecoder> video_decoder =
config_.decoder_factory->Create(env_, decoder.video_format); // If we still have no valid decoder, we have to create a "Null" decoder // that ignores all calls. The reason we can get into this state is that the // old decoder factory interface doesn't have a way to query supported // codecs. if (!video_decoder) {
video_decoder = std::make_unique<NullVideoDecoder>();
}
std::string decoded_output_file =
env_.field_trials().Lookup("WebRTC-DecoderDataDumpDirectory"); // Because '/' can't be used inside a field trial parameter, we use ';' // instead. // This is only relevant to WebRTC-DecoderDataDumpDirectory // field trial. ';' is chosen arbitrary. Even though it's a legal character // in some file systems, we can sacrifice ability to use it in the path to // dumped video, since it's developers-only feature for debugging.
absl::c_replace(decoded_output_file, ';', '/'); if (!decoded_output_file.empty()) { char filename_buffer[256];
rtc::SimpleStringBuilder ssb(filename_buffer);
ssb << decoded_output_file << "/webrtc_receive_stream_" << remote_ssrc()
<< "-" << rtc::TimeMicros() << ".ivf";
video_decoder = CreateFrameDumpingDecoderWrapper(
std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str()));
}
// Mozilla modification: VideoReceiveStream2 and friends do not surface RTCP // stats at all, and even on the most recent libwebrtc code there does not // seem to be any support for these stats right now. So, we hack this in.
rtp_video_stream_receiver_.RemoteRTCPSenderInfo(
&stats.rtcp_sender_packets_sent, &stats.rtcp_sender_octets_sent,
&stats.rtcp_sender_ntp_timestamp_ms,
&stats.rtcp_sender_remote_ntp_timestamp_ms);
// TODO: bugs.webrtc.org/42220804 - we should set local capture clock offset // for `packet_infos`.
RtpPacketInfos packet_infos = video_frame.packet_infos();
// For frame delay metrics, calculated in `OnRenderedFrame`, to better reflect // user experience measurements must be done as close as possible to frame // rendering moment. Capture current time, which is used for calculation of // delay metrics in `OnRenderedFrame`, right after frame is passed to // renderer. Frame may or may be not rendered by this time. This results in // inaccuracy but is still the best we can do in the absence of "frame // rendered" callback from the renderer.
VideoFrameMetaData frame_meta(video_frame, env_.clock().CurrentTime());
call_->worker_thread()->PostTask(
SafeTask(task_safety_.flag(), [frame_meta, packet_infos, this]() {
RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
int64_t video_playout_ntp_ms;
int64_t sync_offset_ms; double estimated_freq_khz; if (rtp_stream_sync_.GetStreamSyncOffsetInMs(
frame_meta.rtp_timestamp, frame_meta.render_time_ms(),
&video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) {
stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms,
estimated_freq_khz);
}
stats_proxy_.OnRenderedFrame(frame_meta);
source_tracker_.OnFrameDelivered(packet_infos,
frame_meta.decode_timestamp);
}));
webrtc::MutexLock lock(&pending_resolution_mutex_); if (pending_resolution_.has_value()) { if (!pending_resolution_->empty() &&
(video_frame.width() != static_cast<int>(pending_resolution_->width) ||
video_frame.height() != static_cast<int>(pending_resolution_->height))) {
RTC_LOG(LS_WARNING)
<< "Recordable encoded frame stream resolution was reported as "
<< pending_resolution_->width << "x" << pending_resolution_->height
<< " but the stream is now " << video_frame.width()
<< video_frame.height();
}
pending_resolution_ = RecordableEncodedFrame::EncodedResolution{ static_cast<unsigned>(video_frame.width()), static_cast<unsigned>(video_frame.height())};
}
}
auto last_continuous_pid = buffer_->InsertFrame(std::move(frame)); if (last_continuous_pid.has_value()) {
{ // TODO(bugs.webrtc.org/11993): Call on the network thread.
RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
rtp_video_stream_receiver_.FrameContinuous(*last_continuous_pid);
}
}
}
// Current OnPreDecode only cares about QP for VP8. // TODO(brandtr): Move to stats_proxy_.OnDecodableFrame in VSBC, or deprecate. int qp = -1; if (frame->CodecSpecific()->codecType == kVideoCodecVP8) { if (!vp8::GetQp(frame->data(), frame->size(), &qp)) {
RTC_LOG(LS_WARNING) << "Failed to extract QP from VP8 video frame";
}
}
stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp);
// TODO(bugs.webrtc.org/11993): Make this PostTask to the network thread.
call_->worker_thread()->PostTask(
SafeTask(task_safety_.flag(),
[this, now, rtp_timestamp, result = std::move(result),
received_frame_is_keyframe, keyframe_request_is_due]() {
RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
keyframe_required_ = result.keyframe_required;
if (result.decoded_frame_picture_id) {
rtp_video_stream_receiver_.FrameDecoded(
*result.decoded_frame_picture_id);
}
last_decoded_rtp_timestamp_ = rtp_timestamp;
// To avoid spamming keyframe requests for a stream that is not active we // check if we have received a packet within the last 5 seconds.
constexpr TimeDelta kInactiveDuration = TimeDelta::Seconds(5); constbool stream_is_active =
last_packet_ms &&
now - Timestamp::Millis(*last_packet_ms) < kInactiveDuration; if (!stream_is_active)
stats_proxy_.OnStreamInactive();
if (!video_receiver_.IsExternalDecoderRegistered(frame->PayloadType())) { // Look for the decoder with this payload type. for (const Decoder& decoder : config_.decoders) { if (decoder.payload_type == frame->PayloadType()) {
CreateAndRegisterExternalDecoder(decoder);
} else { // Unregister any external decoder not from this payload type. // If not, any previous video decoder will be released when the next // frame is decoded but the decoder wrapper will not. // This will cause the decoder to be reused if we switch back to that // payload in the future, failing to configure it and causing to // fallback to the software decoder.
video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type);
}
}
}
int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame(
std::unique_ptr<EncodedFrame> frame) {
RTC_DCHECK_RUN_ON(&decode_sequence_checker_);
// If `buffered_encoded_frames_` grows out of control (=60 queued frames), // maybe due to a stuck decoder, we just halt the process here and log the // error. constbool encoded_frame_output_enabled =
encoded_frame_buffer_function_ != nullptr &&
buffered_encoded_frames_.size() < kBufferedEncodedFramesMaxSize;
EncodedFrame* frame_ptr = frame.get(); if (encoded_frame_output_enabled) { // If we receive a key frame with unset resolution, hold on dispatching the // frame and following ones until we know a resolution of the stream. // NOTE: The code below has a race where it can report the wrong // resolution for keyframes after an initial keyframe of other resolution. // However, the only known consumer of this information is the W3C // MediaRecorder and it will only use the resolution in the first encoded // keyframe from WebRTC, so misreporting is fine.
buffered_encoded_frames_.push_back(std::move(frame)); if (buffered_encoded_frames_.size() == kBufferedEncodedFramesMaxSize)
RTC_LOG(LS_ERROR) << "About to halt recordable encoded frame output due " "to too many buffered frames.";
webrtc::MutexLock lock(&pending_resolution_mutex_); if (IsKeyFrameAndUnspecifiedResolution(*frame_ptr) &&
!pending_resolution_.has_value())
pending_resolution_.emplace();
}
if (encoded_frame_output_enabled) {
std::optional<RecordableEncodedFrame::EncodedResolution> pending_resolution;
{ // Fish out `pending_resolution_` to avoid taking the mutex on every lap // or dispatching under the mutex in the flush loop.
webrtc::MutexLock lock(&pending_resolution_mutex_); if (pending_resolution_.has_value())
pending_resolution = *pending_resolution_;
} if (!pending_resolution.has_value() || !pending_resolution->empty()) { // Flush the buffered frames. for (constauto& frame : buffered_encoded_frames_) {
RecordableEncodedFrame::EncodedResolution resolution{
frame->EncodedImage()._encodedWidth,
frame->EncodedImage()._encodedHeight}; if (IsKeyFrameAndUnspecifiedResolution(*frame)) {
RTC_DCHECK(!pending_resolution->empty());
resolution = *pending_resolution;
}
encoded_frame_buffer_function_(
WebRtcRecordableEncodedFrame(*frame, resolution));
}
buffered_encoded_frames_.clear();
}
} return decode_result;
}
// Repeat sending keyframe requests if we've requested a keyframe. if (keyframe_generation_requested_) { if (received_frame_is_keyframe) {
keyframe_generation_requested_ = false;
} elseif (keyframe_request_is_due) { if (!IsReceivingKeyFrame(now)) {
request_key_frame = true;
}
} else { // It hasn't been long enough since the last keyframe request, do nothing.
}
}
if (request_key_frame) { // HandleKeyFrameGeneration is initiated from the decode thread - // RequestKeyFrame() triggers a call back to the decode thread. // Perhaps there's a way to avoid that.
RequestKeyFrame(now);
}
}
// If we recently have been receiving packets belonging to a keyframe then // we assume a keyframe is currently being received. bool receiving_keyframe = last_keyframe_packet_ms &&
now - Timestamp::Millis(*last_keyframe_packet_ms) <
max_wait_for_keyframe_; return receiving_keyframe;
}
// Since nullopt < anything, this will return the largest of the minumum // delays, or nullopt if all are nullopt.
std::optional<TimeDelta> minimum_delay = std::max(min_delays); if (minimum_delay) { auto num_playout_delays_set =
absl::c_count_if(min_delays, [](auto opt) { return opt.has_value(); }); if (num_playout_delays_set > 1 &&
timing_->min_playout_delay() != minimum_delay) {
RTC_LOG(LS_WARNING)
<< "Multiple playout delays set. Actual delay value set to "
<< *minimum_delay << " frame min delay="
<< OptionalDelayToLogString(frame_minimum_playout_delay_)
<< " base min delay="
<< OptionalDelayToLogString(base_minimum_playout_delay_)
<< " sync min delay="
<< OptionalDelayToLogString(syncable_minimum_playout_delay_);
}
timing_->set_min_playout_delay(*minimum_delay); if (frame_minimum_playout_delay_ == TimeDelta::Zero() &&
frame_maximum_playout_delay_ > TimeDelta::Zero()) { // TODO(kron): Estimate frame rate from video stream.
constexpr Frequency kFrameRate = Frequency::Hertz(60); // Convert playout delay in ms to number of frames. int max_composition_delay_in_frames =
std::lrint(*frame_maximum_playout_delay_ * kFrameRate); // Subtract frames in buffer.
max_composition_delay_in_frames =
std::max(max_composition_delay_in_frames - buffer_->Size(), 0);
timing_->SetMaxCompositionDelayInFrames(max_composition_delay_in_frames);
}
}
if (frame_maximum_playout_delay_) {
timing_->set_max_playout_delay(*frame_maximum_playout_delay_);
}
}
if (generate_key_frame) {
rtp_video_stream_receiver_.RequestKeyFrame();
{ // TODO(bugs.webrtc.org/11993): Post this to the network thread.
RTC_DCHECK_RUN_ON(&packet_sequence_checker_);
keyframe_generation_requested_ = true;
}
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.