/* * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree.
*/
FrameInfo frame_info =
NextFrame(frame_types, keyframe, num_simulcast_streams, rates.bitrate,
simulcast_streams, static_cast<int>(rates.framerate_fps + 0.5)); for (uint8_t i = 0; i < frame_info.layers.size(); ++i) {
constexpr int kMinPayLoadLength = 14; if (frame_info.layers[i].size < kMinPayLoadLength) { // Drop this temporal layer. continue;
}
auto buffer = EncodedImageBuffer::Create(frame_info.layers[i].size); // Fill the buffer with arbitrary data. Write someting to make Asan happy.
memset(buffer->data(), 9, frame_info.layers[i].size); // Write a counter to the image to make each frame unique.
WriteCounter(buffer->data() + frame_info.layers[i].size - 4, counter);
if (frame_types) { for (VideoFrameType frame_type : *frame_types) { if (frame_type == VideoFrameType::kVideoFrameKey) {
frame_info.keyframe = true; break;
}
}
}
MutexLock lock(&mutex_); for (uint8_t i = 0; i < num_simulcast_streams; ++i) { if (target_bitrate.GetBitrate(i, 0) > 0) { int temporal_id = last_frame_info_.layers.size() > i
? ++last_frame_info_.layers[i].temporal_id %
simulcast_streams[i].numberOfTemporalLayers
: 0;
frame_info.layers.emplace_back(0, temporal_id);
}
}
if (last_frame_info_.layers.size() < frame_info.layers.size()) { // A new keyframe is needed since a new layer will be added.
frame_info.keyframe = true;
}
for (uint8_t i = 0; i < frame_info.layers.size(); ++i) {
FrameInfo::SpatialLayer& layer_info = frame_info.layers[i]; if (frame_info.keyframe) {
layer_info.temporal_id = 0;
size_t avg_frame_size =
(target_bitrate.GetBitrate(i, 0) + 7) *
kTemporalLayerRateFactor[frame_info.layers.size() - 1][i] /
(8 * framerate);
// The first frame is a key frame and should be larger. // Store the overshoot bytes and distribute them over the coming frames, // so that we on average meet the bitrate target.
debt_bytes_ += (kKeyframeSizeFactor - 1) * avg_frame_size;
layer_info.size = kKeyframeSizeFactor * avg_frame_size;
} else {
size_t avg_frame_size =
(target_bitrate.GetBitrate(i, layer_info.temporal_id) + 7) *
kTemporalLayerRateFactor[frame_info.layers.size() - 1][i] /
(8 * framerate);
layer_info.size = avg_frame_size; if (debt_bytes_ > 0) { // Pay at most half of the frame size for old debts.
size_t payment_size = std::min(avg_frame_size / 2, debt_bytes_);
debt_bytes_ -= payment_size;
layer_info.size -= payment_size;
}
}
}
last_frame_info_ = frame_info; return frame_info;
}
DelayedEncoder::DelayedEncoder(const Environment& env, int delay_ms)
: test::FakeEncoder(env), delay_ms_(delay_ms) { // The encoder could be created on a different thread than // it is being used on.
sequence_checker_.Detach();
}
MultithreadedFakeH264Encoder::MultithreadedFakeH264Encoder( const Environment& env)
: test::FakeH264Encoder(env),
current_queue_(0),
queue1_(nullptr),
queue2_(nullptr) { // The encoder could be created on a different thread than // it is being used on.
sequence_checker_.Detach();
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.