/* * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree.
*/
void TestCaptureNtpTime(const BuiltInNetworkBehaviorConfig& net_config, int threshold_ms, int start_time_ms, int run_time_ms); void TestMinAudioVideoBitrate(int test_bitrate_from, int test_bitrate_to, int test_bitrate_step, int min_bwe, int start_bwe, int max_bwe); void TestEncodeFramerate(VideoEncoderFactory* encoder_factory,
absl::string_view payload_name, const std::vector<int>& max_framerates);
};
class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver, public rtc::VideoSinkInterface<VideoFrame> { staticconstint kInSyncThresholdMs = 50; staticconstint kStartupTimeMs = 2000; staticconstint kMinRunTimeMs = 30000;
VideoReceiveStreamInterface::Stats stats = receive_stream_->GetStats(); if (stats.sync_offset_ms == std::numeric_limits<int>::max()) return;
int64_t now_ms = clock_->TimeInMilliseconds();
int64_t time_since_creation = now_ms - creation_time_ms_; // During the first couple of seconds audio and video can falsely be // estimated as being synchronized. We don't want to trigger on those. if (time_since_creation < kStartupTimeMs) return; if (std::abs(stats.sync_offset_ms) < kInSyncThresholdMs) { if (first_time_in_sync_ == -1) {
first_time_in_sync_ = now_ms;
GetGlobalMetricsLogger()->LogSingleValueMetric( "sync_convergence_time" + test_label_, "synchronization",
time_since_creation, Unit::kMilliseconds,
ImprovementDirection::kSmallerIsBetter);
} if (time_since_creation > kMinRunTimeMs)
observation_complete_.Set();
} if (first_time_in_sync_ != -1)
sync_offset_ms_list_.AddSample(stats.sync_offset_ms);
}
void set_receive_stream(VideoReceiveStreamInterface* receive_stream) {
RTC_DCHECK_EQ(task_queue_, TaskQueueBase::Current()); // Note that receive_stream may be nullptr.
receive_stream_ = receive_stream;
}
DestroyCalls(); // Call may post periodic rtcp packet to the transport on the process // thread, thus transport should be destroyed after the call objects. // Though transports keep pointers to the call objects, transports handle // packets on the task_queue() and thus wouldn't create a race while current // destruction happens in the same task as destruction of the call objects.
video_send_transport.reset();
audio_send_transport.reset();
receive_transport.reset();
});
observer->PrintResults();
// In quick test synchronization may not be achieved in time. if (!absl::GetFlag(FLAGS_webrtc_quick_perf_test)) { // TODO(bugs.webrtc.org/10417): Reenable this for iOS #if !defined(WEBRTC_IOS)
EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.AVSyncOffsetInMs")); #endif
}
TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) { // Minimal normal usage at the start, then 30s overuse to allow filter to // settle, and then 80s underuse to allow plenty of time for rampup again.
test::ScopedFieldTrials fake_overuse_settings( "WebRTC-ForceSimulatedOveruseIntervalMs/1-30000-80000/");
class LoadObserver : public test::SendTest, public test::FrameGeneratorCapturer::SinkWantsObserver { public:
LoadObserver()
: SendTest(test::VideoTestConstants::kLongTimeout),
test_phase_(TestPhase::kInit) {}
void OnFrameGeneratorCapturerCreated(
test::FrameGeneratorCapturer* frame_generator_capturer) override {
frame_generator_capturer->SetSinkWantsObserver(this); // Set a high initial resolution to be sure that we can scale down.
frame_generator_capturer->ChangeResolution(1920, 1080);
}
// OnSinkWantsChanged is called when FrameGeneratorCapturer::AddOrUpdateSink // is called. // TODO(sprang): Add integration test for maintain-framerate mode? void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* /* sink */, const rtc::VideoSinkWants& wants) override {
RTC_LOG(LS_INFO) << "OnSinkWantsChanged fps:" << wants.max_framerate_fps
<< " max_pixel_count " << wants.max_pixel_count
<< " target_pixel_count"
<< wants.target_pixel_count.value_or(-1); // The sink wants can change either because an adaptation happened // (i.e. the pixels or frame rate changed) or for other reasons, such // as encoded resolutions being communicated (happens whenever we // capture a new frame size). In this test, we only care about // adaptations. bool did_adapt =
last_wants_.max_pixel_count != wants.max_pixel_count ||
last_wants_.target_pixel_count != wants.target_pixel_count ||
last_wants_.max_framerate_fps != wants.max_framerate_fps;
last_wants_ = wants; if (!did_adapt) { if (test_phase_ == TestPhase::kInit) {
test_phase_ = TestPhase::kStart;
} return;
} // At kStart expect CPU overuse. Then expect CPU underuse when the encoder // delay has been decreased. switch (test_phase_) { case TestPhase::kInit:
ADD_FAILURE() << "Got unexpected adaptation request, max res = "
<< wants.max_pixel_count << ", target res = "
<< wants.target_pixel_count.value_or(-1)
<< ", max fps = " << wants.max_framerate_fps; break; case TestPhase::kStart: if (wants.max_pixel_count < std::numeric_limits<int>::max()) { // On adapting down, VideoStreamEncoder::VideoSourceProxy will set // only the max pixel count, leaving the target unset.
test_phase_ = TestPhase::kAdaptedDown;
} else {
ADD_FAILURE() << "Got unexpected adaptation request, max res = "
<< wants.max_pixel_count << ", target res = "
<< wants.target_pixel_count.value_or(-1)
<< ", max fps = " << wants.max_framerate_fps;
} break; case TestPhase::kAdaptedDown: // On adapting up, the adaptation counter will again be at zero, and // so all constraints will be reset. if (wants.max_pixel_count == std::numeric_limits<int>::max() &&
!wants.target_pixel_count) {
test_phase_ = TestPhase::kAdaptedUp;
observation_complete_.Set();
} else {
ADD_FAILURE() << "Got unexpected adaptation request, max res = "
<< wants.max_pixel_count << ", target res = "
<< wants.target_pixel_count.value_or(-1)
<< ", max fps = " << wants.max_framerate_fps;
} break; case TestPhase::kAdaptedUp:
ADD_FAILURE() << "Got unexpected adaptation request, max res = "
<< wants.max_pixel_count << ", target res = "
<< wants.target_pixel_count.value_or(-1)
<< ", max fps = " << wants.max_framerate_fps;
}
}
private: // TODO(holmer): Run this with a timer instead of once per packet.
Action OnSendRtp(rtc::ArrayView<const uint8_t> /* packet */) override {
task_queue_->PostTask(SafeTask(task_safety_flag_, [this]() {
VideoSendStream::Stats stats = send_stream_->GetStats();
if (!stats.substreams.empty()) {
RTC_DCHECK_EQ(1, stats.substreams.size()); int bitrate_kbps =
stats.substreams.begin()->second.total_bitrate_bps / 1000; if (bitrate_kbps > min_acceptable_bitrate_ &&
bitrate_kbps < max_acceptable_bitrate_) {
converged_ = true;
++num_bitrate_observations_in_range_; if (num_bitrate_observations_in_range_ ==
kNumBitrateObservationsInRange)
observation_complete_.Set();
} if (converged_)
bitrate_kbps_list_.AddSample(bitrate_kbps);
}
})); return SEND_PACKET;
}
void PerformTest() override {
ASSERT_TRUE(
time_to_reconfigure_.Wait(test::VideoTestConstants::kDefaultTimeout))
<< "Timed out before receiving an initial high bitrate.";
frame_generator_->ChangeResolution(
test::VideoTestConstants::kDefaultWidth * 2,
test::VideoTestConstants::kDefaultHeight * 2);
SendTask(task_queue_, [&]() {
send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
});
EXPECT_TRUE(Wait())
<< "Timed out while waiting for a couple of high bitrate estimates " "after reconfiguring the send stream.";
}
// Discovers the minimal supported audio+video bitrate. The test bitrate is // considered supported if Rtt does not go above 400ms with the network // contrained to the test bitrate. // // |test_bitrate_from test_bitrate_to| bitrate constraint range // `test_bitrate_step` bitrate constraint update step during the test // |min_bwe max_bwe| BWE range // `start_bwe` initial BWE void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, int test_bitrate_to, int test_bitrate_step, int min_bwe, int start_bwe, int max_bwe) { staticconst std::string kAudioTrackId = "audio_track_0"; static constexpr int kBitrateStabilizationMs = 10000; static constexpr int kBitrateMeasurements = 10; static constexpr int kBitrateMeasurementMs = 1000; static constexpr int kShortDelayMs = 10; static constexpr int kMinGoodRttMs = 400;
class MinVideoAndAudioBitrateTester : public test::EndToEndTest { public:
MinVideoAndAudioBitrateTester(int test_bitrate_from, int test_bitrate_to, int test_bitrate_step, int min_bwe, int start_bwe, int max_bwe,
TaskQueueBase* task_queue)
: EndToEndTest(),
test_bitrate_from_(test_bitrate_from),
test_bitrate_to_(test_bitrate_to),
test_bitrate_step_(test_bitrate_step),
min_bwe_(min_bwe),
start_bwe_(start_bwe),
max_bwe_(max_bwe),
task_queue_(task_queue) {}
void PerformTest() override { // Quick test mode, just to exercise all the code paths without actually // caring about performance measurements. constbool quick_perf_test = absl::GetFlag(FLAGS_webrtc_quick_perf_test);
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.