/* * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree.
*/
// Flag for video codec.
ABSL_FLAG(std::string, codec, "VP8", "Video codec");
// Flags for rtp start and stop timestamp.
ABSL_FLAG(uint32_t,
start_timestamp,
0, "RTP start timestamp, packets with smaller timestamp will be ignored " "(no wraparound)");
ABSL_FLAG(uint32_t,
stop_timestamp,
4294967295, "RTP stop timestamp, packets with larger timestamp will be ignored " "(no wraparound)");
// Flags for render window width and height
ABSL_FLAG(uint32_t, render_width, 640, "Width of render window");
ABSL_FLAG(uint32_t, render_height, 480, "Height of render window");
ABSL_FLAG(
std::string,
force_fieldtrials, "", "Field trials control experimental feature code which can be forced. " "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enabled/" " will assign the group Enable to field trial WebRTC-FooFeature. Multiple " "trials are separated by \"/\"");
ABSL_FLAG(bool, simulated_time, false, "Run in simulated time");
ABSL_FLAG(bool, disable_preview, false, "Disable decoded video preview.");
ABSL_FLAG(bool, disable_decoding, false, "Disable video decoding.");
ABSL_FLAG(int,
extend_run_time_duration,
0, "Extends the run time of the receiving client after the last RTP " "packet has been delivered. Typically useful to let the last few " "frames be decoded and rendered. Duration given in seconds.");
// Holds all the shared memory structures required for a receive stream. This // structure is used to prevent members being deallocated before the replay // has been finished. struct StreamState {
test::NullTransport transport;
std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>> sinks;
std::vector<VideoReceiveStreamInterface*> receive_streams;
std::vector<FlexfecReceiveStream*> flexfec_streams;
std::unique_ptr<VideoDecoderFactory> decoder_factory;
};
if (absl::GetFlag(FLAGS_disable_decoding)) {
stream_state->decoder_factory =
std::make_unique<test::FunctionVideoDecoderFactory>(
[]() { return std::make_unique<test::FakeDecoder>(); });
} else {
stream_state->decoder_factory = std::make_unique<InternalDecoderFactory>();
}
size_t config_count = 0; for (constauto& json : json_configs) { // Create the configuration and parse the JSON into the config. auto receive_config =
ParseVideoReceiveStreamJsonConfig(&(stream_state->transport), json); // Instantiate the underlying decoder. for (auto& decoder : receive_config.decoders) {
decoder = test::CreateMatchingDecoder(decoder.payload_type,
decoder.video_format.name);
} // Create a window for this config.
std::stringstream window_title;
window_title << "Playback Video (" << config_count++ << ")"; if (absl::GetFlag(FLAGS_disable_preview)) {
stream_state->sinks.emplace_back(std::make_unique<NullRenderer>());
} else {
stream_state->sinks.emplace_back(test::VideoRenderer::Create(
window_title.str().c_str(), absl::GetFlag(FLAGS_render_width),
absl::GetFlag(FLAGS_render_height)));
} // Create a receive stream for this config.
receive_config.renderer = stream_state->sinks.back().get();
receive_config.decoder_factory = stream_state->decoder_factory.get();
stream_state->receive_streams.emplace_back(
call->CreateVideoReceiveStream(std::move(receive_config)));
} return stream_state;
}
// Loads the base configuration from flags passed in on the commandline.
std::unique_ptr<StreamState> ConfigureFromFlags( const std::string& rtp_dump_path,
Call* call) { auto stream_state = std::make_unique<StreamState>(); // Create the video renderers. We must add both to the stream state to keep // them from deallocating.
std::stringstream window_title;
window_title << "Playback Video (" << rtp_dump_path << ")";
std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> playback_video; if (absl::GetFlag(FLAGS_disable_preview)) {
playback_video = std::make_unique<NullRenderer>();
} else {
playback_video.reset(test::VideoRenderer::Create(
window_title.str().c_str(), absl::GetFlag(FLAGS_render_width),
absl::GetFlag(FLAGS_render_height)));
} auto file_passthrough = std::make_unique<FileRenderPassthrough>(
absl::GetFlag(FLAGS_out_base), playback_video.get());
stream_state->sinks.push_back(std::move(playback_video));
stream_state->sinks.push_back(std::move(file_passthrough)); // Setup the configuration from the flags.
VideoReceiveStreamInterface::Config receive_config(
&(stream_state->transport));
receive_config.rtp.remote_ssrc = absl::GetFlag(FLAGS_ssrc);
receive_config.rtp.local_ssrc = kReceiverLocalSsrc;
receive_config.rtp.rtx_ssrc = absl::GetFlag(FLAGS_ssrc_rtx);
receive_config.rtp.rtx_associated_payload_types[absl::GetFlag(
FLAGS_media_payload_type_rtx)] = absl::GetFlag(FLAGS_media_payload_type);
receive_config.rtp
.rtx_associated_payload_types[absl::GetFlag(FLAGS_red_payload_type_rtx)] =
absl::GetFlag(FLAGS_red_payload_type);
receive_config.rtp.ulpfec_payload_type =
absl::GetFlag(FLAGS_ulpfec_payload_type);
receive_config.rtp.red_payload_type = absl::GetFlag(FLAGS_red_payload_type);
receive_config.rtp.nack.rtp_history_ms = 1000;
std::unique_ptr<test::RtpFileReader> CreateRtpReader( const std::string& rtp_dump_path) {
std::unique_ptr<test::RtpFileReader> rtp_reader(test::RtpFileReader::Create(
test::RtpFileReader::kRtpDump, rtp_dump_path)); if (!rtp_reader) {
rtp_reader.reset(
test::RtpFileReader::Create(test::RtpFileReader::kPcap, rtp_dump_path)); if (!rtp_reader) {
fprintf(stderr, "Couldn't open input file as either a rtpdump or .pcap. Note " "that .pcapng is not supported.\nTrying to interpret the file as " "length/packet interleaved.\n");
rtp_reader.reset(test::RtpFileReader::Create(
test::RtpFileReader::kLengthPacketInterleaved, rtp_dump_path)); if (!rtp_reader) {
fprintf(stderr, "Unable to open input file with any supported format\n"); return nullptr;
}
}
} return rtp_reader;
}
// The RtpReplayer is responsible for parsing the configuration provided by // the user, setting up the windows, receive streams and decoders and then // replaying the provided RTP dump. class RtpReplayer final { public:
RtpReplayer(absl::string_view replay_config_path,
absl::string_view rtp_dump_path,
std::unique_ptr<FieldTrialsView> field_trials, bool simulated_time)
: replay_config_path_(replay_config_path),
rtp_dump_path_(rtp_dump_path),
time_sim_(simulated_time
? std::make_unique<GlobalSimulatedTimeController>(
Timestamp::Millis(1 << 30))
: nullptr),
env_(CreateEnvironment(
std::move(field_trials),
time_sim_ ? time_sim_->GetTaskQueueFactory() : nullptr,
time_sim_ ? time_sim_->GetClock() : nullptr)),
rtp_reader_(CreateRtpReader(rtp_dump_path_)) {
worker_thread_ = env_.task_queue_factory().CreateTaskQueue( "worker_thread", TaskQueueFactory::Priority::NORMAL);
rtc::Event event;
worker_thread_->PostTask([&]() {
call_ = Call::Create(CallConfig(env_));
// Creation of the streams must happen inside a task queue because it is // resued as a worker thread. if (replay_config_path_.empty()) {
stream_state_ = ConfigureFromFlags(rtp_dump_path_, call_.get());
} else {
stream_state_ = ConfigureFromFile(replay_config_path_, call_.get());
}
event.Set();
});
event.Wait(/*give_up_after=*/TimeDelta::Seconds(10));
~RtpReplayer() { // Destruction of streams and the call must happen on the same thread as // their creation.
rtc::Event event;
worker_thread_->PostTask([&]() { for (constauto& receive_stream : stream_state_->receive_streams) {
call_->DestroyVideoReceiveStream(receive_stream);
} for (constauto& flexfec_stream : stream_state_->flexfec_streams) {
call_->DestroyFlexfecReceiveStream(flexfec_stream);
}
call_.reset();
event.Set();
});
event.Wait(/*give_up_after=*/TimeDelta::Seconds(10));
}
void Run() {
rtc::Event event;
worker_thread_->PostTask([&]() { // Start replaying the provided stream now that it has been configured. // VideoReceiveStreams must be started on the same thread as they were // created on. for (constauto& receive_stream : stream_state_->receive_streams) {
receive_stream->Start();
}
event.Set();
});
event.Wait(/*give_up_after=*/TimeDelta::Seconds(10));
while (true) {
int64_t now_ms = CurrentTimeMs(); if (replay_start_ms == -1) {
replay_start_ms = now_ms;
}
test::RtpPacket packet; if (!rtp_reader_->NextPacket(&packet)) { break;
}
rtc::CopyOnWriteBuffer packet_buffer(
packet.original_length > 0 ? packet.original_length : packet.length);
memcpy(packet_buffer.MutableData(), packet.data, packet.length); if (packet.length < packet.original_length) { // Only the RTP header was recorded in the RTP dump, payload is not // known and and padding length is not known, zero the payload and // clear the padding bit.
memset(packet_buffer.MutableData() + packet.length, 0,
packet.original_length - packet.length);
packet_buffer.MutableData()[0] &= ~0x20;
} // Check that the packet is a RTP packet and is valid. if (!IsRtpPacket({packet.data, packet.length})) { continue;
}
RtpPacket header; if (!header.Parse(packet_buffer) ||
header.Timestamp() < start_timestamp ||
header.Timestamp() > stop_timestamp) { continue;
}
Result result = Result::kOk;
worker_thread_->PostTask([&]() { if (IsRtcpPacket(packet_buffer)) {
call_->Receiver()->DeliverRtcpPacket(std::move(packet_buffer));
}
RtpPacketReceived received_packet(&extensions,
Timestamp::Millis(CurrentTimeMs())); if (!received_packet.Parse(std::move(packet_buffer))) {
result = Result::kParsingFailed;
} else {
call_->Receiver()->DeliverRtpPacket(
MediaType::VIDEO, received_packet,
[&result](const RtpPacketReceived& parsed_packet) -> bool {
result = Result::kUnknownSsrc; // No point in trying to demux again. returnfalse;
});
}
event.Set();
});
event.Wait(/*give_up_after=*/TimeDelta::Seconds(10));
switch (result) { case Result::kOk: break; case Result::kUnknownSsrc: { if (unknown_packets[header.Ssrc()] == 0)
fprintf(stderr, "Unknown SSRC: %u!\n", header.Ssrc());
++unknown_packets[header.Ssrc()]; break;
} case Result::kParsingFailed: {
fprintf(stderr, "Packet error, corrupt packets or incorrect setup?\n");
fprintf(stderr, "Packet len=%zu pt=%u seq=%u ts=%u ssrc=0x%8x\n",
packet.length, header.PayloadType(), header.SequenceNumber(),
header.Timestamp(), header.Ssrc()); break;
}
}
} // Note that even when `extend_run_time_duration` is zero // `SleepOrAdvanceTime` should still be called in order to process the last // delivered packet when running in simulated time.
SleepOrAdvanceTime(absl::GetFlag(FLAGS_extend_run_time_duration) * 1000);
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.