/* * Copyright (c) 2016, Alliance for Open Media. All rights reserved. * * This source code is subject to the terms of the BSD 2 Clause License and * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License * was not distributed with this source code in the LICENSE file, you can * obtain it at www.aomedia.org/license/software. If the Alliance for Open * Media Patent License 1.0 was not distributed with this source code in the * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
*/ #ifndef AOM_TEST_VIDEO_SOURCE_H_ #define AOM_TEST_VIDEO_SOURCE_H_
// Helper macros to ensure LIBAOM_TEST_DATA_PATH is a quoted string. // These are undefined right below GetDataPath // NOTE: LIBAOM_TEST_DATA_PATH MUST NOT be a quoted string before // Stringification or the GetDataPath will fail at runtime #define TO_STRING(S) #S #define STRINGIFY(S) TO_STRING(S)
// A simple function to encapsulate cross platform retrieval of test data path static std::string GetDataPath() { constchar *const data_path = getenv("LIBAOM_TEST_DATA_PATH"); if (data_path == nullptr) { #ifdef LIBAOM_TEST_DATA_PATH // In some environments, we cannot set environment variables // Instead, we set the data path by using a preprocessor symbol // which can be set from make files return STRINGIFY(LIBAOM_TEST_DATA_PATH); #else return"."; #endif
} return data_path;
}
// Undefining stringification macros because they are not used elsewhere #undef TO_STRING #undef STRINGIFY
// Abstract base class for test video sources, which provide a stream of // aom_image_t images with associated timestamps and duration. class VideoSource { public: virtual ~VideoSource() = default;
// Prepare the stream for reading, rewind/open as necessary. virtualvoid Begin() = 0;
// Advance the cursor to the next frame. For spatial layers this // advances the cursor to the next temporal unit. virtualvoid Next() = 0;
// Get the current video frame, or nullptr on End-Of-Stream. virtual aom_image_t *img() const = 0;
// Get the presentation timestamp of the current frame. virtual aom_codec_pts_t pts() const = 0;
// Get the current frame's duration virtualunsignedlong duration() const = 0;
// Get the timebase for the stream virtual aom_rational_t timebase() const = 0;
// Get the current frame counter, starting at 0. For spatial layers // this is the current temporal unit counter. virtualunsignedint frame() const = 0;
// Get the current file limit. virtualunsignedint limit() const = 0;
};
class DummyVideoSource : public VideoSource { public:
DummyVideoSource()
: img_(nullptr), limit_(100), width_(80), height_(64),
format_(AOM_IMG_FMT_I420) {
ReallocImage();
}
class RandomVideoSource : public DummyVideoSource { public:
RandomVideoSource(int seed = ACMRandom::DeterministicSeed())
: rnd_(seed), seed_(seed) {}
// Reset the RNG to get a matching stream for the second pass void Begin() override {
frame_ = 0;
rnd_.Reset(seed_);
FillFrame();
}
protected: // 15 frames of noise, followed by 15 static frames. Reset to 0 rather // than holding previous frames to encourage keyframes to be thrown. void FillFrame() override { if (img_) { if (frame_ % 30 < 15) for (size_t i = 0; i < raw_sz_; ++i) img_->img_data[i] = rnd_.Rand8(); else
memset(img_->img_data, 0, raw_sz_);
}
}
ACMRandom rnd_; int seed_;
};
// Abstract base class for test video sources, which provide a stream of // decompressed images to the decoder. class CompressedVideoSource { public: virtual ~CompressedVideoSource() = default;
virtualvoid Init() = 0;
// Prepare the stream for reading, rewind/open as necessary. virtualvoid Begin() = 0;
// Advance the cursor to the next frame virtualvoid Next() = 0;
virtualconst uint8_t *cxdata() const = 0;
virtual size_t frame_size() const = 0;
virtualunsignedint frame_number() const = 0;
};
} // namespace libaom_test
#endif// AOM_TEST_VIDEO_SOURCE_H_
Messung V0.5
¤ Dauer der Verarbeitung: 0.11 Sekunden
(vorverarbeitet)
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.