/** * struct crc_variant - describes a CRC variant * @bits: Number of bits in the CRC, 1 <= @bits <= 64. * @le: true if it's a "little endian" CRC (reversed mapping between bits and * polynomial coefficients in each byte), false if it's a "big endian" CRC * (natural mapping between bits and polynomial coefficients in each byte) * @poly: The generator polynomial with the highest-order term omitted. * Bit-reversed if @le is true. * @func: The function to compute a CRC. The type signature uses u64 so that it * can fit any CRC up to CRC-64. The CRC is passed in, and is expected * to be returned in, the least significant bits of the u64. The * function is expected to *not* invert the CRC at the beginning and end.
*/ struct crc_variant { int bits; bool le;
u64 poly;
u64 (*func)(u64 crc, const u8 *p, size_t len);
};
staticint crc_suite_init(struct kunit_suite *suite)
{ /* * Allocate the test buffer using vmalloc() with a page-aligned length * so that it is immediately followed by a guard page. This allows * buffer overreads to be detected, even in assembly code.
*/
test_buflen = round_up(CRC_KUNIT_MAX_LEN, PAGE_SIZE);
test_buffer = vmalloc(test_buflen); if (!test_buffer) return -ENOMEM;
/* Generate a random initial CRC. */ static u64 generate_random_initial_crc(conststruct crc_variant *v)
{ switch (rand32() % 4) { case 0: return 0; case 1: return crc_mask(v); /* All 1 bits */ default: return rand64() & crc_mask(v);
}
}
/* Generate a random length, preferring small lengths. */ static size_t generate_random_length(size_t max_length)
{
size_t len;
switch (rand32() % 3) { case 0:
len = rand32() % 128; break; case 1:
len = rand32() % 3072; break; default:
len = rand32(); break;
} return len % (max_length + 1);
}
/* Test that v->func gives the same CRCs as a reference implementation. */ staticvoid crc_test(struct kunit *test, conststruct crc_variant *v)
{
size_t i;
for (i = 0; i < CRC_KUNIT_NUM_TEST_ITERS; i++) {
u64 init_crc, expected_crc, actual_crc;
size_t len, offset; bool nosimd;
init_crc = generate_random_initial_crc(v);
len = generate_random_length(CRC_KUNIT_MAX_LEN);
/* Generate a random offset. */ if (rand32() % 2 == 0) { /* Use a random alignment mod 64 */
offset = rand32() % 64;
offset = min(offset, CRC_KUNIT_MAX_LEN - len);
} else { /* Go up to the guard page, to catch buffer overreads */
offset = test_buflen - len;
}
if (rand32() % 8 == 0) /* Refresh the data occasionally. */
prandom_bytes_state(&rng, &test_buffer[offset], len);
nosimd = rand32() % 8 == 0;
/* * Compute the CRC, and verify that it equals the CRC computed * by a simple bit-at-a-time reference implementation.
*/
expected_crc = crc_ref(v, init_crc, &test_buffer[offset], len); if (nosimd)
local_irq_disable();
actual_crc = v->func(init_crc, &test_buffer[offset], len); if (nosimd)
local_irq_enable();
KUNIT_EXPECT_EQ_MSG(test, expected_crc, actual_crc, "Wrong result with len=%zu offset=%zu nosimd=%d",
len, offset, nosimd);
}
}
static __always_inline void
crc_benchmark(struct kunit *test,
u64 (*crc_func)(u64 crc, const u8 *p, size_t len))
{ staticconst size_t lens_to_test[] = {
1, 16, 64, 127, 128, 200, 256, 511, 512, 1024, 3173, 4096, 16384,
};
size_t len, i, j, num_iters; /* * The CRC value that this function computes in a series of calls to * crc_func is never actually used, so use volatile to ensure that the * computations are done as intended and don't all get optimized out.
*/ volatile u64 crc = 0;
u64 t;
if (!IS_ENABLED(CONFIG_CRC_BENCHMARK))
kunit_skip(test, "not enabled");
/* warm-up */ for (i = 0; i < 10000000; i += CRC_KUNIT_MAX_LEN)
crc = crc_func(crc, test_buffer, CRC_KUNIT_MAX_LEN);
for (i = 0; i < ARRAY_SIZE(lens_to_test); i++) {
len = lens_to_test[i];
KUNIT_ASSERT_LE(test, len, CRC_KUNIT_MAX_LEN);
num_iters = 10000000 / (len + 128);
preempt_disable();
t = ktime_get_ns(); for (j = 0; j < num_iters; j++)
crc = crc_func(crc, test_buffer, len);
t = ktime_get_ns() - t;
preempt_enable();
kunit_info(test, "len=%zu: %llu MB/s\n",
len, div64_u64((u64)len * num_iters * 1000, t));
}
}
/* crc7_be */
static u64 crc7_be_wrapper(u64 crc, const u8 *p, size_t len)
{ /* * crc7_be() left-aligns the 7-bit CRC in a u8, whereas the test wants a * right-aligned CRC (in a u64). Convert between the conventions.
*/ return crc7_be(crc << 1, p, len) >> 1;
}
static u64 crc64_nvme_wrapper(u64 crc, const u8 *p, size_t len)
{ /* The inversions that crc64_nvme() does have to be undone here. */ return ~crc64_nvme(~crc, p, len);
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.