// SPDX-License-Identifier: GPL-2.0+ /* * test_maple_tree.c: Test the maple tree API * Copyright (c) 2018-2022 Oracle Corporation * Author: Liam R. Howlett <Liam.Howlett@Oracle.com> * * Any tests that only require the interface of the tree.
*/
#ifndef CONFIG_DEBUG_MAPLE_TREE #define mt_dump(mt, fmt) do {} while (0) #define mt_validate(mt) do {} while (0) #define mt_cache_shrink() do {} while (0) #define mas_dump(mas) do {} while (0) #define mas_wr_dump(mas) do {} while (0)
atomic_t maple_tree_tests_run;
atomic_t maple_tree_tests_passed; #undef MT_BUG_ON
#ifdef __KERNEL__ #define mt_set_non_kernel(x) do {} while (0) #define mt_zero_nr_tallocated(x) do {} while (0) #else #define cond_resched() do {} while (0) #endif
mt_zero_nr_tallocated(); for (i = 0; i <= max; i++) {
MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL)); for (j = 0; j <= i; j++)
check_index_load(mt, j);
if (i)
MT_BUG_ON(mt, !mt_height(mt));
check_load(mt, i + 1, NULL);
}
#ifndef __KERNEL__ if (verbose) {
rcu_barrier();
mt_dump(mt, mt_dump_dec);
pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
max, mt_get_alloc_size()/1024, mt_nr_allocated(),
mt_nr_tallocated());
} #endif
}
static noinline void __init check_rev_find(struct maple_tree *mt)
{ int i, nr_entries = 200; void *val;
MA_STATE(mas, mt, 0, 0);
for (i = 0; i <= nr_entries; i++)
mtree_store_range(mt, i*10, i*10 + 5,
xa_mk_value(i), GFP_KERNEL);
rcu_read_lock();
mas_set(&mas, 1000);
val = mas_find_rev(&mas, 1000);
MT_BUG_ON(mt, val != xa_mk_value(100));
val = mas_find_rev(&mas, 1000);
MT_BUG_ON(mt, val != NULL);
mas_set(&mas, 999);
val = mas_find_rev(&mas, 997);
MT_BUG_ON(mt, val != NULL);
mas_set(&mas, 1000);
val = mas_find_rev(&mas, 900);
MT_BUG_ON(mt, val != xa_mk_value(100));
val = mas_find_rev(&mas, 900);
MT_BUG_ON(mt, val != xa_mk_value(99));
mas_set(&mas, 20);
val = mas_find_rev(&mas, 0);
MT_BUG_ON(mt, val != xa_mk_value(2));
val = mas_find_rev(&mas, 0);
MT_BUG_ON(mt, val != xa_mk_value(1));
val = mas_find_rev(&mas, 0);
MT_BUG_ON(mt, val != xa_mk_value(0));
val = mas_find_rev(&mas, 0);
MT_BUG_ON(mt, val != NULL);
rcu_read_unlock();
}
static noinline void __init check_find(struct maple_tree *mt)
{ unsignedlong val = 0; unsignedlong count; unsignedlong max; unsignedlong top; unsignedlong last = 0, index = 0; void *entry, *entry2;
for (int i = 0; i <= count; i++) { if (val != 64)
MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL)); else
MT_BUG_ON(mt, mtree_insert(mt, val,
XA_ZERO_ENTRY, GFP_KERNEL));
val <<= 2;
}
val = 0;
mas_set(&mas, val);
mas_lock(&mas); while ((entry = mas_find(&mas, 268435456)) != NULL) { if (val != 64)
MT_BUG_ON(mt, xa_mk_value(val) != entry); else
MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
val <<= 2; /* For zero check. */ if (!val)
val = 1;
}
mas_unlock(&mas);
val = 0;
mas_set(&mas, val);
mas_lock(&mas);
mas_for_each(&mas, entry, ULONG_MAX) { if (val != 64)
MT_BUG_ON(mt, xa_mk_value(val) != entry); else
MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
val <<= 2; /* For zero check. */ if (!val)
val = 1;
}
mas_unlock(&mas);
/* Test mas_pause */
val = 0;
mas_set(&mas, val);
mas_lock(&mas);
mas_for_each(&mas, entry, ULONG_MAX) { if (val != 64)
MT_BUG_ON(mt, xa_mk_value(val) != entry); else
MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
val <<= 2; /* For zero check. */ if (!val)
val = 1;
val = 0;
max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
mt_for_each(mt, entry, index, max) {
MT_BUG_ON(mt, xa_mk_value(val) != entry);
val <<= 2; if (val == 64) /* Skip zero entry. */
val <<= 2; /* For zero check. */ if (!val)
val = 1;
}
val = 0;
max = 0;
index = 0;
MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
mt_for_each(mt, entry, index, ULONG_MAX) { if (val == top)
MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX)); else
MT_BUG_ON(mt, xa_mk_value(val) != entry);
/* Workaround for 32bit */ if ((val << 2) < val)
val = ULONG_MAX; else
val <<= 2;
if (val == 64) /* Skip zero entry. */
val <<= 2; /* For zero check. */ if (!val)
val = 1;
max++;
MT_BUG_ON(mt, max > 25);
}
mtree_erase_index(mt, ULONG_MAX);
/* * Find last value. * 1. get the expected value, leveraging the existence of an end entry * 2. delete end entry * 3. find the last value but searching for ULONG_MAX and then using * prev
*/ /* First, get the expected result. */
mas_lock(&mas);
mas_reset(&mas);
mas.index = ULONG_MAX; /* start at max.. */
entry = mas_find(&mas, ULONG_MAX);
entry = mas_prev(&mas, 0);
index = mas.index;
last = mas.last;
/* Erase the last entry. */
mas_reset(&mas);
mas.index = ULONG_MAX;
mas.last = ULONG_MAX;
mas_erase(&mas);
/* Get the previous value from MAS_START */
mas_reset(&mas);
entry2 = mas_prev(&mas, 0);
/* Check results. */
MT_BUG_ON(mt, entry != entry2);
MT_BUG_ON(mt, index != mas.index);
MT_BUG_ON(mt, last != mas.last);
staticconstunsignedlong holes[] = { /* * Note: start of hole is INCLUSIVE * end of hole is EXCLUSIVE * (opposite of the above table.) * Start of hole, end of hole, size of hole (+1)
*/
0x565234afb000, 0x565234afc000, 0x1000,
0x565234afe000, 0x565235def000, 0x12F1000,
0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
};
/* * req_range consists of 4 values. * 1. min index * 2. max index * 3. size * 4. number that should be returned. * 5. return value
*/ staticconstunsignedlong req_range[] = {
0x565234af9000, /* Min */
0x7fff58791000, /* Max */
0x1000, /* Size */
0x7fff5878d << 12, /* First rev hole of size 0x1000 */
0, /* Return value success. */
0x0, /* Min */
0x565234AF0 << 12, /* Max */
0x3000, /* Size */
0x565234AEE << 12, /* max - 3. */
0, /* Return value success. */
0x0, /* Min */
-1, /* Max */
0x1000, /* Size */
562949953421311 << 12,/* First rev hole of size 0x1000 */
0, /* Return value success. */
0x0, /* Min */
0x7F36D5109 << 12, /* Max */
0x4000, /* Size */
0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
0, /* Return value success. */
/* * req_range consists of 4 values. * 1. min index * 2. max index * 3. size * 4. number that should be returned. * 5. return value
*/ staticconstunsignedlong req_range[] = {
0x565234af9000, /* Min */
0x7fff58791000, /* Max */
0x1000, /* Size */
0x565234afb000, /* First hole in our data of size 1000. */
0, /* Return value success. */
0x0, /* Min */
0x7fff58791000, /* Max */
0x1F00, /* Size */
0x0, /* First hole in our data of size 2000. */
0, /* Return value success. */
/* Test ascend. */
34148797436 << 12, /* Min */
0x7fff587AF000, /* Max */
0x3000, /* Size */
34148798629 << 12, /* Expected location */
0, /* Return value success. */
/* Test failing. */
34148798623 << 12, /* Min */
34148798683 << 12, /* Max */
0x15000, /* Size */
0, /* Expected location */
-EBUSY, /* Return value failed. */
/* Test filling entire gap. */
34148798623 << 12, /* Min */
0x7fff587AF000, /* Max */
0x10000, /* Size */
34148798632 << 12, /* Expected location */
0, /* Return value success. */
/* Test walking off the end of root. */
0, /* Min */
-1, /* Max */
-1, /* Size */
0, /* Expected location */
-EBUSY, /* Return value failure. */
/* Test looking for too large a hole across entire range. */
0, /* Min */
-1, /* Max */
4503599618982063UL << 12, /* Size */
34359052178 << 12, /* Expected location */
-EBUSY, /* Return failure. */
/* Test a single entry */
34148798648 << 12, /* Min */
34148798648 << 12, /* Max */
4096, /* Size of 1 */
34148798648 << 12, /* Location is the same as min/max */
0, /* Success */
}; int i, range_count = ARRAY_SIZE(range); int req_range_count = ARRAY_SIZE(req_range); unsignedlong min = 0x565234af2000;
MA_STATE(mas, mt, 0, 0);
/* Overwrite multiple levels at the end of the tree (slot 7) */
mt_set_non_kernel(50);
check_seq(mt, 400, false);
check_store_range(mt, 353, 361, xa_mk_value(353), 0);
check_store_range(mt, 347, 352, xa_mk_value(347), 0);
check_load(mt, 346, xa_mk_value(346)); for (i = 347; i <= 352; i++)
check_load(mt, i, xa_mk_value(347)); for (i = 353; i <= 361; i++)
check_load(mt, i, xa_mk_value(353));
check_load(mt, 362, xa_mk_value(362));
mt_set_non_kernel(0);
MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
mt_set_non_kernel(5);
check_seq(mt, 400, false);
check_store_range(mt, 352, 364, NULL, 0);
check_store_range(mt, 351, 364, xa_mk_value(352), 0);
check_load(mt, 350, xa_mk_value(350));
check_load(mt, 351, xa_mk_value(352)); for (i = 352; i <= 364; i++)
check_load(mt, i, xa_mk_value(352));
check_load(mt, 365, xa_mk_value(365));
mt_set_non_kernel(0);
MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
mt_set_non_kernel(50);
check_seq(mt, 400, false);
check_store_range(mt, 362, 367, xa_mk_value(362), 0);
check_store_range(mt, 353, 361, xa_mk_value(353), 0);
mt_set_non_kernel(0);
mt_validate(mt);
MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt); /* * Interesting cases: * 1. Overwrite the end of a node and end in the first entry of the next * node. * 2. Split a single range * 3. Overwrite the start of a range * 4. Overwrite the end of a range * 5. Overwrite the entire range * 6. Overwrite a range that causes multiple parent nodes to be * combined * 7. Overwrite a range that causes multiple parent nodes and part of * root to be combined * 8. Overwrite the whole tree * 9. Try to overwrite the zero entry of an alloc tree. * 10. Write a range larger than a nodes current pivot
*/
/* Check in-place modifications */
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); /* Append to the start of last range */
mt_set_non_kernel(50); for (i = 0; i <= 500; i++) {
val = i * 5 + 1;
val2 = val + 4;
check_store_range(mt, val, val2, xa_mk_value(val), 0);
}
/* Append to the last range without touching any boundaries */ for (i = 0; i < 10; i++) {
val = val2 + 5;
val2 = val + 4;
check_store_range(mt, val, val2, xa_mk_value(val), 0);
}
/* Append to the end of last range */
val = val2; for (i = 0; i < 10; i++) {
val += 5;
MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
xa_mk_value(val)) != 0);
}
/* Overwriting the range and over a part of the next range */ for (i = 10; i < 30; i += 2) {
val = i * 5 + 1;
val2 = val + 5;
check_store_range(mt, val, val2, xa_mk_value(val), 0);
}
/* Overwriting a part of the range and over the next range */ for (i = 50; i < 70; i += 2) {
val2 = i * 5;
val = val2 - 5;
check_store_range(mt, val, val2, xa_mk_value(val), 0);
}
/* * Expand the range, only partially overwriting the previous and * next ranges
*/ for (i = 100; i < 130; i += 3) {
val = i * 5 - 5;
val2 = i * 5 + 1;
check_store_range(mt, val, val2, xa_mk_value(val), 0);
}
/* * Expand the range, only partially overwriting the previous and * next ranges, in RCU mode
*/
mt_set_in_rcu(mt); for (i = 150; i < 180; i += 3) {
val = i * 5 - 5;
val2 = i * 5 + 1;
check_store_range(mt, val, val2, xa_mk_value(val), 0);
}
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); for (i = 0; i <= 1300; i++) {
val = i*10;
val2 = (i+1)*10;
check_store_range(mt, val, val2, xa_mk_value(val), 0);
MT_BUG_ON(mt, mt_height(mt) >= 4);
} /* Cause a 3 child split all the way up the tree. */ for (i = 5; i < 215; i += 10)
check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0); for (i = 5; i < 65; i += 10)
check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
MT_BUG_ON(mt, mt_height(mt) >= 4); for (i = 5; i < 45; i += 10)
check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0); if (!MAPLE_32BIT)
MT_BUG_ON(mt, mt_height(mt) < 4);
mtree_destroy(mt);
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); for (i = 0; i <= 1200; i++) {
val = i*10;
val2 = (i+1)*10;
check_store_range(mt, val, val2, xa_mk_value(val), 0);
MT_BUG_ON(mt, mt_height(mt) >= 4);
} /* Fill parents and leaves before split. */ for (i = 5; i < 455; i += 10)
check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
for (i = 1; i < 16; i++)
check_store_range(mt, 8185 + i, 8185 + i + 1,
xa_mk_value(8185+i), 0);
MT_BUG_ON(mt, mt_height(mt) >= 4); /* triple split across multiple levels. */
check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0); if (!MAPLE_32BIT)
MT_BUG_ON(mt, mt_height(mt) != 4);
}
/* Check the first one and get ma_state in the correct state. */
MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++)); for ( ; i <= limit + 1; i++) {
entry = mas_next(&mas, limit); if (i > limit)
MT_BUG_ON(mt, entry != NULL); else
MT_BUG_ON(mt, xa_mk_value(i) != entry);
}
rcu_read_unlock();
mtree_destroy(mt);
}
static noinline void __init check_prev_entry(struct maple_tree *mt)
{ unsignedlong index = 16; void *value; int i;
/* * Store NULL at range [0, ULONG_MAX] to an empty tree should result * in an empty tree
*/
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_lock(&mas);
mas_store_gfp(&mas, NULL, GFP_KERNEL);
MT_BUG_ON(mt, !mtree_empty(mt));
mas_unlock(&mas);
mtree_destroy(mt);
/* * Store NULL at any range to an empty tree should result in an empty * tree
*/
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_lock(&mas);
mas_set_range(&mas, 3, 10);
mas_store_gfp(&mas, NULL, GFP_KERNEL);
MT_BUG_ON(mt, !mtree_empty(mt));
mas_unlock(&mas);
mtree_destroy(mt);
/* * Store NULL at range [0, ULONG_MAX] to a single entry tree should * result in an empty tree
*/
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_lock(&mas);
mas_set(&mas, 0);
mas_store_gfp(&mas, &mas, GFP_KERNEL);
mas_set_range(&mas, 0, ULONG_MAX);
mas_store_gfp(&mas, NULL, GFP_KERNEL);
MT_BUG_ON(mt, !mtree_empty(mt));
mas_unlock(&mas);
mtree_destroy(mt);
/* * Store NULL at range [0, n] to a single entry tree should * result in an empty tree
*/
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_lock(&mas);
mas_set(&mas, 0);
mas_store_gfp(&mas, &mas, GFP_KERNEL);
mas_set_range(&mas, 0, 5);
mas_store_gfp(&mas, NULL, GFP_KERNEL);
MT_BUG_ON(mt, !mtree_empty(mt));
mas_unlock(&mas);
mtree_destroy(mt);
/* * Store NULL at range [m, n] where m > 0 to a single entry tree * should still be a single entry tree
*/
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_lock(&mas);
mas_set(&mas, 0);
mas_store_gfp(&mas, &mas, GFP_KERNEL);
mas_set_range(&mas, 2, 5);
mas_store_gfp(&mas, NULL, GFP_KERNEL);
MT_BUG_ON(mt, mtree_empty(mt)); // MT_BUG_ON(mt, xa_is_node(mas_root(&mas)));
mas_unlock(&mas);
mtree_destroy(mt);
/* * Store NULL at range [0, ULONG_MAX] to a tree with node should * result in an empty tree
*/
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_lock(&mas);
mas_set_range(&mas, 1, 3);
mas_store_gfp(&mas, &mas, GFP_KERNEL); // MT_BUG_ON(mt, !xa_is_node(mas_root(&mas)));
mas_set_range(&mas, 0, ULONG_MAX);
mas_store_gfp(&mas, NULL, GFP_KERNEL);
MT_BUG_ON(mt, !mtree_empty(mt));
mas_unlock(&mas);
mtree_destroy(mt);
}
/* * At this point, there is a gap of 2 at index + 1 between seq100[3] and * seq100[4]. Search for the gap.
*/
mt_set_non_kernel(1);
mas_reset(&mas);
MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
seq100[5]));
MT_BUG_ON(mt, mas.index != index + 1);
rcu_read_unlock();
rcu_read_lock();
mas.index = index;
mas.last = index;
mas_reset(&mas);
entry = mas_find(&mas, ULONG_MAX);
MT_BUG_ON(mt, entry != xa_mk_value(index));
mn1 = mas.node;
entry = mas_next(&mas, ULONG_MAX);
MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
mas_next(&mas, ULONG_MAX); /* go to the next entry. */
mn2 = mas.node;
MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
/* * At this point, there is a gap of 3 at seq100[6]. Find it by * searching 20 - 50 for size 3.
*/
mas_reset(&mas);
MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
seq100[12]));
MT_BUG_ON(mt, mas.index != seq100[6]);
rcu_read_unlock();
static noinline void __init check_iteration(struct maple_tree *mt)
{ int i, nr_entries = 125; void *val;
MA_STATE(mas, mt, 0, 0);
for (i = 0; i <= nr_entries; i++)
mtree_store_range(mt, i * 10, i * 10 + 9,
xa_mk_value(i), GFP_KERNEL);
mt_set_non_kernel(99999);
i = 0;
mas_lock(&mas);
mas_for_each(&mas, val, 925) {
MT_BUG_ON(mt, mas.index != i * 10);
MT_BUG_ON(mt, mas.last != i * 10 + 9); /* Overwrite end of entry 92 */ if (i == 92) {
mas.index = 925;
mas.last = 929;
mas_store(&mas, val);
}
i++;
} /* Ensure mas_find() gets the next value */
val = mas_find(&mas, ULONG_MAX);
MT_BUG_ON(mt, val != xa_mk_value(i));
mas_set(&mas, 0);
i = 0;
mas_for_each(&mas, val, 785) {
MT_BUG_ON(mt, mas.index != i * 10);
MT_BUG_ON(mt, mas.last != i * 10 + 9); /* Overwrite start of entry 78 */ if (i == 78) {
mas.index = 780;
mas.last = 785;
mas_store(&mas, val);
} else {
i++;
}
}
val = mas_find(&mas, ULONG_MAX);
MT_BUG_ON(mt, val != xa_mk_value(i));
mas_set(&mas, 0);
i = 0;
mas_for_each(&mas, val, 765) {
MT_BUG_ON(mt, mas.index != i * 10);
MT_BUG_ON(mt, mas.last != i * 10 + 9); /* Overwrite end of entry 76 and advance to the end */ if (i == 76) {
mas.index = 760;
mas.last = 765;
mas_store(&mas, val);
}
i++;
} /* Make sure the next find returns the one after 765, 766-769 */
val = mas_find(&mas, ULONG_MAX);
MT_BUG_ON(mt, val != xa_mk_value(76));
mas_unlock(&mas);
mas_destroy(&mas);
mt_set_non_kernel(0);
}
val = mas_next(&mas, 1000);
MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
MT_BUG_ON(mt, mas.index != 690);
MT_BUG_ON(mt, mas.last != 695);
val = mas_prev(&mas, 0);
MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
MT_BUG_ON(mt, mas.index != 680);
MT_BUG_ON(mt, mas.last != 685);
val = mas_next(&mas, 1000);
MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
MT_BUG_ON(mt, mas.index != 690);
MT_BUG_ON(mt, mas.last != 695);
val = mas_next(&mas, 1000);
MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
MT_BUG_ON(mt, mas.index != 700);
MT_BUG_ON(mt, mas.last != 705);
/* Check across node boundaries of the tree */
mas_set(&mas, 70);
val = mas_walk(&mas);
MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
MT_BUG_ON(mt, mas.index != 70);
MT_BUG_ON(mt, mas.last != 75);
val = mas_next(&mas, 1000);
MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
MT_BUG_ON(mt, mas.index != 80);
MT_BUG_ON(mt, mas.last != 85);
val = mas_prev(&mas, 70);
MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
MT_BUG_ON(mt, mas.index != 70);
MT_BUG_ON(mt, mas.last != 75);
/* Check across two levels of the tree */
mas_reset(&mas);
mas_set(&mas, level2[0]);
val = mas_walk(&mas);
MT_BUG_ON(mt, val != NULL);
val = mas_next(&mas, level2[1]);
MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
MT_BUG_ON(mt, mas.index != level2[2]);
MT_BUG_ON(mt, mas.last != level2[3]);
mn = mas.node;
val = mas_prev(&mas, 0);
MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
MT_BUG_ON(mt, mas.index != level2[2]);
MT_BUG_ON(mt, mas.last != level2[3]);
/* Check running off the end and back on */
mas_set(&mas, nr_entries * 10);
val = mas_walk(&mas);
MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
MT_BUG_ON(mt, mas.index != (nr_entries * 10));
MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
val = mas_next(&mas, ULONG_MAX);
MT_BUG_ON(mt, val != NULL);
MT_BUG_ON(mt, mas.index != last_index);
MT_BUG_ON(mt, mas.last != ULONG_MAX);
/* Check running off the start and back on */
mas_reset(&mas);
mas_set(&mas, 10);
val = mas_walk(&mas);
MT_BUG_ON(mt, val != xa_mk_value(1));
MT_BUG_ON(mt, mas.index != 10);
MT_BUG_ON(mt, mas.last != 15);
val = mas_prev(&mas, 0);
MT_BUG_ON(mt, val != xa_mk_value(0));
MT_BUG_ON(mt, mas.index != 0);
MT_BUG_ON(mt, mas.last != 5);
val = mas_prev(&mas, 0);
MT_BUG_ON(mt, val != NULL);
MT_BUG_ON(mt, mas.index != 0);
MT_BUG_ON(mt, mas.last != 5);
MT_BUG_ON(mt, !mas_is_underflow(&mas));
/* Test spanning writes that require balancing right sibling or right cousin */ static noinline void __init check_spanning_relatives(struct maple_tree *mt)
{
unsignedlong i, nr_entries = 1000;
for (i = 0; i <= nr_entries; i++)
mtree_store_range(mt, i*10, i*10 + 5,
xa_mk_value(i), GFP_KERNEL);
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.