#ifdef CONFIG_INTERVAL_TREE_SPAN_ITER /* * Roll nodes[1] into nodes[0] by advancing nodes[1] to the end of a contiguous * span of nodes. This makes nodes[0]->last the end of that contiguous used span * of indexes that started at the original nodes[1]->start. * * If there is an interior hole, nodes[1] is now the first node starting the * next used span. A hole span is between nodes[0]->last and nodes[1]->start. * * If there is a tailing hole, nodes[1] is now NULL. A hole span is between * nodes[0]->last and last_index. * * If the contiguous used range span to last_index, nodes[1] is set to NULL.
*/ staticvoid
interval_tree_span_iter_next_gap(struct interval_tree_span_iter *state)
{ struct interval_tree_node *cur = state->nodes[1];
state->nodes[0] = cur; do { if (cur->last > state->nodes[0]->last)
state->nodes[0] = cur;
cur = interval_tree_iter_next(cur, state->first_index,
state->last_index);
} while (cur && (state->nodes[0]->last >= cur->start ||
state->nodes[0]->last + 1 == cur->start));
state->nodes[1] = cur;
}
void interval_tree_span_iter_first(struct interval_tree_span_iter *iter, struct rb_root_cached *itree, unsignedlong first_index, unsignedlong last_index)
{
iter->first_index = first_index;
iter->last_index = last_index;
iter->nodes[0] = NULL;
iter->nodes[1] =
interval_tree_iter_first(itree, first_index, last_index); if (!iter->nodes[1]) { /* No nodes intersect the span, whole span is hole */
iter->start_hole = first_index;
iter->last_hole = last_index;
iter->is_hole = 1; return;
} if (iter->nodes[1]->start > first_index) { /* Leading hole on first iteration */
iter->start_hole = first_index;
iter->last_hole = iter->nodes[1]->start - 1;
iter->is_hole = 1;
interval_tree_span_iter_next_gap(iter); return;
}
/* must have both nodes[0] and [1], interior hole */
iter->start_hole = iter->nodes[0]->last + 1;
iter->last_hole = iter->nodes[1]->start - 1;
iter->is_hole = 1;
interval_tree_span_iter_next_gap(iter);
}
EXPORT_SYMBOL_GPL(interval_tree_span_iter_next);
/* * Advance the iterator index to a specific position. The returned used/hole is * updated to start at new_index. This is faster than calling * interval_tree_span_iter_first() as it can avoid full searches in several * cases where the iterator is already set.
*/ void interval_tree_span_iter_advance(struct interval_tree_span_iter *iter, struct rb_root_cached *itree, unsignedlong new_index)
{ if (iter->is_hole == -1) return;
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.