summaryrefslogtreecommitdiffstats
path: root/lib/radix-tree.c
diff options
context:
space:
mode:
Diffstat (limited to 'lib/radix-tree.c')
-rw-r--r--lib/radix-tree.c151
1 files changed, 151 insertions, 0 deletions
diff --git a/lib/radix-tree.c b/lib/radix-tree.c
index 3e69c2b66c94..fefa76e6ff96 100644
--- a/lib/radix-tree.c
+++ b/lib/radix-tree.c
@@ -3,6 +3,7 @@
* Portions Copyright (C) 2001 Christoph Hellwig
* Copyright (C) 2005 SGI, Christoph Lameter
* Copyright (C) 2006 Nick Piggin
+ * Copyright (C) 2012 Konstantin Khlebnikov
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
@@ -146,6 +147,43 @@ static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
}
return 0;
}
+
+/**
+ * radix_tree_find_next_bit - find the next set bit in a memory region
+ *
+ * @addr: The address to base the search on
+ * @size: The bitmap size in bits
+ * @offset: The bitnumber to start searching at
+ *
+ * Unrollable variant of find_next_bit() for constant size arrays.
+ * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
+ * Returns next bit offset, or size if nothing found.
+ */
+static __always_inline unsigned long
+radix_tree_find_next_bit(const unsigned long *addr,
+ unsigned long size, unsigned long offset)
+{
+ if (!__builtin_constant_p(size))
+ return find_next_bit(addr, size, offset);
+
+ if (offset < size) {
+ unsigned long tmp;
+
+ addr += offset / BITS_PER_LONG;
+ tmp = *addr >> (offset % BITS_PER_LONG);
+ if (tmp)
+ return __ffs(tmp) + offset;
+ offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
+ while (offset < size) {
+ tmp = *++addr;
+ if (tmp)
+ return __ffs(tmp) + offset;
+ offset += BITS_PER_LONG;
+ }
+ }
+ return size;
+}
+
/*
* This assumes that the caller has performed appropriate preallocation, and
* that the caller has pinned this thread of control to the current CPU.
@@ -613,6 +651,119 @@ int radix_tree_tag_get(struct radix_tree_root *root,
EXPORT_SYMBOL(radix_tree_tag_get);
/**
+ * radix_tree_next_chunk - find next chunk of slots for iteration
+ *
+ * @root: radix tree root
+ * @iter: iterator state
+ * @flags: RADIX_TREE_ITER_* flags and tag index
+ * Returns: pointer to chunk first slot, or NULL if iteration is over
+ */
+void **radix_tree_next_chunk(struct radix_tree_root *root,
+ struct radix_tree_iter *iter, unsigned flags)
+{
+ unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK;
+ struct radix_tree_node *rnode, *node;
+ unsigned long index, offset;
+
+ if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
+ return NULL;
+
+ /*
+ * Catch next_index overflow after ~0UL. iter->index never overflows
+ * during iterating; it can be zero only at the beginning.
+ * And we cannot overflow iter->next_index in a single step,
+ * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
+ */
+ index = iter->next_index;
+ if (!index && iter->index)
+ return NULL;
+
+ rnode = rcu_dereference_raw(root->rnode);
+ if (radix_tree_is_indirect_ptr(rnode)) {
+ rnode = indirect_to_ptr(rnode);
+ } else if (rnode && !index) {
+ /* Single-slot tree */
+ iter->index = 0;
+ iter->next_index = 1;
+ iter->tags = 1;
+ return (void **)&root->rnode;
+ } else
+ return NULL;
+
+restart:
+ shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT;
+ offset = index >> shift;
+
+ /* Index outside of the tree */
+ if (offset >= RADIX_TREE_MAP_SIZE)
+ return NULL;
+
+ node = rnode;
+ while (1) {
+ if ((flags & RADIX_TREE_ITER_TAGGED) ?
+ !test_bit(offset, node->tags[tag]) :
+ !node->slots[offset]) {
+ /* Hole detected */
+ if (flags & RADIX_TREE_ITER_CONTIG)
+ return NULL;
+
+ if (flags & RADIX_TREE_ITER_TAGGED)
+ offset = radix_tree_find_next_bit(
+ node->tags[tag],
+ RADIX_TREE_MAP_SIZE,
+ offset + 1);
+ else
+ while (++offset < RADIX_TREE_MAP_SIZE) {
+ if (node->slots[offset])
+ break;
+ }
+ index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1);
+ index += offset << shift;
+ /* Overflow after ~0UL */
+ if (!index)
+ return NULL;
+ if (offset == RADIX_TREE_MAP_SIZE)
+ goto restart;
+ }
+
+ /* This is leaf-node */
+ if (!shift)
+ break;
+
+ node = rcu_dereference_raw(node->slots[offset]);
+ if (node == NULL)
+ goto restart;
+ shift -= RADIX_TREE_MAP_SHIFT;
+ offset = (index >> shift) & RADIX_TREE_MAP_MASK;
+ }
+
+ /* Update the iterator state */
+ iter->index = index;
+ iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1;
+
+ /* Construct iter->tags bit-mask from node->tags[tag] array */
+ if (flags & RADIX_TREE_ITER_TAGGED) {
+ unsigned tag_long, tag_bit;
+
+ tag_long = offset / BITS_PER_LONG;
+ tag_bit = offset % BITS_PER_LONG;
+ iter->tags = node->tags[tag][tag_long] >> tag_bit;
+ /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
+ if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
+ /* Pick tags from next element */
+ if (tag_bit)
+ iter->tags |= node->tags[tag][tag_long + 1] <<
+ (BITS_PER_LONG - tag_bit);
+ /* Clip chunk size, here only BITS_PER_LONG tags */
+ iter->next_index = index + BITS_PER_LONG;
+ }
+ }
+
+ return node->slots + offset;
+}
+EXPORT_SYMBOL(radix_tree_next_chunk);
+
+/**
* radix_tree_range_tag_if_tagged - for each item in given range set given
* tag if item has another tag set
* @root: radix tree root