aboutsummaryrefslogtreecommitdiff
path: root/include/linux/radix-tree.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/linux/radix-tree.h')
-rw-r--r--include/linux/radix-tree.h212
1 files changed, 101 insertions, 111 deletions
diff --git a/include/linux/radix-tree.h b/include/linux/radix-tree.h
index af3581b8a451..52bda854593b 100644
--- a/include/linux/radix-tree.h
+++ b/include/linux/radix-tree.h
@@ -80,26 +80,25 @@ static inline bool radix_tree_is_internal_node(void *ptr)
#define RADIX_TREE_MAX_PATH (DIV_ROUND_UP(RADIX_TREE_INDEX_BITS, \
RADIX_TREE_MAP_SHIFT))
-/* Internally used bits of node->count */
-#define RADIX_TREE_COUNT_SHIFT (RADIX_TREE_MAP_SHIFT + 1)
-#define RADIX_TREE_COUNT_MASK ((1UL << RADIX_TREE_COUNT_SHIFT) - 1)
-
+/*
+ * @count is the count of every non-NULL element in the ->slots array
+ * whether that is an exceptional entry, a retry entry, a user pointer,
+ * a sibling entry or a pointer to the next level of the tree.
+ * @exceptional is the count of every element in ->slots which is
+ * either radix_tree_exceptional_entry() or is a sibling entry for an
+ * exceptional entry.
+ */
struct radix_tree_node {
- unsigned char shift; /* Bits remaining in each slot */
- unsigned char offset; /* Slot offset in parent */
- unsigned int count;
+ unsigned char shift; /* Bits remaining in each slot */
+ unsigned char offset; /* Slot offset in parent */
+ unsigned char count; /* Total entry count */
+ unsigned char exceptional; /* Exceptional entry count */
+ struct radix_tree_node *parent; /* Used when ascending tree */
+ void *private_data; /* For tree user */
union {
- struct {
- /* Used when ascending tree */
- struct radix_tree_node *parent;
- /* For tree user */
- void *private_data;
- };
- /* Used when freeing node */
- struct rcu_head rcu_head;
+ struct list_head private_list; /* For tree user */
+ struct rcu_head rcu_head; /* Used when freeing node */
};
- /* For tree user */
- struct list_head private_list;
void __rcu *slots[RADIX_TREE_MAP_SIZE];
unsigned long tags[RADIX_TREE_MAX_TAGS][RADIX_TREE_TAG_LONGS];
};
@@ -130,6 +129,41 @@ static inline bool radix_tree_empty(struct radix_tree_root *root)
}
/**
+ * struct radix_tree_iter - radix tree iterator state
+ *
+ * @index: index of current slot
+ * @next_index: one beyond the last index for this chunk
+ * @tags: bit-mask for tag-iterating
+ * @node: node that contains current slot
+ * @shift: shift for the node that holds our slots
+ *
+ * This radix tree iterator works in terms of "chunks" of slots. A chunk is a
+ * subinterval of slots contained within one radix tree leaf node. It is
+ * described by a pointer to its first slot and a struct radix_tree_iter
+ * which holds the chunk's position in the tree and its size. For tagged
+ * iteration radix_tree_iter also holds the slots' bit-mask for one chosen
+ * radix tree tag.
+ */
+struct radix_tree_iter {
+ unsigned long index;
+ unsigned long next_index;
+ unsigned long tags;
+ struct radix_tree_node *node;
+#ifdef CONFIG_RADIX_TREE_MULTIORDER
+ unsigned int shift;
+#endif
+};
+
+static inline unsigned int iter_shift(const struct radix_tree_iter *iter)
+{
+#ifdef CONFIG_RADIX_TREE_MULTIORDER
+ return iter->shift;
+#else
+ return 0;
+#endif
+}
+
+/**
* Radix-tree synchronization
*
* The radix-tree API requires that users provide all synchronisation (with
@@ -248,20 +282,6 @@ static inline int radix_tree_exception(void *arg)
return unlikely((unsigned long)arg & RADIX_TREE_ENTRY_MASK);
}
-/**
- * radix_tree_replace_slot - replace item in a slot
- * @pslot: pointer to slot, returned by radix_tree_lookup_slot
- * @item: new item to store in the slot.
- *
- * For use with radix_tree_lookup_slot(). Caller must hold tree write locked
- * across slot lookup and replacement.
- */
-static inline void radix_tree_replace_slot(void **pslot, void *item)
-{
- BUG_ON(radix_tree_is_internal_node(item));
- rcu_assign_pointer(*pslot, item);
-}
-
int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
unsigned order, struct radix_tree_node **nodep,
void ***slotp);
@@ -276,8 +296,19 @@ void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
struct radix_tree_node **nodep, void ***slotp);
void *radix_tree_lookup(struct radix_tree_root *, unsigned long);
void **radix_tree_lookup_slot(struct radix_tree_root *, unsigned long);
-bool __radix_tree_delete_node(struct radix_tree_root *root,
- struct radix_tree_node *node);
+typedef void (*radix_tree_update_node_t)(struct radix_tree_node *, void *);
+void __radix_tree_replace(struct radix_tree_root *root,
+ struct radix_tree_node *node,
+ void **slot, void *item,
+ radix_tree_update_node_t update_node, void *private);
+void radix_tree_iter_replace(struct radix_tree_root *,
+ const struct radix_tree_iter *, void **slot, void *item);
+void radix_tree_replace_slot(struct radix_tree_root *root,
+ void **slot, void *item);
+void __radix_tree_delete_node(struct radix_tree_root *root,
+ struct radix_tree_node *node,
+ radix_tree_update_node_t update_node,
+ void *private);
void *radix_tree_delete_item(struct radix_tree_root *, unsigned long, void *);
void *radix_tree_delete(struct radix_tree_root *, unsigned long);
void radix_tree_clear_tags(struct radix_tree_root *root,
@@ -299,6 +330,8 @@ void *radix_tree_tag_clear(struct radix_tree_root *root,
unsigned long index, unsigned int tag);
int radix_tree_tag_get(struct radix_tree_root *root,
unsigned long index, unsigned int tag);
+void radix_tree_iter_tag_set(struct radix_tree_root *root,
+ const struct radix_tree_iter *iter, unsigned int tag);
unsigned int
radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
unsigned long first_index, unsigned int max_items,
@@ -307,50 +340,18 @@ unsigned int
radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
unsigned long first_index, unsigned int max_items,
unsigned int tag);
-unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
- unsigned long *first_indexp, unsigned long last_index,
- unsigned long nr_to_tag,
- unsigned int fromtag, unsigned int totag);
int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag);
-unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item);
static inline void radix_tree_preload_end(void)
{
preempt_enable();
}
-/**
- * struct radix_tree_iter - radix tree iterator state
- *
- * @index: index of current slot
- * @next_index: one beyond the last index for this chunk
- * @tags: bit-mask for tag-iterating
- * @shift: shift for the node that holds our slots
- *
- * This radix tree iterator works in terms of "chunks" of slots. A chunk is a
- * subinterval of slots contained within one radix tree leaf node. It is
- * described by a pointer to its first slot and a struct radix_tree_iter
- * which holds the chunk's position in the tree and its size. For tagged
- * iteration radix_tree_iter also holds the slots' bit-mask for one chosen
- * radix tree tag.
- */
-struct radix_tree_iter {
- unsigned long index;
- unsigned long next_index;
- unsigned long tags;
-#ifdef CONFIG_RADIX_TREE_MULTIORDER
- unsigned int shift;
-#endif
-};
-
-static inline unsigned int iter_shift(struct radix_tree_iter *iter)
-{
-#ifdef CONFIG_RADIX_TREE_MULTIORDER
- return iter->shift;
-#else
- return 0;
-#endif
-}
+int radix_tree_split_preload(unsigned old_order, unsigned new_order, gfp_t);
+int radix_tree_split(struct radix_tree_root *, unsigned long index,
+ unsigned new_order);
+int radix_tree_join(struct radix_tree_root *, unsigned long index,
+ unsigned new_order, void *);
#define RADIX_TREE_ITER_TAG_MASK 0x00FF /* tag index in lower byte */
#define RADIX_TREE_ITER_TAGGED 0x0100 /* lookup tagged slots */
@@ -419,20 +420,17 @@ __radix_tree_iter_add(struct radix_tree_iter *iter, unsigned long slots)
}
/**
- * radix_tree_iter_next - resume iterating when the chunk may be invalid
- * @iter: iterator state
+ * radix_tree_iter_resume - resume iterating when the chunk may be invalid
+ * @slot: pointer to current slot
+ * @iter: iterator state
+ * Returns: New slot pointer
*
* If the iterator needs to release then reacquire a lock, the chunk may
* have been invalidated by an insertion or deletion. Call this function
- * to continue the iteration from the next index.
+ * before releasing the lock to continue the iteration from the next index.
*/
-static inline __must_check
-void **radix_tree_iter_next(struct radix_tree_iter *iter)
-{
- iter->next_index = __radix_tree_iter_add(iter, 1);
- iter->tags = 0;
- return NULL;
-}
+void **__must_check radix_tree_iter_resume(void **slot,
+ struct radix_tree_iter *iter);
/**
* radix_tree_chunk_size - get current chunk size
@@ -446,10 +444,17 @@ radix_tree_chunk_size(struct radix_tree_iter *iter)
return (iter->next_index - iter->index) >> iter_shift(iter);
}
-static inline struct radix_tree_node *entry_to_node(void *ptr)
+#ifdef CONFIG_RADIX_TREE_MULTIORDER
+void ** __radix_tree_next_slot(void **slot, struct radix_tree_iter *iter,
+ unsigned flags);
+#else
+/* Can't happen without sibling entries, but the compiler can't tell that */
+static inline void ** __radix_tree_next_slot(void **slot,
+ struct radix_tree_iter *iter, unsigned flags)
{
- return (void *)((unsigned long)ptr & ~RADIX_TREE_INTERNAL_NODE);
+ return slot;
}
+#endif
/**
* radix_tree_next_slot - find next slot in chunk
@@ -463,7 +468,7 @@ static inline struct radix_tree_node *entry_to_node(void *ptr)
* For tagged lookup it also eats @iter->tags.
*
* There are several cases where 'slot' can be passed in as NULL to this
- * function. These cases result from the use of radix_tree_iter_next() or
+ * function. These cases result from the use of radix_tree_iter_resume() or
* radix_tree_iter_retry(). In these cases we don't end up dereferencing
* 'slot' because either:
* a) we are doing tagged iteration and iter->tags has been set to 0, or
@@ -474,51 +479,31 @@ static __always_inline void **
radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags)
{
if (flags & RADIX_TREE_ITER_TAGGED) {
- void *canon = slot;
-
iter->tags >>= 1;
if (unlikely(!iter->tags))
return NULL;
- while (IS_ENABLED(CONFIG_RADIX_TREE_MULTIORDER) &&
- radix_tree_is_internal_node(slot[1])) {
- if (entry_to_node(slot[1]) == canon) {
- iter->tags >>= 1;
- iter->index = __radix_tree_iter_add(iter, 1);
- slot++;
- continue;
- }
- iter->next_index = __radix_tree_iter_add(iter, 1);
- return NULL;
- }
if (likely(iter->tags & 1ul)) {
iter->index = __radix_tree_iter_add(iter, 1);
- return slot + 1;
+ slot++;
+ goto found;
}
if (!(flags & RADIX_TREE_ITER_CONTIG)) {
unsigned offset = __ffs(iter->tags);
- iter->tags >>= offset;
- iter->index = __radix_tree_iter_add(iter, offset + 1);
- return slot + offset + 1;
+ iter->tags >>= offset++;
+ iter->index = __radix_tree_iter_add(iter, offset);
+ slot += offset;
+ goto found;
}
} else {
long count = radix_tree_chunk_size(iter);
- void *canon = slot;
while (--count > 0) {
slot++;
iter->index = __radix_tree_iter_add(iter, 1);
- if (IS_ENABLED(CONFIG_RADIX_TREE_MULTIORDER) &&
- radix_tree_is_internal_node(*slot)) {
- if (entry_to_node(*slot) == canon)
- continue;
- iter->next_index = iter->index;
- break;
- }
-
if (likely(*slot))
- return slot;
+ goto found;
if (flags & RADIX_TREE_ITER_CONTIG) {
/* forbid switching to the next chunk */
iter->next_index = 0;
@@ -527,6 +512,11 @@ radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags)
}
}
return NULL;
+
+ found:
+ if (unlikely(radix_tree_is_internal_node(*slot)))
+ return __radix_tree_next_slot(slot, iter, flags);
+ return slot;
}
/**
@@ -577,6 +567,6 @@ radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags)
slot || (slot = radix_tree_next_chunk(root, iter, \
RADIX_TREE_ITER_TAGGED | tag)) ; \
slot = radix_tree_next_slot(slot, iter, \
- RADIX_TREE_ITER_TAGGED))
+ RADIX_TREE_ITER_TAGGED | tag))
#endif /* _LINUX_RADIX_TREE_H */