unsigned long index, unsigned int tag);
 int radix_tree_tag_get(const struct radix_tree_root *,
                        unsigned long index, unsigned int tag);
-void radix_tree_iter_tag_set(struct radix_tree_root *,
-               const struct radix_tree_iter *iter, unsigned int tag);
 void radix_tree_iter_tag_clear(struct radix_tree_root *,
                const struct radix_tree_iter *iter, unsigned int tag);
 unsigned int radix_tree_gang_lookup_tag(const struct radix_tree_root *,
 
 }
 EXPORT_SYMBOL(radix_tree_tag_set);
 
-/**
- * radix_tree_iter_tag_set - set a tag on the current iterator entry
- * @root:      radix tree root
- * @iter:      iterator state
- * @tag:       tag to set
- */
-void radix_tree_iter_tag_set(struct radix_tree_root *root,
-                       const struct radix_tree_iter *iter, unsigned int tag)
-{
-       node_tag_set(root, iter->node, tag, iter_offset(iter));
-}
-
 static void node_tag_clear(struct radix_tree_root *root,
                                struct radix_tree_node *node,
                                unsigned int tag, unsigned int offset)
 
 
 #define NUM_THREADS    5
 #define MAX_IDX                100
-#define TAG            0
-#define NEW_TAG                1
+#define TAG            XA_MARK_0
+#define NEW_TAG                XA_MARK_1
 
-static pthread_mutex_t tree_lock = PTHREAD_MUTEX_INITIALIZER;
 static pthread_t threads[NUM_THREADS];
 static unsigned int seeds[3];
 static RADIX_TREE(tree, GFP_KERNEL);
                int order;
 
                for (pgoff = 0; pgoff < MAX_IDX; pgoff++) {
-                       pthread_mutex_lock(&tree_lock);
+                       xa_lock(&tree);
                        for (order = max_order; order >= 0; order--) {
                                if (item_insert_order(&tree, pgoff, order)
                                                == 0) {
                                        break;
                                }
                        }
-                       pthread_mutex_unlock(&tree_lock);
+                       xa_unlock(&tree);
                }
        }
 
 
                pgoff = rand_r(&seeds[2]) % MAX_IDX;
 
-               pthread_mutex_lock(&tree_lock);
+               xa_lock(&tree);
                item_delete(&tree, pgoff);
-               pthread_mutex_unlock(&tree_lock);
+               xa_unlock(&tree);
        }
 
        rcu_unregister_thread();
        rcu_register_thread();
 
        while (!test_complete) {
-               tag_tagged_items(&tree, &tree_lock, 0, MAX_IDX, 10, TAG,
-                                       NEW_TAG);
+               tag_tagged_items(&tree, 0, MAX_IDX, 10, TAG, NEW_TAG);
        }
        rcu_unregister_thread();
        return NULL;
 
        }
 
 //     printf("\ncopying tags...\n");
-       tagged = tag_tagged_items(&tree, NULL, start, end, ITEMS, 0, 1);
+       tagged = tag_tagged_items(&tree, start, end, ITEMS, XA_MARK_0, XA_MARK_1);
 
 //     printf("checking copied tags\n");
        assert(tagged == count);
        /* Copy tags in several rounds */
 //     printf("\ncopying tags...\n");
        tmp = rand() % (count / 10 + 2);
-       tagged = tag_tagged_items(&tree, NULL, start, end, tmp, 0, 2);
+       tagged = tag_tagged_items(&tree, start, end, tmp, XA_MARK_0, XA_MARK_2);
        assert(tagged == count);
 
 //     printf("%lu %lu %lu\n", tagged, tmp, count);
 
                assert(!radix_tree_tag_get(&tree, i, 1));
        }
 
-       assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 1);
+       assert(tag_tagged_items(&tree, 0, ~0UL, 10, XA_MARK_0, XA_MARK_1) == 1);
        assert(radix_tree_tag_clear(&tree, index, 0));
 
        for_each_index(i, base, order) {
        assert(radix_tree_tag_set(&tree, 0, 0));
        assert(radix_tree_tag_set(&tree, index2, 0));
 
-       assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 2);
+       assert(tag_tagged_items(&tree, 0, ~0UL, 10, XA_MARK_0, XA_MARK_1) == 2);
 
        item_kill_tree(&tree);
 }
                }
        }
 
-       assert(tag_tagged_items(&tree, NULL, 0, ~0UL, TAG_ENTRIES, 1, 2) ==
-                               TAG_ENTRIES);
+       assert(tag_tagged_items(&tree, 0, ~0UL, TAG_ENTRIES, XA_MARK_1,
+                               XA_MARK_2) == TAG_ENTRIES);
 
        for (j = 0; j < 256; j++) {
                int mask, k;
                }
        }
 
-       assert(tag_tagged_items(&tree, NULL, 1, ~0UL, MT_NUM_ENTRIES * 2, 1, 0)
-                       == TAG_ENTRIES);
+       assert(tag_tagged_items(&tree, 1, ~0UL, MT_NUM_ENTRIES * 2, XA_MARK_1,
+                               XA_MARK_0) == TAG_ENTRIES);
        i = 0;
        radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) {
                assert(iter.index == tag_index[i]);
 
 #include "regression.h"
 
 static RADIX_TREE(mt_tree, GFP_KERNEL);
-static pthread_mutex_t mt_lock = PTHREAD_MUTEX_INITIALIZER;
 
 struct page {
        pthread_mutex_t lock;
                        struct page *p;
 
                        p = page_alloc(0);
-                       pthread_mutex_lock(&mt_lock);
+                       xa_lock(&mt_tree);
                        radix_tree_insert(&mt_tree, 0, p);
-                       pthread_mutex_unlock(&mt_lock);
+                       xa_unlock(&mt_tree);
 
                        p = page_alloc(1);
-                       pthread_mutex_lock(&mt_lock);
+                       xa_lock(&mt_tree);
                        radix_tree_insert(&mt_tree, 1, p);
-                       pthread_mutex_unlock(&mt_lock);
+                       xa_unlock(&mt_tree);
 
-                       pthread_mutex_lock(&mt_lock);
+                       xa_lock(&mt_tree);
                        p = radix_tree_delete(&mt_tree, 1);
                        pthread_mutex_lock(&p->lock);
                        p->count--;
                        pthread_mutex_unlock(&p->lock);
-                       pthread_mutex_unlock(&mt_lock);
+                       xa_unlock(&mt_tree);
                        page_free(p);
 
-                       pthread_mutex_lock(&mt_lock);
+                       xa_lock(&mt_tree);
                        p = radix_tree_delete(&mt_tree, 0);
                        pthread_mutex_lock(&p->lock);
                        p->count--;
                        pthread_mutex_unlock(&p->lock);
-                       pthread_mutex_unlock(&mt_lock);
+                       xa_unlock(&mt_tree);
                        page_free(p);
                }
        } else {
 
 #include "regression.h"
 #include "test.h"
 
-#define PAGECACHE_TAG_DIRTY     0
-#define PAGECACHE_TAG_WRITEBACK 1
-#define PAGECACHE_TAG_TOWRITE   2
+#define PAGECACHE_TAG_DIRTY     XA_MARK_0
+#define PAGECACHE_TAG_WRITEBACK XA_MARK_1
+#define PAGECACHE_TAG_TOWRITE   XA_MARK_2
 
 static RADIX_TREE(mt_tree, GFP_KERNEL);
 unsigned long page_count = 0;
        /* 1. */
        start = 0;
        end = max_slots - 2;
-       tag_tagged_items(&mt_tree, NULL, start, end, 1,
+       tag_tagged_items(&mt_tree, start, end, 1,
                                PAGECACHE_TAG_DIRTY, PAGECACHE_TAG_TOWRITE);
 
        /* 2. */
 
        item_tag_set(tree, index, tag);
        ret = item_tag_get(tree, index, tag);
        assert(ret != 0);
-       ret = tag_tagged_items(tree, NULL, first, ~0UL, 10, tag, !tag);
+       ret = tag_tagged_items(tree, first, ~0UL, 10, tag, !tag);
        assert(ret == 1);
        ret = item_tag_get(tree, index, !tag);
        assert(ret != 0);
        assert(ret == 0);
        verify_tag_consistency(&tree, 0);
        verify_tag_consistency(&tree, 1);
-       ret = tag_tagged_items(&tree, NULL, first, 10, 10, 0, 1);
+       ret = tag_tagged_items(&tree, first, 10, 10, XA_MARK_0, XA_MARK_1);
        assert(ret == 1);
        ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 0, BATCH, 1);
        assert(ret == 1);
 
 }
 
 /* Use the same pattern as tag_pages_for_writeback() in mm/page-writeback.c */
-int tag_tagged_items(struct radix_tree_root *root, pthread_mutex_t *lock,
-                       unsigned long start, unsigned long end, unsigned batch,
-                       unsigned iftag, unsigned thentag)
+int tag_tagged_items(struct xarray *xa, unsigned long start, unsigned long end,
+               unsigned batch, xa_mark_t iftag, xa_mark_t thentag)
 {
-       unsigned long tagged = 0;
-       struct radix_tree_iter iter;
-       void **slot;
+       XA_STATE(xas, xa, start);
+       unsigned int tagged = 0;
+       struct item *item;
 
        if (batch == 0)
                batch = 1;
 
-       if (lock)
-               pthread_mutex_lock(lock);
-       radix_tree_for_each_tagged(slot, root, &iter, start, iftag) {
-               if (iter.index > end)
-                       break;
-               radix_tree_iter_tag_set(root, &iter, thentag);
-               tagged++;
-               if ((tagged % batch) != 0)
+       xas_lock_irq(&xas);
+       xas_for_each_marked(&xas, item, end, iftag) {
+               xas_set_mark(&xas, thentag);
+               if (++tagged % batch)
                        continue;
-               slot = radix_tree_iter_resume(slot, &iter);
-               if (lock) {
-                       pthread_mutex_unlock(lock);
-                       rcu_barrier();
-                       pthread_mutex_lock(lock);
-               }
+
+               xas_pause(&xas);
+               xas_unlock_irq(&xas);
+               rcu_barrier();
+               xas_lock_irq(&xas);
        }
-       if (lock)
-               pthread_mutex_unlock(lock);
+       xas_unlock_irq(&xas);
 
        return tagged;
 }
 
                        unsigned long nr, int chunk);
 void item_kill_tree(struct radix_tree_root *root);
 
-int tag_tagged_items(struct radix_tree_root *, pthread_mutex_t *,
-                       unsigned long start, unsigned long end, unsigned batch,
-                       unsigned iftag, unsigned thentag);
+int tag_tagged_items(struct xarray *, unsigned long start, unsigned long end,
+               unsigned batch, xa_mark_t iftag, xa_mark_t thentag);
 
 void xarray_tests(void);
 void tag_check(void);