if ((*p & mask) != mask) {
                        *p += 1ULL << offset;
-                       EBUG_ON(bkey_cmp_packed(b, out, &k) <= 0);
+                       EBUG_ON(bch2_bkey_cmp_packed(b, out, &k) <= 0);
                        return true;
                }
 
 }
 
 __pure __flatten
-int __bch2_bkey_cmp_packed(const struct bkey_packed *l,
-                          const struct bkey_packed *r,
-                          const struct btree *b)
+int bch2_bkey_cmp_packed(const struct btree *b,
+                        const struct bkey_packed *l,
+                        const struct bkey_packed *r)
 {
        struct bkey unpacked;
 
 
 #define bkey_whiteout(_k)                              \
        ((_k)->type == KEY_TYPE_deleted || (_k)->type == KEY_TYPE_discard)
 
-#define bkey_packed_typecheck(_k)                                      \
-({                                                                     \
-       BUILD_BUG_ON(!type_is(_k, struct bkey *) &&                     \
-                    !type_is(_k, struct bkey_packed *));               \
-       type_is(_k, struct bkey_packed *);                              \
-})
-
 enum bkey_lr_packed {
        BKEY_PACKED_BOTH,
        BKEY_PACKED_RIGHT,
        BKEY_PACKED_NONE,
 };
 
-#define bkey_lr_packed_typecheck(_l, _r)                               \
-       (!bkey_packed_typecheck(_l) + ((!bkey_packed_typecheck(_r)) << 1))
-
 #define bkey_lr_packed(_l, _r)                                         \
        ((_l)->format + ((_r)->format << 1))
 
                                          const struct bpos *);
 
 __pure
-int __bch2_bkey_cmp_packed(const struct bkey_packed *,
-                          const struct bkey_packed *,
-                          const struct btree *);
+int bch2_bkey_cmp_packed(const struct btree *,
+                        const struct bkey_packed *,
+                        const struct bkey_packed *);
 
 __pure
 int __bch2_bkey_cmp_left_packed(const struct btree *,
        return bkey_cmp_left_packed(b, l, &r);
 }
 
-/*
- * If @_l or @_r are struct bkey * (not bkey_packed *), uses type information to
- * skip dispatching on k->format:
- */
-#define bkey_cmp_packed(_b, _l, _r)                                    \
-({                                                                     \
-       int _cmp;                                                       \
-                                                                       \
-       switch (bkey_lr_packed_typecheck(_l, _r)) {                     \
-       case BKEY_PACKED_NONE:                                          \
-               _cmp = bkey_cmp(((struct bkey *) (_l))->p,              \
-                               ((struct bkey *) (_r))->p);             \
-               break;                                                  \
-       case BKEY_PACKED_LEFT:                                          \
-               _cmp = bkey_cmp_left_packed((_b),                       \
-                                 (struct bkey_packed *) (_l),          \
-                                 &((struct bkey *) (_r))->p);          \
-               break;                                                  \
-       case BKEY_PACKED_RIGHT:                                         \
-               _cmp = -bkey_cmp_left_packed((_b),                      \
-                                 (struct bkey_packed *) (_r),          \
-                                 &((struct bkey *) (_l))->p);          \
-               break;                                                  \
-       case BKEY_PACKED_BOTH:                                          \
-               _cmp = __bch2_bkey_cmp_packed((void *) (_l),            \
-                                        (void *) (_r), (_b));          \
-               break;                                                  \
-       }                                                               \
-       _cmp;                                                           \
-})
-
 #if 1
 static __always_inline int bkey_cmp(struct bpos l, struct bpos r)
 {
 
                                               struct bkey_packed *l,
                                               struct bkey_packed *r)
 {
-       return bkey_cmp_packed(b, l, r) ?:
+       return bch2_bkey_cmp_packed(b, l, r) ?:
                cmp_int((unsigned long) l, (unsigned long) r);
 }
 
         * and should be dropped.
         */
        return iter->used >= 2 &&
-               !bkey_cmp_packed(iter->b,
+               !bch2_bkey_cmp_packed(iter->b,
                                 iter->data[0].k,
                                 iter->data[1].k);
 }
                                struct bkey_packed *l,
                                struct bkey_packed *r)
 {
-       return bkey_cmp_packed(b, l, r) ?:
+       return bch2_bkey_cmp_packed(b, l, r) ?:
                (int) bkey_deleted(r) - (int) bkey_deleted(l) ?:
                (int) l->needs_whiteout - (int) r->needs_whiteout;
 }
                        continue;
 
                while ((next = sort_iter_peek(iter)) &&
-                      !bkey_cmp_packed(iter->b, in, next)) {
+                      !bch2_bkey_cmp_packed(iter->b, in, next)) {
                        BUG_ON(in->needs_whiteout &&
                               next->needs_whiteout);
                        needs_whiteout |= in->needs_whiteout;
                                   struct bkey_packed *l,
                                   struct bkey_packed *r)
 {
-       return bkey_cmp_packed(b, l, r) ?:
+       return bch2_bkey_cmp_packed(b, l, r) ?:
                (int) bkey_deleted(l) - (int) bkey_deleted(r);
 }
 
 
                                const struct bkey_packed *l,
                                const struct bkey_packed *r)
 {
-       return bkey_cmp_packed(b, l, r)
+       return bch2_bkey_cmp_packed(b, l, r)
                ?: (int) bkey_deleted(r) - (int) bkey_deleted(l)
                ?: cmp_int(l, r);
 }
 
                BUG_ON(extents
                       ? bkey_cmp(l.p, bkey_start_pos(&r)) > 0
                       : bkey_cmp(l.p, bkey_start_pos(&r)) >= 0);
-               //BUG_ON(bkey_cmp_packed(&b->format, p, k) >= 0);
+               //BUG_ON(bch2_bkey_cmp_packed(&b->format, p, k) >= 0);
        }
 #endif
 }
                        break;
 
                for (b = a; c = 2 * b + 1, (d = c + 1) < n;)
-                       b = bkey_cmp_packed(bt,
+                       b = bch2_bkey_cmp_packed(bt,
                                            ptrs[c],
                                            ptrs[d]) >= 0 ? c : d;
                if (d == n)
                        b = c;
 
                while (b != a &&
-                      bkey_cmp_packed(bt,
+                      bch2_bkey_cmp_packed(bt,
                                       ptrs[a],
                                       ptrs[b]) >= 0)
                        b = (b - 1) / 2;
 
         * the node the iterator points to:
         */
        while ((k = bch2_btree_node_iter_prev_all(&node_iter, b)) &&
-              (bkey_cmp_packed(b, k, &insert->k) >= 0))
+              (bkey_cmp_left_packed(b, k, &insert->k.p) >= 0))
                ;
 
        for_each_keylist_key(keys, insert)
 
        EBUG_ON(iter->flags & BTREE_ITER_IS_EXTENTS);
 
        k = bch2_btree_node_iter_peek_all(node_iter, b);
-       if (k && bkey_cmp_packed(b, k, &insert->k))
+       if (k && bkey_cmp_left_packed(b, k, &insert->k.p))
                k = NULL;
 
        /* @k is the key being overwritten/deleted, if any: */