e.nr++;
} else {
if (e.nr)
- heap_add_or_replace(&ca->alloc_heap, e, -bucket_alloc_cmp);
+ heap_add_or_replace(&ca->alloc_heap, e,
+ -bucket_alloc_cmp, NULL);
e = (struct alloc_heap_entry) {
.bucket = b,
}
if (e.nr)
- heap_add_or_replace(&ca->alloc_heap, e, -bucket_alloc_cmp);
+ heap_add_or_replace(&ca->alloc_heap, e,
+ -bucket_alloc_cmp, NULL);
for (i = 0; i < ca->alloc_heap.used; i++)
nr += ca->alloc_heap.data[i].nr;
while (nr - ca->alloc_heap.data[0].nr >= ALLOC_SCAN_BATCH(ca)) {
nr -= ca->alloc_heap.data[0].nr;
- heap_pop(&ca->alloc_heap, e, -bucket_alloc_cmp);
+ heap_pop(&ca->alloc_heap, e, -bucket_alloc_cmp, NULL);
}
up_read(&ca->bucket_lock);
if (bch2_can_invalidate_bucket(ca, b, m)) {
struct alloc_heap_entry e = { .bucket = b, .nr = 1, };
- heap_add(&ca->alloc_heap, e, bucket_alloc_cmp);
+ heap_add(&ca->alloc_heap, e, bucket_alloc_cmp, NULL);
if (heap_full(&ca->alloc_heap))
break;
}
if (bch2_can_invalidate_bucket(ca, b, m)) {
struct alloc_heap_entry e = { .bucket = b, .nr = 1, };
- heap_add(&ca->alloc_heap, e, bucket_alloc_cmp);
+ heap_add(&ca->alloc_heap, e, bucket_alloc_cmp, NULL);
if (heap_full(&ca->alloc_heap))
break;
}
break;
}
- heap_resort(&ca->alloc_heap, bucket_alloc_cmp);
+ heap_resort(&ca->alloc_heap, bucket_alloc_cmp, NULL);
for (i = 0; i < ca->alloc_heap.used; i++)
nr += ca->alloc_heap.data[i].nr;
return b;
}
- heap_pop(&ca->alloc_heap, e, bucket_alloc_cmp);
+ heap_pop(&ca->alloc_heap, e, bucket_alloc_cmp, NULL);
}
return -1;
__btree_node_key_to_offset(b, end)
});
- __heap_add(iter, n, btree_node_iter_cmp_heap);
+ __heap_add(iter, n, btree_node_iter_cmp_heap, NULL);
}
}
EBUG_ON(iter->data->k > iter->data->end);
if (iter->data->k == iter->data->end)
- heap_del(iter, 0, btree_node_iter_cmp_heap);
+ heap_del(iter, 0, btree_node_iter_cmp_heap, NULL);
else
- heap_sift_down(iter, 0, btree_node_iter_cmp_heap);
+ heap_sift_down(iter, 0, btree_node_iter_cmp_heap, NULL);
}
static void verify_no_dups(struct btree *b,
if (clock->timers.data[i] == timer)
goto out;
- BUG_ON(!heap_add(&clock->timers, timer, io_timer_cmp));
+ BUG_ON(!heap_add(&clock->timers, timer, io_timer_cmp, NULL));
out:
spin_unlock(&clock->timer_lock);
}
for (i = 0; i < clock->timers.used; i++)
if (clock->timers.data[i] == timer) {
- heap_del(&clock->timers, i, io_timer_cmp);
+ heap_del(&clock->timers, i, io_timer_cmp, NULL);
break;
}
if (clock->timers.used &&
time_after_eq(now, clock->timers.data[0]->expire))
- heap_pop(&clock->timers, ret, io_timer_cmp);
+ heap_pop(&clock->timers, ret, io_timer_cmp, NULL);
spin_unlock(&clock->timer_lock);
memset(&nr, 0, sizeof(nr));
- heap_resort(iter, key_sort_cmp);
+ heap_resort(iter, key_sort_cmp, NULL);
while (!bch2_btree_node_iter_large_end(iter)) {
if (!should_drop_next_key(iter, b)) {
}
sort_key_next(iter, b, iter->data);
- heap_sift_down(iter, 0, key_sort_cmp);
+ heap_sift_down(iter, 0, key_sort_cmp, NULL);
}
dst->u64s = cpu_to_le16((u64 *) out - dst->_data);
static inline void extent_sort_sift(struct btree_node_iter_large *iter,
struct btree *b, size_t i)
{
- heap_sift_down(iter, i, extent_sort_cmp);
+ heap_sift_down(iter, i, extent_sort_cmp, NULL);
}
static inline void extent_sort_next(struct btree_node_iter_large *iter,
struct btree_node_iter_set *i)
{
sort_key_next(iter, b, i);
- heap_sift_down(iter, i - iter->data, extent_sort_cmp);
+ heap_sift_down(iter, i - iter->data, extent_sort_cmp, NULL);
}
static void extent_sort_append(struct bch_fs *c,
memset(&nr, 0, sizeof(nr));
- heap_resort(iter, extent_sort_cmp);
+ heap_resort(iter, extent_sort_cmp, NULL);
while (!bch2_btree_node_iter_large_end(iter)) {
lk = __btree_node_offset_to_key(b, _l->k);
.sectors = bucket_sectors_used(m),
.offset = bucket_to_sector(ca, b),
};
- heap_add_or_replace(h, e, -sectors_used_cmp);
+ heap_add_or_replace(h, e, -sectors_used_cmp, NULL);
}
up_read(&ca->bucket_lock);
up_read(&c->gc_lock);
sectors_to_move += i->sectors;
while (sectors_to_move > COPYGC_SECTORS_PER_ITER(ca)) {
- BUG_ON(!heap_pop(h, e, -sectors_used_cmp));
+ BUG_ON(!heap_pop(h, e, -sectors_used_cmp, NULL));
sectors_to_move -= e.sectors;
}
(heap)->data = NULL; \
} while (0)
-#define heap_swap(h, i, j) swap((h)->data[i], (h)->data[j])
+#define heap_set_backpointer(h, i, _fn) \
+do { \
+ void (*fn)(typeof(h), size_t) = _fn; \
+ if (fn) \
+ fn(h, i); \
+} while (0)
+
+#define heap_swap(h, i, j, set_backpointer) \
+do { \
+ swap((h)->data[i], (h)->data[j]); \
+ heap_set_backpointer(h, i, set_backpointer); \
+ heap_set_backpointer(h, j, set_backpointer); \
+} while (0)
#define heap_peek(h) \
({ \
#define heap_full(h) ((h)->used == (h)->size)
-#define heap_sift_down(h, i, cmp) \
+#define heap_sift_down(h, i, cmp, set_backpointer) \
do { \
size_t _c, _j = i; \
\
\
if (cmp(h, (h)->data[_c], (h)->data[_j]) >= 0) \
break; \
- heap_swap(h, _c, _j); \
+ heap_swap(h, _c, _j, set_backpointer); \
} \
} while (0)
-#define heap_sift_up(h, i, cmp) \
+#define heap_sift_up(h, i, cmp, set_backpointer) \
do { \
while (i) { \
size_t p = (i - 1) / 2; \
if (cmp(h, (h)->data[i], (h)->data[p]) >= 0) \
break; \
- heap_swap(h, i, p); \
+ heap_swap(h, i, p, set_backpointer); \
i = p; \
} \
} while (0)
-#define __heap_add(h, d, cmp) \
-do { \
+#define __heap_add(h, d, cmp, set_backpointer) \
+({ \
size_t _i = (h)->used++; \
(h)->data[_i] = d; \
+ heap_set_backpointer(h, _i, set_backpointer); \
\
- heap_sift_up(h, _i, cmp); \
-} while (0)
+ heap_sift_up(h, _i, cmp, set_backpointer); \
+ _i; \
+})
-#define heap_add(h, d, cmp) \
+#define heap_add(h, d, cmp, set_backpointer) \
({ \
bool _r = !heap_full(h); \
if (_r) \
- __heap_add(h, d, cmp); \
+ __heap_add(h, d, cmp, set_backpointer); \
_r; \
})
-#define heap_add_or_replace(h, new, cmp) \
+#define heap_add_or_replace(h, new, cmp, set_backpointer) \
do { \
- if (!heap_add(h, new, cmp) && \
+ if (!heap_add(h, new, cmp, set_backpointer) && \
cmp(h, new, heap_peek(h)) >= 0) { \
(h)->data[0] = new; \
- heap_sift_down(h, 0, cmp); \
+ heap_set_backpointer(h, 0, set_backpointer); \
+ heap_sift_down(h, 0, cmp, set_backpointer); \
} \
} while (0)
-#define heap_del(h, i, cmp) \
+#define heap_del(h, i, cmp, set_backpointer) \
do { \
size_t _i = (i); \
\
BUG_ON(_i >= (h)->used); \
(h)->used--; \
- heap_swap(h, _i, (h)->used); \
- heap_sift_up(h, _i, cmp); \
- heap_sift_down(h, _i, cmp); \
+ heap_swap(h, _i, (h)->used, set_backpointer); \
+ heap_sift_up(h, _i, cmp, set_backpointer); \
+ heap_sift_down(h, _i, cmp, set_backpointer); \
} while (0)
-#define heap_pop(h, d, cmp) \
+#define heap_pop(h, d, cmp, set_backpointer) \
({ \
bool _r = (h)->used; \
if (_r) { \
(d) = (h)->data[0]; \
- heap_del(h, 0, cmp); \
+ heap_del(h, 0, cmp, set_backpointer); \
} \
_r; \
})
-#define heap_resort(heap, cmp) \
+#define heap_resort(heap, cmp, set_backpointer) \
do { \
ssize_t _i; \
for (_i = (ssize_t) (heap)->used / 2 - 1; _i >= 0; --_i) \
- heap_sift_down(heap, _i, cmp); \
+ heap_sift_down(heap, _i, cmp, set_backpointer); \
} while (0)
#define ANYSINT_MAX(t) \