cxl_insert_extent_to_extent_list(extent_list, dpa, len, NULL, 0);
ct3d->dc.total_extent_count += 1;
+ ct3_set_region_block_backed(ct3d, dpa, len);
}
/* Remove the first extent group in the pending list */
cxl_extent_group_list_delete_front(&ct3d->dc.extents_pending);
* list and update the extent count;
*/
QTAILQ_FOREACH_SAFE(ent, &ct3d->dc.extents, node, ent_next) {
+ ct3_clear_region_block_backed(ct3d, ent->start_dpa, ent->len);
cxl_remove_extent_from_extent_list(&ct3d->dc.extents, ent);
}
copy_extent_list(&ct3d->dc.extents, &updated_list);
QTAILQ_FOREACH_SAFE(ent, &updated_list, node, ent_next) {
+ ct3_set_region_block_backed(ct3d, ent->start_dpa, ent->len);
cxl_remove_extent_from_extent_list(&updated_list, ent);
}
ct3d->dc.total_extent_count = updated_list_size;
.flags = 0,
};
ct3d->dc.total_capacity += region->len;
+ region->blk_bitmap = bitmap_new(region->len / region->block_size);
}
QTAILQ_INIT(&ct3d->dc.extents);
QTAILQ_INIT(&ct3d->dc.extents_pending);
{
CXLDCExtent *ent, *ent_next;
CXLDCExtentGroup *group, *group_next;
+ int i;
+ CXLDCRegion *region;
QTAILQ_FOREACH_SAFE(ent, &ct3d->dc.extents, node, ent_next) {
cxl_remove_extent_from_extent_list(&ct3d->dc.extents, ent);
}
g_free(group);
}
+
+ for (i = 0; i < ct3d->dc.num_regions; i++) {
+ region = &ct3d->dc.regions[i];
+ g_free(region->blk_bitmap);
+ }
}
static bool cxl_setup_memory(CXLType3Dev *ct3d, Error **errp)
}
}
+/*
+ * Mark the DPA range [dpa, dap + len - 1] to be backed and accessible. This
+ * happens when a DC extent is added and accepted by the host.
+ */
+void ct3_set_region_block_backed(CXLType3Dev *ct3d, uint64_t dpa,
+ uint64_t len)
+{
+ CXLDCRegion *region;
+
+ region = cxl_find_dc_region(ct3d, dpa, len);
+ if (!region) {
+ return;
+ }
+
+ bitmap_set(region->blk_bitmap, (dpa - region->base) / region->block_size,
+ len / region->block_size);
+}
+
+/*
+ * Check whether the DPA range [dpa, dpa + len - 1] is backed with DC extents.
+ * Used when validating read/write to dc regions
+ */
+bool ct3_test_region_block_backed(CXLType3Dev *ct3d, uint64_t dpa,
+ uint64_t len)
+{
+ CXLDCRegion *region;
+ uint64_t nbits;
+ long nr;
+
+ region = cxl_find_dc_region(ct3d, dpa, len);
+ if (!region) {
+ return false;
+ }
+
+ nr = (dpa - region->base) / region->block_size;
+ nbits = DIV_ROUND_UP(len, region->block_size);
+ /*
+ * if bits between [dpa, dpa + len) are all 1s, meaning the DPA range is
+ * backed with DC extents, return true; else return false.
+ */
+ return find_next_zero_bit(region->blk_bitmap, nr + nbits, nr) == nr + nbits;
+}
+
+/*
+ * Mark the DPA range [dpa, dap + len - 1] to be unbacked and inaccessible.
+ * This happens when a dc extent is released by the host.
+ */
+void ct3_clear_region_block_backed(CXLType3Dev *ct3d, uint64_t dpa,
+ uint64_t len)
+{
+ CXLDCRegion *region;
+ uint64_t nbits;
+ long nr;
+
+ region = cxl_find_dc_region(ct3d, dpa, len);
+ if (!region) {
+ return;
+ }
+
+ nr = (dpa - region->base) / region->block_size;
+ nbits = len / region->block_size;
+ bitmap_clear(region->blk_bitmap, nr, nbits);
+}
+
static bool cxl_type3_dpa(CXLType3Dev *ct3d, hwaddr host_addr, uint64_t *dpa)
{
int hdm_inc = R_CXL_HDM_DECODER1_BASE_LO - R_CXL_HDM_DECODER0_BASE_LO;
*as = &ct3d->hostpmem_as;
*dpa_offset -= vmr_size;
} else {
+ if (!ct3_test_region_block_backed(ct3d, *dpa_offset, size)) {
+ return -ENODEV;
+ }
+
*as = &ct3d->dc.host_dc_as;
*dpa_offset -= (vmr_size + pmr_size);
}