static void __dma_direct_free_pages(struct device *dev, struct page *page,
size_t size)
{
- if (IS_ENABLED(CONFIG_DMA_RESTRICTED_POOL) &&
- swiotlb_free(dev, page, size))
+ if (swiotlb_free(dev, page, size))
return;
dma_free_contiguous(dev, page, size);
}
gfp |= dma_direct_optimal_gfp_mask(dev, dev->coherent_dma_mask,
&phys_limit);
- if (IS_ENABLED(CONFIG_DMA_RESTRICTED_POOL) &&
- is_swiotlb_for_alloc(dev)) {
+ if (is_swiotlb_for_alloc(dev)) {
page = swiotlb_alloc(dev, size);
if (page && !dma_coherent_ok(dev, page_to_phys(page), size)) {
__dma_direct_free_pages(dev, page, size);