Lines Matching refs:hwdev

296 xen_swiotlb_alloc_coherent(struct device *hwdev, size_t size,  in xen_swiotlb_alloc_coherent()  argument
314 if (dma_alloc_from_coherent(hwdev, size, dma_handle, &ret)) in xen_swiotlb_alloc_coherent()
322 ret = xen_alloc_coherent_pages(hwdev, size, dma_handle, flags, attrs); in xen_swiotlb_alloc_coherent()
327 if (hwdev && hwdev->coherent_dma_mask) in xen_swiotlb_alloc_coherent()
328 dma_mask = dma_alloc_coherent_mask(hwdev, flags); in xen_swiotlb_alloc_coherent()
342 xen_free_coherent_pages(hwdev, size, ret, (dma_addr_t)phys, attrs); in xen_swiotlb_alloc_coherent()
352 xen_swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr, in xen_swiotlb_free_coherent() argument
359 if (dma_release_from_coherent(hwdev, order, vaddr)) in xen_swiotlb_free_coherent()
362 if (hwdev && hwdev->coherent_dma_mask) in xen_swiotlb_free_coherent()
363 dma_mask = hwdev->coherent_dma_mask; in xen_swiotlb_free_coherent()
373 xen_free_coherent_pages(hwdev, size, vaddr, (dma_addr_t)phys, attrs); in xen_swiotlb_free_coherent()
442 static void xen_unmap_single(struct device *hwdev, dma_addr_t dev_addr, in xen_unmap_single() argument
450 xen_dma_unmap_page(hwdev, dev_addr, size, dir, attrs); in xen_unmap_single()
454 swiotlb_tbl_unmap_single(hwdev, paddr, size, dir); in xen_unmap_single()
470 void xen_swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_unmap_page() argument
474 xen_unmap_single(hwdev, dev_addr, size, dir, attrs); in xen_swiotlb_unmap_page()
489 xen_swiotlb_sync_single(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_sync_single() argument
498 xen_dma_sync_single_for_cpu(hwdev, dev_addr, size, dir); in xen_swiotlb_sync_single()
502 swiotlb_tbl_sync_single(hwdev, paddr, size, dir, target); in xen_swiotlb_sync_single()
505 xen_dma_sync_single_for_device(hwdev, dev_addr, size, dir); in xen_swiotlb_sync_single()
514 xen_swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_sync_single_for_cpu() argument
517 xen_swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_CPU); in xen_swiotlb_sync_single_for_cpu()
522 xen_swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_sync_single_for_device() argument
525 xen_swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_DEVICE); in xen_swiotlb_sync_single_for_device()
546 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_map_sg_attrs() argument
560 xen_arch_need_swiotlb(hwdev, PFN_DOWN(paddr), PFN_DOWN(dev_addr)) || in xen_swiotlb_map_sg_attrs()
561 !dma_capable(hwdev, dev_addr, sg->length) || in xen_swiotlb_map_sg_attrs()
563 phys_addr_t map = swiotlb_tbl_map_single(hwdev, in xen_swiotlb_map_sg_attrs()
569 dev_warn(hwdev, "swiotlb buffer is full\n"); in xen_swiotlb_map_sg_attrs()
572 xen_swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, in xen_swiotlb_map_sg_attrs()
577 xen_dma_map_page(hwdev, pfn_to_page(map >> PAGE_SHIFT), in xen_swiotlb_map_sg_attrs()
588 xen_dma_map_page(hwdev, pfn_to_page(paddr >> PAGE_SHIFT), in xen_swiotlb_map_sg_attrs()
607 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_unmap_sg_attrs() argument
617 xen_unmap_single(hwdev, sg->dma_address, sg_dma_len(sg), dir, attrs); in xen_swiotlb_unmap_sg_attrs()
630 xen_swiotlb_sync_sg(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_sync_sg() argument
638 xen_swiotlb_sync_single(hwdev, sg->dma_address, in xen_swiotlb_sync_sg()
643 xen_swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg, in xen_swiotlb_sync_sg_for_cpu() argument
646 xen_swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_CPU); in xen_swiotlb_sync_sg_for_cpu()
651 xen_swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg, in xen_swiotlb_sync_sg_for_device() argument
654 xen_swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_DEVICE); in xen_swiotlb_sync_sg_for_device()
659 xen_swiotlb_dma_mapping_error(struct device *hwdev, dma_addr_t dma_addr) in xen_swiotlb_dma_mapping_error() argument
672 xen_swiotlb_dma_supported(struct device *hwdev, u64 mask) in xen_swiotlb_dma_supported() argument