Searched refs:hwdev (Results 1 - 35 of 35) sorted by relevance

/linux-4.1.27/include/asm-generic/
H A Dpci-dma-compat.h10 pci_dma_supported(struct pci_dev *hwdev, u64 mask) pci_dma_supported() argument
12 return dma_supported(hwdev == NULL ? NULL : &hwdev->dev, mask); pci_dma_supported()
16 pci_alloc_consistent(struct pci_dev *hwdev, size_t size, pci_alloc_consistent() argument
19 return dma_alloc_coherent(hwdev == NULL ? NULL : &hwdev->dev, size, dma_handle, GFP_ATOMIC); pci_alloc_consistent()
23 pci_zalloc_consistent(struct pci_dev *hwdev, size_t size, pci_zalloc_consistent() argument
26 return dma_zalloc_coherent(hwdev == NULL ? NULL : &hwdev->dev, pci_zalloc_consistent()
31 pci_free_consistent(struct pci_dev *hwdev, size_t size, pci_free_consistent() argument
34 dma_free_coherent(hwdev == NULL ? NULL : &hwdev->dev, size, vaddr, dma_handle); pci_free_consistent()
38 pci_map_single(struct pci_dev *hwdev, void *ptr, size_t size, int direction) pci_map_single() argument
40 return dma_map_single(hwdev == NULL ? NULL : &hwdev->dev, ptr, size, (enum dma_data_direction)direction); pci_map_single()
44 pci_unmap_single(struct pci_dev *hwdev, dma_addr_t dma_addr, pci_unmap_single() argument
47 dma_unmap_single(hwdev == NULL ? NULL : &hwdev->dev, dma_addr, size, (enum dma_data_direction)direction); pci_unmap_single()
51 pci_map_page(struct pci_dev *hwdev, struct page *page, pci_map_page() argument
54 return dma_map_page(hwdev == NULL ? NULL : &hwdev->dev, page, offset, size, (enum dma_data_direction)direction); pci_map_page()
58 pci_unmap_page(struct pci_dev *hwdev, dma_addr_t dma_address, pci_unmap_page() argument
61 dma_unmap_page(hwdev == NULL ? NULL : &hwdev->dev, dma_address, size, (enum dma_data_direction)direction); pci_unmap_page()
65 pci_map_sg(struct pci_dev *hwdev, struct scatterlist *sg, pci_map_sg() argument
68 return dma_map_sg(hwdev == NULL ? NULL : &hwdev->dev, sg, nents, (enum dma_data_direction)direction); pci_map_sg()
72 pci_unmap_sg(struct pci_dev *hwdev, struct scatterlist *sg, pci_unmap_sg() argument
75 dma_unmap_sg(hwdev == NULL ? NULL : &hwdev->dev, sg, nents, (enum dma_data_direction)direction); pci_unmap_sg()
79 pci_dma_sync_single_for_cpu(struct pci_dev *hwdev, dma_addr_t dma_handle, pci_dma_sync_single_for_cpu() argument
82 dma_sync_single_for_cpu(hwdev == NULL ? NULL : &hwdev->dev, dma_handle, size, (enum dma_data_direction)direction); pci_dma_sync_single_for_cpu()
86 pci_dma_sync_single_for_device(struct pci_dev *hwdev, dma_addr_t dma_handle, pci_dma_sync_single_for_device() argument
89 dma_sync_single_for_device(hwdev == NULL ? NULL : &hwdev->dev, dma_handle, size, (enum dma_data_direction)direction); pci_dma_sync_single_for_device()
93 pci_dma_sync_sg_for_cpu(struct pci_dev *hwdev, struct scatterlist *sg, pci_dma_sync_sg_for_cpu() argument
96 dma_sync_sg_for_cpu(hwdev == NULL ? NULL : &hwdev->dev, sg, nelems, (enum dma_data_direction)direction); pci_dma_sync_sg_for_cpu()
100 pci_dma_sync_sg_for_device(struct pci_dev *hwdev, struct scatterlist *sg, pci_dma_sync_sg_for_device() argument
103 dma_sync_sg_for_device(hwdev == NULL ? NULL : &hwdev->dev, sg, nelems, (enum dma_data_direction)direction); pci_dma_sync_sg_for_device()
/linux-4.1.27/arch/arm/include/asm/xen/
H A Dpage-coherent.h8 void __xen_dma_map_page(struct device *hwdev, struct page *page,
11 void __xen_dma_unmap_page(struct device *hwdev, dma_addr_t handle,
14 void __xen_dma_sync_single_for_cpu(struct device *hwdev,
17 void __xen_dma_sync_single_for_device(struct device *hwdev,
20 static inline void *xen_alloc_coherent_pages(struct device *hwdev, size_t size, xen_alloc_coherent_pages() argument
24 return __generic_dma_ops(hwdev)->alloc(hwdev, size, dma_handle, flags, attrs); xen_alloc_coherent_pages()
27 static inline void xen_free_coherent_pages(struct device *hwdev, size_t size, xen_free_coherent_pages() argument
31 __generic_dma_ops(hwdev)->free(hwdev, size, cpu_addr, dma_handle, attrs); xen_free_coherent_pages()
34 static inline void xen_dma_map_page(struct device *hwdev, struct page *page, xen_dma_map_page() argument
44 __generic_dma_ops(hwdev)->map_page(hwdev, page, offset, size, dir, attrs); xen_dma_map_page()
46 __xen_dma_map_page(hwdev, page, dev_addr, offset, size, dir, attrs); xen_dma_map_page()
49 static inline void xen_dma_unmap_page(struct device *hwdev, dma_addr_t handle, xen_dma_unmap_page() argument
59 if (__generic_dma_ops(hwdev)->unmap_page) xen_dma_unmap_page()
60 __generic_dma_ops(hwdev)->unmap_page(hwdev, handle, size, dir, attrs); xen_dma_unmap_page()
62 __xen_dma_unmap_page(hwdev, handle, size, dir, attrs); xen_dma_unmap_page()
65 static inline void xen_dma_sync_single_for_cpu(struct device *hwdev, xen_dma_sync_single_for_cpu() argument
70 if (__generic_dma_ops(hwdev)->sync_single_for_cpu) xen_dma_sync_single_for_cpu()
71 __generic_dma_ops(hwdev)->sync_single_for_cpu(hwdev, handle, size, dir); xen_dma_sync_single_for_cpu()
73 __xen_dma_sync_single_for_cpu(hwdev, handle, size, dir); xen_dma_sync_single_for_cpu()
76 static inline void xen_dma_sync_single_for_device(struct device *hwdev, xen_dma_sync_single_for_device() argument
81 if (__generic_dma_ops(hwdev)->sync_single_for_device) xen_dma_sync_single_for_device()
82 __generic_dma_ops(hwdev)->sync_single_for_device(hwdev, handle, size, dir); xen_dma_sync_single_for_device()
84 __xen_dma_sync_single_for_device(hwdev, handle, size, dir); xen_dma_sync_single_for_device()
/linux-4.1.27/include/xen/
H A Dswiotlb-xen.h10 *xen_swiotlb_alloc_coherent(struct device *hwdev, size_t size,
15 xen_swiotlb_free_coherent(struct device *hwdev, size_t size,
24 extern void xen_swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr,
28 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
33 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
38 xen_swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr,
42 xen_swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
46 xen_swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr,
50 xen_swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
54 xen_swiotlb_dma_mapping_error(struct device *hwdev, dma_addr_t dma_addr);
57 xen_swiotlb_dma_supported(struct device *hwdev, u64 mask);
/linux-4.1.27/arch/x86/include/asm/xen/
H A Dpage-coherent.h8 static inline void *xen_alloc_coherent_pages(struct device *hwdev, size_t size, xen_alloc_coherent_pages() argument
17 static inline void xen_free_coherent_pages(struct device *hwdev, size_t size, xen_free_coherent_pages() argument
24 static inline void xen_dma_map_page(struct device *hwdev, struct page *page, xen_dma_map_page() argument
28 static inline void xen_dma_unmap_page(struct device *hwdev, dma_addr_t handle, xen_dma_unmap_page() argument
32 static inline void xen_dma_sync_single_for_cpu(struct device *hwdev, xen_dma_sync_single_for_cpu() argument
35 static inline void xen_dma_sync_single_for_device(struct device *hwdev, xen_dma_sync_single_for_device() argument
/linux-4.1.27/include/linux/
H A Dswiotlb.h42 extern phys_addr_t swiotlb_tbl_map_single(struct device *hwdev,
47 extern void swiotlb_tbl_unmap_single(struct device *hwdev,
51 extern void swiotlb_tbl_sync_single(struct device *hwdev,
58 *swiotlb_alloc_coherent(struct device *hwdev, size_t size,
62 swiotlb_free_coherent(struct device *hwdev, size_t size,
69 extern void swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr,
74 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sg, int nents,
78 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents,
82 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems,
86 swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
91 swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr,
95 swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
99 swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr,
103 swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
107 swiotlb_dma_mapping_error(struct device *hwdev, dma_addr_t dma_addr);
110 swiotlb_dma_supported(struct device *hwdev, u64 mask);
H A Dfmc.h177 struct device *hwdev; /* The underlying hardware device */ member in struct:fmc_device
/linux-4.1.27/drivers/hwmon/
H A Dhwmon.c100 struct hwmon_device *hwdev; hwmon_device_register_with_groups() local
111 hwdev = kzalloc(sizeof(*hwdev), GFP_KERNEL); hwmon_device_register_with_groups()
112 if (hwdev == NULL) { hwmon_device_register_with_groups()
117 hwdev->name = name; hwmon_device_register_with_groups()
118 hwdev->dev.class = &hwmon_class; hwmon_device_register_with_groups()
119 hwdev->dev.parent = dev; hwmon_device_register_with_groups()
120 hwdev->dev.groups = groups; hwmon_device_register_with_groups()
121 hwdev->dev.of_node = dev ? dev->of_node : NULL; hwmon_device_register_with_groups()
122 dev_set_drvdata(&hwdev->dev, drvdata); hwmon_device_register_with_groups()
123 dev_set_name(&hwdev->dev, HWMON_ID_FORMAT, id); hwmon_device_register_with_groups()
124 err = device_register(&hwdev->dev); hwmon_device_register_with_groups()
128 return &hwdev->dev; hwmon_device_register_with_groups()
131 kfree(hwdev); hwmon_device_register_with_groups()
173 struct device *hwdev = *(struct device **)res; devm_hwmon_release() local
175 hwmon_device_unregister(hwdev); devm_hwmon_release()
193 struct device **ptr, *hwdev; devm_hwmon_device_register_with_groups() local
202 hwdev = hwmon_device_register_with_groups(dev, name, drvdata, groups); devm_hwmon_device_register_with_groups()
203 if (IS_ERR(hwdev)) devm_hwmon_device_register_with_groups()
206 *ptr = hwdev; devm_hwmon_device_register_with_groups()
208 return hwdev; devm_hwmon_device_register_with_groups()
212 return hwdev; devm_hwmon_device_register_with_groups()
218 struct device **hwdev = res; devm_hwmon_match() local
220 return *hwdev == data; devm_hwmon_match()
/linux-4.1.27/arch/arm/xen/
H A Dmm.c87 static void __xen_dma_page_dev_to_cpu(struct device *hwdev, dma_addr_t handle, __xen_dma_page_dev_to_cpu() argument
93 static void __xen_dma_page_cpu_to_dev(struct device *hwdev, dma_addr_t handle, __xen_dma_page_cpu_to_dev() argument
99 void __xen_dma_map_page(struct device *hwdev, struct page *page, __xen_dma_map_page() argument
103 if (is_device_dma_coherent(hwdev)) __xen_dma_map_page()
108 __xen_dma_page_cpu_to_dev(hwdev, dev_addr, size, dir); __xen_dma_map_page()
111 void __xen_dma_unmap_page(struct device *hwdev, dma_addr_t handle, __xen_dma_unmap_page() argument
116 if (is_device_dma_coherent(hwdev)) __xen_dma_unmap_page()
121 __xen_dma_page_dev_to_cpu(hwdev, handle, size, dir); __xen_dma_unmap_page()
124 void __xen_dma_sync_single_for_cpu(struct device *hwdev, __xen_dma_sync_single_for_cpu() argument
127 if (is_device_dma_coherent(hwdev)) __xen_dma_sync_single_for_cpu()
129 __xen_dma_page_dev_to_cpu(hwdev, handle, size, dir); __xen_dma_sync_single_for_cpu()
132 void __xen_dma_sync_single_for_device(struct device *hwdev, __xen_dma_sync_single_for_device() argument
135 if (is_device_dma_coherent(hwdev)) __xen_dma_sync_single_for_device()
137 __xen_dma_page_cpu_to_dev(hwdev, handle, size, dir); __xen_dma_sync_single_for_device()
/linux-4.1.27/arch/x86/kernel/
H A Dpci-nommu.c15 check_addr(char *name, struct device *hwdev, dma_addr_t bus, size_t size) check_addr() argument
17 if (hwdev && !dma_capable(hwdev, bus, size)) { check_addr()
18 if (*hwdev->dma_mask >= DMA_BIT_MASK(32)) check_addr()
22 (long long)*hwdev->dma_mask); check_addr()
56 static int nommu_map_sg(struct device *hwdev, struct scatterlist *sg, nommu_map_sg() argument
68 if (!check_addr("map_sg", hwdev, s->dma_address, s->length)) for_each_sg()
H A Dpci-swiotlb.c17 void *x86_swiotlb_alloc_coherent(struct device *hwdev, size_t size, x86_swiotlb_alloc_coherent() argument
23 vaddr = dma_generic_alloc_coherent(hwdev, size, dma_handle, flags, x86_swiotlb_alloc_coherent()
28 return swiotlb_alloc_coherent(hwdev, size, dma_handle, flags); x86_swiotlb_alloc_coherent()
/linux-4.1.27/drivers/xen/
H A Dswiotlb-xen.c296 xen_swiotlb_alloc_coherent(struct device *hwdev, size_t size, xen_swiotlb_alloc_coherent() argument
314 if (dma_alloc_from_coherent(hwdev, size, dma_handle, &ret)) xen_swiotlb_alloc_coherent()
322 ret = xen_alloc_coherent_pages(hwdev, size, dma_handle, flags, attrs); xen_swiotlb_alloc_coherent()
327 if (hwdev && hwdev->coherent_dma_mask) xen_swiotlb_alloc_coherent()
328 dma_mask = dma_alloc_coherent_mask(hwdev, flags); xen_swiotlb_alloc_coherent()
342 xen_free_coherent_pages(hwdev, size, ret, (dma_addr_t)phys, attrs); xen_swiotlb_alloc_coherent()
352 xen_swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr, xen_swiotlb_free_coherent() argument
359 if (dma_release_from_coherent(hwdev, order, vaddr)) xen_swiotlb_free_coherent()
362 if (hwdev && hwdev->coherent_dma_mask) xen_swiotlb_free_coherent()
363 dma_mask = hwdev->coherent_dma_mask; xen_swiotlb_free_coherent()
373 xen_free_coherent_pages(hwdev, size, vaddr, (dma_addr_t)phys, attrs); xen_swiotlb_free_coherent()
442 static void xen_unmap_single(struct device *hwdev, dma_addr_t dev_addr, xen_unmap_single() argument
450 xen_dma_unmap_page(hwdev, dev_addr, size, dir, attrs); xen_unmap_single()
454 swiotlb_tbl_unmap_single(hwdev, paddr, size, dir); xen_unmap_single()
470 void xen_swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr, xen_swiotlb_unmap_page() argument
474 xen_unmap_single(hwdev, dev_addr, size, dir, attrs); xen_swiotlb_unmap_page()
489 xen_swiotlb_sync_single(struct device *hwdev, dma_addr_t dev_addr, xen_swiotlb_sync_single() argument
498 xen_dma_sync_single_for_cpu(hwdev, dev_addr, size, dir); xen_swiotlb_sync_single()
502 swiotlb_tbl_sync_single(hwdev, paddr, size, dir, target); xen_swiotlb_sync_single()
505 xen_dma_sync_single_for_device(hwdev, dev_addr, size, dir); xen_swiotlb_sync_single()
514 xen_swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr, xen_swiotlb_sync_single_for_cpu() argument
517 xen_swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_CPU); xen_swiotlb_sync_single_for_cpu()
522 xen_swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr, xen_swiotlb_sync_single_for_device() argument
525 xen_swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_DEVICE); xen_swiotlb_sync_single_for_device()
546 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, xen_swiotlb_map_sg_attrs() argument
560 xen_arch_need_swiotlb(hwdev, PFN_DOWN(paddr), PFN_DOWN(dev_addr)) || for_each_sg()
561 !dma_capable(hwdev, dev_addr, sg->length) || for_each_sg()
563 phys_addr_t map = swiotlb_tbl_map_single(hwdev, for_each_sg()
569 dev_warn(hwdev, "swiotlb buffer is full\n"); for_each_sg()
572 xen_swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, for_each_sg()
577 xen_dma_map_page(hwdev, pfn_to_page(map >> PAGE_SHIFT), for_each_sg()
588 xen_dma_map_page(hwdev, pfn_to_page(paddr >> PAGE_SHIFT), for_each_sg()
607 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, xen_swiotlb_unmap_sg_attrs() argument
617 xen_unmap_single(hwdev, sg->dma_address, sg_dma_len(sg), dir, attrs); xen_swiotlb_unmap_sg_attrs()
630 xen_swiotlb_sync_sg(struct device *hwdev, struct scatterlist *sgl, xen_swiotlb_sync_sg() argument
638 xen_swiotlb_sync_single(hwdev, sg->dma_address, xen_swiotlb_sync_sg()
643 xen_swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg, xen_swiotlb_sync_sg_for_cpu() argument
646 xen_swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_CPU); xen_swiotlb_sync_sg_for_cpu()
651 xen_swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg, xen_swiotlb_sync_sg_for_device() argument
654 xen_swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_DEVICE); xen_swiotlb_sync_sg_for_device()
659 xen_swiotlb_dma_mapping_error(struct device *hwdev, dma_addr_t dma_addr) xen_swiotlb_dma_mapping_error() argument
672 xen_swiotlb_dma_supported(struct device *hwdev, u64 mask) xen_swiotlb_dma_supported() argument
/linux-4.1.27/lib/
H A Dswiotlb.c135 static dma_addr_t swiotlb_virt_to_bus(struct device *hwdev, swiotlb_virt_to_bus() argument
138 return phys_to_dma(hwdev, virt_to_phys(address)); swiotlb_virt_to_bus()
425 phys_addr_t swiotlb_tbl_map_single(struct device *hwdev, swiotlb_tbl_map_single() argument
441 mask = dma_get_seg_boundary(hwdev); swiotlb_tbl_map_single()
517 dev_warn(hwdev, "swiotlb buffer is full (sz: %zd bytes)\n", size); swiotlb_tbl_map_single()
541 map_single(struct device *hwdev, phys_addr_t phys, size_t size, map_single() argument
544 dma_addr_t start_dma_addr = phys_to_dma(hwdev, io_tlb_start); map_single()
546 return swiotlb_tbl_map_single(hwdev, start_dma_addr, phys, size, dir); map_single()
552 void swiotlb_tbl_unmap_single(struct device *hwdev, phys_addr_t tlb_addr, swiotlb_tbl_unmap_single() argument
596 void swiotlb_tbl_sync_single(struct device *hwdev, phys_addr_t tlb_addr, swiotlb_tbl_sync_single() argument
629 swiotlb_alloc_coherent(struct device *hwdev, size_t size, swiotlb_alloc_coherent() argument
637 if (hwdev && hwdev->coherent_dma_mask) swiotlb_alloc_coherent()
638 dma_mask = hwdev->coherent_dma_mask; swiotlb_alloc_coherent()
642 dev_addr = swiotlb_virt_to_bus(hwdev, ret); swiotlb_alloc_coherent()
657 phys_addr_t paddr = map_single(hwdev, 0, size, DMA_FROM_DEVICE); swiotlb_alloc_coherent()
662 dev_addr = phys_to_dma(hwdev, paddr); swiotlb_alloc_coherent()
666 printk("hwdev DMA mask = 0x%016Lx, dev_addr = 0x%016Lx\n", swiotlb_alloc_coherent()
671 swiotlb_tbl_unmap_single(hwdev, paddr, swiotlb_alloc_coherent()
685 swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr, swiotlb_free_coherent() argument
688 phys_addr_t paddr = dma_to_phys(hwdev, dev_addr); swiotlb_free_coherent()
695 swiotlb_tbl_unmap_single(hwdev, paddr, size, DMA_TO_DEVICE); swiotlb_free_coherent()
777 static void unmap_single(struct device *hwdev, dma_addr_t dev_addr, unmap_single() argument
780 phys_addr_t paddr = dma_to_phys(hwdev, dev_addr); unmap_single()
785 swiotlb_tbl_unmap_single(hwdev, paddr, size, dir); unmap_single()
801 void swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr, swiotlb_unmap_page() argument
805 unmap_single(hwdev, dev_addr, size, dir); swiotlb_unmap_page()
820 swiotlb_sync_single(struct device *hwdev, dma_addr_t dev_addr, swiotlb_sync_single() argument
824 phys_addr_t paddr = dma_to_phys(hwdev, dev_addr); swiotlb_sync_single()
829 swiotlb_tbl_sync_single(hwdev, paddr, size, dir, target); swiotlb_sync_single()
840 swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr, swiotlb_sync_single_for_cpu() argument
843 swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_CPU); swiotlb_sync_single_for_cpu()
848 swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr, swiotlb_sync_single_for_device() argument
851 swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_DEVICE); swiotlb_sync_single_for_device()
872 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems, swiotlb_map_sg_attrs() argument
882 dma_addr_t dev_addr = phys_to_dma(hwdev, paddr); for_each_sg()
885 !dma_capable(hwdev, dev_addr, sg->length)) { for_each_sg()
886 phys_addr_t map = map_single(hwdev, sg_phys(sg), for_each_sg()
891 swiotlb_full(hwdev, sg->length, dir, 0); for_each_sg()
892 swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, for_each_sg()
897 sg->dma_address = phys_to_dma(hwdev, map); for_each_sg()
907 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, swiotlb_map_sg() argument
910 return swiotlb_map_sg_attrs(hwdev, sgl, nelems, dir, NULL); swiotlb_map_sg()
919 swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, swiotlb_unmap_sg_attrs() argument
928 unmap_single(hwdev, sg->dma_address, sg_dma_len(sg), dir); swiotlb_unmap_sg_attrs()
934 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, swiotlb_unmap_sg() argument
937 return swiotlb_unmap_sg_attrs(hwdev, sgl, nelems, dir, NULL); swiotlb_unmap_sg()
949 swiotlb_sync_sg(struct device *hwdev, struct scatterlist *sgl, swiotlb_sync_sg() argument
957 swiotlb_sync_single(hwdev, sg->dma_address, swiotlb_sync_sg()
962 swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg, swiotlb_sync_sg_for_cpu() argument
965 swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_CPU); swiotlb_sync_sg_for_cpu()
970 swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg, swiotlb_sync_sg_for_device() argument
973 swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_DEVICE); swiotlb_sync_sg_for_device()
978 swiotlb_dma_mapping_error(struct device *hwdev, dma_addr_t dma_addr) swiotlb_dma_mapping_error() argument
980 return (dma_addr == phys_to_dma(hwdev, io_tlb_overflow_buffer)); swiotlb_dma_mapping_error()
991 swiotlb_dma_supported(struct device *hwdev, u64 mask) swiotlb_dma_supported() argument
993 return phys_to_dma(hwdev, io_tlb_end - 1) <= mask; swiotlb_dma_supported()
/linux-4.1.27/arch/hexagon/kernel/
H A Ddma.c103 static int check_addr(const char *name, struct device *hwdev, check_addr() argument
106 if (hwdev && hwdev->dma_mask && !dma_capable(hwdev, bus, size)) { check_addr()
107 if (*hwdev->dma_mask >= DMA_BIT_MASK(32)) check_addr()
111 (long long)*hwdev->dma_mask); check_addr()
117 static int hexagon_map_sg(struct device *hwdev, struct scatterlist *sg, hexagon_map_sg() argument
128 if (!check_addr("map_sg", hwdev, s->dma_address, s->length)) for_each_sg()
/linux-4.1.27/arch/x86/include/asm/
H A Dswiotlb.h32 extern void *x86_swiotlb_alloc_coherent(struct device *hwdev, size_t size,
H A Ddma-mapping.h60 extern int dma_supported(struct device *hwdev, u64 mask);
/linux-4.1.27/drivers/fmc/
H A Dfmc-core.c164 if (!fmc->hwdev) { fmc_device_register_n()
165 pr_err("%s: device nr. %i has no hwdev pointer\n", fmc_device_register_n()
171 dev_info(fmc->hwdev, "absent mezzanine in slot %d\n", fmc_device_register_n()
176 dev_err(fmc->hwdev, "no eeprom provided for slot %i\n", fmc_device_register_n()
181 dev_err(fmc->hwdev, "no eeprom_addr for slot %i\n", fmc_device_register_n()
187 dev_err(fmc->hwdev, fmc_device_register_n()
210 fmc->dev.parent = fmc->hwdev; fmc_device_register_n()
226 dev_err(fmc->hwdev, "Slot %i: Failed in registering " fmc_device_register_n()
H A Dfmc-dump.c52 pr_info("FMC: %s (%s), slot %i, device %s\n", dev_name(fmc->hwdev), fmc_dump_eeprom()
90 pr_info("FMC: %s (%s), slot %i, device %s\n", dev_name(fmc->hwdev), fmc_dump_sdb()
H A Dfmc-match.c75 dev_info(fmc->hwdev, "mezzanine %i\n", fmc->slot_id); /* header */ fmc_fill_id_info()
H A Dfmc-fakedev.c259 .hwdev = NULL, /* filled at creation time */
285 fmc->hwdev = &ff->dev; ff_dev_create()
/linux-4.1.27/drivers/infiniband/hw/ipath/
H A Dipath_user_pages.c106 dma_addr_t ipath_map_page(struct pci_dev *hwdev, struct page *page, ipath_map_page() argument
111 phys = pci_map_page(hwdev, page, offset, size, direction); ipath_map_page()
114 pci_unmap_page(hwdev, phys, size, direction); ipath_map_page()
115 phys = pci_map_page(hwdev, page, offset, size, direction); ipath_map_page()
130 dma_addr_t ipath_map_single(struct pci_dev *hwdev, void *ptr, size_t size, ipath_map_single() argument
135 phys = pci_map_single(hwdev, ptr, size, direction); ipath_map_single()
138 pci_unmap_single(hwdev, phys, size, direction); ipath_map_single()
139 phys = pci_map_single(hwdev, ptr, size, direction); ipath_map_single()
/linux-4.1.27/arch/frv/include/asm/
H A Dpci.h35 extern void *pci_alloc_consistent(struct pci_dev *hwdev, size_t size,
38 extern void pci_free_consistent(struct pci_dev *hwdev, size_t size,
/linux-4.1.27/arch/frv/mb93090-mb00/
H A Dpci-dma.c21 void *dma_alloc_coherent(struct device *hwdev, size_t size, dma_addr_t *dma_handle, gfp_t gfp) dma_alloc_coherent() argument
34 void dma_free_coherent(struct device *hwdev, size_t size, void *vaddr, dma_addr_t dma_handle) dma_free_coherent() argument
H A Dpci-dma-nommu.c37 void *dma_alloc_coherent(struct device *hwdev, size_t size, dma_addr_t *dma_handle, gfp_t gfp) dma_alloc_coherent() argument
89 void dma_free_coherent(struct device *hwdev, size_t size, void *vaddr, dma_addr_t dma_handle) dma_free_coherent() argument
/linux-4.1.27/arch/ia64/kernel/
H A Dmachvec.c77 machvec_dma_sync_single(struct device *hwdev, dma_addr_t dma_handle, size_t size, machvec_dma_sync_single() argument
85 machvec_dma_sync_sg(struct device *hwdev, struct scatterlist *sg, int n, machvec_dma_sync_sg() argument
/linux-4.1.27/arch/ia64/sn/pci/pcibr/
H A Dpcibr_dma.c208 pcibr_dma_unmap(struct pci_dev *hwdev, dma_addr_t dma_handle, int direction) pcibr_dma_unmap() argument
210 struct pcidev_info *pcidev_info = SN_PCIDEV_INFO(hwdev); pcibr_dma_unmap()
357 pcibr_dma_map(struct pci_dev * hwdev, unsigned long phys_addr, size_t size, int dma_flags) pcibr_dma_map() argument
360 struct pcidev_info *pcidev_info = SN_PCIDEV_INFO(hwdev); pcibr_dma_map()
363 if (hwdev->dma_mask < 0x7fffffff) { pcibr_dma_map()
367 if (hwdev->dma_mask == ~0UL) { pcibr_dma_map()
395 pcibr_dma_map_consistent(struct pci_dev * hwdev, unsigned long phys_addr, pcibr_dma_map_consistent() argument
399 struct pcidev_info *pcidev_info = SN_PCIDEV_INFO(hwdev); pcibr_dma_map_consistent()
401 if (hwdev->dev.coherent_dma_mask == ~0UL) { pcibr_dma_map_consistent()
/linux-4.1.27/drivers/infiniband/hw/qib/
H A Dqib_user_pages.c101 dma_addr_t qib_map_page(struct pci_dev *hwdev, struct page *page, qib_map_page() argument
106 phys = pci_map_page(hwdev, page, offset, size, direction); qib_map_page()
109 pci_unmap_page(hwdev, phys, size, direction); qib_map_page()
110 phys = pci_map_page(hwdev, page, offset, size, direction); qib_map_page()
/linux-4.1.27/drivers/net/ethernet/via/
H A Dvia-rhine.c680 static inline int verify_mmio(struct device *hwdev, verify_mmio() argument
695 dev_err(hwdev, verify_mmio()
907 static int rhine_init_one_common(struct device *hwdev, u32 quirks, rhine_init_one_common() argument
916 rc = dma_set_mask(hwdev, DMA_BIT_MASK(32)); rhine_init_one_common()
918 dev_err(hwdev, "32-bit DMA addresses not supported by the card!?\n"); rhine_init_one_common()
927 SET_NETDEV_DEV(dev, hwdev); rhine_init_one_common()
1004 dev_set_drvdata(hwdev, dev); rhine_init_one_common()
1042 struct device *hwdev = &pdev->dev; rhine_init_one_pci() local
1087 dev_err(hwdev, "Insufficient PCI resources, aborting\n"); rhine_init_one_pci()
1103 dev_err(hwdev, rhine_init_one_pci()
1105 dev_name(hwdev), io_size, memaddr); rhine_init_one_pci()
1111 rc = verify_mmio(hwdev, pioaddr, ioaddr, quirks); rhine_init_one_pci()
1162 struct device *hwdev = dev->dev.parent; alloc_ring() local
1166 ring = dma_alloc_coherent(hwdev, alloc_ring()
1176 rp->tx_bufs = dma_alloc_coherent(hwdev, alloc_ring()
1181 dma_free_coherent(hwdev, alloc_ring()
1200 struct device *hwdev = dev->dev.parent; free_ring() local
1202 dma_free_coherent(hwdev, free_ring()
1209 dma_free_coherent(hwdev, PKT_BUF_SZ * TX_RING_SIZE, free_ring()
1219 struct device *hwdev = dev->dev.parent; alloc_rbufs() local
1248 dma_map_single(hwdev, skb->data, rp->rx_buf_sz, alloc_rbufs()
1250 if (dma_mapping_error(hwdev, rp->rx_skbuff_dma[i])) { alloc_rbufs()
1264 struct device *hwdev = dev->dev.parent; free_rbufs() local
1272 dma_unmap_single(hwdev, free_rbufs()
1306 struct device *hwdev = dev->dev.parent; free_tbufs() local
1315 dma_unmap_single(hwdev, free_tbufs()
1737 struct device *hwdev = dev->dev.parent; rhine_start_tx() local
1774 dma_map_single(hwdev, skb->data, skb->len, rhine_start_tx()
1776 if (dma_mapping_error(hwdev, rp->tx_skbuff_dma[entry])) { rhine_start_tx()
1868 struct device *hwdev = dev->dev.parent; rhine_tx() local
1915 dma_unmap_single(hwdev, rhine_tx()
1951 struct device *hwdev = dev->dev.parent; rhine_rx() local
2013 dma_sync_single_for_cpu(hwdev, rhine_rx()
2022 dma_sync_single_for_device(hwdev, rhine_rx()
2034 dma_unmap_single(hwdev, rhine_rx()
2068 dma_map_single(hwdev, skb->data, rhine_rx()
2071 if (dma_mapping_error(hwdev, rhine_rx()
2226 struct device *hwdev = dev->dev.parent; netdev_get_drvinfo() local
2230 strlcpy(info->bus_info, dev_name(hwdev), sizeof(info->bus_info)); netdev_get_drvinfo()
/linux-4.1.27/arch/xtensa/kernel/
H A Dpci-dma.c67 void dma_free_coherent(struct device *hwdev, size_t size, dma_free_coherent() argument
/linux-4.1.27/arch/mips/include/asm/mach-pmcs-msp71xx/
H A Dmsp_usb.h51 u32 hwdev; /* 0xc: Device HW params */ member in struct:msp_usbid_regs
/linux-4.1.27/drivers/parisc/
H A Dsba_iommu.c862 * @hwdev: instance of PCI owned by the driver that's asking.
868 static void *sba_alloc_consistent(struct device *hwdev, size_t size, sba_alloc_consistent() argument
873 if (!hwdev) { sba_alloc_consistent()
883 *dma_handle = sba_map_single(hwdev, ret, size, 0); sba_alloc_consistent()
892 * @hwdev: instance of PCI owned by the driver that's asking.
900 sba_free_consistent(struct device *hwdev, size_t size, void *vaddr, sba_free_consistent() argument
903 sba_unmap_single(hwdev, dma_handle, size, 0); sba_free_consistent()
H A Dccio-dma.c846 if(!hwdev) { ccio_alloc_consistent()
/linux-4.1.27/drivers/isdn/hardware/eicon/
H A Ddivasmain.c336 static void *diva_pci_alloc_consistent(struct pci_dev *hwdev, diva_pci_alloc_consistent() argument
341 void *addr = pci_alloc_consistent(hwdev, size, dma_handle); diva_pci_alloc_consistent()
/linux-4.1.27/drivers/net/ethernet/toshiba/
H A Dtc35815.c451 struct pci_dev *hwdev, alloc_rxbuf_skb()
458 *dma_handle = pci_map_single(hwdev, skb->data, RX_BUF_SIZE, alloc_rxbuf_skb()
460 if (pci_dma_mapping_error(hwdev, *dma_handle)) { alloc_rxbuf_skb()
468 static void free_rxbuf_skb(struct pci_dev *hwdev, struct sk_buff *skb, dma_addr_t dma_handle) free_rxbuf_skb() argument
470 pci_unmap_single(hwdev, dma_handle, RX_BUF_SIZE, free_rxbuf_skb()
450 alloc_rxbuf_skb(struct net_device *dev, struct pci_dev *hwdev, dma_addr_t *dma_handle) alloc_rxbuf_skb() argument
/linux-4.1.27/arch/sparc/kernel/
H A Dioport.c428 * hwdev should be valid struct pci_dev pointer for PCI devices.
/linux-4.1.27/drivers/gpu/drm/i915/
H A Di915_gem_gtt.c622 struct pci_dev *hwdev = ppgtt->base.dev->pdev; gen8_ppgtt_unmap_pages() local
631 pci_unmap_page(hwdev, ppgtt->pdp.page_directory[i]->daddr, PAGE_SIZE, gen8_ppgtt_unmap_pages()
646 pci_unmap_page(hwdev, addr, PAGE_SIZE, gen8_ppgtt_unmap_pages()

Completed in 1040 milliseconds