Lines Matching refs:sgt

50 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt)  in vb2_dc_get_contiguous_size()  argument
53 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size()
57 for_each_sg(sgt->sgl, s, sgt->nents, i) { in vb2_dc_get_contiguous_size()
97 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_prepare() local
100 if (!sgt || buf->db_attach) in vb2_dc_prepare()
103 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_prepare()
110 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_finish() local
113 if (!sgt || buf->db_attach) in vb2_dc_finish()
116 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->orig_nents, buf->dma_dir); in vb2_dc_finish()
214 struct sg_table sgt; member
224 struct sg_table *sgt; in vb2_dc_dmabuf_ops_attach() local
232 sgt = &attach->sgt; in vb2_dc_dmabuf_ops_attach()
236 ret = sg_alloc_table(sgt, buf->sgt_base->orig_nents, GFP_KERNEL); in vb2_dc_dmabuf_ops_attach()
243 wr = sgt->sgl; in vb2_dc_dmabuf_ops_attach()
244 for (i = 0; i < sgt->orig_nents; ++i) { in vb2_dc_dmabuf_ops_attach()
260 struct sg_table *sgt; in vb2_dc_dmabuf_ops_detach() local
265 sgt = &attach->sgt; in vb2_dc_dmabuf_ops_detach()
269 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_dmabuf_ops_detach()
271 sg_free_table(sgt); in vb2_dc_dmabuf_ops_detach()
282 struct sg_table *sgt; in vb2_dc_dmabuf_ops_map() local
286 sgt = &attach->sgt; in vb2_dc_dmabuf_ops_map()
290 return sgt; in vb2_dc_dmabuf_ops_map()
295 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_dmabuf_ops_map()
301 sgt->nents = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_dmabuf_ops_map()
303 if (!sgt->nents) { in vb2_dc_dmabuf_ops_map()
313 return sgt; in vb2_dc_dmabuf_ops_map()
317 struct sg_table *sgt, enum dma_data_direction dma_dir) in vb2_dc_dmabuf_ops_unmap() argument
363 struct sg_table *sgt; in vb2_dc_get_base_sgt() local
365 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in vb2_dc_get_base_sgt()
366 if (!sgt) { in vb2_dc_get_base_sgt()
371 ret = dma_get_sgtable(buf->dev, sgt, buf->vaddr, buf->dma_addr, in vb2_dc_get_base_sgt()
375 kfree(sgt); in vb2_dc_get_base_sgt()
379 return sgt; in vb2_dc_get_base_sgt()
416 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_put_userptr() local
420 if (sgt) { in vb2_dc_put_userptr()
428 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_put_userptr()
435 sg_free_table(sgt); in vb2_dc_put_userptr()
436 kfree(sgt); in vb2_dc_put_userptr()
482 struct sg_table *sgt; in vb2_dc_get_userptr() local
530 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in vb2_dc_get_userptr()
531 if (!sgt) { in vb2_dc_get_userptr()
537 ret = sg_alloc_table_from_pages(sgt, frame_vector_pages(vec), n_pages, in vb2_dc_get_userptr()
548 sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_get_userptr()
550 if (sgt->nents <= 0) { in vb2_dc_get_userptr()
556 contig_size = vb2_dc_get_contiguous_size(sgt); in vb2_dc_get_userptr()
564 buf->dma_addr = sg_dma_address(sgt->sgl); in vb2_dc_get_userptr()
565 buf->dma_sgt = sgt; in vb2_dc_get_userptr()
572 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_get_userptr()
576 sg_free_table(sgt); in vb2_dc_get_userptr()
579 kfree(sgt); in vb2_dc_get_userptr()
597 struct sg_table *sgt; in vb2_dc_map_dmabuf() local
611 sgt = dma_buf_map_attachment(buf->db_attach, buf->dma_dir); in vb2_dc_map_dmabuf()
612 if (IS_ERR(sgt)) { in vb2_dc_map_dmabuf()
618 contig_size = vb2_dc_get_contiguous_size(sgt); in vb2_dc_map_dmabuf()
622 dma_buf_unmap_attachment(buf->db_attach, sgt, buf->dma_dir); in vb2_dc_map_dmabuf()
626 buf->dma_addr = sg_dma_address(sgt->sgl); in vb2_dc_map_dmabuf()
627 buf->dma_sgt = sgt; in vb2_dc_map_dmabuf()
636 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_unmap_dmabuf() local
643 if (WARN_ON(!sgt)) { in vb2_dc_unmap_dmabuf()
652 dma_buf_unmap_attachment(buf->db_attach, sgt, buf->dma_dir); in vb2_dc_unmap_dmabuf()