/linux-4.1.27/drivers/parisc/ |
D | iommu-helpers.h | 21 unsigned long dma_offset = 0, dma_len = 0; in iommu_fill_pdir() local 46 BUG_ON(pdirp && (dma_len != sg_dma_len(dma_sg))); in iommu_fill_pdir() 50 dma_len = sg_dma_len(startsg); in iommu_fill_pdir() 105 unsigned long dma_offset, dma_len; /* start/len of DMA stream */ in iommu_coalesce_chunks() local 119 dma_len = startsg->length; in iommu_coalesce_chunks() 148 if (unlikely(ALIGN(dma_len + dma_offset + startsg->length, IOVP_SIZE) > in iommu_coalesce_chunks() 162 dma_len += startsg->length; in iommu_coalesce_chunks() 170 sg_dma_len(contig_sg) = dma_len; in iommu_coalesce_chunks() 171 dma_len = ALIGN(dma_len + dma_offset, IOVP_SIZE); in iommu_coalesce_chunks() 174 | (iommu_alloc_range(ioc, dev, dma_len) << IOVP_SHIFT) in iommu_coalesce_chunks()
|
/linux-4.1.27/drivers/net/wireless/brcm80211/brcmsmac/ |
D | brcms_trace_brcmsmac_tx.h | 76 u16 dma_len), 78 dma_len), 85 __field(u16, dma_len) 93 __entry->dma_len = dma_len; 98 __entry->ampdu_frames, __entry->dma_len)
|
D | ampdu.h | 37 u16 dma_len; member
|
D | ampdu.c | 509 session->dma_len = 0; in brcms_c_ampdu_reset_session() 568 session->dma_len += p->len; in brcms_c_ampdu_add_frame() 644 u16 dma_len = session->dma_len; in brcms_c_ampdu_finalize() local 771 dma_len = min(dma_len, f->ampdu_pld_size); in brcms_c_ampdu_finalize() 772 txh->PreloadSize = cpu_to_le16(dma_len); in brcms_c_ampdu_finalize()
|
D | dma.c | 1325 session->dma_len); in ampdu_finalize()
|
/linux-4.1.27/net/rds/ |
D | iw_rdma.c | 256 sg->dma_len = 0; 268 WARN_ON(sg->dma_len); 270 sg->dma_len = ib_dma_map_sg(dev, sg->list, sg->len, DMA_BIDIRECTIONAL); 271 if (unlikely(!sg->dma_len)) { 280 for (i = 0; i < sg->dma_len; ++i) { 281 unsigned int dma_len = ib_sg_dma_len(dev, &sg->list[i]); local 285 sg->bytes += dma_len; 287 end_addr = dma_addr + dma_len; 294 if (i < sg->dma_len - 1) 312 for (i = j = 0; i < sg->dma_len; ++i) { [all …]
|
D | ib_rdma.c | 390 unsigned int dma_len = ib_sg_dma_len(dev, &scat[i]); in rds_ib_map_fmr() local 399 if ((dma_addr + dma_len) & ~PAGE_MASK) { in rds_ib_map_fmr() 406 len += dma_len; in rds_ib_map_fmr() 420 unsigned int dma_len = ib_sg_dma_len(dev, &scat[i]); in rds_ib_map_fmr() local 423 for (j = 0; j < dma_len; j += PAGE_SIZE) in rds_ib_map_fmr()
|
D | iw.h | 57 int dma_len; member
|
/linux-4.1.27/arch/arm/mach-rpc/ |
D | dma.c | 35 unsigned int dma_len; member 64 end = offset + idma->dma_len; in iomd_get_next_sg() 74 idma->dma_len -= end - offset; in iomd_get_next_sg() 77 if (idma->dma_len == 0) { in iomd_get_next_sg() 81 idma->dma_len = idma->dma.sg->length; in iomd_get_next_sg()
|
/linux-4.1.27/drivers/net/ethernet/sfc/ |
D | tx.c | 342 unsigned int dma_len; in efx_enqueue_skb() local 391 dma_len = efx_max_tx_len(efx, dma_addr); in efx_enqueue_skb() 392 if (likely(dma_len >= len)) in efx_enqueue_skb() 393 dma_len = len; in efx_enqueue_skb() 396 buffer->len = dma_len; in efx_enqueue_skb() 399 len -= dma_len; in efx_enqueue_skb() 400 dma_addr += dma_len; in efx_enqueue_skb() 930 unsigned dma_len; in efx_tx_queue_insert() local 944 dma_len = efx_max_tx_len(efx, dma_addr); in efx_tx_queue_insert() 947 if (dma_len >= len) in efx_tx_queue_insert() [all …]
|
D | mcdi_port.c | 932 u32 dma_len = action != EFX_STATS_DISABLE ? in efx_mcdi_mac_stats() local 946 MCDI_SET_DWORD(inbuf, MAC_STATS_IN_DMA_LEN, dma_len); in efx_mcdi_mac_stats()
|
/linux-4.1.27/drivers/scsi/ |
D | am53c974.c | 268 static u32 pci_esp_dma_length_limit(struct esp *esp, u32 dma_addr, u32 dma_len) in pci_esp_dma_length_limit() argument 280 if (dma_len > (1U << dma_limit)) in pci_esp_dma_length_limit() 281 dma_len = (1U << dma_limit); in pci_esp_dma_length_limit() 287 end = base + dma_len; in pci_esp_dma_length_limit() 290 dma_len = end - base; in pci_esp_dma_length_limit() 292 return dma_len; in pci_esp_dma_length_limit()
|
D | mac53c94.c | 367 u32 dma_len; in set_dma_cmds() local 381 dma_len = sg_dma_len(scl); in set_dma_cmds() 382 if (dma_len > 0xffff) in set_dma_cmds() 384 total += dma_len; in set_dma_cmds() 385 dcmds->req_count = cpu_to_le16(dma_len); in set_dma_cmds()
|
D | esp_scsi.c | 519 static u32 esp_dma_length_limit(struct esp *esp, u32 dma_addr, u32 dma_len) in esp_dma_length_limit() argument 523 if (dma_len > (1U << 24)) in esp_dma_length_limit() 524 dma_len = (1U << 24); in esp_dma_length_limit() 534 if (dma_len > (1U << 16)) in esp_dma_length_limit() 535 dma_len = (1U << 16); in esp_dma_length_limit() 541 end = base + dma_len; in esp_dma_length_limit() 544 dma_len = end - base; in esp_dma_length_limit() 546 return dma_len; in esp_dma_length_limit() 1758 unsigned int dma_len = esp_cur_dma_len(ent, cmd); in esp_process_event() local 1769 dma_len = esp->ops->dma_length_limit(esp, dma_addr, in esp_process_event() [all …]
|
D | mac_esp.c | 437 static u32 mac_esp_dma_length_limit(struct esp *esp, u32 dma_addr, u32 dma_len) in mac_esp_dma_length_limit() argument 439 return dma_len > 0xFFFF ? 0xFFFF : dma_len; in mac_esp_dma_length_limit()
|
D | atari_NCR5380.c | 783 hostdata->dma_len = 0; in NCR5380_init() 1074 && !hostdata->dma_len in NCR5380_main() 1160 transferred = hostdata->dma_len - NCR5380_dma_residual(instance); in NCR5380_dma_complete() 1161 hostdata->dma_len = 0; in NCR5380_dma_complete() 1835 hostdata->dma_len = c; in NCR5380_transfer_dma() 1898 hostdata->dma_len = (p & SR_IO) ? in NCR5380_transfer_dma() 1916 hostdata->dma_len = (p & SR_IO) ? in NCR5380_transfer_dma() 2920 hostdata->dma_len = 0; in NCR5380_bus_reset()
|
D | NCR5380.h | 266 volatile int dma_len; /* requested length of DMA */ member
|
D | mesh.c | 1281 u32 dma_len = sg_dma_len(scl); in set_dma_cmds() local 1284 if (off >= dma_len) { in set_dma_cmds() 1285 off -= dma_len; in set_dma_cmds() 1288 if (dma_len > 0xffff) in set_dma_cmds() 1290 dcmds->req_count = cpu_to_le16(dma_len - off); in set_dma_cmds() 1295 dtot += dma_len - off; in set_dma_cmds()
|
D | esp_scsi.h | 390 u32 dma_len);
|
D | atari_scsi.c | 165 (atari_scsi_host->hostdata))->dma_len)
|
D | NCR5380.c | 1748 …hostdata->dma_len = (p & SR_IO) ? NCR5380_dma_read_setup(instance, d, c) : NCR5380_dma_write_setup… in NCR5380_transfer_dma()
|
/linux-4.1.27/drivers/usb/host/whci/ |
D | qset.c | 464 size_t dma_len; in qset_add_urb_sg() local 492 dma_len = dma_remaining; in qset_add_urb_sg() 500 if (std->len + dma_len > QTD_MAX_XFER_SIZE) { in qset_add_urb_sg() 501 dma_len = (QTD_MAX_XFER_SIZE / qset->max_packet) in qset_add_urb_sg() 505 std->len += dma_len; in qset_add_urb_sg() 509 ep = dma_addr + dma_len; in qset_add_urb_sg() 529 dma_remaining -= dma_len; in qset_add_urb_sg() 530 remaining -= dma_len; in qset_add_urb_sg()
|
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb/ |
D | sge.c | 165 DEFINE_DMA_UNMAP_LEN(dma_len); 171 DEFINE_DMA_UNMAP_LEN(dma_len); 513 dma_unmap_len(ce, dma_len), in free_freelQ_buffers() 627 if (likely(dma_unmap_len(ce, dma_len))) { in free_cmdQ_buffers() 629 dma_unmap_len(ce, dma_len), in free_cmdQ_buffers() 830 unsigned int dma_len = q->rx_buffer_size - q->dma_offset; in refill_free_list() local 841 mapping = pci_map_single(pdev, skb->data, dma_len, in refill_free_list() 847 dma_unmap_len_set(ce, dma_len, dma_len); in refill_free_list() 850 e->len_gen = V_CMD_LEN(dma_len) | V_CMD_GEN1(q->genbit); in refill_free_list() 1055 dma_unmap_len(ce, dma_len), in get_packet() [all …]
|
/linux-4.1.27/drivers/mmc/host/ |
D | bfin_sdh.c | 69 int dma_len; member 158 host->dma_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, host->dma_dir); in sdh_setup_data() 169 for_each_sg(data->sg, sg, host->dma_len, i) { in sdh_setup_data() 183 host->dma_len * sizeof(struct dma_desc_array)); in sdh_setup_data() 185 host->sg_cpu[host->dma_len - 1].cfg &= ~(DMAFLOW | NDSIZE); in sdh_setup_data() 186 host->sg_cpu[host->dma_len - 1].cfg |= DI_EN; in sdh_setup_data()
|
D | tifm_sd.c | 267 unsigned int dma_len, dma_blk_cnt, dma_off; in tifm_sd_set_dma_data() local 283 dma_len = sg_dma_len(&r_data->sg[host->sg_pos]) - host->block_pos; in tifm_sd_set_dma_data() 284 if (!dma_len) { in tifm_sd_set_dma_data() 289 dma_len = sg_dma_len(&r_data->sg[host->sg_pos]); in tifm_sd_set_dma_data() 292 if (dma_len < t_size) { in tifm_sd_set_dma_data() 293 dma_blk_cnt = dma_len / r_data->blksz; in tifm_sd_set_dma_data() 304 else if (dma_len) { in tifm_sd_set_dma_data()
|
D | omap_hsmmc.c | 171 unsigned int dma_len; member 197 unsigned int dma_len; member 1290 int dma_len; in omap_hsmmc_pre_dma_transfer() local 1302 dma_len = dma_map_sg(chan->device->dev, data->sg, data->sg_len, in omap_hsmmc_pre_dma_transfer() 1306 dma_len = host->next_data.dma_len; in omap_hsmmc_pre_dma_transfer() 1307 host->next_data.dma_len = 0; in omap_hsmmc_pre_dma_transfer() 1311 if (dma_len == 0) in omap_hsmmc_pre_dma_transfer() 1315 next->dma_len = dma_len; in omap_hsmmc_pre_dma_transfer() 1318 host->dma_len = dma_len; in omap_hsmmc_pre_dma_transfer()
|
D | pxamci.c | 74 unsigned int dma_len; member 212 host->dma_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in pxamci_setup_data() 215 for (i = 0; i < host->dma_len; i++) { in pxamci_setup_data() 233 host->sg_cpu[host->dma_len - 1].ddadr = DDADR_STOP; in pxamci_setup_data()
|
D | sunxi-mmc.c | 336 u32 i, dma_len; in sunxi_mmc_map_dma() local 339 dma_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sunxi_mmc_map_dma() 341 if (dma_len == 0) { in sunxi_mmc_map_dma()
|
/linux-4.1.27/drivers/crypto/ |
D | talitos.c | 731 int dma_len; member 808 if (edesc->dma_len) in ipsec_esp_unmap() 809 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len, in ipsec_esp_unmap() 863 if (edesc->dma_len) in ipsec_esp_decrypt_swauth_done() 982 edesc->dma_len, DMA_BIDIRECTIONAL); in ipsec_esp() 1032 edesc->dma_len, in ipsec_esp() 1075 edesc->dma_len, DMA_BIDIRECTIONAL); in ipsec_esp() 1127 int assoc_nents = 0, src_nents, dst_nents, alloc_len, dma_len; in talitos_edesc_alloc() local 1177 dma_len = (src_nents + dst_nents + 2 + assoc_nents) * in talitos_edesc_alloc() 1179 alloc_len += dma_len; in talitos_edesc_alloc() [all …]
|
/linux-4.1.27/drivers/i2c/busses/ |
D | i2c-imx.c | 193 unsigned int dma_len; member 351 dma->dma_len, dma->dma_data_dir); in i2c_imx_dma_callback() 364 dma->dma_len, dma->dma_data_dir); in i2c_imx_dma_xfer() 371 dma->dma_len, dma->dma_transfer_dir, in i2c_imx_dma_xfer() 391 dma->dma_len, dma->dma_data_dir); in i2c_imx_dma_xfer() 401 dma->dma_len = 0; in i2c_imx_dma_free() 613 dma->dma_len = msgs->len - 1; in i2c_imx_dma_write() 681 dma->dma_len = msgs->len - 2; in i2c_imx_dma_read()
|
/linux-4.1.27/drivers/ata/ |
D | pata_arasan_cf.c | 434 u32 xfer_cnt, sglen, dma_len, xfer_ctr; in sg_xfer() local 474 dma_len = min(xfer_cnt, FIFO_SIZE); in sg_xfer() 475 ret = dma_xfer(acdev, src, dest, dma_len); in sg_xfer() 482 src += dma_len; in sg_xfer() 484 dest += dma_len; in sg_xfer() 486 sglen -= dma_len; in sg_xfer() 487 xfer_cnt -= dma_len; in sg_xfer()
|
/linux-4.1.27/drivers/block/rsxx/ |
D | dma.c | 647 unsigned int dma_len, in rsxx_queue_dma() argument 663 dma->sub_page.cnt = (dma_len >> 9); in rsxx_queue_dma() 694 unsigned int dma_len; in rsxx_dma_queue_bio() local 734 dma_len = min(bv_len, in rsxx_dma_queue_bio() 739 dma_off, dma_len, in rsxx_dma_queue_bio() 747 addr8 += dma_len; in rsxx_dma_queue_bio() 748 bv_off += dma_len; in rsxx_dma_queue_bio() 749 bv_len -= dma_len; in rsxx_dma_queue_bio()
|
/linux-4.1.27/arch/ia64/hp/common/ |
D | sba_iommu.c | 1332 unsigned long dma_offset, dma_len; /* start/len of DMA stream */ in sba_coalesce_chunks() local 1344 dma_len = vcontig_len = vcontig_end = startsg->length; in sba_coalesce_chunks() 1371 if (((dma_len + dma_offset + startsg->length + ~iovp_mask) & iovp_mask) in sba_coalesce_chunks() 1375 if (dma_len + startsg->length > max_seg_size) in sba_coalesce_chunks() 1388 dma_len += startsg->length; in sba_coalesce_chunks() 1419 dma_len += vcontig_len; in sba_coalesce_chunks() 1432 dma_len = (dma_len + dma_offset + ~iovp_mask) & iovp_mask; in sba_coalesce_chunks() 1433 ASSERT(dma_len <= DMA_CHUNK_SIZE); in sba_coalesce_chunks() 1434 idx = sba_alloc_range(ioc, dev, dma_len); in sba_coalesce_chunks()
|
/linux-4.1.27/drivers/infiniband/ulp/srp/ |
D | ib_srp.c | 1260 unsigned int dma_len, u32 rkey) in srp_map_desc() argument 1266 desc->len = cpu_to_be32(dma_len); in srp_map_desc() 1268 state->total_len += dma_len; in srp_map_desc() 1287 srp_map_desc(state, 0, state->dma_len, fmr->fmr->rkey); in srp_map_finish_fmr() 1319 wr.wr.fast_reg.length = state->dma_len; in srp_map_finish_fr() 1328 srp_map_desc(state, state->base_dma_addr, state->dma_len, in srp_map_finish_fr() 1344 srp_map_desc(state, state->base_dma_addr, state->dma_len, in srp_finish_mapping() 1353 state->dma_len = 0; in srp_finish_mapping() 1377 unsigned int dma_len = ib_sg_dma_len(ibdev, sg); in srp_map_sg_entry() local 1381 if (!dma_len) in srp_map_sg_entry() [all …]
|
D | ib_srp.h | 292 u32 dma_len; member
|
/linux-4.1.27/drivers/net/wireless/p54/ |
D | p54pci.h | 62 __le32 dma_len; member
|
D | p54usb.c | 772 P54U_WRITE(NET2280_DEV_U32, &devreg->dma_len, in p54u_upload_firmware_net2280()
|
/linux-4.1.27/drivers/infiniband/ulp/srpt/ |
D | ib_srpt.c | 1093 u32 dma_len; in srpt_map_sg_to_ib_sge() local 1128 dma_len = ib_sg_dma_len(dev, &sg[0]); in srpt_map_sg_to_ib_sge() 1150 if (rsize >= dma_len) { in srpt_map_sg_to_ib_sge() 1151 tsize -= dma_len; in srpt_map_sg_to_ib_sge() 1152 rsize -= dma_len; in srpt_map_sg_to_ib_sge() 1153 raddr += dma_len; in srpt_map_sg_to_ib_sge() 1159 dma_len = ib_sg_dma_len( in srpt_map_sg_to_ib_sge() 1165 dma_len -= rsize; in srpt_map_sg_to_ib_sge() 1197 dma_len = ib_sg_dma_len(dev, &sg[0]); in srpt_map_sg_to_ib_sge() 1211 if (rsize >= dma_len) { in srpt_map_sg_to_ib_sge() [all …]
|
/linux-4.1.27/drivers/media/platform/soc_camera/ |
D | pxa_camera.c | 293 int i, offset, dma_len, xfer_len; in calculate_dma_sglen() local 298 dma_len = sg_dma_len(sg); in calculate_dma_sglen() 301 xfer_len = roundup(min(dma_len - offset, size), 8); in calculate_dma_sglen() 339 int dma_len = 0, xfer_len = 0; in pxa_init_dma_channel() local 362 dma_len = sg_dma_len(sg); in pxa_init_dma_channel() 365 xfer_len = roundup(min(dma_len - offset, size), 8); in pxa_init_dma_channel() 399 if (xfer_len >= dma_len) { in pxa_init_dma_channel() 400 *sg_first_ofs = xfer_len - dma_len; in pxa_init_dma_channel()
|
/linux-4.1.27/drivers/infiniband/ulp/iser/ |
D | iser_memory.c | 253 unsigned int dma_len; in iser_sg_to_page_vec() local 265 dma_len = ib_sg_dma_len(ibdev, sg); in iser_sg_to_page_vec() 266 end_addr = start_addr + dma_len; in iser_sg_to_page_vec() 267 total_sz += dma_len; in iser_sg_to_page_vec()
|
/linux-4.1.27/drivers/net/wireless/ath/ath9k/ |
D | init.c | 260 u32 dma_len; in ath_descdma_setup() local 263 dma_len = ndesc_skipped * desc_len; in ath_descdma_setup() 264 dd->dd_desc_len += dma_len; in ath_descdma_setup() 266 ndesc_skipped = ATH_DESC_4KB_BOUND_NUM_SKIPPED(dma_len); in ath_descdma_setup()
|
/linux-4.1.27/drivers/scsi/lpfc/ |
D | lpfc_scsi.c | 2272 uint32_t dma_len; in lpfc_bg_setup_sgl() local 2336 dma_len = sg_dma_len(sgde); in lpfc_bg_setup_sgl() 2346 sgl->sge_len = cpu_to_le32(dma_len); in lpfc_bg_setup_sgl() 2347 dma_offset += dma_len; in lpfc_bg_setup_sgl() 2412 uint32_t dma_len; in lpfc_bg_setup_sgl_prot() local 2553 dma_len = remainder; in lpfc_bg_setup_sgl_prot() 2560 dma_len = protgrp_bytes - subtotal; in lpfc_bg_setup_sgl_prot() 2561 split_offset += dma_len; in lpfc_bg_setup_sgl_prot() 2564 subtotal += dma_len; in lpfc_bg_setup_sgl_prot() 2572 sgl->sge_len = cpu_to_le32(dma_len); in lpfc_bg_setup_sgl_prot() [all …]
|
/linux-4.1.27/drivers/net/ethernet/brocade/bna/ |
D | bnad.h | 226 DEFINE_DMA_UNMAP_LEN(dma_len);
|
D | bnad.c | 146 dma_unmap_len(&unmap->vectors[vector], dma_len), in bnad_tx_buff_unmap() 3068 dma_unmap_len_set(&unmap->vectors[vect_id], dma_len, size); in bnad_start_xmit()
|
/linux-4.1.27/drivers/net/ethernet/amd/xgbe/ |
D | xgbe-drv.c | 1831 skb = napi_alloc_skb(napi, rdata->rx.hdr.dma_len); in xgbe_create_skb() 1838 copy_len = min(rdata->rx.hdr.dma_len, copy_len); in xgbe_create_skb() 1998 rdata->rx.hdr.dma_len, in xgbe_rx_poll() 2011 rdata->rx.buf.dma_len, in xgbe_rx_poll() 2017 put_len, rdata->rx.buf.dma_len); in xgbe_rx_poll()
|
D | xgbe.h | 295 unsigned int dma_len; member
|
D | xgbe-desc.c | 302 bd->dma_len = len; in xgbe_set_buffer_data()
|
/linux-4.1.27/drivers/block/ |
D | nvme-core.c | 642 int dma_len = sg_dma_len(sg); in nvme_setup_prps() local 655 dma_len -= (page_size - offset); in nvme_setup_prps() 656 if (dma_len) { in nvme_setup_prps() 661 dma_len = sg_dma_len(sg); in nvme_setup_prps() 699 dma_len -= page_size; in nvme_setup_prps() 704 if (dma_len > 0) in nvme_setup_prps() 706 BUG_ON(dma_len < 0); in nvme_setup_prps() 709 dma_len = sg_dma_len(sg); in nvme_setup_prps()
|
/linux-4.1.27/drivers/net/ethernet/nvidia/ |
D | forcedeth.c | 722 unsigned int dma_len:31; member 1827 np->put_rx_ctx->dma_len = skb_tailroom(skb); in nv_alloc_rx() 1868 np->put_rx_ctx->dma_len = skb_tailroom(skb); in nv_alloc_rx_optimized() 1958 np->tx_skb[i].dma_len = 0; in nv_init_tx() 1983 tx_skb->dma_len, in nv_unmap_txskb() 1987 tx_skb->dma_len, in nv_unmap_txskb() 2025 np->tx_skb[i].dma_len = 0; in nv_drain_tx() 2240 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit() 2288 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit() 2389 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit_optimized() [all …]
|
/linux-4.1.27/drivers/net/ethernet/broadcom/ |
D | bcmsysport.c | 649 dma_unmap_len(cb, dma_len), in bcm_sysport_tx_reclaim_one() 656 ndev->stats.tx_bytes += dma_unmap_len(cb, dma_len); in bcm_sysport_tx_reclaim_one() 658 dma_unmap_len(cb, dma_len), DMA_TO_DEVICE); in bcm_sysport_tx_reclaim_one() 995 dma_unmap_len_set(cb, dma_len, skb_len); in bcm_sysport_xmit()
|
D | bcmsysport.h | 627 DEFINE_DMA_UNMAP_LEN(dma_len);
|
/linux-4.1.27/drivers/scsi/qla2xxx/ |
D | qla_iocb.c | 861 uint32_t dma_len; /* OUT */ member 896 sgx->dma_len = (blk_sz - cumulative_partial); in qla24xx_get_one_block_sg() 901 sgx->dma_len = sg_len - sgx->bytes_consumed; in qla24xx_get_one_block_sg() 902 sgx->tot_partial += sgx->dma_len; in qla24xx_get_one_block_sg() 906 sgx->bytes_consumed += sgx->dma_len; in qla24xx_get_one_block_sg() 963 sle_dma_len = sgx.dma_len; in qla24xx_walk_and_build_sglist_no_difb()
|
/linux-4.1.27/drivers/net/ethernet/broadcom/genet/ |
D | bcmgenet.h | 480 DEFINE_DMA_UNMAP_LEN(dma_len);
|
D | bcmgenet.c | 1056 dma_unmap_len(tx_cb_ptr, dma_len); in __bcmgenet_tx_reclaim() 1059 dma_unmap_len(tx_cb_ptr, dma_len), in __bcmgenet_tx_reclaim() 1162 dma_unmap_len_set(tx_cb_ptr, dma_len, skb->len); in bcmgenet_xmit_single() 1204 dma_unmap_len_set(tx_cb_ptr, dma_len, frag->size); in bcmgenet_xmit_frag()
|
/linux-4.1.27/drivers/net/ethernet/sun/ |
D | sunhme.c | 1930 u32 flags, dma_addr, dma_len; in happy_meal_tx() local 1953 dma_len = hme_read_desc32(hp, &this->tx_flags); in happy_meal_tx() 1955 dma_len &= TXFLAG_SIZE; in happy_meal_tx() 1957 dma_unmap_single(hp->dma_dev, dma_addr, dma_len, DMA_TO_DEVICE); in happy_meal_tx() 1959 dma_unmap_page(hp->dma_dev, dma_addr, dma_len, DMA_TO_DEVICE); in happy_meal_tx()
|
D | sungem.c | 654 u32 dma_len; in gem_tx() local 684 dma_len = le64_to_cpu(txd->control_word) & TXDCTRL_BUFSZ; in gem_tx() 686 pci_unmap_page(gp->pdev, dma_addr, dma_len, PCI_DMA_TODEVICE); in gem_tx()
|
/linux-4.1.27/drivers/net/ethernet/3com/ |
D | typhoon.c | 1509 int dma_len; in typhoon_clean_tx() local 1526 dma_len = le16_to_cpu(tx->len); in typhoon_clean_tx() 1527 pci_unmap_single(tp->pdev, skb_dma, dma_len, in typhoon_clean_tx()
|
/linux-4.1.27/drivers/mtd/nand/ |
D | pxa3xx_nand.c | 560 int dma_len = ALIGN(info->data_size + info->oob_size, 32); in start_data_dma() local 563 desc->dcmd = DCMD_ENDIRQEN | DCMD_WIDTH4 | DCMD_BURST32 | dma_len; in start_data_dma()
|
/linux-4.1.27/drivers/block/mtip32xx/ |
D | mtip32xx.c | 1762 unsigned int dma_len; in fill_command_sg() local 1769 dma_len = sg_dma_len(sg); in fill_command_sg() 1770 if (dma_len > 0x400000) in fill_command_sg() 1774 cpu_to_le32((dma_len-1) & 0x3FFFFF); in fill_command_sg()
|
/linux-4.1.27/drivers/infiniband/hw/ocrdma/ |
D | ocrdma_verbs.c | 811 u32 dma_len = mr->pbl_size; in ocrdma_build_pbl_tbl() local 823 va = dma_alloc_coherent(&pdev->dev, dma_len, &pa, GFP_KERNEL); in ocrdma_build_pbl_tbl() 829 memset(va, 0, dma_len); in ocrdma_build_pbl_tbl()
|