Lines Matching refs:desc
358 struct dma_pl330_desc *desc; member
522 struct dma_pl330_desc *desc; member
527 return thrd->req[0].desc == NULL && thrd->req[1].desc == NULL; in _queue_empty()
532 return thrd->req[0].desc != NULL && thrd->req[1].desc != NULL; in _queue_full()
1029 struct dma_pl330_desc *desc; in _trigger() local
1040 if (thrd->req[idx].desc != NULL) { in _trigger()
1044 if (thrd->req[idx].desc != NULL) in _trigger()
1058 desc = req->desc; in _trigger()
1060 ns = desc->rqcfg.nonsecure ? 1 : 0; in _trigger()
1120 struct pl330_config *pcfg = pxs->desc->rqcfg.pcfg; in _ldst_memtomem()
1146 off += _emit_WFP(dry_run, &buf[off], SINGLE, pxs->desc->peri); in _ldst_devtomem()
1147 off += _emit_LDP(dry_run, &buf[off], SINGLE, pxs->desc->peri); in _ldst_devtomem()
1149 off += _emit_FLUSHP(dry_run, &buf[off], pxs->desc->peri); in _ldst_devtomem()
1161 off += _emit_WFP(dry_run, &buf[off], SINGLE, pxs->desc->peri); in _ldst_memtodev()
1163 off += _emit_STP(dry_run, &buf[off], SINGLE, pxs->desc->peri); in _ldst_memtodev()
1164 off += _emit_FLUSHP(dry_run, &buf[off], pxs->desc->peri); in _ldst_memtodev()
1175 switch (pxs->desc->rqtype) { in _bursts()
1275 struct pl330_xfer *x = &pxs->desc->px; in _setup_loops()
1292 struct pl330_xfer *x = &pxs->desc->px; in _setup_xfer()
1323 x = &pxs->desc->px; in _setup_req()
1376 struct dma_pl330_desc *desc) in pl330_submit_req() argument
1393 if (desc->rqtype != DMA_MEM_TO_MEM && in pl330_submit_req()
1394 desc->peri >= pl330->pcfg.num_peri) { in pl330_submit_req()
1397 __func__, __LINE__, desc->peri); in pl330_submit_req()
1410 desc->rqcfg.nonsecure = 0; in pl330_submit_req()
1412 desc->rqcfg.nonsecure = 1; in pl330_submit_req()
1414 ccr = _prepare_ccr(&desc->rqcfg); in pl330_submit_req()
1416 idx = thrd->req[0].desc == NULL ? 0 : 1; in pl330_submit_req()
1419 xs.desc = desc; in pl330_submit_req()
1435 thrd->req[idx].desc = desc; in pl330_submit_req()
1446 static void dma_pl330_rqcb(struct dma_pl330_desc *desc, enum pl330_op_err err) in dma_pl330_rqcb() argument
1451 if (!desc) in dma_pl330_rqcb()
1454 pch = desc->pchan; in dma_pl330_rqcb()
1462 desc->status = DONE; in dma_pl330_rqcb()
1509 dma_pl330_rqcb(thrd->req[1 - thrd->lstenq].desc, err); in pl330_dotask()
1510 dma_pl330_rqcb(thrd->req[thrd->lstenq].desc, err); in pl330_dotask()
1513 thrd->req[0].desc = NULL; in pl330_dotask()
1514 thrd->req[1].desc = NULL; in pl330_dotask()
1594 descdone = thrd->req[active].desc; in pl330_update()
1595 thrd->req[active].desc = NULL; in pl330_update()
1672 thrd->req[0].desc = NULL; in pl330_request_channel()
1673 thrd->req[1].desc = NULL; in pl330_request_channel()
1707 dma_pl330_rqcb(thrd->req[1 - thrd->lstenq].desc, PL330_ERR_ABORT); in pl330_release_channel()
1708 dma_pl330_rqcb(thrd->req[thrd->lstenq].desc, PL330_ERR_ABORT); in pl330_release_channel()
1771 thrd->req[0].desc = NULL; in _reset_thread()
1777 thrd->req[1].desc = NULL; in _reset_thread()
1944 struct dma_pl330_desc *desc; in fill_queue() local
1947 list_for_each_entry(desc, &pch->work_list, node) { in fill_queue()
1950 if (desc->status == BUSY) in fill_queue()
1953 ret = pl330_submit_req(pch->thread, desc); in fill_queue()
1955 desc->status = BUSY; in fill_queue()
1961 desc->status = DONE; in fill_queue()
1963 __func__, __LINE__, desc->txd.cookie); in fill_queue()
1972 struct dma_pl330_desc *desc, *_dt; in pl330_tasklet() local
1979 list_for_each_entry_safe(desc, _dt, &pch->work_list, node) in pl330_tasklet()
1980 if (desc->status == DONE) { in pl330_tasklet()
1982 dma_cookie_complete(&desc->txd); in pl330_tasklet()
1983 list_move_tail(&desc->node, &pch->completed_list); in pl330_tasklet()
2005 desc = list_first_entry(&pch->completed_list, in pl330_tasklet()
2008 callback = desc->txd.callback; in pl330_tasklet()
2009 callback_param = desc->txd.callback_param; in pl330_tasklet()
2012 desc->status = PREP; in pl330_tasklet()
2013 list_move_tail(&desc->node, &pch->work_list); in pl330_tasklet()
2021 desc->status = FREE; in pl330_tasklet()
2022 list_move_tail(&desc->node, &pch->dmac->desc_pool); in pl330_tasklet()
2025 dma_descriptor_unmap(&desc->txd); in pl330_tasklet()
2125 struct dma_pl330_desc *desc; in pl330_terminate_all() local
2136 pch->thread->req[0].desc = NULL; in pl330_terminate_all()
2137 pch->thread->req[1].desc = NULL; in pl330_terminate_all()
2141 list_for_each_entry(desc, &pch->submitted_list, node) { in pl330_terminate_all()
2142 desc->status = FREE; in pl330_terminate_all()
2143 dma_cookie_complete(&desc->txd); in pl330_terminate_all()
2146 list_for_each_entry(desc, &pch->work_list , node) { in pl330_terminate_all()
2147 desc->status = FREE; in pl330_terminate_all()
2148 dma_cookie_complete(&desc->txd); in pl330_terminate_all()
2210 struct dma_pl330_desc *desc) in pl330_get_current_xferred_count() argument
2219 if (desc->rqcfg.src_inc) { in pl330_get_current_xferred_count()
2221 addr = desc->px.src_addr; in pl330_get_current_xferred_count()
2224 addr = desc->px.dst_addr; in pl330_get_current_xferred_count()
2237 struct dma_pl330_desc *desc, *running = NULL; in pl330_tx_status() local
2252 running = pch->thread->req[pch->thread->req_running].desc; in pl330_tx_status()
2255 list_for_each_entry(desc, &pch->work_list, node) { in pl330_tx_status()
2256 if (desc->status == DONE) in pl330_tx_status()
2257 transferred = desc->bytes_requested; in pl330_tx_status()
2258 else if (running && desc == running) in pl330_tx_status()
2260 pl330_get_current_xferred_count(pch, desc); in pl330_tx_status()
2263 residual += desc->bytes_requested - transferred; in pl330_tx_status()
2264 if (desc->txd.cookie == cookie) { in pl330_tx_status()
2265 switch (desc->status) { in pl330_tx_status()
2278 if (desc->last) in pl330_tx_status()
2317 struct dma_pl330_desc *desc, *last = to_desc(tx); in pl330_tx_submit() local
2326 desc = list_entry(last->node.next, struct dma_pl330_desc, node); in pl330_tx_submit()
2328 desc->txd.callback = last->txd.callback; in pl330_tx_submit()
2329 desc->txd.callback_param = last->txd.callback_param; in pl330_tx_submit()
2331 desc->last = false; in pl330_tx_submit()
2333 dma_cookie_assign(&desc->txd); in pl330_tx_submit()
2335 list_move_tail(&desc->node, &pch->submitted_list); in pl330_tx_submit()
2346 static inline void _init_desc(struct dma_pl330_desc *desc) in _init_desc() argument
2348 desc->rqcfg.swap = SWAP_NO; in _init_desc()
2349 desc->rqcfg.scctl = CCTRL0; in _init_desc()
2350 desc->rqcfg.dcctl = CCTRL0; in _init_desc()
2351 desc->txd.tx_submit = pl330_tx_submit; in _init_desc()
2353 INIT_LIST_HEAD(&desc->node); in _init_desc()
2359 struct dma_pl330_desc *desc; in add_desc() local
2363 desc = kcalloc(count, sizeof(*desc), flg); in add_desc()
2364 if (!desc) in add_desc()
2370 _init_desc(&desc[i]); in add_desc()
2371 list_add_tail(&desc[i].node, &pl330->desc_pool); in add_desc()
2381 struct dma_pl330_desc *desc = NULL; in pluck_desc() local
2387 desc = list_entry(pl330->desc_pool.next, in pluck_desc()
2390 list_del_init(&desc->node); in pluck_desc()
2392 desc->status = PREP; in pluck_desc()
2393 desc->txd.callback = NULL; in pluck_desc()
2398 return desc; in pluck_desc()
2405 struct dma_pl330_desc *desc; in pl330_get_desc() local
2408 desc = pluck_desc(pl330); in pl330_get_desc()
2411 if (!desc) { in pl330_get_desc()
2416 desc = pluck_desc(pl330); in pl330_get_desc()
2417 if (!desc) { in pl330_get_desc()
2425 desc->pchan = pch; in pl330_get_desc()
2426 desc->txd.cookie = 0; in pl330_get_desc()
2427 async_tx_ack(&desc->txd); in pl330_get_desc()
2429 desc->peri = peri_id ? pch->chan.chan_id : 0; in pl330_get_desc()
2430 desc->rqcfg.pcfg = &pch->dmac->pcfg; in pl330_get_desc()
2432 dma_async_tx_descriptor_init(&desc->txd, &pch->chan); in pl330_get_desc()
2434 return desc; in pl330_get_desc()
2449 struct dma_pl330_desc *desc = pl330_get_desc(pch); in __pl330_prep_dma_memcpy() local
2451 if (!desc) { in __pl330_prep_dma_memcpy()
2467 fill_px(&desc->px, dst, src, len); in __pl330_prep_dma_memcpy()
2469 return desc; in __pl330_prep_dma_memcpy()
2473 static inline int get_burst_len(struct dma_pl330_desc *desc, size_t len) in get_burst_len() argument
2475 struct dma_pl330_chan *pch = desc->pchan; in get_burst_len()
2481 burst_len >>= desc->rqcfg.brst_size; in get_burst_len()
2488 if (!(len % (burst_len << desc->rqcfg.brst_size))) in get_burst_len()
2501 struct dma_pl330_desc *desc = NULL, *first = NULL; in pl330_prep_dma_cyclic() local
2518 desc = pl330_get_desc(pch); in pl330_prep_dma_cyclic()
2519 if (!desc) { in pl330_prep_dma_cyclic()
2529 desc = list_entry(first->node.next, in pl330_prep_dma_cyclic()
2531 list_move_tail(&desc->node, &pl330->desc_pool); in pl330_prep_dma_cyclic()
2543 desc->rqcfg.src_inc = 1; in pl330_prep_dma_cyclic()
2544 desc->rqcfg.dst_inc = 0; in pl330_prep_dma_cyclic()
2549 desc->rqcfg.src_inc = 0; in pl330_prep_dma_cyclic()
2550 desc->rqcfg.dst_inc = 1; in pl330_prep_dma_cyclic()
2558 desc->rqtype = direction; in pl330_prep_dma_cyclic()
2559 desc->rqcfg.brst_size = pch->burst_sz; in pl330_prep_dma_cyclic()
2560 desc->rqcfg.brst_len = 1; in pl330_prep_dma_cyclic()
2561 desc->bytes_requested = period_len; in pl330_prep_dma_cyclic()
2562 fill_px(&desc->px, dst, src, period_len); in pl330_prep_dma_cyclic()
2565 first = desc; in pl330_prep_dma_cyclic()
2567 list_add_tail(&desc->node, &first->node); in pl330_prep_dma_cyclic()
2572 if (!desc) in pl330_prep_dma_cyclic()
2576 desc->txd.flags = flags; in pl330_prep_dma_cyclic()
2578 return &desc->txd; in pl330_prep_dma_cyclic()
2585 struct dma_pl330_desc *desc; in pl330_prep_dma_memcpy() local
2593 desc = __pl330_prep_dma_memcpy(pch, dst, src, len); in pl330_prep_dma_memcpy()
2594 if (!desc) in pl330_prep_dma_memcpy()
2597 desc->rqcfg.src_inc = 1; in pl330_prep_dma_memcpy()
2598 desc->rqcfg.dst_inc = 1; in pl330_prep_dma_memcpy()
2599 desc->rqtype = DMA_MEM_TO_MEM; in pl330_prep_dma_memcpy()
2612 desc->rqcfg.brst_size = 0; in pl330_prep_dma_memcpy()
2613 while (burst != (1 << desc->rqcfg.brst_size)) in pl330_prep_dma_memcpy()
2614 desc->rqcfg.brst_size++; in pl330_prep_dma_memcpy()
2620 if (desc->rqcfg.brst_size * 8 < pl330->pcfg.data_bus_width) in pl330_prep_dma_memcpy()
2621 desc->rqcfg.brst_len = 1; in pl330_prep_dma_memcpy()
2623 desc->rqcfg.brst_len = get_burst_len(desc, len); in pl330_prep_dma_memcpy()
2624 desc->bytes_requested = len; in pl330_prep_dma_memcpy()
2626 desc->txd.flags = flags; in pl330_prep_dma_memcpy()
2628 return &desc->txd; in pl330_prep_dma_memcpy()
2635 struct dma_pl330_desc *desc; in __pl330_giveback_desc() local
2643 desc = list_entry(first->node.next, in __pl330_giveback_desc()
2645 list_move_tail(&desc->node, &pl330->desc_pool); in __pl330_giveback_desc()
2658 struct dma_pl330_desc *first, *desc = NULL; in pl330_prep_slave_sg() local
2673 desc = pl330_get_desc(pch); in pl330_prep_slave_sg()
2674 if (!desc) { in pl330_prep_slave_sg()
2686 first = desc; in pl330_prep_slave_sg()
2688 list_add_tail(&desc->node, &first->node); in pl330_prep_slave_sg()
2691 desc->rqcfg.src_inc = 1; in pl330_prep_slave_sg()
2692 desc->rqcfg.dst_inc = 0; in pl330_prep_slave_sg()
2693 fill_px(&desc->px, in pl330_prep_slave_sg()
2696 desc->rqcfg.src_inc = 0; in pl330_prep_slave_sg()
2697 desc->rqcfg.dst_inc = 1; in pl330_prep_slave_sg()
2698 fill_px(&desc->px, in pl330_prep_slave_sg()
2702 desc->rqcfg.brst_size = pch->burst_sz; in pl330_prep_slave_sg()
2703 desc->rqcfg.brst_len = 1; in pl330_prep_slave_sg()
2704 desc->rqtype = direction; in pl330_prep_slave_sg()
2705 desc->bytes_requested = sg_dma_len(sg); in pl330_prep_slave_sg()
2709 desc->txd.flags = flg; in pl330_prep_slave_sg()
2710 return &desc->txd; in pl330_prep_slave_sg()