Lines Matching refs:desc
358 struct dma_pl330_desc *desc; member
522 struct dma_pl330_desc *desc; member
527 return thrd->req[0].desc == NULL && thrd->req[1].desc == NULL; in _queue_empty()
532 return thrd->req[0].desc != NULL && thrd->req[1].desc != NULL; in _queue_full()
1029 struct dma_pl330_desc *desc; in _trigger() local
1040 if (thrd->req[idx].desc != NULL) { in _trigger()
1044 if (thrd->req[idx].desc != NULL) in _trigger()
1058 desc = req->desc; in _trigger()
1060 ns = desc->rqcfg.nonsecure ? 1 : 0; in _trigger()
1120 struct pl330_config *pcfg = pxs->desc->rqcfg.pcfg; in _ldst_memtomem()
1146 off += _emit_WFP(dry_run, &buf[off], SINGLE, pxs->desc->peri); in _ldst_devtomem()
1147 off += _emit_LDP(dry_run, &buf[off], SINGLE, pxs->desc->peri); in _ldst_devtomem()
1149 off += _emit_FLUSHP(dry_run, &buf[off], pxs->desc->peri); in _ldst_devtomem()
1161 off += _emit_WFP(dry_run, &buf[off], SINGLE, pxs->desc->peri); in _ldst_memtodev()
1163 off += _emit_STP(dry_run, &buf[off], SINGLE, pxs->desc->peri); in _ldst_memtodev()
1164 off += _emit_FLUSHP(dry_run, &buf[off], pxs->desc->peri); in _ldst_memtodev()
1175 switch (pxs->desc->rqtype) { in _bursts()
1278 struct pl330_xfer *x = &pxs->desc->px; in _setup_loops()
1295 struct pl330_xfer *x = &pxs->desc->px; in _setup_xfer()
1326 x = &pxs->desc->px; in _setup_req()
1379 struct dma_pl330_desc *desc) in pl330_submit_req() argument
1396 if (desc->rqtype != DMA_MEM_TO_MEM && in pl330_submit_req()
1397 desc->peri >= pl330->pcfg.num_peri) { in pl330_submit_req()
1400 __func__, __LINE__, desc->peri); in pl330_submit_req()
1413 desc->rqcfg.nonsecure = 0; in pl330_submit_req()
1415 desc->rqcfg.nonsecure = 1; in pl330_submit_req()
1417 ccr = _prepare_ccr(&desc->rqcfg); in pl330_submit_req()
1419 idx = thrd->req[0].desc == NULL ? 0 : 1; in pl330_submit_req()
1422 xs.desc = desc; in pl330_submit_req()
1438 thrd->req[idx].desc = desc; in pl330_submit_req()
1449 static void dma_pl330_rqcb(struct dma_pl330_desc *desc, enum pl330_op_err err) in dma_pl330_rqcb() argument
1454 if (!desc) in dma_pl330_rqcb()
1457 pch = desc->pchan; in dma_pl330_rqcb()
1465 desc->status = DONE; in dma_pl330_rqcb()
1512 dma_pl330_rqcb(thrd->req[1 - thrd->lstenq].desc, err); in pl330_dotask()
1513 dma_pl330_rqcb(thrd->req[thrd->lstenq].desc, err); in pl330_dotask()
1516 thrd->req[0].desc = NULL; in pl330_dotask()
1517 thrd->req[1].desc = NULL; in pl330_dotask()
1597 descdone = thrd->req[active].desc; in pl330_update()
1598 thrd->req[active].desc = NULL; in pl330_update()
1675 thrd->req[0].desc = NULL; in pl330_request_channel()
1676 thrd->req[1].desc = NULL; in pl330_request_channel()
1710 dma_pl330_rqcb(thrd->req[1 - thrd->lstenq].desc, PL330_ERR_ABORT); in pl330_release_channel()
1711 dma_pl330_rqcb(thrd->req[thrd->lstenq].desc, PL330_ERR_ABORT); in pl330_release_channel()
1774 thrd->req[0].desc = NULL; in _reset_thread()
1780 thrd->req[1].desc = NULL; in _reset_thread()
1947 struct dma_pl330_desc *desc; in fill_queue() local
1950 list_for_each_entry(desc, &pch->work_list, node) { in fill_queue()
1953 if (desc->status == BUSY) in fill_queue()
1956 ret = pl330_submit_req(pch->thread, desc); in fill_queue()
1958 desc->status = BUSY; in fill_queue()
1964 desc->status = DONE; in fill_queue()
1966 __func__, __LINE__, desc->txd.cookie); in fill_queue()
1975 struct dma_pl330_desc *desc, *_dt; in pl330_tasklet() local
1982 list_for_each_entry_safe(desc, _dt, &pch->work_list, node) in pl330_tasklet()
1983 if (desc->status == DONE) { in pl330_tasklet()
1985 dma_cookie_complete(&desc->txd); in pl330_tasklet()
1986 list_move_tail(&desc->node, &pch->completed_list); in pl330_tasklet()
2008 desc = list_first_entry(&pch->completed_list, in pl330_tasklet()
2011 callback = desc->txd.callback; in pl330_tasklet()
2012 callback_param = desc->txd.callback_param; in pl330_tasklet()
2015 desc->status = PREP; in pl330_tasklet()
2016 list_move_tail(&desc->node, &pch->work_list); in pl330_tasklet()
2024 desc->status = FREE; in pl330_tasklet()
2025 list_move_tail(&desc->node, &pch->dmac->desc_pool); in pl330_tasklet()
2028 dma_descriptor_unmap(&desc->txd); in pl330_tasklet()
2128 struct dma_pl330_desc *desc; in pl330_terminate_all() local
2139 pch->thread->req[0].desc = NULL; in pl330_terminate_all()
2140 pch->thread->req[1].desc = NULL; in pl330_terminate_all()
2144 list_for_each_entry(desc, &pch->submitted_list, node) { in pl330_terminate_all()
2145 desc->status = FREE; in pl330_terminate_all()
2146 dma_cookie_complete(&desc->txd); in pl330_terminate_all()
2149 list_for_each_entry(desc, &pch->work_list , node) { in pl330_terminate_all()
2150 desc->status = FREE; in pl330_terminate_all()
2151 dma_cookie_complete(&desc->txd); in pl330_terminate_all()
2213 struct dma_pl330_desc *desc) in pl330_get_current_xferred_count() argument
2222 if (desc->rqcfg.src_inc) { in pl330_get_current_xferred_count()
2224 addr = desc->px.src_addr; in pl330_get_current_xferred_count()
2227 addr = desc->px.dst_addr; in pl330_get_current_xferred_count()
2240 struct dma_pl330_desc *desc, *running = NULL; in pl330_tx_status() local
2255 running = pch->thread->req[pch->thread->req_running].desc; in pl330_tx_status()
2258 list_for_each_entry(desc, &pch->work_list, node) { in pl330_tx_status()
2259 if (desc->status == DONE) in pl330_tx_status()
2260 transferred = desc->bytes_requested; in pl330_tx_status()
2261 else if (running && desc == running) in pl330_tx_status()
2263 pl330_get_current_xferred_count(pch, desc); in pl330_tx_status()
2266 residual += desc->bytes_requested - transferred; in pl330_tx_status()
2267 if (desc->txd.cookie == cookie) { in pl330_tx_status()
2268 switch (desc->status) { in pl330_tx_status()
2281 if (desc->last) in pl330_tx_status()
2320 struct dma_pl330_desc *desc, *last = to_desc(tx); in pl330_tx_submit() local
2329 desc = list_entry(last->node.next, struct dma_pl330_desc, node); in pl330_tx_submit()
2331 desc->txd.callback = last->txd.callback; in pl330_tx_submit()
2332 desc->txd.callback_param = last->txd.callback_param; in pl330_tx_submit()
2334 desc->last = false; in pl330_tx_submit()
2336 dma_cookie_assign(&desc->txd); in pl330_tx_submit()
2338 list_move_tail(&desc->node, &pch->submitted_list); in pl330_tx_submit()
2349 static inline void _init_desc(struct dma_pl330_desc *desc) in _init_desc() argument
2351 desc->rqcfg.swap = SWAP_NO; in _init_desc()
2352 desc->rqcfg.scctl = CCTRL0; in _init_desc()
2353 desc->rqcfg.dcctl = CCTRL0; in _init_desc()
2354 desc->txd.tx_submit = pl330_tx_submit; in _init_desc()
2356 INIT_LIST_HEAD(&desc->node); in _init_desc()
2362 struct dma_pl330_desc *desc; in add_desc() local
2366 desc = kcalloc(count, sizeof(*desc), flg); in add_desc()
2367 if (!desc) in add_desc()
2373 _init_desc(&desc[i]); in add_desc()
2374 list_add_tail(&desc[i].node, &pl330->desc_pool); in add_desc()
2384 struct dma_pl330_desc *desc = NULL; in pluck_desc() local
2390 desc = list_entry(pl330->desc_pool.next, in pluck_desc()
2393 list_del_init(&desc->node); in pluck_desc()
2395 desc->status = PREP; in pluck_desc()
2396 desc->txd.callback = NULL; in pluck_desc()
2401 return desc; in pluck_desc()
2408 struct dma_pl330_desc *desc; in pl330_get_desc() local
2411 desc = pluck_desc(pl330); in pl330_get_desc()
2414 if (!desc) { in pl330_get_desc()
2419 desc = pluck_desc(pl330); in pl330_get_desc()
2420 if (!desc) { in pl330_get_desc()
2428 desc->pchan = pch; in pl330_get_desc()
2429 desc->txd.cookie = 0; in pl330_get_desc()
2430 async_tx_ack(&desc->txd); in pl330_get_desc()
2432 desc->peri = peri_id ? pch->chan.chan_id : 0; in pl330_get_desc()
2433 desc->rqcfg.pcfg = &pch->dmac->pcfg; in pl330_get_desc()
2435 dma_async_tx_descriptor_init(&desc->txd, &pch->chan); in pl330_get_desc()
2437 return desc; in pl330_get_desc()
2452 struct dma_pl330_desc *desc = pl330_get_desc(pch); in __pl330_prep_dma_memcpy() local
2454 if (!desc) { in __pl330_prep_dma_memcpy()
2470 fill_px(&desc->px, dst, src, len); in __pl330_prep_dma_memcpy()
2472 return desc; in __pl330_prep_dma_memcpy()
2476 static inline int get_burst_len(struct dma_pl330_desc *desc, size_t len) in get_burst_len() argument
2478 struct dma_pl330_chan *pch = desc->pchan; in get_burst_len()
2484 burst_len >>= desc->rqcfg.brst_size; in get_burst_len()
2491 if (!(len % (burst_len << desc->rqcfg.brst_size))) in get_burst_len()
2504 struct dma_pl330_desc *desc = NULL, *first = NULL; in pl330_prep_dma_cyclic() local
2521 desc = pl330_get_desc(pch); in pl330_prep_dma_cyclic()
2522 if (!desc) { in pl330_prep_dma_cyclic()
2532 desc = list_entry(first->node.next, in pl330_prep_dma_cyclic()
2534 list_move_tail(&desc->node, &pl330->desc_pool); in pl330_prep_dma_cyclic()
2546 desc->rqcfg.src_inc = 1; in pl330_prep_dma_cyclic()
2547 desc->rqcfg.dst_inc = 0; in pl330_prep_dma_cyclic()
2552 desc->rqcfg.src_inc = 0; in pl330_prep_dma_cyclic()
2553 desc->rqcfg.dst_inc = 1; in pl330_prep_dma_cyclic()
2561 desc->rqtype = direction; in pl330_prep_dma_cyclic()
2562 desc->rqcfg.brst_size = pch->burst_sz; in pl330_prep_dma_cyclic()
2563 desc->rqcfg.brst_len = 1; in pl330_prep_dma_cyclic()
2564 desc->bytes_requested = period_len; in pl330_prep_dma_cyclic()
2565 fill_px(&desc->px, dst, src, period_len); in pl330_prep_dma_cyclic()
2568 first = desc; in pl330_prep_dma_cyclic()
2570 list_add_tail(&desc->node, &first->node); in pl330_prep_dma_cyclic()
2575 if (!desc) in pl330_prep_dma_cyclic()
2579 desc->txd.flags = flags; in pl330_prep_dma_cyclic()
2581 return &desc->txd; in pl330_prep_dma_cyclic()
2588 struct dma_pl330_desc *desc; in pl330_prep_dma_memcpy() local
2598 desc = __pl330_prep_dma_memcpy(pch, dst, src, len); in pl330_prep_dma_memcpy()
2599 if (!desc) in pl330_prep_dma_memcpy()
2602 desc->rqcfg.src_inc = 1; in pl330_prep_dma_memcpy()
2603 desc->rqcfg.dst_inc = 1; in pl330_prep_dma_memcpy()
2604 desc->rqtype = DMA_MEM_TO_MEM; in pl330_prep_dma_memcpy()
2617 desc->rqcfg.brst_size = 0; in pl330_prep_dma_memcpy()
2618 while (burst != (1 << desc->rqcfg.brst_size)) in pl330_prep_dma_memcpy()
2619 desc->rqcfg.brst_size++; in pl330_prep_dma_memcpy()
2625 if (desc->rqcfg.brst_size * 8 < pl330->pcfg.data_bus_width) in pl330_prep_dma_memcpy()
2626 desc->rqcfg.brst_len = 1; in pl330_prep_dma_memcpy()
2628 desc->rqcfg.brst_len = get_burst_len(desc, len); in pl330_prep_dma_memcpy()
2629 desc->bytes_requested = len; in pl330_prep_dma_memcpy()
2631 desc->txd.flags = flags; in pl330_prep_dma_memcpy()
2633 return &desc->txd; in pl330_prep_dma_memcpy()
2640 struct dma_pl330_desc *desc; in __pl330_giveback_desc() local
2648 desc = list_entry(first->node.next, in __pl330_giveback_desc()
2650 list_move_tail(&desc->node, &pl330->desc_pool); in __pl330_giveback_desc()
2663 struct dma_pl330_desc *first, *desc = NULL; in pl330_prep_slave_sg() local
2678 desc = pl330_get_desc(pch); in pl330_prep_slave_sg()
2679 if (!desc) { in pl330_prep_slave_sg()
2691 first = desc; in pl330_prep_slave_sg()
2693 list_add_tail(&desc->node, &first->node); in pl330_prep_slave_sg()
2696 desc->rqcfg.src_inc = 1; in pl330_prep_slave_sg()
2697 desc->rqcfg.dst_inc = 0; in pl330_prep_slave_sg()
2698 fill_px(&desc->px, in pl330_prep_slave_sg()
2701 desc->rqcfg.src_inc = 0; in pl330_prep_slave_sg()
2702 desc->rqcfg.dst_inc = 1; in pl330_prep_slave_sg()
2703 fill_px(&desc->px, in pl330_prep_slave_sg()
2707 desc->rqcfg.brst_size = pch->burst_sz; in pl330_prep_slave_sg()
2708 desc->rqcfg.brst_len = 1; in pl330_prep_slave_sg()
2709 desc->rqtype = direction; in pl330_prep_slave_sg()
2710 desc->bytes_requested = sg_dma_len(sg); in pl330_prep_slave_sg()
2714 desc->txd.flags = flg; in pl330_prep_slave_sg()
2715 return &desc->txd; in pl330_prep_slave_sg()