| /linux/drivers/dma/ |
| A D | dma-jz4780.c | 119 struct virt_dma_desc vdesc; member 170 struct virt_dma_desc *vdesc) in to_jz4780_dma_desc() argument 172 return container_of(vdesc, struct jz4780_dma_desc, vdesc); in to_jz4780_dma_desc() 481 struct virt_dma_desc *vdesc; in jz4780_dma_begin() local 487 if (!vdesc) in jz4780_dma_begin() 490 list_del(&vdesc->node); in jz4780_dma_begin() 637 struct virt_dma_desc *vdesc; in jz4780_dma_tx_status() local 649 if (vdesc) { in jz4780_dma_tx_status() 652 to_jz4780_dma_desc(vdesc), 0); in jz4780_dma_tx_status() 659 if (vdesc && jzchan->desc && vdesc == &jzchan->desc->vdesc in jz4780_dma_tx_status() [all …]
|
| A D | dma-axi-dmac.c | 109 struct virt_dma_desc vdesc; member 163 return container_of(vdesc, struct axi_dmac_desc, vdesc); in to_axi_dmac_desc() 206 struct virt_dma_desc *vdesc; in axi_dmac_start_transfer() local 219 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer() 220 if (!vdesc) in axi_dmac_start_transfer() 223 desc = to_axi_dmac_desc(vdesc); in axi_dmac_start_transfer() 278 struct axi_dmac_desc, vdesc.node); in axi_dmac_active_desc() 390 vchan_cyclic_callback(&active->vdesc); in axi_dmac_transfer_done() 397 list_del(&active->vdesc.node); in axi_dmac_transfer_done() 398 vchan_cookie_complete(&active->vdesc); in axi_dmac_transfer_done() [all …]
|
| A D | idma64.c | 115 struct virt_dma_desc *vdesc; in idma64_start_transfer() local 118 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer() 119 if (!vdesc) { in idma64_start_transfer() 124 list_del(&vdesc->node); in idma64_start_transfer() 125 idma64c->desc = to_idma64_desc(vdesc); in idma64_start_transfer() 151 vchan_cookie_complete(&desc->vdesc); in idma64_chan_irq() 226 idma64_desc_free(idma64c, to_idma64_desc(vdesc)); in idma64_vdesc_free() 369 struct virt_dma_desc *vdesc; in idma64_tx_status() local 384 } else if (vdesc) { in idma64_tx_status() 385 bytes = to_idma64_desc(vdesc)->length; in idma64_tx_status() [all …]
|
| A D | fsl-edma-common.c | 147 void fsl_edma_free_desc(struct virt_dma_desc *vdesc) in fsl_edma_free_desc() argument 152 fsl_desc = to_fsl_edma_desc(vdesc); in fsl_edma_free_desc() 271 struct virt_dma_desc *vdesc, bool in_progress) in fsl_edma_desc_residue() argument 317 struct virt_dma_desc *vdesc; in fsl_edma_tx_status() local 332 fsl_edma_desc_residue(fsl_chan, vdesc, true); in fsl_edma_tx_status() 333 else if (vdesc) in fsl_edma_tx_status() 335 fsl_edma_desc_residue(fsl_chan, vdesc, false); in fsl_edma_tx_status() 622 struct virt_dma_desc *vdesc; in fsl_edma_xfer_desc() local 626 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_edma_xfer_desc() 627 if (!vdesc) in fsl_edma_xfer_desc() [all …]
|
| A D | stm32-dma.c | 192 struct virt_dma_desc vdesc; member 234 return container_of(vdesc, struct stm32_dma_desc, vdesc); in to_stm32_dma_desc() 543 struct virt_dma_desc *vdesc; in stm32_dma_start_transfer() local 554 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer() 555 if (!vdesc) in stm32_dma_start_transfer() 558 list_del(&vdesc->node); in stm32_dma_start_transfer() 560 chan->desc = to_stm32_dma_desc(vdesc); in stm32_dma_start_transfer() 1185 struct virt_dma_desc *vdesc; in stm32_dma_tx_status() local 1199 else if (vdesc) in stm32_dma_tx_status() 1201 to_stm32_dma_desc(vdesc), 0); in stm32_dma_tx_status() [all …]
|
| A D | st_fdma.c | 29 return container_of(vd, struct st_fdma_desc, vdesc); in to_st_fdma_desc() 76 struct virt_dma_desc *vdesc; in st_fdma_xfer_desc() local 79 vdesc = vchan_next_desc(&fchan->vchan); in st_fdma_xfer_desc() 80 if (!vdesc) in st_fdma_xfer_desc() 83 fchan->fdesc = to_st_fdma_desc(vdesc); in st_fdma_xfer_desc() 144 list_del(&fchan->fdesc->vdesc.node); in st_fdma_irq_handler() 145 vchan_cookie_complete(&fchan->fdesc->vdesc); in st_fdma_irq_handler() 149 vchan_cyclic_callback(&fchan->fdesc->vdesc); in st_fdma_irq_handler() 225 static void st_fdma_free_desc(struct virt_dma_desc *vdesc) in st_fdma_free_desc() argument 230 fdesc = to_st_fdma_desc(vdesc); in st_fdma_free_desc() [all …]
|
| A D | idma64.h | 114 struct virt_dma_desc vdesc; member 122 static inline struct idma64_desc *to_idma64_desc(struct virt_dma_desc *vdesc) in to_idma64_desc() argument 124 return container_of(vdesc, struct idma64_desc, vdesc); in to_idma64_desc()
|
| A D | stm32-mdma.c | 251 struct virt_dma_desc vdesc; member 297 return container_of(vdesc, struct stm32_mdma_desc, vdesc); in to_stm32_mdma_desc() 1118 struct virt_dma_desc *vdesc; in stm32_mdma_start_transfer() local 1123 vdesc = vchan_next_desc(&chan->vchan); in stm32_mdma_start_transfer() 1124 if (!vdesc) { in stm32_mdma_start_transfer() 1129 list_del(&vdesc->node); in stm32_mdma_start_transfer() 1131 chan->desc = to_stm32_mdma_desc(vdesc); in stm32_mdma_start_transfer() 1310 struct virt_dma_desc *vdesc; in stm32_mdma_tx_status() local 1325 else if (vdesc) in stm32_mdma_tx_status() 1327 to_stm32_mdma_desc(vdesc), 0); in stm32_mdma_tx_status() [all …]
|
| A D | fsl-qdma.c | 177 struct virt_dma_desc vdesc; member 204 struct virt_dma_desc vdesc; member 711 fsl_comp->vdesc.tx_result.result = in fsl_qdma_queue_transfer_complete() 715 fsl_comp->vdesc.tx_result.result = in fsl_qdma_queue_transfer_complete() 721 fsl_comp->vdesc.tx_result.result = in fsl_qdma_queue_transfer_complete() 730 vchan_cookie_complete(&fsl_comp->vdesc); in fsl_qdma_queue_transfer_complete() 991 struct virt_dma_desc *vdesc; in fsl_qdma_enqueue_desc() local 1000 if (!vdesc) in fsl_qdma_enqueue_desc() 1002 list_del(&vdesc->node); in fsl_qdma_enqueue_desc() 1003 fsl_comp = to_fsl_qdma_comp(vdesc); in fsl_qdma_enqueue_desc() [all …]
|
| A D | fsl-edma-common.h | 133 struct virt_dma_desc vdesc; member 222 return container_of(vd, struct fsl_edma_desc, vdesc); in to_fsl_edma_desc() 228 void fsl_edma_free_desc(struct virt_dma_desc *vdesc);
|
| A D | mcf-edma.c | 46 list_del(&mcf_chan->edesc->vdesc.node); in mcf_edma_tx_handler() 47 vchan_cookie_complete(&mcf_chan->edesc->vdesc); in mcf_edma_tx_handler() 52 vchan_cyclic_callback(&mcf_chan->edesc->vdesc); in mcf_edma_tx_handler()
|
| A D | fsl-edma.c | 57 list_del(&fsl_chan->edesc->vdesc.node); in fsl_edma_tx_handler() 58 vchan_cookie_complete(&fsl_chan->edesc->vdesc); in fsl_edma_tx_handler() 63 vchan_cyclic_callback(&fsl_chan->edesc->vdesc); in fsl_edma_tx_handler()
|
| /linux/fs/nilfs2/ |
| A D | ioctl.c | 541 struct nilfs_vdesc *vdesc, in nilfs_ioctl_move_inode_block() argument 547 if (vdesc->vd_flags == 0) in nilfs_ioctl_move_inode_block() 549 inode, vdesc->vd_offset, vdesc->vd_blocknr, in nilfs_ioctl_move_inode_block() 550 vdesc->vd_vblocknr, &bh); in nilfs_ioctl_move_inode_block() 553 inode, vdesc->vd_blocknr, vdesc->vd_vblocknr, &bh); in nilfs_ioctl_move_inode_block() 571 (unsigned long long)vdesc->vd_ino, in nilfs_ioctl_move_inode_block() 602 struct nilfs_vdesc *vdesc; in nilfs_ioctl_move_blocks() local 610 ino = vdesc->vd_ino; in nilfs_ioctl_move_blocks() 611 cno = vdesc->vd_cno; in nilfs_ioctl_move_blocks() 635 vdesc++; in nilfs_ioctl_move_blocks() [all …]
|
| /linux/drivers/dma/hsu/ |
| A D | hsu.c | 110 struct virt_dma_desc *vdesc; in hsu_dma_start_transfer() local 113 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer() 114 if (!vdesc) { in hsu_dma_start_transfer() 119 list_del(&vdesc->node); in hsu_dma_start_transfer() 120 hsuc->desc = to_hsu_dma_desc(vdesc); in hsu_dma_start_transfer() 224 vchan_cookie_complete(&desc->vdesc); in hsu_dma_do_irq() 322 struct virt_dma_desc *vdesc; in hsu_dma_tx_status() local 332 vdesc = vchan_find_desc(&hsuc->vchan, cookie); in hsu_dma_tx_status() 337 } else if (vdesc) { in hsu_dma_tx_status() 338 bytes = to_hsu_dma_desc(vdesc)->length; in hsu_dma_tx_status() [all …]
|
| A D | hsu.h | 68 struct virt_dma_desc vdesc; member 77 static inline struct hsu_dma_desc *to_hsu_dma_desc(struct virt_dma_desc *vdesc) in to_hsu_dma_desc() argument 79 return container_of(vdesc, struct hsu_dma_desc, vdesc); in to_hsu_dma_desc()
|
| /linux/drivers/staging/ralink-gdma/ |
| A D | ralink-gdma.c | 96 struct virt_dma_desc vdesc; member 151 return container_of(vdesc, struct gdma_dma_desc, vdesc); in to_gdma_dma_desc() 414 struct virt_dma_desc *vdesc; in gdma_next_desc() local 416 vdesc = vchan_next_desc(&chan->vchan); in gdma_next_desc() 417 if (!vdesc) { in gdma_next_desc() 421 chan->desc = to_gdma_dma_desc(vdesc); in gdma_next_desc() 439 vchan_cyclic_callback(&desc->vdesc); in gdma_dma_chan_irq() 446 list_del(&desc->vdesc.node); in gdma_dma_chan_irq() 659 struct virt_dma_desc *vdesc; in gdma_dma_tx_status() local 683 if (vdesc) in gdma_dma_tx_status() [all …]
|
| /linux/drivers/staging/mt7621-dma/ |
| A D | hsdma-mt7621.c | 138 struct virt_dma_desc vdesc; member 178 return container_of(vdesc, struct mtk_hsdma_desc, vdesc); in to_mtk_hsdma_desc() 385 struct virt_dma_desc *vdesc; in gdma_next_desc() local 387 vdesc = vchan_next_desc(&chan->vchan); in gdma_next_desc() 388 if (!vdesc) { in gdma_next_desc() 392 chan->desc = to_mtk_hsdma_desc(vdesc); in gdma_next_desc() 409 list_del(&desc->vdesc.node); in mtk_hsdma_chan_done() 410 vchan_cookie_complete(&desc->vdesc); in mtk_hsdma_chan_done() 478 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in mtk_hsdma_prep_dma_memcpy() 493 static void mtk_hsdma_desc_free(struct virt_dma_desc *vdesc) in mtk_hsdma_desc_free() argument [all …]
|
| /linux/drivers/dma/xilinx/ |
| A D | xilinx_dpdma.c | 198 struct virt_dma_desc vdesc; member 658 if (!vdesc) in xilinx_dpdma_chan_free_tx_desc() 661 desc = to_dpdma_tx_desc(vdesc); in xilinx_dpdma_chan_free_tx_desc() 829 struct virt_dma_desc *vdesc; in xilinx_dpdma_chan_queue_transfer() local 845 vdesc = vchan_next_desc(&chan->vchan); in xilinx_dpdma_chan_queue_transfer() 846 if (!vdesc) in xilinx_dpdma_chan_queue_transfer() 849 desc = to_dpdma_tx_desc(vdesc); in xilinx_dpdma_chan_queue_transfer() 851 list_del(&desc->vdesc.node); in xilinx_dpdma_chan_queue_transfer() 1052 vchan_cyclic_callback(&active->vdesc); in xilinx_dpdma_chan_done_irq() 1177 list_add_tail(&active->vdesc.node, in xilinx_dpdma_chan_handle_err() [all …]
|
| /linux/drivers/sh/intc/ |
| A D | virq.c | 122 struct irq_desc *vdesc = irq_to_desc(entry->irq); in intc_virq_handler() local 124 if (vdesc) { in intc_virq_handler() 125 handle = (unsigned long)irq_desc_get_handler_data(vdesc); in intc_virq_handler() 128 generic_handle_irq_desc(vdesc); in intc_virq_handler()
|
| /linux/drivers/dma/fsl-dpaa2-qdma/ |
| A D | dpaa2-qdma.c | 26 return container_of(vd, struct dpaa2_qdma_comp, vdesc); in to_fsl_qdma_comp() 267 return vchan_tx_prep(&dpaa2_chan->vchan, &dpaa2_comp->vdesc, flags); in dpaa2_qdma_prep_memcpy() 274 struct virt_dma_desc *vdesc; in dpaa2_qdma_issue_pending() local 282 vdesc = vchan_next_desc(&dpaa2_chan->vchan); in dpaa2_qdma_issue_pending() 283 if (!vdesc) in dpaa2_qdma_issue_pending() 285 dpaa2_comp = to_fsl_qdma_comp(vdesc); in dpaa2_qdma_issue_pending() 289 list_del(&vdesc->node); in dpaa2_qdma_issue_pending() 441 dpaa2_comp->vdesc); in dpaa2_qdma_fqdan_cb() 619 static void dpaa2_qdma_free_desc(struct virt_dma_desc *vdesc) in dpaa2_qdma_free_desc() argument 625 dpaa2_comp = to_fsl_qdma_comp(vdesc); in dpaa2_qdma_free_desc()
|
| A D | dpaa2-qdma.h | 72 struct virt_dma_desc vdesc; member 95 struct virt_dma_desc vdesc; member
|
| /linux/drivers/dma/sf-pdma/ |
| A D | sf-pdma.c | 49 return container_of(vd, struct sf_pdma_desc, vdesc); in to_sf_pdma_desc() 111 desc->async_tx = vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in sf_pdma_prep_dma_memcpy() 177 tx = &chan->desc->vdesc.tx; in sf_pdma_desc_residue() 277 static void sf_pdma_free_desc(struct virt_dma_desc *vdesc) in sf_pdma_free_desc() argument 281 desc = to_sf_pdma_desc(vdesc); in sf_pdma_free_desc() 299 list_del(&chan->desc->vdesc.node); in sf_pdma_donebh_tasklet() 300 vchan_cookie_complete(&chan->desc->vdesc); in sf_pdma_donebh_tasklet()
|
| /linux/drivers/dma/ti/ |
| A D | edma.c | 179 struct virt_dma_desc vdesc; member 771 kfree(container_of(vdesc, struct edma_desc, vdesc)); in edma_desc_free() 778 struct virt_dma_desc *vdesc; in edma_execute() local 785 vdesc = vchan_next_desc(&echan->vchan); in edma_execute() 786 if (!vdesc) in edma_execute() 788 list_del(&vdesc->node); in edma_execute() 789 echan->edesc = to_edma_desc(&vdesc->tx); in edma_execute() 887 vchan_terminate_vdesc(&echan->edesc->vdesc); in edma_terminate_all() 1519 vchan_cyclic_callback(&edesc->vdesc); in edma_completion_handler() 1525 vchan_cookie_complete(&edesc->vdesc); in edma_completion_handler() [all …]
|
| /linux/drivers/dma/lgm/ |
| A D | lgm-dma.c | 270 struct virt_dma_desc vdesc; member 302 return container_of(vdesc, struct dw2_desc_sw, vdesc); in to_lgm_dma_desc() 688 tx = &ds->vdesc.tx; in ldma_chan_desc_cfg() 967 struct dw2_desc_sw *ds = to_lgm_dma_desc(vdesc); in dma_free_desc_resource() 1023 struct virt_dma_desc *vdesc; in ldma_issue_pending() local 1026 vdesc = vchan_next_desc(&c->vchan); in ldma_issue_pending() 1027 if (!vdesc) { in ldma_issue_pending() 1032 list_del(&vdesc->node); in ldma_issue_pending() 1033 c->ds = to_lgm_dma_desc(vdesc); in ldma_issue_pending() 1053 dma_free_desc_resource(&c->ds->vdesc); in ldma_synchronize() [all …]
|
| /linux/drivers/dma/dw-axi-dmac/ |
| A D | dw-axi-dmac-platform.c | 296 static void vchan_desc_put(struct virt_dma_desc *vdesc) in vchan_desc_put() argument 298 axi_desc_put(vd_to_axi_desc(vdesc)); in vchan_desc_put() 306 struct virt_dma_desc *vdesc; in dma_chan_tx_status() local 321 vdesc = vchan_find_desc(&chan->vc, cookie); in dma_chan_tx_status() 322 if (vdesc) { in dma_chan_tx_status() 323 length = vd_to_axi_desc(vdesc)->length; in dma_chan_tx_status() 324 completed_blocks = vd_to_axi_desc(vdesc)->completed_blocks; in dma_chan_tx_status() 325 len = vd_to_axi_desc(vdesc)->hw_desc[0].len; in dma_chan_tx_status() 329 bytes = vd_to_axi_desc(vdesc)->length; in dma_chan_tx_status()
|