Lines Matching refs:hwdesc

233 	struct udma_hwdesc hwdesc[];  member
494 return d->hwdesc[idx].cppi5_desc_paddr; in udma_curr_cppi5_desc_paddr()
499 return d->hwdesc[idx].cppi5_desc_vaddr; in udma_curr_cppi5_desc_vaddr()
535 if (!d->hwdesc[i].cppi5_desc_vaddr) in udma_free_hwdesc()
539 d->hwdesc[i].cppi5_desc_vaddr, in udma_free_hwdesc()
540 d->hwdesc[i].cppi5_desc_paddr); in udma_free_hwdesc()
542 d->hwdesc[i].cppi5_desc_vaddr = NULL; in udma_free_hwdesc()
544 } else if (d->hwdesc[0].cppi5_desc_vaddr) { in udma_free_hwdesc()
545 dma_free_coherent(uc->dma_dev, d->hwdesc[0].cppi5_desc_size, in udma_free_hwdesc()
546 d->hwdesc[0].cppi5_desc_vaddr, in udma_free_hwdesc()
547 d->hwdesc[0].cppi5_desc_paddr); in udma_free_hwdesc()
549 d->hwdesc[0].cppi5_desc_vaddr = NULL; in udma_free_hwdesc()
1031 h_desc = d->hwdesc[d->desc_idx].cppi5_desc_vaddr; in udma_cyclic_packet_elapsed()
1039 struct cppi5_host_desc_t *h_desc = d->hwdesc[0].cppi5_desc_vaddr; in udma_fetch_epib()
2740 struct udma_hwdesc *hwdesc; in udma_alloc_tr_desc() local
2758 d = kzalloc(sizeof(*d) + sizeof(d->hwdesc[0]), GFP_NOWAIT); in udma_alloc_tr_desc()
2765 hwdesc = &d->hwdesc[0]; in udma_alloc_tr_desc()
2769 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_alloc_tr_desc()
2770 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_alloc_tr_desc()
2772 &hwdesc->cppi5_desc_paddr); in udma_alloc_tr_desc()
2774 hwdesc->cppi5_desc_size = cppi5_trdesc_calc_size(tr_size, in udma_alloc_tr_desc()
2776 hwdesc->cppi5_desc_size = ALIGN(hwdesc->cppi5_desc_size, in udma_alloc_tr_desc()
2778 hwdesc->cppi5_desc_vaddr = dma_alloc_coherent(uc->ud->dev, in udma_alloc_tr_desc()
2779 hwdesc->cppi5_desc_size, in udma_alloc_tr_desc()
2780 &hwdesc->cppi5_desc_paddr, in udma_alloc_tr_desc()
2784 if (!hwdesc->cppi5_desc_vaddr) { in udma_alloc_tr_desc()
2790 hwdesc->tr_req_base = hwdesc->cppi5_desc_vaddr + tr_size; in udma_alloc_tr_desc()
2792 hwdesc->tr_resp_base = hwdesc->tr_req_base + tr_size * tr_count; in udma_alloc_tr_desc()
2794 tr_desc = hwdesc->cppi5_desc_vaddr; in udma_alloc_tr_desc()
2893 tr_req = d->hwdesc[0].tr_req_base; in udma_prep_slave_sg_tr()
3025 tr_req = d->hwdesc[0].tr_req_base; in udma_prep_slave_sg_triggered_tr()
3206 d = kzalloc(struct_size(d, hwdesc, sglen), GFP_NOWAIT); in udma_prep_slave_sg_pkt()
3224 struct udma_hwdesc *hwdesc = &d->hwdesc[i]; in udma_prep_slave_sg_pkt() local
3229 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_prep_slave_sg_pkt()
3231 &hwdesc->cppi5_desc_paddr); in udma_prep_slave_sg_pkt()
3232 if (!hwdesc->cppi5_desc_vaddr) { in udma_prep_slave_sg_pkt()
3242 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_prep_slave_sg_pkt()
3243 desc = hwdesc->cppi5_desc_vaddr; in udma_prep_slave_sg_pkt()
3263 hwdesc->cppi5_desc_paddr | asel); in udma_prep_slave_sg_pkt()
3279 h_desc = d->hwdesc[0].cppi5_desc_vaddr; in udma_prep_slave_sg_pkt()
3303 h_desc = d->hwdesc[0].cppi5_desc_vaddr; in udma_attach_metadata()
3331 h_desc = d->hwdesc[0].cppi5_desc_vaddr; in udma_get_metadata_ptr()
3360 h_desc = d->hwdesc[0].cppi5_desc_vaddr; in udma_set_metadata_len()
3474 tr_req = d->hwdesc[0].tr_req_base; in udma_prep_dma_cyclic_tr()
3533 d = kzalloc(struct_size(d, hwdesc, periods), GFP_NOWAIT); in udma_prep_dma_cyclic_pkt()
3549 struct udma_hwdesc *hwdesc = &d->hwdesc[i]; in udma_prep_dma_cyclic_pkt() local
3553 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_prep_dma_cyclic_pkt()
3555 &hwdesc->cppi5_desc_paddr); in udma_prep_dma_cyclic_pkt()
3556 if (!hwdesc->cppi5_desc_vaddr) { in udma_prep_dma_cyclic_pkt()
3565 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_prep_dma_cyclic_pkt()
3566 h_desc = hwdesc->cppi5_desc_vaddr; in udma_prep_dma_cyclic_pkt()
3694 tr_req = d->hwdesc[0].tr_req_base; in udma_prep_dma_memcpy()
5043 struct udma_hwdesc *hwdesc; in udma_setup_rx_flush() local
5060 hwdesc = &rx_flush->hwdescs[0]; in udma_setup_rx_flush()
5062 hwdesc->cppi5_desc_size = cppi5_trdesc_calc_size(tr_size, 1); in udma_setup_rx_flush()
5063 hwdesc->cppi5_desc_size = ALIGN(hwdesc->cppi5_desc_size, in udma_setup_rx_flush()
5066 hwdesc->cppi5_desc_vaddr = devm_kzalloc(dev, hwdesc->cppi5_desc_size, in udma_setup_rx_flush()
5068 if (!hwdesc->cppi5_desc_vaddr) in udma_setup_rx_flush()
5071 hwdesc->cppi5_desc_paddr = dma_map_single(dev, hwdesc->cppi5_desc_vaddr, in udma_setup_rx_flush()
5072 hwdesc->cppi5_desc_size, in udma_setup_rx_flush()
5074 if (dma_mapping_error(dev, hwdesc->cppi5_desc_paddr)) in udma_setup_rx_flush()
5078 hwdesc->tr_req_base = hwdesc->cppi5_desc_vaddr + tr_size; in udma_setup_rx_flush()
5080 hwdesc->tr_resp_base = hwdesc->tr_req_base + tr_size; in udma_setup_rx_flush()
5082 tr_desc = hwdesc->cppi5_desc_vaddr; in udma_setup_rx_flush()
5087 tr_req = hwdesc->tr_req_base; in udma_setup_rx_flush()
5096 dma_sync_single_for_device(dev, hwdesc->cppi5_desc_paddr, in udma_setup_rx_flush()
5097 hwdesc->cppi5_desc_size, DMA_TO_DEVICE); in udma_setup_rx_flush()
5100 hwdesc = &rx_flush->hwdescs[1]; in udma_setup_rx_flush()
5101 hwdesc->cppi5_desc_size = ALIGN(sizeof(struct cppi5_host_desc_t) + in udma_setup_rx_flush()
5106 hwdesc->cppi5_desc_vaddr = devm_kzalloc(dev, hwdesc->cppi5_desc_size, in udma_setup_rx_flush()
5108 if (!hwdesc->cppi5_desc_vaddr) in udma_setup_rx_flush()
5111 hwdesc->cppi5_desc_paddr = dma_map_single(dev, hwdesc->cppi5_desc_vaddr, in udma_setup_rx_flush()
5112 hwdesc->cppi5_desc_size, in udma_setup_rx_flush()
5114 if (dma_mapping_error(dev, hwdesc->cppi5_desc_paddr)) in udma_setup_rx_flush()
5117 desc = hwdesc->cppi5_desc_vaddr; in udma_setup_rx_flush()
5126 dma_sync_single_for_device(dev, hwdesc->cppi5_desc_paddr, in udma_setup_rx_flush()
5127 hwdesc->cppi5_desc_size, DMA_TO_DEVICE); in udma_setup_rx_flush()