Lines Matching refs:uc
355 static inline u32 udma_tchanrt_read(struct udma_chan *uc, int reg) in udma_tchanrt_read() argument
357 if (!uc->tchan) in udma_tchanrt_read()
359 return udma_read(uc->tchan->reg_rt, reg); in udma_tchanrt_read()
362 static inline void udma_tchanrt_write(struct udma_chan *uc, int reg, u32 val) in udma_tchanrt_write() argument
364 if (!uc->tchan) in udma_tchanrt_write()
366 udma_write(uc->tchan->reg_rt, reg, val); in udma_tchanrt_write()
369 static inline void udma_tchanrt_update_bits(struct udma_chan *uc, int reg, in udma_tchanrt_update_bits() argument
372 if (!uc->tchan) in udma_tchanrt_update_bits()
374 udma_update_bits(uc->tchan->reg_rt, reg, mask, val); in udma_tchanrt_update_bits()
378 static inline u32 udma_rchanrt_read(struct udma_chan *uc, int reg) in udma_rchanrt_read() argument
380 if (!uc->rchan) in udma_rchanrt_read()
382 return udma_read(uc->rchan->reg_rt, reg); in udma_rchanrt_read()
385 static inline void udma_rchanrt_write(struct udma_chan *uc, int reg, u32 val) in udma_rchanrt_write() argument
387 if (!uc->rchan) in udma_rchanrt_write()
389 udma_write(uc->rchan->reg_rt, reg, val); in udma_rchanrt_write()
392 static inline void udma_rchanrt_update_bits(struct udma_chan *uc, int reg, in udma_rchanrt_update_bits() argument
395 if (!uc->rchan) in udma_rchanrt_update_bits()
397 udma_update_bits(uc->rchan->reg_rt, reg, mask, val); in udma_rchanrt_update_bits()
457 static void udma_reset_uchan(struct udma_chan *uc) in udma_reset_uchan() argument
459 memset(&uc->config, 0, sizeof(uc->config)); in udma_reset_uchan()
460 uc->config.remote_thread_id = -1; in udma_reset_uchan()
461 uc->config.mapped_channel_id = -1; in udma_reset_uchan()
462 uc->config.default_flow_id = -1; in udma_reset_uchan()
463 uc->state = UDMA_CHAN_IS_IDLE; in udma_reset_uchan()
466 static void udma_dump_chan_stdata(struct udma_chan *uc) in udma_dump_chan_stdata() argument
468 struct device *dev = uc->ud->dev; in udma_dump_chan_stdata()
472 if (uc->config.dir == DMA_MEM_TO_DEV || uc->config.dir == DMA_MEM_TO_MEM) { in udma_dump_chan_stdata()
477 udma_tchanrt_read(uc, offset)); in udma_dump_chan_stdata()
481 if (uc->config.dir == DMA_DEV_TO_MEM || uc->config.dir == DMA_MEM_TO_MEM) { in udma_dump_chan_stdata()
486 udma_rchanrt_read(uc, offset)); in udma_dump_chan_stdata()
502 static struct udma_desc *udma_udma_desc_from_paddr(struct udma_chan *uc, in udma_udma_desc_from_paddr() argument
505 struct udma_desc *d = uc->terminated_desc; in udma_udma_desc_from_paddr()
516 d = uc->desc; in udma_udma_desc_from_paddr()
529 static void udma_free_hwdesc(struct udma_chan *uc, struct udma_desc *d) in udma_free_hwdesc() argument
531 if (uc->use_dma_pool) { in udma_free_hwdesc()
538 dma_pool_free(uc->hdesc_pool, in udma_free_hwdesc()
545 dma_free_coherent(uc->dma_dev, d->hwdesc[0].cppi5_desc_size, in udma_free_hwdesc()
565 struct udma_chan *uc = to_udma_chan(vd->tx.chan); in udma_purge_desc_work() local
568 udma_free_hwdesc(uc, d); in udma_purge_desc_work()
581 struct udma_chan *uc = to_udma_chan(vd->tx.chan); in udma_desc_free() local
585 if (uc->terminated_desc == d) in udma_desc_free()
586 uc->terminated_desc = NULL; in udma_desc_free()
588 if (uc->use_dma_pool) { in udma_desc_free()
589 udma_free_hwdesc(uc, d); in udma_desc_free()
601 static bool udma_is_chan_running(struct udma_chan *uc) in udma_is_chan_running() argument
606 if (uc->tchan) in udma_is_chan_running()
607 trt_ctl = udma_tchanrt_read(uc, UDMA_CHAN_RT_CTL_REG); in udma_is_chan_running()
608 if (uc->rchan) in udma_is_chan_running()
609 rrt_ctl = udma_rchanrt_read(uc, UDMA_CHAN_RT_CTL_REG); in udma_is_chan_running()
617 static bool udma_is_chan_paused(struct udma_chan *uc) in udma_is_chan_paused() argument
621 switch (uc->config.dir) { in udma_is_chan_paused()
623 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_PEER_RT_EN_REG); in udma_is_chan_paused()
627 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_PEER_RT_EN_REG); in udma_is_chan_paused()
631 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_CTL_REG); in udma_is_chan_paused()
644 static inline dma_addr_t udma_get_rx_flush_hwdesc_paddr(struct udma_chan *uc) in udma_get_rx_flush_hwdesc_paddr() argument
646 return uc->ud->rx_flush.hwdescs[uc->config.pkt_mode].cppi5_desc_paddr; in udma_get_rx_flush_hwdesc_paddr()
649 static int udma_push_to_ring(struct udma_chan *uc, int idx) in udma_push_to_ring() argument
651 struct udma_desc *d = uc->desc; in udma_push_to_ring()
655 switch (uc->config.dir) { in udma_push_to_ring()
657 ring = uc->rflow->fd_ring; in udma_push_to_ring()
661 ring = uc->tchan->t_ring; in udma_push_to_ring()
669 paddr = udma_get_rx_flush_hwdesc_paddr(uc); in udma_push_to_ring()
679 static bool udma_desc_is_rx_flush(struct udma_chan *uc, dma_addr_t addr) in udma_desc_is_rx_flush() argument
681 if (uc->config.dir != DMA_DEV_TO_MEM) in udma_desc_is_rx_flush()
684 if (addr == udma_get_rx_flush_hwdesc_paddr(uc)) in udma_desc_is_rx_flush()
690 static int udma_pop_from_ring(struct udma_chan *uc, dma_addr_t *addr) in udma_pop_from_ring() argument
695 switch (uc->config.dir) { in udma_pop_from_ring()
697 ring = uc->rflow->r_ring; in udma_pop_from_ring()
701 ring = uc->tchan->tc_ring; in udma_pop_from_ring()
718 if (udma_desc_is_rx_flush(uc, *addr)) in udma_pop_from_ring()
724 static void udma_reset_rings(struct udma_chan *uc) in udma_reset_rings() argument
729 switch (uc->config.dir) { in udma_reset_rings()
731 if (uc->rchan) { in udma_reset_rings()
732 ring1 = uc->rflow->fd_ring; in udma_reset_rings()
733 ring2 = uc->rflow->r_ring; in udma_reset_rings()
738 if (uc->tchan) { in udma_reset_rings()
739 ring1 = uc->tchan->t_ring; in udma_reset_rings()
740 ring2 = uc->tchan->tc_ring; in udma_reset_rings()
754 if (uc->terminated_desc) { in udma_reset_rings()
755 udma_desc_free(&uc->terminated_desc->vd); in udma_reset_rings()
756 uc->terminated_desc = NULL; in udma_reset_rings()
760 static void udma_reset_counters(struct udma_chan *uc) in udma_reset_counters() argument
764 if (uc->tchan) { in udma_reset_counters()
765 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_reset_counters()
766 udma_tchanrt_write(uc, UDMA_CHAN_RT_BCNT_REG, val); in udma_reset_counters()
768 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_SBCNT_REG); in udma_reset_counters()
769 udma_tchanrt_write(uc, UDMA_CHAN_RT_SBCNT_REG, val); in udma_reset_counters()
771 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_PCNT_REG); in udma_reset_counters()
772 udma_tchanrt_write(uc, UDMA_CHAN_RT_PCNT_REG, val); in udma_reset_counters()
774 if (!uc->bchan) { in udma_reset_counters()
775 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_PEER_BCNT_REG); in udma_reset_counters()
776 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_BCNT_REG, val); in udma_reset_counters()
780 if (uc->rchan) { in udma_reset_counters()
781 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_reset_counters()
782 udma_rchanrt_write(uc, UDMA_CHAN_RT_BCNT_REG, val); in udma_reset_counters()
784 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_SBCNT_REG); in udma_reset_counters()
785 udma_rchanrt_write(uc, UDMA_CHAN_RT_SBCNT_REG, val); in udma_reset_counters()
787 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_PCNT_REG); in udma_reset_counters()
788 udma_rchanrt_write(uc, UDMA_CHAN_RT_PCNT_REG, val); in udma_reset_counters()
790 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_PEER_BCNT_REG); in udma_reset_counters()
791 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_BCNT_REG, val); in udma_reset_counters()
794 uc->bcnt = 0; in udma_reset_counters()
797 static int udma_reset_chan(struct udma_chan *uc, bool hard) in udma_reset_chan() argument
799 switch (uc->config.dir) { in udma_reset_chan()
801 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, 0); in udma_reset_chan()
802 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
805 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
806 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, 0); in udma_reset_chan()
809 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
810 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
817 udma_reset_counters(uc); in udma_reset_chan()
824 memcpy(&ucc_backup, &uc->config, sizeof(uc->config)); in udma_reset_chan()
825 uc->ud->ddev.device_free_chan_resources(&uc->vc.chan); in udma_reset_chan()
828 memcpy(&uc->config, &ucc_backup, sizeof(uc->config)); in udma_reset_chan()
829 ret = uc->ud->ddev.device_alloc_chan_resources(&uc->vc.chan); in udma_reset_chan()
837 if (uc->config.dir == DMA_DEV_TO_MEM) in udma_reset_chan()
838 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_reset_chan()
843 uc->state = UDMA_CHAN_IS_IDLE; in udma_reset_chan()
848 static void udma_start_desc(struct udma_chan *uc) in udma_start_desc() argument
850 struct udma_chan_config *ucc = &uc->config; in udma_start_desc()
852 if (uc->ud->match_data->type == DMA_TYPE_UDMA && ucc->pkt_mode && in udma_start_desc()
853 (uc->cyclic || ucc->dir == DMA_DEV_TO_MEM)) { in udma_start_desc()
862 for (i = 0; i < uc->desc->sglen; i++) in udma_start_desc()
863 udma_push_to_ring(uc, i); in udma_start_desc()
865 udma_push_to_ring(uc, 0); in udma_start_desc()
869 static bool udma_chan_needs_reconfiguration(struct udma_chan *uc) in udma_chan_needs_reconfiguration() argument
872 if (uc->config.ep_type == PSIL_EP_NATIVE) in udma_chan_needs_reconfiguration()
876 if (memcmp(&uc->static_tr, &uc->desc->static_tr, sizeof(uc->static_tr))) in udma_chan_needs_reconfiguration()
882 static int udma_start(struct udma_chan *uc) in udma_start() argument
884 struct virt_dma_desc *vd = vchan_next_desc(&uc->vc); in udma_start()
887 uc->desc = NULL; in udma_start()
893 uc->desc = to_udma_desc(&vd->tx); in udma_start()
896 if (udma_is_chan_running(uc) && !udma_chan_needs_reconfiguration(uc)) { in udma_start()
897 udma_start_desc(uc); in udma_start()
902 udma_reset_chan(uc, false); in udma_start()
905 udma_start_desc(uc); in udma_start()
907 switch (uc->desc->dir) { in udma_start()
910 if (uc->config.ep_type == PSIL_EP_PDMA_XY) { in udma_start()
911 u32 val = PDMA_STATIC_TR_Y(uc->desc->static_tr.elcnt) | in udma_start()
912 PDMA_STATIC_TR_X(uc->desc->static_tr.elsize); in udma_start()
914 uc->ud->match_data; in udma_start()
916 if (uc->config.enable_acc32) in udma_start()
918 if (uc->config.enable_burst) in udma_start()
921 udma_rchanrt_write(uc, in udma_start()
925 udma_rchanrt_write(uc, in udma_start()
927 PDMA_STATIC_TR_Z(uc->desc->static_tr.bstcnt, in udma_start()
931 memcpy(&uc->static_tr, &uc->desc->static_tr, in udma_start()
932 sizeof(uc->static_tr)); in udma_start()
935 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
939 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_start()
945 if (uc->config.ep_type == PSIL_EP_PDMA_XY) { in udma_start()
946 u32 val = PDMA_STATIC_TR_Y(uc->desc->static_tr.elcnt) | in udma_start()
947 PDMA_STATIC_TR_X(uc->desc->static_tr.elsize); in udma_start()
949 if (uc->config.enable_acc32) in udma_start()
951 if (uc->config.enable_burst) in udma_start()
954 udma_tchanrt_write(uc, in udma_start()
959 memcpy(&uc->static_tr, &uc->desc->static_tr, in udma_start()
960 sizeof(uc->static_tr)); in udma_start()
964 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_start()
967 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
972 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
974 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
982 uc->state = UDMA_CHAN_IS_ACTIVE; in udma_start()
988 static int udma_stop(struct udma_chan *uc) in udma_stop() argument
990 enum udma_chan_state old_state = uc->state; in udma_stop()
992 uc->state = UDMA_CHAN_IS_TERMINATING; in udma_stop()
993 reinit_completion(&uc->teardown_completed); in udma_stop()
995 switch (uc->config.dir) { in udma_stop()
997 if (!uc->cyclic && !uc->desc) in udma_stop()
998 udma_push_to_ring(uc, -1); in udma_stop()
1000 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_stop()
1005 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_stop()
1008 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_stop()
1013 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_stop()
1018 uc->state = old_state; in udma_stop()
1019 complete_all(&uc->teardown_completed); in udma_stop()
1026 static void udma_cyclic_packet_elapsed(struct udma_chan *uc) in udma_cyclic_packet_elapsed() argument
1028 struct udma_desc *d = uc->desc; in udma_cyclic_packet_elapsed()
1033 udma_push_to_ring(uc, d->desc_idx); in udma_cyclic_packet_elapsed()
1037 static inline void udma_fetch_epib(struct udma_chan *uc, struct udma_desc *d) in udma_fetch_epib() argument
1044 static bool udma_is_desc_really_done(struct udma_chan *uc, struct udma_desc *d) in udma_is_desc_really_done() argument
1049 if (uc->config.ep_type == PSIL_EP_NATIVE || in udma_is_desc_really_done()
1050 uc->config.dir != DMA_MEM_TO_DEV) in udma_is_desc_really_done()
1053 peer_bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_PEER_BCNT_REG); in udma_is_desc_really_done()
1054 bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_is_desc_really_done()
1058 uc->tx_drain.residue = bcnt - peer_bcnt; in udma_is_desc_really_done()
1059 uc->tx_drain.tstamp = ktime_get(); in udma_is_desc_really_done()
1068 struct udma_chan *uc = container_of(work, typeof(*uc), in udma_check_tx_completion() local
1076 if (uc->desc) { in udma_check_tx_completion()
1078 residue_diff = uc->tx_drain.residue; in udma_check_tx_completion()
1079 time_diff = uc->tx_drain.tstamp; in udma_check_tx_completion()
1084 desc_done = udma_is_desc_really_done(uc, uc->desc); in udma_check_tx_completion()
1092 time_diff = ktime_sub(uc->tx_drain.tstamp, in udma_check_tx_completion()
1094 residue_diff -= uc->tx_drain.residue; in udma_check_tx_completion()
1103 uc->tx_drain.residue; in udma_check_tx_completion()
1106 schedule_delayed_work(&uc->tx_drain.work, HZ); in udma_check_tx_completion()
1115 if (uc->desc) { in udma_check_tx_completion()
1116 struct udma_desc *d = uc->desc; in udma_check_tx_completion()
1118 uc->bcnt += d->residue; in udma_check_tx_completion()
1119 udma_start(uc); in udma_check_tx_completion()
1130 struct udma_chan *uc = data; in udma_ring_irq_handler() local
1134 if (udma_pop_from_ring(uc, &paddr) || !paddr) in udma_ring_irq_handler()
1137 spin_lock(&uc->vc.lock); in udma_ring_irq_handler()
1141 complete_all(&uc->teardown_completed); in udma_ring_irq_handler()
1143 if (uc->terminated_desc) { in udma_ring_irq_handler()
1144 udma_desc_free(&uc->terminated_desc->vd); in udma_ring_irq_handler()
1145 uc->terminated_desc = NULL; in udma_ring_irq_handler()
1148 if (!uc->desc) in udma_ring_irq_handler()
1149 udma_start(uc); in udma_ring_irq_handler()
1154 d = udma_udma_desc_from_paddr(uc, paddr); in udma_ring_irq_handler()
1160 dev_err(uc->ud->dev, "not matching descriptors!\n"); in udma_ring_irq_handler()
1164 if (d == uc->desc) { in udma_ring_irq_handler()
1166 if (uc->cyclic) { in udma_ring_irq_handler()
1167 udma_cyclic_packet_elapsed(uc); in udma_ring_irq_handler()
1170 if (udma_is_desc_really_done(uc, d)) { in udma_ring_irq_handler()
1171 uc->bcnt += d->residue; in udma_ring_irq_handler()
1172 udma_start(uc); in udma_ring_irq_handler()
1175 schedule_delayed_work(&uc->tx_drain.work, in udma_ring_irq_handler()
1188 spin_unlock(&uc->vc.lock); in udma_ring_irq_handler()
1195 struct udma_chan *uc = data; in udma_udma_irq_handler() local
1198 spin_lock(&uc->vc.lock); in udma_udma_irq_handler()
1199 d = uc->desc; in udma_udma_irq_handler()
1203 if (uc->cyclic) { in udma_udma_irq_handler()
1207 uc->bcnt += d->residue; in udma_udma_irq_handler()
1208 udma_start(uc); in udma_udma_irq_handler()
1213 spin_unlock(&uc->vc.lock); in udma_udma_irq_handler()
1347 static int bcdma_get_bchan(struct udma_chan *uc) in bcdma_get_bchan() argument
1349 struct udma_dev *ud = uc->ud; in bcdma_get_bchan()
1353 if (uc->bchan) { in bcdma_get_bchan()
1355 uc->id, uc->bchan->id); in bcdma_get_bchan()
1363 if (uc->config.tr_trigger_type) in bcdma_get_bchan()
1368 uc->bchan = __udma_reserve_bchan(ud, tpl, -1); in bcdma_get_bchan()
1369 if (IS_ERR(uc->bchan)) { in bcdma_get_bchan()
1370 ret = PTR_ERR(uc->bchan); in bcdma_get_bchan()
1371 uc->bchan = NULL; in bcdma_get_bchan()
1375 uc->tchan = uc->bchan; in bcdma_get_bchan()
1380 static int udma_get_tchan(struct udma_chan *uc) in udma_get_tchan() argument
1382 struct udma_dev *ud = uc->ud; in udma_get_tchan()
1385 if (uc->tchan) { in udma_get_tchan()
1387 uc->id, uc->tchan->id); in udma_get_tchan()
1396 uc->tchan = __udma_reserve_tchan(ud, uc->config.channel_tpl, in udma_get_tchan()
1397 uc->config.mapped_channel_id); in udma_get_tchan()
1398 if (IS_ERR(uc->tchan)) { in udma_get_tchan()
1399 ret = PTR_ERR(uc->tchan); in udma_get_tchan()
1400 uc->tchan = NULL; in udma_get_tchan()
1408 if (uc->config.default_flow_id >= 0) in udma_get_tchan()
1409 tflow_id = uc->config.default_flow_id; in udma_get_tchan()
1411 tflow_id = uc->tchan->id; in udma_get_tchan()
1415 clear_bit(uc->tchan->id, ud->tchan_map); in udma_get_tchan()
1416 uc->tchan = NULL; in udma_get_tchan()
1420 uc->tchan->tflow_id = tflow_id; in udma_get_tchan()
1423 uc->tchan->tflow_id = -1; in udma_get_tchan()
1429 static int udma_get_rchan(struct udma_chan *uc) in udma_get_rchan() argument
1431 struct udma_dev *ud = uc->ud; in udma_get_rchan()
1434 if (uc->rchan) { in udma_get_rchan()
1436 uc->id, uc->rchan->id); in udma_get_rchan()
1445 uc->rchan = __udma_reserve_rchan(ud, uc->config.channel_tpl, in udma_get_rchan()
1446 uc->config.mapped_channel_id); in udma_get_rchan()
1447 if (IS_ERR(uc->rchan)) { in udma_get_rchan()
1448 ret = PTR_ERR(uc->rchan); in udma_get_rchan()
1449 uc->rchan = NULL; in udma_get_rchan()
1456 static int udma_get_chan_pair(struct udma_chan *uc) in udma_get_chan_pair() argument
1458 struct udma_dev *ud = uc->ud; in udma_get_chan_pair()
1461 if ((uc->tchan && uc->rchan) && uc->tchan->id == uc->rchan->id) { in udma_get_chan_pair()
1463 uc->id, uc->tchan->id); in udma_get_chan_pair()
1467 if (uc->tchan) { in udma_get_chan_pair()
1469 uc->id, uc->tchan->id); in udma_get_chan_pair()
1471 } else if (uc->rchan) { in udma_get_chan_pair()
1473 uc->id, uc->rchan->id); in udma_get_chan_pair()
1495 uc->tchan = &ud->tchans[chan_id]; in udma_get_chan_pair()
1496 uc->rchan = &ud->rchans[chan_id]; in udma_get_chan_pair()
1499 uc->tchan->tflow_id = -1; in udma_get_chan_pair()
1504 static int udma_get_rflow(struct udma_chan *uc, int flow_id) in udma_get_rflow() argument
1506 struct udma_dev *ud = uc->ud; in udma_get_rflow()
1509 if (!uc->rchan) { in udma_get_rflow()
1510 dev_err(ud->dev, "chan%d: does not have rchan??\n", uc->id); in udma_get_rflow()
1514 if (uc->rflow) { in udma_get_rflow()
1516 uc->id, uc->rflow->id); in udma_get_rflow()
1520 uc->rflow = __udma_get_rflow(ud, flow_id); in udma_get_rflow()
1521 if (IS_ERR(uc->rflow)) { in udma_get_rflow()
1522 ret = PTR_ERR(uc->rflow); in udma_get_rflow()
1523 uc->rflow = NULL; in udma_get_rflow()
1530 static void bcdma_put_bchan(struct udma_chan *uc) in bcdma_put_bchan() argument
1532 struct udma_dev *ud = uc->ud; in bcdma_put_bchan()
1534 if (uc->bchan) { in bcdma_put_bchan()
1535 dev_dbg(ud->dev, "chan%d: put bchan%d\n", uc->id, in bcdma_put_bchan()
1536 uc->bchan->id); in bcdma_put_bchan()
1537 clear_bit(uc->bchan->id, ud->bchan_map); in bcdma_put_bchan()
1538 uc->bchan = NULL; in bcdma_put_bchan()
1539 uc->tchan = NULL; in bcdma_put_bchan()
1543 static void udma_put_rchan(struct udma_chan *uc) in udma_put_rchan() argument
1545 struct udma_dev *ud = uc->ud; in udma_put_rchan()
1547 if (uc->rchan) { in udma_put_rchan()
1548 dev_dbg(ud->dev, "chan%d: put rchan%d\n", uc->id, in udma_put_rchan()
1549 uc->rchan->id); in udma_put_rchan()
1550 clear_bit(uc->rchan->id, ud->rchan_map); in udma_put_rchan()
1551 uc->rchan = NULL; in udma_put_rchan()
1555 static void udma_put_tchan(struct udma_chan *uc) in udma_put_tchan() argument
1557 struct udma_dev *ud = uc->ud; in udma_put_tchan()
1559 if (uc->tchan) { in udma_put_tchan()
1560 dev_dbg(ud->dev, "chan%d: put tchan%d\n", uc->id, in udma_put_tchan()
1561 uc->tchan->id); in udma_put_tchan()
1562 clear_bit(uc->tchan->id, ud->tchan_map); in udma_put_tchan()
1564 if (uc->tchan->tflow_id >= 0) in udma_put_tchan()
1565 clear_bit(uc->tchan->tflow_id, ud->tflow_map); in udma_put_tchan()
1567 uc->tchan = NULL; in udma_put_tchan()
1571 static void udma_put_rflow(struct udma_chan *uc) in udma_put_rflow() argument
1573 struct udma_dev *ud = uc->ud; in udma_put_rflow()
1575 if (uc->rflow) { in udma_put_rflow()
1576 dev_dbg(ud->dev, "chan%d: put rflow%d\n", uc->id, in udma_put_rflow()
1577 uc->rflow->id); in udma_put_rflow()
1578 __udma_put_rflow(ud, uc->rflow); in udma_put_rflow()
1579 uc->rflow = NULL; in udma_put_rflow()
1583 static void bcdma_free_bchan_resources(struct udma_chan *uc) in bcdma_free_bchan_resources() argument
1585 if (!uc->bchan) in bcdma_free_bchan_resources()
1588 k3_ringacc_ring_free(uc->bchan->tc_ring); in bcdma_free_bchan_resources()
1589 k3_ringacc_ring_free(uc->bchan->t_ring); in bcdma_free_bchan_resources()
1590 uc->bchan->tc_ring = NULL; in bcdma_free_bchan_resources()
1591 uc->bchan->t_ring = NULL; in bcdma_free_bchan_resources()
1592 k3_configure_chan_coherency(&uc->vc.chan, 0); in bcdma_free_bchan_resources()
1594 bcdma_put_bchan(uc); in bcdma_free_bchan_resources()
1597 static int bcdma_alloc_bchan_resources(struct udma_chan *uc) in bcdma_alloc_bchan_resources() argument
1600 struct udma_dev *ud = uc->ud; in bcdma_alloc_bchan_resources()
1603 ret = bcdma_get_bchan(uc); in bcdma_alloc_bchan_resources()
1607 ret = k3_ringacc_request_rings_pair(ud->ringacc, uc->bchan->id, -1, in bcdma_alloc_bchan_resources()
1608 &uc->bchan->t_ring, in bcdma_alloc_bchan_resources()
1609 &uc->bchan->tc_ring); in bcdma_alloc_bchan_resources()
1620 k3_configure_chan_coherency(&uc->vc.chan, ud->asel); in bcdma_alloc_bchan_resources()
1622 ring_cfg.dma_dev = dmaengine_get_dma_device(&uc->vc.chan); in bcdma_alloc_bchan_resources()
1624 ret = k3_ringacc_ring_cfg(uc->bchan->t_ring, &ring_cfg); in bcdma_alloc_bchan_resources()
1631 k3_ringacc_ring_free(uc->bchan->tc_ring); in bcdma_alloc_bchan_resources()
1632 uc->bchan->tc_ring = NULL; in bcdma_alloc_bchan_resources()
1633 k3_ringacc_ring_free(uc->bchan->t_ring); in bcdma_alloc_bchan_resources()
1634 uc->bchan->t_ring = NULL; in bcdma_alloc_bchan_resources()
1635 k3_configure_chan_coherency(&uc->vc.chan, 0); in bcdma_alloc_bchan_resources()
1637 bcdma_put_bchan(uc); in bcdma_alloc_bchan_resources()
1642 static void udma_free_tx_resources(struct udma_chan *uc) in udma_free_tx_resources() argument
1644 if (!uc->tchan) in udma_free_tx_resources()
1647 k3_ringacc_ring_free(uc->tchan->t_ring); in udma_free_tx_resources()
1648 k3_ringacc_ring_free(uc->tchan->tc_ring); in udma_free_tx_resources()
1649 uc->tchan->t_ring = NULL; in udma_free_tx_resources()
1650 uc->tchan->tc_ring = NULL; in udma_free_tx_resources()
1652 udma_put_tchan(uc); in udma_free_tx_resources()
1655 static int udma_alloc_tx_resources(struct udma_chan *uc) in udma_alloc_tx_resources() argument
1658 struct udma_dev *ud = uc->ud; in udma_alloc_tx_resources()
1662 ret = udma_get_tchan(uc); in udma_alloc_tx_resources()
1666 tchan = uc->tchan; in udma_alloc_tx_resources()
1688 k3_configure_chan_coherency(&uc->vc.chan, uc->config.asel); in udma_alloc_tx_resources()
1689 ring_cfg.asel = uc->config.asel; in udma_alloc_tx_resources()
1690 ring_cfg.dma_dev = dmaengine_get_dma_device(&uc->vc.chan); in udma_alloc_tx_resources()
1702 k3_ringacc_ring_free(uc->tchan->tc_ring); in udma_alloc_tx_resources()
1703 uc->tchan->tc_ring = NULL; in udma_alloc_tx_resources()
1704 k3_ringacc_ring_free(uc->tchan->t_ring); in udma_alloc_tx_resources()
1705 uc->tchan->t_ring = NULL; in udma_alloc_tx_resources()
1707 udma_put_tchan(uc); in udma_alloc_tx_resources()
1712 static void udma_free_rx_resources(struct udma_chan *uc) in udma_free_rx_resources() argument
1714 if (!uc->rchan) in udma_free_rx_resources()
1717 if (uc->rflow) { in udma_free_rx_resources()
1718 struct udma_rflow *rflow = uc->rflow; in udma_free_rx_resources()
1725 udma_put_rflow(uc); in udma_free_rx_resources()
1728 udma_put_rchan(uc); in udma_free_rx_resources()
1731 static int udma_alloc_rx_resources(struct udma_chan *uc) in udma_alloc_rx_resources() argument
1733 struct udma_dev *ud = uc->ud; in udma_alloc_rx_resources()
1739 ret = udma_get_rchan(uc); in udma_alloc_rx_resources()
1744 if (uc->config.dir == DMA_MEM_TO_MEM) in udma_alloc_rx_resources()
1747 if (uc->config.default_flow_id >= 0) in udma_alloc_rx_resources()
1748 ret = udma_get_rflow(uc, uc->config.default_flow_id); in udma_alloc_rx_resources()
1750 ret = udma_get_rflow(uc, uc->rchan->id); in udma_alloc_rx_resources()
1757 rflow = uc->rflow; in udma_alloc_rx_resources()
1762 uc->rchan->id; in udma_alloc_rx_resources()
1775 if (uc->config.pkt_mode) in udma_alloc_rx_resources()
1785 k3_configure_chan_coherency(&uc->vc.chan, uc->config.asel); in udma_alloc_rx_resources()
1786 ring_cfg.asel = uc->config.asel; in udma_alloc_rx_resources()
1787 ring_cfg.dma_dev = dmaengine_get_dma_device(&uc->vc.chan); in udma_alloc_rx_resources()
1806 udma_put_rflow(uc); in udma_alloc_rx_resources()
1808 udma_put_rchan(uc); in udma_alloc_rx_resources()
1845 static int udma_tisci_m2m_channel_config(struct udma_chan *uc) in udma_tisci_m2m_channel_config() argument
1847 struct udma_dev *ud = uc->ud; in udma_tisci_m2m_channel_config()
1850 struct udma_tchan *tchan = uc->tchan; in udma_tisci_m2m_channel_config()
1851 struct udma_rchan *rchan = uc->rchan; in udma_tisci_m2m_channel_config()
1904 static int bcdma_tisci_m2m_channel_config(struct udma_chan *uc) in bcdma_tisci_m2m_channel_config() argument
1906 struct udma_dev *ud = uc->ud; in bcdma_tisci_m2m_channel_config()
1910 struct udma_bchan *bchan = uc->bchan; in bcdma_tisci_m2m_channel_config()
1937 static int udma_tisci_tx_channel_config(struct udma_chan *uc) in udma_tisci_tx_channel_config() argument
1939 struct udma_dev *ud = uc->ud; in udma_tisci_tx_channel_config()
1942 struct udma_tchan *tchan = uc->tchan; in udma_tisci_tx_channel_config()
1948 if (uc->config.pkt_mode) { in udma_tisci_tx_channel_config()
1950 fetch_size = cppi5_hdesc_calc_size(uc->config.needs_epib, in udma_tisci_tx_channel_config()
1951 uc->config.psd_size, 0); in udma_tisci_tx_channel_config()
1961 req_tx.tx_supr_tdpkt = uc->config.notdpkt; in udma_tisci_tx_channel_config()
1964 req_tx.tx_atype = uc->config.atype; in udma_tisci_tx_channel_config()
1965 if (uc->config.ep_type == PSIL_EP_PDMA_XY && in udma_tisci_tx_channel_config()
1980 static int bcdma_tisci_tx_channel_config(struct udma_chan *uc) in bcdma_tisci_tx_channel_config() argument
1982 struct udma_dev *ud = uc->ud; in bcdma_tisci_tx_channel_config()
1985 struct udma_tchan *tchan = uc->tchan; in bcdma_tisci_tx_channel_config()
1992 req_tx.tx_supr_tdpkt = uc->config.notdpkt; in bcdma_tisci_tx_channel_config()
2009 static int udma_tisci_rx_channel_config(struct udma_chan *uc) in udma_tisci_rx_channel_config() argument
2011 struct udma_dev *ud = uc->ud; in udma_tisci_rx_channel_config()
2014 struct udma_rchan *rchan = uc->rchan; in udma_tisci_rx_channel_config()
2015 int fd_ring = k3_ringacc_get_ring_id(uc->rflow->fd_ring); in udma_tisci_rx_channel_config()
2016 int rx_ring = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_tisci_rx_channel_config()
2022 if (uc->config.pkt_mode) { in udma_tisci_rx_channel_config()
2024 fetch_size = cppi5_hdesc_calc_size(uc->config.needs_epib, in udma_tisci_rx_channel_config()
2025 uc->config.psd_size, 0); in udma_tisci_rx_channel_config()
2037 req_rx.rx_atype = uc->config.atype; in udma_tisci_rx_channel_config()
2063 if (uc->config.needs_epib) in udma_tisci_rx_channel_config()
2067 if (uc->config.psd_size) in udma_tisci_rx_channel_config()
2090 static int bcdma_tisci_rx_channel_config(struct udma_chan *uc) in bcdma_tisci_rx_channel_config() argument
2092 struct udma_dev *ud = uc->ud; in bcdma_tisci_rx_channel_config()
2095 struct udma_rchan *rchan = uc->rchan; in bcdma_tisci_rx_channel_config()
2110 static int pktdma_tisci_rx_channel_config(struct udma_chan *uc) in pktdma_tisci_rx_channel_config() argument
2112 struct udma_dev *ud = uc->ud; in pktdma_tisci_rx_channel_config()
2121 req_rx.index = uc->rchan->id; in pktdma_tisci_rx_channel_config()
2125 dev_err(ud->dev, "rchan%d cfg failed %d\n", uc->rchan->id, ret); in pktdma_tisci_rx_channel_config()
2135 flow_req.flow_index = uc->rflow->id; in pktdma_tisci_rx_channel_config()
2137 if (uc->config.needs_epib) in pktdma_tisci_rx_channel_config()
2141 if (uc->config.psd_size) in pktdma_tisci_rx_channel_config()
2150 dev_err(ud->dev, "flow%d config failed: %d\n", uc->rflow->id, in pktdma_tisci_rx_channel_config()
2158 struct udma_chan *uc = to_udma_chan(chan); in udma_alloc_chan_resources() local
2165 uc->dma_dev = ud->dev; in udma_alloc_chan_resources()
2167 if (uc->config.pkt_mode || uc->config.dir == DMA_MEM_TO_MEM) { in udma_alloc_chan_resources()
2168 uc->use_dma_pool = true; in udma_alloc_chan_resources()
2170 if (uc->config.dir == DMA_MEM_TO_MEM) { in udma_alloc_chan_resources()
2171 uc->config.hdesc_size = cppi5_trdesc_calc_size( in udma_alloc_chan_resources()
2173 uc->config.pkt_mode = false; in udma_alloc_chan_resources()
2177 if (uc->use_dma_pool) { in udma_alloc_chan_resources()
2178 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in udma_alloc_chan_resources()
2179 uc->config.hdesc_size, in udma_alloc_chan_resources()
2182 if (!uc->hdesc_pool) { in udma_alloc_chan_resources()
2185 uc->use_dma_pool = false; in udma_alloc_chan_resources()
2195 reinit_completion(&uc->teardown_completed); in udma_alloc_chan_resources()
2196 complete_all(&uc->teardown_completed); in udma_alloc_chan_resources()
2197 uc->state = UDMA_CHAN_IS_IDLE; in udma_alloc_chan_resources()
2199 switch (uc->config.dir) { in udma_alloc_chan_resources()
2202 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in udma_alloc_chan_resources()
2203 uc->id); in udma_alloc_chan_resources()
2205 ret = udma_get_chan_pair(uc); in udma_alloc_chan_resources()
2209 ret = udma_alloc_tx_resources(uc); in udma_alloc_chan_resources()
2211 udma_put_rchan(uc); in udma_alloc_chan_resources()
2215 ret = udma_alloc_rx_resources(uc); in udma_alloc_chan_resources()
2217 udma_free_tx_resources(uc); in udma_alloc_chan_resources()
2221 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
2222 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
2225 irq_ring = uc->tchan->tc_ring; in udma_alloc_chan_resources()
2226 irq_udma_idx = uc->tchan->id; in udma_alloc_chan_resources()
2228 ret = udma_tisci_m2m_channel_config(uc); in udma_alloc_chan_resources()
2232 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in udma_alloc_chan_resources()
2233 uc->id); in udma_alloc_chan_resources()
2235 ret = udma_alloc_tx_resources(uc); in udma_alloc_chan_resources()
2239 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
2240 uc->config.dst_thread = uc->config.remote_thread_id; in udma_alloc_chan_resources()
2241 uc->config.dst_thread |= K3_PSIL_DST_THREAD_ID_OFFSET; in udma_alloc_chan_resources()
2243 irq_ring = uc->tchan->tc_ring; in udma_alloc_chan_resources()
2244 irq_udma_idx = uc->tchan->id; in udma_alloc_chan_resources()
2246 ret = udma_tisci_tx_channel_config(uc); in udma_alloc_chan_resources()
2250 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in udma_alloc_chan_resources()
2251 uc->id); in udma_alloc_chan_resources()
2253 ret = udma_alloc_rx_resources(uc); in udma_alloc_chan_resources()
2257 uc->config.src_thread = uc->config.remote_thread_id; in udma_alloc_chan_resources()
2258 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
2261 irq_ring = uc->rflow->r_ring; in udma_alloc_chan_resources()
2262 irq_udma_idx = soc_data->oes.udma_rchan + uc->rchan->id; in udma_alloc_chan_resources()
2264 ret = udma_tisci_rx_channel_config(uc); in udma_alloc_chan_resources()
2268 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in udma_alloc_chan_resources()
2269 __func__, uc->id, uc->config.dir); in udma_alloc_chan_resources()
2279 if (udma_is_chan_running(uc)) { in udma_alloc_chan_resources()
2280 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in udma_alloc_chan_resources()
2281 udma_reset_chan(uc, false); in udma_alloc_chan_resources()
2282 if (udma_is_chan_running(uc)) { in udma_alloc_chan_resources()
2283 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in udma_alloc_chan_resources()
2290 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2293 uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2297 uc->psil_paired = true; in udma_alloc_chan_resources()
2299 uc->irq_num_ring = k3_ringacc_get_ring_irq_num(irq_ring); in udma_alloc_chan_resources()
2300 if (uc->irq_num_ring <= 0) { in udma_alloc_chan_resources()
2307 ret = request_irq(uc->irq_num_ring, udma_ring_irq_handler, in udma_alloc_chan_resources()
2308 IRQF_TRIGGER_HIGH, uc->name, uc); in udma_alloc_chan_resources()
2310 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in udma_alloc_chan_resources()
2315 if (is_slave_direction(uc->config.dir) && !uc->config.pkt_mode) { in udma_alloc_chan_resources()
2316 uc->irq_num_udma = ti_sci_inta_msi_get_virq(ud->dev, in udma_alloc_chan_resources()
2318 if (uc->irq_num_udma <= 0) { in udma_alloc_chan_resources()
2321 free_irq(uc->irq_num_ring, uc); in udma_alloc_chan_resources()
2326 ret = request_irq(uc->irq_num_udma, udma_udma_irq_handler, 0, in udma_alloc_chan_resources()
2327 uc->name, uc); in udma_alloc_chan_resources()
2330 uc->id); in udma_alloc_chan_resources()
2331 free_irq(uc->irq_num_ring, uc); in udma_alloc_chan_resources()
2335 uc->irq_num_udma = 0; in udma_alloc_chan_resources()
2338 udma_reset_rings(uc); in udma_alloc_chan_resources()
2343 uc->irq_num_ring = 0; in udma_alloc_chan_resources()
2344 uc->irq_num_udma = 0; in udma_alloc_chan_resources()
2346 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2347 uc->psil_paired = false; in udma_alloc_chan_resources()
2349 udma_free_tx_resources(uc); in udma_alloc_chan_resources()
2350 udma_free_rx_resources(uc); in udma_alloc_chan_resources()
2352 udma_reset_uchan(uc); in udma_alloc_chan_resources()
2354 if (uc->use_dma_pool) { in udma_alloc_chan_resources()
2355 dma_pool_destroy(uc->hdesc_pool); in udma_alloc_chan_resources()
2356 uc->use_dma_pool = false; in udma_alloc_chan_resources()
2364 struct udma_chan *uc = to_udma_chan(chan); in bcdma_alloc_chan_resources() local
2371 uc->config.pkt_mode = false; in bcdma_alloc_chan_resources()
2377 reinit_completion(&uc->teardown_completed); in bcdma_alloc_chan_resources()
2378 complete_all(&uc->teardown_completed); in bcdma_alloc_chan_resources()
2379 uc->state = UDMA_CHAN_IS_IDLE; in bcdma_alloc_chan_resources()
2381 switch (uc->config.dir) { in bcdma_alloc_chan_resources()
2384 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in bcdma_alloc_chan_resources()
2385 uc->id); in bcdma_alloc_chan_resources()
2387 ret = bcdma_alloc_bchan_resources(uc); in bcdma_alloc_chan_resources()
2391 irq_ring_idx = uc->bchan->id + oes->bcdma_bchan_ring; in bcdma_alloc_chan_resources()
2392 irq_udma_idx = uc->bchan->id + oes->bcdma_bchan_data; in bcdma_alloc_chan_resources()
2394 ret = bcdma_tisci_m2m_channel_config(uc); in bcdma_alloc_chan_resources()
2398 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in bcdma_alloc_chan_resources()
2399 uc->id); in bcdma_alloc_chan_resources()
2401 ret = udma_alloc_tx_resources(uc); in bcdma_alloc_chan_resources()
2403 uc->config.remote_thread_id = -1; in bcdma_alloc_chan_resources()
2407 uc->config.src_thread = ud->psil_base + uc->tchan->id; in bcdma_alloc_chan_resources()
2408 uc->config.dst_thread = uc->config.remote_thread_id; in bcdma_alloc_chan_resources()
2409 uc->config.dst_thread |= K3_PSIL_DST_THREAD_ID_OFFSET; in bcdma_alloc_chan_resources()
2411 irq_ring_idx = uc->tchan->id + oes->bcdma_tchan_ring; in bcdma_alloc_chan_resources()
2412 irq_udma_idx = uc->tchan->id + oes->bcdma_tchan_data; in bcdma_alloc_chan_resources()
2414 ret = bcdma_tisci_tx_channel_config(uc); in bcdma_alloc_chan_resources()
2418 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in bcdma_alloc_chan_resources()
2419 uc->id); in bcdma_alloc_chan_resources()
2421 ret = udma_alloc_rx_resources(uc); in bcdma_alloc_chan_resources()
2423 uc->config.remote_thread_id = -1; in bcdma_alloc_chan_resources()
2427 uc->config.src_thread = uc->config.remote_thread_id; in bcdma_alloc_chan_resources()
2428 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in bcdma_alloc_chan_resources()
2431 irq_ring_idx = uc->rchan->id + oes->bcdma_rchan_ring; in bcdma_alloc_chan_resources()
2432 irq_udma_idx = uc->rchan->id + oes->bcdma_rchan_data; in bcdma_alloc_chan_resources()
2434 ret = bcdma_tisci_rx_channel_config(uc); in bcdma_alloc_chan_resources()
2438 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in bcdma_alloc_chan_resources()
2439 __func__, uc->id, uc->config.dir); in bcdma_alloc_chan_resources()
2447 if (udma_is_chan_running(uc)) { in bcdma_alloc_chan_resources()
2448 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in bcdma_alloc_chan_resources()
2449 udma_reset_chan(uc, false); in bcdma_alloc_chan_resources()
2450 if (udma_is_chan_running(uc)) { in bcdma_alloc_chan_resources()
2451 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in bcdma_alloc_chan_resources()
2457 uc->dma_dev = dmaengine_get_dma_device(chan); in bcdma_alloc_chan_resources()
2458 if (uc->config.dir == DMA_MEM_TO_MEM && !uc->config.tr_trigger_type) { in bcdma_alloc_chan_resources()
2459 uc->config.hdesc_size = cppi5_trdesc_calc_size( in bcdma_alloc_chan_resources()
2462 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in bcdma_alloc_chan_resources()
2463 uc->config.hdesc_size, in bcdma_alloc_chan_resources()
2466 if (!uc->hdesc_pool) { in bcdma_alloc_chan_resources()
2469 uc->use_dma_pool = false; in bcdma_alloc_chan_resources()
2474 uc->use_dma_pool = true; in bcdma_alloc_chan_resources()
2475 } else if (uc->config.dir != DMA_MEM_TO_MEM) { in bcdma_alloc_chan_resources()
2477 ret = navss_psil_pair(ud, uc->config.src_thread, in bcdma_alloc_chan_resources()
2478 uc->config.dst_thread); in bcdma_alloc_chan_resources()
2482 uc->config.src_thread, uc->config.dst_thread); in bcdma_alloc_chan_resources()
2486 uc->psil_paired = true; in bcdma_alloc_chan_resources()
2489 uc->irq_num_ring = ti_sci_inta_msi_get_virq(ud->dev, irq_ring_idx); in bcdma_alloc_chan_resources()
2490 if (uc->irq_num_ring <= 0) { in bcdma_alloc_chan_resources()
2497 ret = request_irq(uc->irq_num_ring, udma_ring_irq_handler, in bcdma_alloc_chan_resources()
2498 IRQF_TRIGGER_HIGH, uc->name, uc); in bcdma_alloc_chan_resources()
2500 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in bcdma_alloc_chan_resources()
2505 if (is_slave_direction(uc->config.dir)) { in bcdma_alloc_chan_resources()
2506 uc->irq_num_udma = ti_sci_inta_msi_get_virq(ud->dev, in bcdma_alloc_chan_resources()
2508 if (uc->irq_num_udma <= 0) { in bcdma_alloc_chan_resources()
2511 free_irq(uc->irq_num_ring, uc); in bcdma_alloc_chan_resources()
2516 ret = request_irq(uc->irq_num_udma, udma_udma_irq_handler, 0, in bcdma_alloc_chan_resources()
2517 uc->name, uc); in bcdma_alloc_chan_resources()
2520 uc->id); in bcdma_alloc_chan_resources()
2521 free_irq(uc->irq_num_ring, uc); in bcdma_alloc_chan_resources()
2525 uc->irq_num_udma = 0; in bcdma_alloc_chan_resources()
2528 udma_reset_rings(uc); in bcdma_alloc_chan_resources()
2530 INIT_DELAYED_WORK_ONSTACK(&uc->tx_drain.work, in bcdma_alloc_chan_resources()
2535 uc->irq_num_ring = 0; in bcdma_alloc_chan_resources()
2536 uc->irq_num_udma = 0; in bcdma_alloc_chan_resources()
2538 if (uc->psil_paired) in bcdma_alloc_chan_resources()
2539 navss_psil_unpair(ud, uc->config.src_thread, in bcdma_alloc_chan_resources()
2540 uc->config.dst_thread); in bcdma_alloc_chan_resources()
2541 uc->psil_paired = false; in bcdma_alloc_chan_resources()
2543 bcdma_free_bchan_resources(uc); in bcdma_alloc_chan_resources()
2544 udma_free_tx_resources(uc); in bcdma_alloc_chan_resources()
2545 udma_free_rx_resources(uc); in bcdma_alloc_chan_resources()
2547 udma_reset_uchan(uc); in bcdma_alloc_chan_resources()
2549 if (uc->use_dma_pool) { in bcdma_alloc_chan_resources()
2550 dma_pool_destroy(uc->hdesc_pool); in bcdma_alloc_chan_resources()
2551 uc->use_dma_pool = false; in bcdma_alloc_chan_resources()
2560 struct udma_chan *uc = to_udma_chan(chan); in bcdma_router_config() local
2563 if (!uc->bchan) in bcdma_router_config()
2566 if (uc->config.tr_trigger_type != 1 && uc->config.tr_trigger_type != 2) in bcdma_router_config()
2569 trigger_event = uc->ud->soc_data->bcdma_trigger_event_offset; in bcdma_router_config()
2570 trigger_event += (uc->bchan->id * 2) + uc->config.tr_trigger_type - 1; in bcdma_router_config()
2577 struct udma_chan *uc = to_udma_chan(chan); in pktdma_alloc_chan_resources() local
2587 reinit_completion(&uc->teardown_completed); in pktdma_alloc_chan_resources()
2588 complete_all(&uc->teardown_completed); in pktdma_alloc_chan_resources()
2589 uc->state = UDMA_CHAN_IS_IDLE; in pktdma_alloc_chan_resources()
2591 switch (uc->config.dir) { in pktdma_alloc_chan_resources()
2594 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in pktdma_alloc_chan_resources()
2595 uc->id); in pktdma_alloc_chan_resources()
2597 ret = udma_alloc_tx_resources(uc); in pktdma_alloc_chan_resources()
2599 uc->config.remote_thread_id = -1; in pktdma_alloc_chan_resources()
2603 uc->config.src_thread = ud->psil_base + uc->tchan->id; in pktdma_alloc_chan_resources()
2604 uc->config.dst_thread = uc->config.remote_thread_id; in pktdma_alloc_chan_resources()
2605 uc->config.dst_thread |= K3_PSIL_DST_THREAD_ID_OFFSET; in pktdma_alloc_chan_resources()
2607 irq_ring_idx = uc->tchan->tflow_id + oes->pktdma_tchan_flow; in pktdma_alloc_chan_resources()
2609 ret = pktdma_tisci_tx_channel_config(uc); in pktdma_alloc_chan_resources()
2613 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in pktdma_alloc_chan_resources()
2614 uc->id); in pktdma_alloc_chan_resources()
2616 ret = udma_alloc_rx_resources(uc); in pktdma_alloc_chan_resources()
2618 uc->config.remote_thread_id = -1; in pktdma_alloc_chan_resources()
2622 uc->config.src_thread = uc->config.remote_thread_id; in pktdma_alloc_chan_resources()
2623 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in pktdma_alloc_chan_resources()
2626 irq_ring_idx = uc->rflow->id + oes->pktdma_rchan_flow; in pktdma_alloc_chan_resources()
2628 ret = pktdma_tisci_rx_channel_config(uc); in pktdma_alloc_chan_resources()
2632 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in pktdma_alloc_chan_resources()
2633 __func__, uc->id, uc->config.dir); in pktdma_alloc_chan_resources()
2641 if (udma_is_chan_running(uc)) { in pktdma_alloc_chan_resources()
2642 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in pktdma_alloc_chan_resources()
2643 udma_reset_chan(uc, false); in pktdma_alloc_chan_resources()
2644 if (udma_is_chan_running(uc)) { in pktdma_alloc_chan_resources()
2645 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in pktdma_alloc_chan_resources()
2651 uc->dma_dev = dmaengine_get_dma_device(chan); in pktdma_alloc_chan_resources()
2652 uc->hdesc_pool = dma_pool_create(uc->name, uc->dma_dev, in pktdma_alloc_chan_resources()
2653 uc->config.hdesc_size, ud->desc_align, in pktdma_alloc_chan_resources()
2655 if (!uc->hdesc_pool) { in pktdma_alloc_chan_resources()
2658 uc->use_dma_pool = false; in pktdma_alloc_chan_resources()
2663 uc->use_dma_pool = true; in pktdma_alloc_chan_resources()
2666 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2669 uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2673 uc->psil_paired = true; in pktdma_alloc_chan_resources()
2675 uc->irq_num_ring = ti_sci_inta_msi_get_virq(ud->dev, irq_ring_idx); in pktdma_alloc_chan_resources()
2676 if (uc->irq_num_ring <= 0) { in pktdma_alloc_chan_resources()
2683 ret = request_irq(uc->irq_num_ring, udma_ring_irq_handler, in pktdma_alloc_chan_resources()
2684 IRQF_TRIGGER_HIGH, uc->name, uc); in pktdma_alloc_chan_resources()
2686 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in pktdma_alloc_chan_resources()
2690 uc->irq_num_udma = 0; in pktdma_alloc_chan_resources()
2692 udma_reset_rings(uc); in pktdma_alloc_chan_resources()
2694 INIT_DELAYED_WORK_ONSTACK(&uc->tx_drain.work, in pktdma_alloc_chan_resources()
2697 if (uc->tchan) in pktdma_alloc_chan_resources()
2700 uc->id, uc->tchan->id, uc->tchan->tflow_id, in pktdma_alloc_chan_resources()
2701 uc->config.remote_thread_id); in pktdma_alloc_chan_resources()
2702 else if (uc->rchan) in pktdma_alloc_chan_resources()
2705 uc->id, uc->rchan->id, uc->rflow->id, in pktdma_alloc_chan_resources()
2706 uc->config.remote_thread_id); in pktdma_alloc_chan_resources()
2710 uc->irq_num_ring = 0; in pktdma_alloc_chan_resources()
2712 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2713 uc->psil_paired = false; in pktdma_alloc_chan_resources()
2715 udma_free_tx_resources(uc); in pktdma_alloc_chan_resources()
2716 udma_free_rx_resources(uc); in pktdma_alloc_chan_resources()
2718 udma_reset_uchan(uc); in pktdma_alloc_chan_resources()
2720 dma_pool_destroy(uc->hdesc_pool); in pktdma_alloc_chan_resources()
2721 uc->use_dma_pool = false; in pktdma_alloc_chan_resources()
2729 struct udma_chan *uc = to_udma_chan(chan); in udma_slave_config() local
2731 memcpy(&uc->cfg, cfg, sizeof(uc->cfg)); in udma_slave_config()
2736 static struct udma_desc *udma_alloc_tr_desc(struct udma_chan *uc, in udma_alloc_tr_desc() argument
2753 dev_err(uc->ud->dev, "Unsupported TR size of %zu\n", tr_size); in udma_alloc_tr_desc()
2768 if (uc->use_dma_pool) { in udma_alloc_tr_desc()
2769 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_alloc_tr_desc()
2770 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_alloc_tr_desc()
2777 uc->ud->desc_align); in udma_alloc_tr_desc()
2778 hwdesc->cppi5_desc_vaddr = dma_alloc_coherent(uc->ud->dev, in udma_alloc_tr_desc()
2796 if (uc->cyclic) in udma_alloc_tr_desc()
2800 ring_id = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_alloc_tr_desc()
2802 ring_id = k3_ringacc_get_ring_id(uc->tchan->tc_ring); in udma_alloc_tr_desc()
2805 cppi5_desc_set_pktids(tr_desc, uc->id, in udma_alloc_tr_desc()
2858 udma_prep_slave_sg_tr(struct udma_chan *uc, struct scatterlist *sgl, in udma_prep_slave_sg_tr() argument
2882 d = udma_alloc_tr_desc(uc, tr_size, num_tr, dir); in udma_prep_slave_sg_tr()
2888 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_slave_sg_tr()
2891 asel = (u64)uc->config.asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_slave_sg_tr()
2900 dev_err(uc->ud->dev, "size %u is not supported\n", in udma_prep_slave_sg_tr()
2902 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_tr()
2942 udma_prep_slave_sg_triggered_tr(struct udma_chan *uc, struct scatterlist *sgl, in udma_prep_slave_sg_triggered_tr() argument
2961 dev_addr = uc->cfg.src_addr; in udma_prep_slave_sg_triggered_tr()
2962 dev_width = uc->cfg.src_addr_width; in udma_prep_slave_sg_triggered_tr()
2963 burst = uc->cfg.src_maxburst; in udma_prep_slave_sg_triggered_tr()
2964 port_window = uc->cfg.src_port_window_size; in udma_prep_slave_sg_triggered_tr()
2966 dev_addr = uc->cfg.dst_addr; in udma_prep_slave_sg_triggered_tr()
2967 dev_width = uc->cfg.dst_addr_width; in udma_prep_slave_sg_triggered_tr()
2968 burst = uc->cfg.dst_maxburst; in udma_prep_slave_sg_triggered_tr()
2969 port_window = uc->cfg.dst_port_window_size; in udma_prep_slave_sg_triggered_tr()
2971 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_slave_sg_triggered_tr()
2980 dev_err(uc->ud->dev, in udma_prep_slave_sg_triggered_tr()
2998 dev_err(uc->ud->dev, in udma_prep_slave_sg_triggered_tr()
3012 d = udma_alloc_tr_desc(uc, tr_size, num_tr, dir); in udma_prep_slave_sg_triggered_tr()
3018 if (uc->ud->match_data->type == DMA_TYPE_UDMA) { in udma_prep_slave_sg_triggered_tr()
3021 asel = (u64)uc->config.asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_slave_sg_triggered_tr()
3034 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_slave_sg_triggered_tr()
3036 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_triggered_tr()
3045 uc->config.tr_trigger_type, in udma_prep_slave_sg_triggered_tr()
3092 uc->config.tr_trigger_type, in udma_prep_slave_sg_triggered_tr()
3140 static int udma_configure_statictr(struct udma_chan *uc, struct udma_desc *d, in udma_configure_statictr() argument
3144 if (uc->config.ep_type != PSIL_EP_PDMA_XY) in udma_configure_statictr()
3176 if (uc->config.pkt_mode || !uc->cyclic) { in udma_configure_statictr()
3179 if (uc->cyclic) in udma_configure_statictr()
3184 if (uc->config.dir == DMA_DEV_TO_MEM && in udma_configure_statictr()
3185 d->static_tr.bstcnt > uc->ud->match_data->statictr_z_mask) in udma_configure_statictr()
3195 udma_prep_slave_sg_pkt(struct udma_chan *uc, struct scatterlist *sgl, in udma_prep_slave_sg_pkt() argument
3214 ring_id = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_prep_slave_sg_pkt()
3216 ring_id = k3_ringacc_get_ring_id(uc->tchan->tc_ring); in udma_prep_slave_sg_pkt()
3218 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_slave_sg_pkt()
3221 asel = (u64)uc->config.asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_slave_sg_pkt()
3229 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_prep_slave_sg_pkt()
3233 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
3236 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_pkt()
3242 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_prep_slave_sg_pkt()
3248 cppi5_desc_set_pktids(&desc->hdr, uc->id, in udma_prep_slave_sg_pkt()
3265 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA || in udma_prep_slave_sg_pkt()
3271 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
3274 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_pkt()
3289 struct udma_chan *uc = to_udma_chan(desc->chan); in udma_attach_metadata() local
3294 if (!uc->config.pkt_mode || !uc->config.metadata_size) in udma_attach_metadata()
3297 if (!data || len > uc->config.metadata_size) in udma_attach_metadata()
3300 if (uc->config.needs_epib && len < CPPI5_INFO0_HDESC_EPIB_SIZE) in udma_attach_metadata()
3307 if (uc->config.needs_epib) in udma_attach_metadata()
3312 if (uc->config.needs_epib) in udma_attach_metadata()
3325 struct udma_chan *uc = to_udma_chan(desc->chan); in udma_get_metadata_ptr() local
3328 if (!uc->config.pkt_mode || !uc->config.metadata_size) in udma_get_metadata_ptr()
3333 *max_len = uc->config.metadata_size; in udma_get_metadata_ptr()
3346 struct udma_chan *uc = to_udma_chan(desc->chan); in udma_set_metadata_len() local
3351 if (!uc->config.pkt_mode || !uc->config.metadata_size) in udma_set_metadata_len()
3354 if (payload_len > uc->config.metadata_size) in udma_set_metadata_len()
3357 if (uc->config.needs_epib && payload_len < CPPI5_INFO0_HDESC_EPIB_SIZE) in udma_set_metadata_len()
3362 if (uc->config.needs_epib) { in udma_set_metadata_len()
3384 struct udma_chan *uc = to_udma_chan(chan); in udma_prep_slave_sg() local
3389 if (dir != uc->config.dir && in udma_prep_slave_sg()
3390 (uc->config.dir == DMA_MEM_TO_MEM && !uc->config.tr_trigger_type)) { in udma_prep_slave_sg()
3393 __func__, uc->id, in udma_prep_slave_sg()
3394 dmaengine_get_direction_text(uc->config.dir), in udma_prep_slave_sg()
3400 dev_width = uc->cfg.src_addr_width; in udma_prep_slave_sg()
3401 burst = uc->cfg.src_maxburst; in udma_prep_slave_sg()
3403 dev_width = uc->cfg.dst_addr_width; in udma_prep_slave_sg()
3404 burst = uc->cfg.dst_maxburst; in udma_prep_slave_sg()
3413 if (uc->config.pkt_mode) in udma_prep_slave_sg()
3414 d = udma_prep_slave_sg_pkt(uc, sgl, sglen, dir, tx_flags, in udma_prep_slave_sg()
3416 else if (is_slave_direction(uc->config.dir)) in udma_prep_slave_sg()
3417 d = udma_prep_slave_sg_tr(uc, sgl, sglen, dir, tx_flags, in udma_prep_slave_sg()
3420 d = udma_prep_slave_sg_triggered_tr(uc, sgl, sglen, dir, in udma_prep_slave_sg()
3431 if (udma_configure_statictr(uc, d, dev_width, burst)) { in udma_prep_slave_sg()
3432 dev_err(uc->ud->dev, in udma_prep_slave_sg()
3436 udma_free_hwdesc(uc, d); in udma_prep_slave_sg()
3441 if (uc->config.metadata_size) in udma_prep_slave_sg()
3444 return vchan_tx_prep(&uc->vc, &d->vd, tx_flags); in udma_prep_slave_sg()
3448 udma_prep_dma_cyclic_tr(struct udma_chan *uc, dma_addr_t buf_addr, in udma_prep_dma_cyclic_tr() argument
3463 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_cyclic_tr()
3470 d = udma_alloc_tr_desc(uc, tr_size, periods * num_tr, dir); in udma_prep_dma_cyclic_tr()
3475 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_dma_cyclic_tr()
3479 ((u64)uc->config.asel << K3_ADDRESS_ASEL_SHIFT); in udma_prep_dma_cyclic_tr()
3518 udma_prep_dma_cyclic_pkt(struct udma_chan *uc, dma_addr_t buf_addr, in udma_prep_dma_cyclic_pkt() argument
3541 ring_id = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_prep_dma_cyclic_pkt()
3543 ring_id = k3_ringacc_get_ring_id(uc->tchan->tc_ring); in udma_prep_dma_cyclic_pkt()
3545 if (uc->ud->match_data->type != DMA_TYPE_UDMA) in udma_prep_dma_cyclic_pkt()
3546 buf_addr |= (u64)uc->config.asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_cyclic_pkt()
3553 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_prep_dma_cyclic_pkt()
3557 dev_err(uc->ud->dev, in udma_prep_dma_cyclic_pkt()
3560 udma_free_hwdesc(uc, d); in udma_prep_dma_cyclic_pkt()
3565 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_prep_dma_cyclic_pkt()
3572 cppi5_desc_set_pktids(&h_desc->hdr, uc->id, in udma_prep_dma_cyclic_pkt()
3590 struct udma_chan *uc = to_udma_chan(chan); in udma_prep_dma_cyclic() local
3595 if (dir != uc->config.dir) { in udma_prep_dma_cyclic()
3598 __func__, uc->id, in udma_prep_dma_cyclic()
3599 dmaengine_get_direction_text(uc->config.dir), in udma_prep_dma_cyclic()
3604 uc->cyclic = true; in udma_prep_dma_cyclic()
3607 dev_width = uc->cfg.src_addr_width; in udma_prep_dma_cyclic()
3608 burst = uc->cfg.src_maxburst; in udma_prep_dma_cyclic()
3610 dev_width = uc->cfg.dst_addr_width; in udma_prep_dma_cyclic()
3611 burst = uc->cfg.dst_maxburst; in udma_prep_dma_cyclic()
3613 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_dma_cyclic()
3620 if (uc->config.pkt_mode) in udma_prep_dma_cyclic()
3621 d = udma_prep_dma_cyclic_pkt(uc, buf_addr, buf_len, period_len, in udma_prep_dma_cyclic()
3624 d = udma_prep_dma_cyclic_tr(uc, buf_addr, buf_len, period_len, in udma_prep_dma_cyclic()
3636 if (udma_configure_statictr(uc, d, dev_width, burst)) { in udma_prep_dma_cyclic()
3637 dev_err(uc->ud->dev, in udma_prep_dma_cyclic()
3641 udma_free_hwdesc(uc, d); in udma_prep_dma_cyclic()
3646 if (uc->config.metadata_size) in udma_prep_dma_cyclic()
3649 return vchan_tx_prep(&uc->vc, &d->vd, flags); in udma_prep_dma_cyclic()
3656 struct udma_chan *uc = to_udma_chan(chan); in udma_prep_dma_memcpy() local
3663 if (uc->config.dir != DMA_MEM_TO_MEM) { in udma_prep_dma_memcpy()
3666 __func__, uc->id, in udma_prep_dma_memcpy()
3667 dmaengine_get_direction_text(uc->config.dir), in udma_prep_dma_memcpy()
3675 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_memcpy()
3680 d = udma_alloc_tr_desc(uc, tr_size, num_tr, DMA_MEM_TO_MEM); in udma_prep_dma_memcpy()
3689 if (uc->ud->match_data->type != DMA_TYPE_UDMA) { in udma_prep_dma_memcpy()
3690 src |= (u64)uc->ud->asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_memcpy()
3691 dest |= (u64)uc->ud->asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_memcpy()
3735 if (uc->config.metadata_size) in udma_prep_dma_memcpy()
3738 return vchan_tx_prep(&uc->vc, &d->vd, tx_flags); in udma_prep_dma_memcpy()
3743 struct udma_chan *uc = to_udma_chan(chan); in udma_issue_pending() local
3746 spin_lock_irqsave(&uc->vc.lock, flags); in udma_issue_pending()
3749 if (vchan_issue_pending(&uc->vc) && !uc->desc) { in udma_issue_pending()
3755 if (!(uc->state == UDMA_CHAN_IS_TERMINATING && in udma_issue_pending()
3756 udma_is_chan_running(uc))) in udma_issue_pending()
3757 udma_start(uc); in udma_issue_pending()
3760 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_issue_pending()
3767 struct udma_chan *uc = to_udma_chan(chan); in udma_tx_status() local
3771 spin_lock_irqsave(&uc->vc.lock, flags); in udma_tx_status()
3775 if (!udma_is_chan_running(uc)) in udma_tx_status()
3778 if (ret == DMA_IN_PROGRESS && udma_is_chan_paused(uc)) in udma_tx_status()
3784 if (uc->desc && uc->desc->vd.tx.cookie == cookie) { in udma_tx_status()
3787 u32 residue = uc->desc->residue; in udma_tx_status()
3790 if (uc->desc->dir == DMA_MEM_TO_DEV) { in udma_tx_status()
3791 bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_SBCNT_REG); in udma_tx_status()
3793 if (uc->config.ep_type != PSIL_EP_NATIVE) { in udma_tx_status()
3794 peer_bcnt = udma_tchanrt_read(uc, in udma_tx_status()
3800 } else if (uc->desc->dir == DMA_DEV_TO_MEM) { in udma_tx_status()
3801 bcnt = udma_rchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_tx_status()
3803 if (uc->config.ep_type != PSIL_EP_NATIVE) { in udma_tx_status()
3804 peer_bcnt = udma_rchanrt_read(uc, in udma_tx_status()
3811 bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_tx_status()
3814 bcnt -= uc->bcnt; in udma_tx_status()
3815 if (bcnt && !(bcnt % uc->desc->residue)) in udma_tx_status()
3818 residue -= bcnt % uc->desc->residue; in udma_tx_status()
3820 if (!residue && (uc->config.dir == DMA_DEV_TO_MEM || !delay)) { in udma_tx_status()
3833 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_tx_status()
3839 struct udma_chan *uc = to_udma_chan(chan); in udma_pause() local
3842 switch (uc->config.dir) { in udma_pause()
3844 udma_rchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_pause()
3849 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_pause()
3854 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_CTL_REG, in udma_pause()
3867 struct udma_chan *uc = to_udma_chan(chan); in udma_resume() local
3870 switch (uc->config.dir) { in udma_resume()
3872 udma_rchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_resume()
3877 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_resume()
3881 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_CTL_REG, in udma_resume()
3893 struct udma_chan *uc = to_udma_chan(chan); in udma_terminate_all() local
3897 spin_lock_irqsave(&uc->vc.lock, flags); in udma_terminate_all()
3899 if (udma_is_chan_running(uc)) in udma_terminate_all()
3900 udma_stop(uc); in udma_terminate_all()
3902 if (uc->desc) { in udma_terminate_all()
3903 uc->terminated_desc = uc->desc; in udma_terminate_all()
3904 uc->desc = NULL; in udma_terminate_all()
3905 uc->terminated_desc->terminated = true; in udma_terminate_all()
3906 cancel_delayed_work(&uc->tx_drain.work); in udma_terminate_all()
3909 uc->paused = false; in udma_terminate_all()
3911 vchan_get_all_descriptors(&uc->vc, &head); in udma_terminate_all()
3912 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_terminate_all()
3913 vchan_dma_desc_free_list(&uc->vc, &head); in udma_terminate_all()
3920 struct udma_chan *uc = to_udma_chan(chan); in udma_synchronize() local
3923 vchan_synchronize(&uc->vc); in udma_synchronize()
3925 if (uc->state == UDMA_CHAN_IS_TERMINATING) { in udma_synchronize()
3926 timeout = wait_for_completion_timeout(&uc->teardown_completed, in udma_synchronize()
3929 dev_warn(uc->ud->dev, "chan%d teardown timeout!\n", in udma_synchronize()
3930 uc->id); in udma_synchronize()
3931 udma_dump_chan_stdata(uc); in udma_synchronize()
3932 udma_reset_chan(uc, true); in udma_synchronize()
3936 udma_reset_chan(uc, false); in udma_synchronize()
3937 if (udma_is_chan_running(uc)) in udma_synchronize()
3938 dev_warn(uc->ud->dev, "chan%d refused to stop!\n", uc->id); in udma_synchronize()
3940 cancel_delayed_work_sync(&uc->tx_drain.work); in udma_synchronize()
3941 udma_reset_rings(uc); in udma_synchronize()
3948 struct udma_chan *uc = to_udma_chan(&vc->chan); in udma_desc_pre_callback() local
3957 udma_fetch_epib(uc, d); in udma_desc_pre_callback()
4019 struct udma_chan *uc = to_udma_chan(chan); in udma_free_chan_resources() local
4023 if (uc->terminated_desc) { in udma_free_chan_resources()
4024 udma_reset_chan(uc, false); in udma_free_chan_resources()
4025 udma_reset_rings(uc); in udma_free_chan_resources()
4028 cancel_delayed_work_sync(&uc->tx_drain.work); in udma_free_chan_resources()
4030 if (uc->irq_num_ring > 0) { in udma_free_chan_resources()
4031 free_irq(uc->irq_num_ring, uc); in udma_free_chan_resources()
4033 uc->irq_num_ring = 0; in udma_free_chan_resources()
4035 if (uc->irq_num_udma > 0) { in udma_free_chan_resources()
4036 free_irq(uc->irq_num_udma, uc); in udma_free_chan_resources()
4038 uc->irq_num_udma = 0; in udma_free_chan_resources()
4042 if (uc->psil_paired) { in udma_free_chan_resources()
4043 navss_psil_unpair(ud, uc->config.src_thread, in udma_free_chan_resources()
4044 uc->config.dst_thread); in udma_free_chan_resources()
4045 uc->psil_paired = false; in udma_free_chan_resources()
4048 vchan_free_chan_resources(&uc->vc); in udma_free_chan_resources()
4049 tasklet_kill(&uc->vc.task); in udma_free_chan_resources()
4051 bcdma_free_bchan_resources(uc); in udma_free_chan_resources()
4052 udma_free_tx_resources(uc); in udma_free_chan_resources()
4053 udma_free_rx_resources(uc); in udma_free_chan_resources()
4054 udma_reset_uchan(uc); in udma_free_chan_resources()
4056 if (uc->use_dma_pool) { in udma_free_chan_resources()
4057 dma_pool_destroy(uc->hdesc_pool); in udma_free_chan_resources()
4058 uc->use_dma_pool = false; in udma_free_chan_resources()
4078 struct udma_chan *uc; in udma_dma_filter_fn() local
4086 uc = to_udma_chan(chan); in udma_dma_filter_fn()
4087 ucc = &uc->config; in udma_dma_filter_fn()
4088 ud = uc->ud; in udma_dma_filter_fn()
4173 dev_dbg(ud->dev, "chan%d: Remote thread: 0x%04x (%s)\n", uc->id, in udma_dma_filter_fn()
4179 dev_dbg(ud->dev, "chan%d: triggered channel (type: %u)\n", uc->id, in udma_dma_filter_fn()
5135 struct udma_chan *uc = to_udma_chan(chan); in udma_dbg_summary_show_chan() local
5136 struct udma_chan_config *ucc = &uc->config; in udma_dbg_summary_show_chan()
5144 dmaengine_get_direction_text(uc->config.dir)); in udma_dbg_summary_show_chan()
5146 switch (uc->config.dir) { in udma_dbg_summary_show_chan()
5148 if (uc->ud->match_data->type == DMA_TYPE_BCDMA) { in udma_dbg_summary_show_chan()
5149 seq_printf(s, "bchan%d)\n", uc->bchan->id); in udma_dbg_summary_show_chan()
5153 seq_printf(s, "chan%d pair [0x%04x -> 0x%04x], ", uc->tchan->id, in udma_dbg_summary_show_chan()
5157 seq_printf(s, "rchan%d [0x%04x -> 0x%04x], ", uc->rchan->id, in udma_dbg_summary_show_chan()
5159 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA) in udma_dbg_summary_show_chan()
5160 seq_printf(s, "rflow%d, ", uc->rflow->id); in udma_dbg_summary_show_chan()
5163 seq_printf(s, "tchan%d [0x%04x -> 0x%04x], ", uc->tchan->id, in udma_dbg_summary_show_chan()
5165 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA) in udma_dbg_summary_show_chan()
5166 seq_printf(s, "tflow%d, ", uc->tchan->tflow_id); in udma_dbg_summary_show_chan()
5447 struct udma_chan *uc = &ud->channels[i]; in udma_probe() local
5449 uc->ud = ud; in udma_probe()
5450 uc->vc.desc_free = udma_desc_free; in udma_probe()
5451 uc->id = i; in udma_probe()
5452 uc->bchan = NULL; in udma_probe()
5453 uc->tchan = NULL; in udma_probe()
5454 uc->rchan = NULL; in udma_probe()
5455 uc->config.remote_thread_id = -1; in udma_probe()
5456 uc->config.mapped_channel_id = -1; in udma_probe()
5457 uc->config.default_flow_id = -1; in udma_probe()
5458 uc->config.dir = DMA_MEM_TO_MEM; in udma_probe()
5459 uc->name = devm_kasprintf(dev, GFP_KERNEL, "%s chan%d", in udma_probe()
5462 vchan_init(&uc->vc, &ud->ddev); in udma_probe()
5464 tasklet_setup(&uc->vc.task, udma_vchan_complete); in udma_probe()
5465 init_completion(&uc->teardown_completed); in udma_probe()
5466 INIT_DELAYED_WORK(&uc->tx_drain.work, udma_check_tx_completion); in udma_probe()