| /linux/drivers/gpu/drm/nouveau/nvkm/core/ |
| A D | ramht.c | 36 hash ^= chid << (ramht->bits - 4); in nvkm_ramht_hash() 45 co = ho = nvkm_ramht_hash(ramht, chid, handle); in nvkm_ramht_search() 47 if (ramht->data[co].chid == chid) { in nvkm_ramht_search() 61 int chid, int addr, u32 handle, u32 context) in nvkm_ramht_update() argument 68 data->chid = chid; in nvkm_ramht_update() 75 data->chid = -1; in nvkm_ramht_update() 108 int chid, int addr, u32 handle, u32 context) in nvkm_ramht_insert() argument 112 if (nvkm_ramht_search(ramht, chid, handle)) in nvkm_ramht_insert() 115 co = ho = nvkm_ramht_hash(ramht, chid, handle); in nvkm_ramht_insert() 117 if (ramht->data[co].chid < 0) { in nvkm_ramht_insert() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/disp/ |
| A D | gp102.c | 31 gp102_disp_intr_error(struct nv50_disp *disp, int chid) in gp102_disp_intr_error() argument 35 u32 mthd = nvkm_rd32(device, 0x6111f0 + (chid * 12)); in gp102_disp_intr_error() 36 u32 data = nvkm_rd32(device, 0x6111f4 + (chid * 12)); in gp102_disp_intr_error() 37 u32 unkn = nvkm_rd32(device, 0x6111f8 + (chid * 12)); in gp102_disp_intr_error() 40 chid, (mthd & 0x0000ffc), data, mthd, unkn); in gp102_disp_intr_error() 42 if (chid < ARRAY_SIZE(disp->chan)) { in gp102_disp_intr_error() 45 nv50_disp_chan_mthd(disp->chan[chid], NV_DBG_ERROR); in gp102_disp_intr_error() 52 nvkm_wr32(device, 0x61009c, (1 << chid)); in gp102_disp_intr_error() 53 nvkm_wr32(device, 0x6111f0 + (chid * 12), 0x90000000); in gp102_disp_intr_error()
|
| A D | gf119.c | 94 u32 stat = nvkm_rd32(device, 0x6101f0 + (chid * 12)); in gf119_disp_intr_error() 97 u32 data = nvkm_rd32(device, 0x6101f4 + (chid * 12)); in gf119_disp_intr_error() 104 chid, stat, type, reason ? reason->name : "", in gf119_disp_intr_error() 107 if (chid < ARRAY_SIZE(disp->chan)) { in gf119_disp_intr_error() 117 nvkm_wr32(device, 0x61009c, (1 << chid)); in gf119_disp_intr_error() 132 int chid = __ffs(stat); stat &= ~(1 << chid); in gf119_disp_intr() local 133 nv50_disp_chan_uevent_send(disp, chid); in gf119_disp_intr() 134 nvkm_wr32(device, 0x61008c, 1 << chid); in gf119_disp_intr() 141 int chid = ffs(stat) - 1; in gf119_disp_intr() local 142 if (chid >= 0) in gf119_disp_intr() [all …]
|
| A D | dmacnv50.c | 36 struct nv50_disp *disp, int chid, int head, u64 push, in nv50_disp_dmac_new_() argument 44 ret = nv50_disp_chan_new_(func, mthd, disp, chid, chid, head, oclass, in nv50_disp_dmac_new_() 74 chan->chid.user, -10, handle, in nv50_disp_dmac_bind() 75 chan->chid.user << 28 | in nv50_disp_dmac_bind() 76 chan->chid.user); in nv50_disp_dmac_bind() 84 int ctrl = chan->chid.ctrl; in nv50_disp_dmac_fini() 85 int user = chan->chid.user; in nv50_disp_dmac_fini() 106 int ctrl = chan->chid.ctrl; in nv50_disp_dmac_init() 107 int user = chan->chid.user; in nv50_disp_dmac_init()
|
| A D | dmacgv100.c | 31 const u32 soff = (chan->chid.ctrl - 1) * 0x04; in gv100_disp_dmac_idle() 45 chan->chid.user, -9, handle, in gv100_disp_dmac_bind() 46 chan->chid.user << 25 | 0x00000040); in gv100_disp_dmac_bind() 53 const u32 uoff = (chan->chid.ctrl - 1) * 0x1000; in gv100_disp_dmac_fini() 54 const u32 coff = chan->chid.ctrl * 0x04; in gv100_disp_dmac_fini() 66 const u32 uoff = (chan->chid.ctrl - 1) * 0x1000; in gv100_disp_dmac_init() 67 const u32 poff = chan->chid.ctrl * 0x10; in gv100_disp_dmac_init() 68 const u32 coff = chan->chid.ctrl * 0x04; in gv100_disp_dmac_init()
|
| A D | dmacgf119.c | 34 chan->chid.user, -9, handle, in gf119_disp_dmac_bind() 35 chan->chid.user << 27 | 0x00000001); in gf119_disp_dmac_bind() 43 int ctrl = chan->chid.ctrl; in gf119_disp_dmac_fini() 44 int user = chan->chid.user; in gf119_disp_dmac_fini() 65 int ctrl = chan->chid.ctrl; in gf119_disp_dmac_init() 66 int user = chan->chid.user; in gf119_disp_dmac_init()
|
| A D | channv50.c | 89 mthd->name, chan->chid.user); in nv50_disp_chan_mthd() 146 notify->index = chan->chid.user; in nv50_disp_chan_uevent_ctor() 164 return 0x640000 + (chan->chid.user * 0x1000); in nv50_disp_chan_user() 171 const u32 mask = 0x00010001 << chan->chid.user; in nv50_disp_chan_intr() 319 if (chan->chid.user >= 0) in nv50_disp_chan_dtor() 320 disp->chan[chan->chid.user] = NULL; in nv50_disp_chan_dtor() 354 chan->chid.ctrl = ctrl; in nv50_disp_chan_new_() 355 chan->chid.user = user; in nv50_disp_chan_new_() 358 if (disp->chan[chan->chid.user]) { in nv50_disp_chan_new_() 359 chan->chid.user = -1; in nv50_disp_chan_new_() [all …]
|
| A D | gv100.c | 97 gv100_disp_exception(struct nv50_disp *disp, int chid) in gv100_disp_exception() argument 101 u32 stat = nvkm_rd32(device, 0x611020 + (chid * 12)); in gv100_disp_exception() 112 if (chid <= 32) { in gv100_disp_exception() 113 u32 data = nvkm_rd32(device, 0x611024 + (chid * 12)); in gv100_disp_exception() 114 u32 code = nvkm_rd32(device, 0x611028 + (chid * 12)); in gv100_disp_exception() 117 chid, stat, type, reason ? reason->name : "", in gv100_disp_exception() 122 chid, stat, type, reason ? reason->name : "", mthd); in gv100_disp_exception() 125 if (chid < ARRAY_SIZE(disp->chan) && disp->chan[chid]) { in gv100_disp_exception() 128 nv50_disp_chan_mthd(disp->chan[chid], NV_DBG_ERROR); in gv100_disp_exception() 135 nvkm_wr32(device, 0x611020 + (chid * 12), 0x90000000); in gv100_disp_exception()
|
| A D | piocgf119.c | 35 int ctrl = chan->chid.ctrl; in gf119_disp_pioc_fini() 36 int user = chan->chid.user; in gf119_disp_pioc_fini() 54 int ctrl = chan->chid.ctrl; in gf119_disp_pioc_init() 55 int user = chan->chid.user; in gf119_disp_pioc_init()
|
| A D | piocnv50.c | 35 int ctrl = chan->chid.ctrl; in nv50_disp_pioc_fini() 36 int user = chan->chid.user; in nv50_disp_pioc_fini() 54 int ctrl = chan->chid.ctrl; in nv50_disp_pioc_init() 55 int user = chan->chid.user; in nv50_disp_pioc_init()
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
| A D | dmanv40.c | 77 int chid; in nv40_fifo_dma_engine_fini() local 85 chid = nvkm_rd32(device, 0x003204) & (fifo->base.nr - 1); in nv40_fifo_dma_engine_fini() 86 if (chid == chan->base.chid) in nv40_fifo_dma_engine_fini() 107 int chid; in nv40_fifo_dma_engine_init() local 116 chid = nvkm_rd32(device, 0x003204) & (fifo->base.nr - 1); in nv40_fifo_dma_engine_init() 117 if (chid == chan->base.chid) in nv40_fifo_dma_engine_init() 156 u32 context = chan->base.chid << 23; in nv40_fifo_dma_object_ctor() 171 hash = nvkm_ramht_insert(imem->ramht, object, chan->base.chid, 4, in nv40_fifo_dma_object_ctor() 229 args->v0.chid = chan->base.chid; in nv40_fifo_dma_new() 230 chan->ramfc = chan->base.chid * 128; in nv40_fifo_dma_new()
|
| A D | nv04.c | 159 handled = nvkm_sw_mthd(sw, chid, subc, mthd, data); in nv04_fifo_swmthd() 195 !nv04_fifo_swmthd(device, chid, mthd, data)) { in nv04_fifo_cache_error() 196 chan = nvkm_fifo_chan_chid(&fifo->base, chid, &flags); in nv04_fifo_cache_error() 199 chid, chan ? chan->object.client->name : "unknown", in nv04_fifo_cache_error() 220 nv04_fifo_dma_pusher(struct nv04_fifo *fifo, u32 chid) in nv04_fifo_dma_pusher() argument 232 chan = nvkm_fifo_chan_chid(&fifo->base, chid, &flags); in nv04_fifo_dma_pusher() 243 chid, name, ho_get, dma_get, ho_put, dma_put, in nv04_fifo_dma_pusher() 258 chid, name, dma_get, dma_put, state, in nv04_fifo_dma_pusher() 279 u32 reassign, chid, get, sem; in nv04_fifo_intr() local 288 nv04_fifo_cache_error(fifo, chid, get); in nv04_fifo_intr() [all …]
|
| A D | dmanv04.c | 52 u32 context = 0x80000000 | chan->base.chid << 24; in nv04_fifo_dma_object_ctor() 67 hash = nvkm_ramht_insert(imem->ramht, object, chan->base.chid, 4, in nv04_fifo_dma_object_ctor() 84 u32 chid; in nv04_fifo_dma_fini() local 91 chid = nvkm_rd32(device, NV03_PFIFO_CACHE1_PUSH1) & mask; in nv04_fifo_dma_fini() 92 if (chid == chan->base.chid) { in nv04_fifo_dma_fini() 121 nvkm_mask(device, NV04_PFIFO_MODE, 1 << chan->base.chid, 0); in nv04_fifo_dma_fini() 132 u32 mask = 1 << chan->base.chid; in nv04_fifo_dma_init() 202 args->v0.chid = chan->base.chid; in nv04_fifo_dma_new() 203 chan->ramfc = chan->base.chid * 32; in nv04_fifo_dma_new()
|
| A D | gpfifogk104.c | 50 nvkm_wr32(device, 0x002634, chan->base.chid); in gk104_fifo_gpfifo_kick_locked() 57 cgrp ? cgrp->id : chan->base.chid, client->name); in gk104_fifo_gpfifo_kick_locked() 58 nvkm_fifo_recover_chan(&fifo->base, chan->base.chid); in gk104_fifo_gpfifo_kick_locked() 198 u32 coff = chan->base.chid * 8; in gk104_fifo_gpfifo_fini() 217 u32 coff = chan->base.chid * 8; in gk104_fifo_gpfifo_init() 252 gk104_fifo_gpfifo_new_(struct gk104_fifo *fifo, u64 *runlists, u16 *chid, in gk104_fifo_gpfifo_new_() argument 280 *chid = chan->base.chid; in gk104_fifo_gpfifo_new_() 289 chan->cgrp->id = chan->base.chid; in gk104_fifo_gpfifo_new_() 296 usermem = chan->base.chid * 0x200; in gk104_fifo_gpfifo_new_() 319 nvkm_wo32(chan->base.inst, 0xe8, chan->base.chid); in gk104_fifo_gpfifo_new_() [all …]
|
| A D | tu102.c | 179 if (chan->base.chid == chid) { in tu102_fifo_recover_chid() 186 if (cgrp->id == chid) { in tu102_fifo_recover_chid() 199 tu102_fifo_recover_chan(struct nvkm_fifo *base, int chid) in tu102_fifo_recover_chan() argument 204 const u32 stat = nvkm_rd32(device, 0x800004 + (chid * 0x08)); in tu102_fifo_recover_chan() 215 chan = tu102_fifo_recover_chid(fifo, runl, chid); in tu102_fifo_recover_chan() 218 nvkm_fifo_kevent(&fifo->base, chid); in tu102_fifo_recover_chan() 222 nvkm_wr32(device, 0x800004 + (chid * 0x08), stat | 0x00000800); in tu102_fifo_recover_chan() 223 nvkm_warn(subdev, "channel %d: killed\n", chid); in tu102_fifo_recover_chan() 233 if (!status.chan || status.chan->id != chid) in tu102_fifo_recover_chan() 332 info->reason, er ? er->name : "", chan ? chan->chid : -1, in tu102_fifo_fault() [all …]
|
| A D | channv50.c | 90 chan->base.chid, chan->base.object.client->name); in nv50_fifo_chan_engine_fini() 192 u32 chid = chan->base.chid; in nv50_fifo_chan_fini() local 195 nvkm_mask(device, 0x002600 + (chid * 4), 0x80000000, 0x00000000); in nv50_fifo_chan_fini() 197 nvkm_wr32(device, 0x002600 + (chid * 4), 0x00000000); in nv50_fifo_chan_fini() 207 u32 chid = chan->base.chid; in nv50_fifo_chan_init() local 209 nvkm_wr32(device, 0x002600 + (chid * 4), 0x80000000 | addr); in nv50_fifo_chan_init()
|
| A D | gpfifogv100.c | 34 return chan->chid; in gv100_fifo_gpfifo_submit_token() 121 struct gk104_fifo *fifo, u64 *runlists, u16 *chid, in gv100_fifo_gpfifo_new_() argument 150 *chid = chan->base.chid; in gv100_fifo_gpfifo_new_() 160 chan->cgrp->id = chan->base.chid; in gv100_fifo_gpfifo_new_() 167 usermem = chan->base.chid * 0x200; in gv100_fifo_gpfifo_new_() 202 nvkm_wo32(chan->base.inst, 0x0e8, chan->base.chid); in gv100_fifo_gpfifo_new_() 231 &args->v0.chid, in gv100_fifo_gpfifo_new()
|
| A D | gf100.c | 66 nvkm_wo32(cur, (nr * 8) + 0, chan->base.chid); in gf100_fifo_runlist_commit() 180 u32 chid = chan->base.chid; in gf100_fifo_recover() local 184 engine->subdev.name, chid); in gf100_fifo_recover() 194 nvkm_fifo_kevent(&fifo->base, chid); in gf100_fifo_recover() 302 info->reason, er ? er->name : "", chan ? chan->chid : -1, in gf100_fifo_fault() 332 u32 chid = (stat & 0x0000007f); in gf100_fifo_intr_sched_ctxsw() local 337 if (chan->base.chid == chid) { in gf100_fifo_intr_sched_ctxsw() 412 u32 chid = nvkm_rd32(device, 0x040120 + (unit * 0x2000)) & 0x7f; in gf100_fifo_intr_pbdma() local 422 if (nvkm_sw_mthd(device->sw, chid, subc, mthd, data)) in gf100_fifo_intr_pbdma() 429 chan = nvkm_fifo_chan_chid(&fifo->base, chid, &flags); in gf100_fifo_intr_pbdma() [all …]
|
| A D | base.c | 37 nvkm_fifo_recover_chan(struct nvkm_fifo *fifo, int chid) in nvkm_fifo_recover_chan() argument 43 fifo->func->recover_chan(fifo, chid); in nvkm_fifo_recover_chan() 105 nvkm_fifo_chan_chid(struct nvkm_fifo *fifo, int chid, unsigned long *rflags) in nvkm_fifo_chan_chid() argument 111 if (chan->chid == chid) { in nvkm_fifo_chan_chid() 123 nvkm_fifo_kevent(struct nvkm_fifo *fifo, int chid) in nvkm_fifo_kevent() argument 125 nvkm_event_send(&fifo->kevent, 1, chid, NULL, 0); in nvkm_fifo_kevent() 136 notify->index = chan->chid; in nvkm_fifo_kevent_ctor()
|
| A D | gpfifogf100.c | 89 nvkm_wr32(device, 0x002634, chan->base.chid); in gf100_fifo_gpfifo_engine_fini() 91 if (nvkm_rd32(device, 0x002634) == chan->base.chid) in gf100_fifo_gpfifo_engine_fini() 95 chan->base.chid, chan->base.object.client->name); in gf100_fifo_gpfifo_engine_fini() 171 u32 coff = chan->base.chid * 8; in gf100_fifo_gpfifo_fini() 191 u32 coff = chan->base.chid * 8; in gf100_fifo_gpfifo_init() 265 args->v0.chid = chan->base.chid; in gf100_fifo_gpfifo_new() 269 usermem = chan->base.chid * 0x1000; in gf100_fifo_gpfifo_new()
|
| /linux/drivers/dma/qcom/ |
| A D | gpi.c | 246 u8 chid; member 257 u8 chid; member 267 u8 chid; member 697 chid = gchan->chid; in gpi_send_cmd() 774 for (chid = 0; chid < MAX_CHANNELS_PER_GPII; chid++) { in gpi_process_ch_ctrl_irq() 985 chid = imed_event->chid; in gpi_process_imed_data_event() 1060 chid = compl_event->chid; in gpi_process_xfer_compl_event() 1105 chid = gpi_event->xfer_compl_event.chid; in gpi_process_events() 1183 u32 chid = gpi_event->xfer_compl_event.chid; in gpi_mark_stale_events() local 1185 if (chid == gchan->chid) in gpi_mark_stale_events() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
| A D | nv20.c | 24 nvkm_wo32(gr->ctxtab, chan->chid * 4, inst >> 4); in nv20_gr_chan_init() 36 int chid = -1; in nv20_gr_chan_fini() local 40 chid = (nvkm_rd32(device, 0x400148) & 0x1f000000) >> 24; in nv20_gr_chan_fini() 41 if (chan->chid == chid) { in nv20_gr_chan_fini() 54 nvkm_wo32(gr->ctxtab, chan->chid * 4, 0x00000000); in nv20_gr_chan_fini() 86 chan->chid = fifoch->chid; in nv20_gr_chan_new() 96 nvkm_wo32(chan->inst, 0x0000, 0x00000001 | (chan->chid << 24)); in nv20_gr_chan_new() 190 u32 chid = (addr & 0x01f00000) >> 20; in nv20_gr_intr() local 199 chan = nvkm_fifo_chan_chid(device->fifo, chid, &flags); in nv20_gr_intr() 211 show, msg, nsource, src, nstatus, sta, chid, in nv20_gr_intr()
|
| A D | nv10.c | 402 int chid; member 553 if (chid < ARRAY_SIZE(gr->chan)) in nv10_gr_channel() 554 chan = gr->chan[chid]; in nv10_gr_channel() 937 int chid; in nv10_gr_context_switch() local 948 next = gr->chan[chid]; in nv10_gr_context_switch() 950 nv10_gr_load_context(next, chid); in nv10_gr_context_switch() 978 gr->chan[chan->chid] = NULL; in nv10_gr_chan_dtor() 1014 chan->chid = fifoch->chid; in nv10_gr_chan_new() 1039 gr->chan[chan->chid] = chan; in nv10_gr_chan_new() 1090 u32 chid = (addr & 0x01f00000) >> 20; in nv10_gr_intr() local [all …]
|
| A D | nv04.c | 362 int chid; member 1078 if (chid < ARRAY_SIZE(gr->chan)) in nv04_gr_channel() 1079 chan = gr->chan[chid]; in nv04_gr_channel() 1119 int chid; in nv04_gr_context_switch() local 1130 next = gr->chan[chid]; in nv04_gr_context_switch() 1132 nv04_gr_load_context(next, chid); in nv04_gr_context_switch() 1155 gr->chan[chan->chid] = NULL; in nv04_gr_chan_dtor() 1195 chan->chid = fifoch->chid; in nv04_gr_chan_new() 1201 gr->chan[chan->chid] = chan; in nv04_gr_chan_new() 1281 u32 chid = (addr & 0x0f000000) >> 24; in nv04_gr_intr() local [all …]
|
| /linux/drivers/gpu/drm/nouveau/include/nvkm/core/ |
| A D | ramht.h | 9 int chid; member 26 int chid, int addr, u32 handle, u32 context); 29 nvkm_ramht_search(struct nvkm_ramht *, int chid, u32 handle);
|