Lines Matching refs:parser
678 static void amdgpu_cs_parser_fini(struct amdgpu_cs_parser *parser, int error, in amdgpu_cs_parser_fini() argument
686 amdgpu_bo_list_for_each_entry(e, parser->bo_list) { in amdgpu_cs_parser_fini()
691 ttm_eu_backoff_reservation(&parser->ticket, in amdgpu_cs_parser_fini()
692 &parser->validated); in amdgpu_cs_parser_fini()
695 for (i = 0; i < parser->num_post_deps; i++) { in amdgpu_cs_parser_fini()
696 drm_syncobj_put(parser->post_deps[i].syncobj); in amdgpu_cs_parser_fini()
697 kfree(parser->post_deps[i].chain); in amdgpu_cs_parser_fini()
699 kfree(parser->post_deps); in amdgpu_cs_parser_fini()
701 dma_fence_put(parser->fence); in amdgpu_cs_parser_fini()
703 if (parser->ctx) { in amdgpu_cs_parser_fini()
704 mutex_unlock(&parser->ctx->lock); in amdgpu_cs_parser_fini()
705 amdgpu_ctx_put(parser->ctx); in amdgpu_cs_parser_fini()
707 if (parser->bo_list) in amdgpu_cs_parser_fini()
708 amdgpu_bo_list_put(parser->bo_list); in amdgpu_cs_parser_fini()
710 for (i = 0; i < parser->nchunks; i++) in amdgpu_cs_parser_fini()
711 kvfree(parser->chunks[i].kdata); in amdgpu_cs_parser_fini()
712 kvfree(parser->chunks); in amdgpu_cs_parser_fini()
713 if (parser->job) in amdgpu_cs_parser_fini()
714 amdgpu_job_free(parser->job); in amdgpu_cs_parser_fini()
715 if (parser->uf_entry.tv.bo) { in amdgpu_cs_parser_fini()
716 struct amdgpu_bo *uf = ttm_to_amdgpu_bo(parser->uf_entry.tv.bo); in amdgpu_cs_parser_fini()
872 struct amdgpu_cs_parser *parser) in amdgpu_cs_ib_fill() argument
874 struct amdgpu_fpriv *fpriv = parser->filp->driver_priv; in amdgpu_cs_ib_fill()
880 for (i = 0, j = 0; i < parser->nchunks && j < parser->job->num_ibs; i++) { in amdgpu_cs_ib_fill()
886 chunk = &parser->chunks[i]; in amdgpu_cs_ib_fill()
887 ib = &parser->job->ibs[j]; in amdgpu_cs_ib_fill()
907 r = amdgpu_ctx_get_entity(parser->ctx, chunk_ib->ip_type, in amdgpu_cs_ib_fill()
914 parser->job->preamble_status |= in amdgpu_cs_ib_fill()
917 if (parser->entity && parser->entity != entity) in amdgpu_cs_ib_fill()
925 parser->entity = entity; in amdgpu_cs_ib_fill()
944 ring = to_amdgpu_ring(parser->entity->rq->sched); in amdgpu_cs_ib_fill()
945 if (parser->job->uf_addr && ring->funcs->no_user_fence) in amdgpu_cs_ib_fill()
948 return amdgpu_ctx_wait_prev_fence(parser->ctx, parser->entity); in amdgpu_cs_ib_fill()
1304 static void trace_amdgpu_cs_ibs(struct amdgpu_cs_parser *parser) in trace_amdgpu_cs_ibs() argument
1311 for (i = 0; i < parser->job->num_ibs; i++) in trace_amdgpu_cs_ibs()
1312 trace_amdgpu_cs(parser, i); in trace_amdgpu_cs_ibs()
1319 struct amdgpu_cs_parser parser = {}; in amdgpu_cs_ioctl() local
1329 parser.adev = adev; in amdgpu_cs_ioctl()
1330 parser.filp = filp; in amdgpu_cs_ioctl()
1332 r = amdgpu_cs_parser_init(&parser, data); in amdgpu_cs_ioctl()
1339 r = amdgpu_cs_ib_fill(adev, &parser); in amdgpu_cs_ioctl()
1343 r = amdgpu_cs_dependencies(adev, &parser); in amdgpu_cs_ioctl()
1349 r = amdgpu_cs_parser_bos(&parser, data); in amdgpu_cs_ioctl()
1360 trace_amdgpu_cs_ibs(&parser); in amdgpu_cs_ioctl()
1362 r = amdgpu_cs_vm_handling(&parser); in amdgpu_cs_ioctl()
1366 r = amdgpu_cs_submit(&parser, cs); in amdgpu_cs_ioctl()
1369 amdgpu_cs_parser_fini(&parser, r, reserved_buffers); in amdgpu_cs_ioctl()
1680 int amdgpu_cs_find_mapping(struct amdgpu_cs_parser *parser, in amdgpu_cs_find_mapping() argument
1684 struct amdgpu_fpriv *fpriv = parser->filp->driver_priv; in amdgpu_cs_find_mapping()
1700 if (dma_resv_locking_ctx((*bo)->tbo.base.resv) != &parser->ticket) in amdgpu_cs_find_mapping()