| /linux/drivers/gpu/drm/tegra/ |
| A D | gem.c | 30 drm_gem_object_put(&obj->gem); in tegra_bo_put() 115 obj->gem.size); in tegra_bo_pin() 530 gem->size); in __tegra_gem_mmap() 581 gem->size) < 0) in tegra_gem_prime_map_dma_buf() 646 err = drm_gem_mmap_obj(gem, gem->size, vma); in tegra_gem_prime_mmap() 688 exp_info.priv = gem; in tegra_gem_prime_export() 701 if (gem->dev == drm) { in tegra_gem_prime_import() 703 return gem; in tegra_gem_prime_import() 711 return &bo->gem; in tegra_gem_prime_import() 720 if (!gem) in tegra_gem_lookup() [all …]
|
| A D | gem.h | 36 struct drm_gem_object gem; member 52 static inline struct tegra_bo *to_tegra_bo(struct drm_gem_object *gem) in to_tegra_bo() argument 54 return container_of(gem, struct tegra_bo, gem); in to_tegra_bo() 69 void tegra_bo_free_object(struct drm_gem_object *gem); 75 int __tegra_gem_mmap(struct drm_gem_object *gem, struct vm_area_struct *vma); 78 struct dma_buf *tegra_gem_prime_export(struct drm_gem_object *gem,
|
| A D | drm.c | 367 if (!gem) in tegra_gem_mmap() 370 bo = to_tegra_bo(gem); in tegra_gem_mmap() 374 drm_gem_object_put(gem); in tegra_gem_mmap() 634 if (!gem) in tegra_gem_set_tiling() 637 bo = to_tegra_bo(gem); in tegra_gem_set_tiling() 656 if (!gem) in tegra_gem_get_tiling() 659 bo = to_tegra_bo(gem); in tegra_gem_get_tiling() 698 if (!gem) in tegra_gem_set_flags() 701 bo = to_tegra_bo(gem); in tegra_gem_set_flags() 720 if (!gem) in tegra_gem_get_flags() [all …]
|
| A D | fb.c | 126 fb->obj[i] = &planes[i]->gem; in tegra_fb_alloc() 145 struct drm_gem_object *gem; in tegra_fb_create() local 155 gem = drm_gem_object_lookup(file, cmd->handles[i]); in tegra_fb_create() 156 if (!gem) { in tegra_fb_create() 166 if (gem->size < size) { in tegra_fb_create() 171 planes[i] = to_tegra_bo(gem); in tegra_fb_create() 184 drm_gem_object_put(&planes[i]->gem); in tegra_fb_create() 198 err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma); in tegra_fb_mmap() 202 return __tegra_gem_mmap(&bo->gem, vma); in tegra_fb_mmap() 248 drm_gem_object_put(&bo->gem); in tegra_fbdev_probe() [all …]
|
| /linux/drivers/gpu/drm/ |
| A D | drm_gem_ttm_helper.c | 24 const struct drm_gem_object *gem) in drm_gem_ttm_print_info() argument 63 int drm_gem_ttm_vmap(struct drm_gem_object *gem, in drm_gem_ttm_vmap() argument 66 struct ttm_buffer_object *bo = drm_gem_ttm_of_gem(gem); in drm_gem_ttm_vmap() 80 void drm_gem_ttm_vunmap(struct drm_gem_object *gem, in drm_gem_ttm_vunmap() argument 97 int drm_gem_ttm_mmap(struct drm_gem_object *gem, in drm_gem_ttm_mmap() argument 111 drm_gem_object_put(gem); in drm_gem_ttm_mmap() 136 struct drm_gem_object *gem; in drm_gem_ttm_dumb_map_offset() local 138 gem = drm_gem_object_lookup(file, handle); in drm_gem_ttm_dumb_map_offset() 139 if (!gem) in drm_gem_ttm_dumb_map_offset() 142 *offset = drm_vma_node_offset_addr(&gem->vma_node); in drm_gem_ttm_dumb_map_offset() [all …]
|
| A D | drm_gem_vram_helper.c | 190 struct drm_gem_object *gem; in drm_gem_vram_create() local 199 gem = dev->driver->gem_create_object(dev, size); in drm_gem_vram_create() 200 if (!gem) in drm_gem_vram_create() 202 gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_create() 207 gem = &gbo->bo.base; in drm_gem_vram_create() 210 if (!gem->funcs) in drm_gem_vram_create() 211 gem->funcs = &drm_gem_vram_object_funcs; in drm_gem_vram_create() 213 ret = drm_gem_object_init(dev, gem, size); in drm_gem_vram_create() 593 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_object_free() 770 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_object_pin() [all …]
|
| A D | drm_fb_cma_helper.c | 44 struct drm_gem_object *gem; in drm_fb_cma_get_gem_obj() local 46 gem = drm_gem_fb_get_obj(fb, plane); in drm_fb_cma_get_gem_obj() 47 if (!gem) in drm_fb_cma_get_gem_obj() 50 return to_drm_gem_cma_obj(gem); in drm_fb_cma_get_gem_obj()
|
| A D | drm_client.c | 238 drm_gem_vunmap(buffer->gem, &buffer->map); in drm_client_buffer_delete() 240 if (buffer->gem) in drm_client_buffer_delete() 241 drm_gem_object_put(buffer->gem); in drm_client_buffer_delete() 281 buffer->gem = obj; in drm_client_buffer_create() 325 ret = drm_gem_vmap(buffer->gem, map); in drm_client_buffer_vmap() 347 drm_gem_vunmap(buffer->gem, map); in drm_client_buffer_vunmap()
|
| /linux/drivers/gpu/drm/i915/ |
| A D | Makefile | 135 gem-y += \ 136 gem/i915_gem_busy.o \ 145 gem/i915_gem_lmem.o \ 146 gem/i915_gem_mman.o \ 147 gem/i915_gem_pages.o \ 148 gem/i915_gem_phys.o \ 149 gem/i915_gem_pm.o \ 156 gem/i915_gem_ttm.o \ 159 gem/i915_gem_wait.o \ 160 gem/i915_gemfs.o [all …]
|
| /linux/drivers/gpu/drm/gma500/ |
| A D | gem.c | 26 struct gtt_range *gtt = container_of(obj, struct gtt_range, gem); in psb_gem_free_object() 76 r->gem.funcs = &psb_gem_object_funcs; in psb_gem_create() 78 if (drm_gem_object_init(dev, &r->gem, size) != 0) { in psb_gem_create() 85 mapping_set_gfp_mask(r->gem.filp->f_mapping, GFP_KERNEL | __GFP_DMA32); in psb_gem_create() 87 ret = drm_gem_handle_create(file, &r->gem, &handle); in psb_gem_create() 90 &r->gem, size); in psb_gem_create() 91 drm_gem_object_release(&r->gem); in psb_gem_create() 96 drm_gem_object_put(&r->gem); in psb_gem_create() 152 r = container_of(obj, struct gtt_range, gem); /* Get the gtt range */ in psb_gem_fault()
|
| A D | gtt.c | 152 pages = drm_gem_get_pages(>->gem); in psb_gtt_attach_pages() 156 gt->npage = gt->gem.size / PAGE_SIZE; in psb_gtt_attach_pages() 173 drm_gem_put_pages(>->gem, gt->pages, true, false); in psb_gtt_detach_pages() 190 struct drm_device *dev = gt->gem.dev; in psb_gtt_pin() 228 struct drm_device *dev = gt->gem.dev; in psb_gtt_unpin() 292 gt->gem.dev = dev; in psb_gtt_alloc_range()
|
| /linux/Documentation/devicetree/bindings/net/ |
| A D | macb.txt | 4 - compatible: Should be "cdns,[<chip>-]{macb|gem}" 10 Use "atmel,sama5d2-gem" for the GEM IP (10/100) available on Atmel sama5d2 SoCs. 11 Use "atmel,sama5d29-gem" for GEM XL IP (10/100) available on Atmel sama5d29 SoCs. 13 Use "atmel,sama5d3-gem" for the Gigabit IP available on Atmel sama5d3 SoCs. 14 Use "atmel,sama5d4-gem" for the GEM IP (10/100) available on Atmel sama5d4 SoCs. 15 Use "cdns,zynq-gem" Xilinx Zynq-7xxx SoC. 16 Use "cdns,zynqmp-gem" for Zynq Ultrascale+ MPSoC. 17 Use "sifive,fu540-c000-gem" for SiFive FU540-C000 SoC. 19 Use "microchip,sama7g5-gem" for Microchip SAMA7G5 gigabit ethernet interface. 22 For "sifive,fu540-c000-gem", second range is required to specify the [all …]
|
| /linux/drivers/gpu/drm/nouveau/ |
| A D | nouveau_gem.c | 90 if (gem->import_attach) in nouveau_gem_object_del() 438 struct drm_gem_object *gem; in validate_init() local 442 if (!gem) { in validate_init() 450 drm_gem_object_put(gem); in validate_init() 457 drm_gem_object_put(gem); in validate_init() 961 if (!gem) in nouveau_gem_ioctl_cpu_prep() 975 drm_gem_object_put(gem); in nouveau_gem_ioctl_cpu_prep() 989 if (!gem) in nouveau_gem_ioctl_cpu_fini() 994 drm_gem_object_put(gem); in nouveau_gem_ioctl_cpu_fini() 1007 if (!gem) in nouveau_gem_ioctl_info() [all …]
|
| A D | nouveau_ttm.c | 167 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init_vram() 173 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init_vram() 196 unsigned long size_pages = drm->gem.gart_available >> PAGE_SHIFT; in nouveau_ttm_init_gtt() 292 drm->gem.vram_available = drm->client.device.info.ram_user; in nouveau_ttm_init() 308 drm->gem.gart_available = drm->client.vmm.vmm.limit; in nouveau_ttm_init() 310 drm->gem.gart_available = drm->agp.size; in nouveau_ttm_init() 322 NV_INFO(drm, "VRAM: %d MiB\n", (u32)(drm->gem.vram_available >> 20)); in nouveau_ttm_init() 323 NV_INFO(drm, "GART: %d MiB\n", (u32)(drm->gem.gart_available >> 20)); in nouveau_ttm_init()
|
| A D | nouveau_gem.h | 11 nouveau_gem_object(struct drm_gem_object *gem) in nouveau_gem_object() argument 13 return gem ? container_of(gem, struct nouveau_bo, bo.base) : NULL; in nouveau_gem_object()
|
| /linux/drivers/net/ethernet/sun/ |
| A D | sungem.c | 145 struct gem *gp = netdev_priv(dev); in _sungem_phy_read() 178 struct gem *gp = netdev_priv(dev); in _sungem_phy_write() 876 struct gem *gp = container_of(napi, struct gem, napi); in gem_poll() 927 struct gem *gp = netdev_priv(dev); in gem_interrupt() 955 struct gem *gp = netdev_priv(dev); in gem_poll_controller() 965 struct gem *gp = netdev_priv(dev); in gem_tx_timeout() 993 struct gem *gp = netdev_priv(dev); in gem_start_xmit() 2142 struct gem *gp = netdev_priv(dev); in gem_do_start() 2181 struct gem *gp = netdev_priv(dev); in gem_do_stop() 2226 struct gem *gp = container_of(work, struct gem, reset_task); in gem_reset_task() [all …]
|
| /linux/include/drm/ |
| A D | drm_gem_ttm_helper.h | 19 const struct drm_gem_object *gem); 20 int drm_gem_ttm_vmap(struct drm_gem_object *gem, 22 void drm_gem_ttm_vunmap(struct drm_gem_object *gem, 24 int drm_gem_ttm_mmap(struct drm_gem_object *gem,
|
| /linux/drivers/gpu/drm/qxl/ |
| A D | qxl_gem.c | 68 mutex_lock(&qdev->gem.mutex); in qxl_gem_object_create() 69 list_add_tail(&qbo->list, &qdev->gem.objects); in qxl_gem_object_create() 70 mutex_unlock(&qdev->gem.mutex); in qxl_gem_object_create() 116 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_gem_init()
|
| A D | qxl_object.c | 45 mutex_lock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 47 mutex_unlock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 358 if (list_empty(&qdev->gem.objects)) in qxl_bo_force_delete() 361 list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) { in qxl_bo_force_delete() 365 mutex_lock(&qdev->gem.mutex); in qxl_bo_force_delete() 367 mutex_unlock(&qdev->gem.mutex); in qxl_bo_force_delete()
|
| /linux/drivers/gpu/drm/i915/pxp/ |
| A D | intel_pxp.c | 255 spin_lock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate() 256 list_for_each_entry_safe(ctx, cn, &i915->gem.contexts.list, link) { in intel_pxp_invalidate() 268 spin_unlock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate() 294 spin_lock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate() 298 spin_unlock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate()
|
| /linux/drivers/gpu/drm/rcar-du/ |
| A D | rcar_du_vsp.c | 194 struct drm_gem_cma_object *gem = drm_fb_cma_get_gem_obj(fb, i); in rcar_du_vsp_map_fb() local 197 if (gem->sgt) { in rcar_du_vsp_map_fb() 208 ret = sg_alloc_table(sgt, gem->sgt->orig_nents, in rcar_du_vsp_map_fb() 213 src = gem->sgt->sgl; in rcar_du_vsp_map_fb() 215 for (j = 0; j < gem->sgt->orig_nents; ++j) { in rcar_du_vsp_map_fb() 222 ret = dma_get_sgtable(rcdu->dev, sgt, gem->vaddr, in rcar_du_vsp_map_fb() 223 gem->paddr, gem->base.size); in rcar_du_vsp_map_fb()
|
| /linux/Documentation/devicetree/bindings/interconnect/ |
| A D | qcom,rpmh.yaml | 33 - qcom,sc7180-gem-noc 46 - qcom,sc7280-gem-noc 58 - qcom,sc8180x-gem-noc 81 - qcom,sm8150-gem-noc 91 - qcom,sm8250-gem-noc 101 - qcom,sm8350-gem-noc
|
| /linux/drivers/gpu/drm/radeon/ |
| A D | radeon_prime.c | 61 mutex_lock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table() 62 list_add_tail(&bo->list, &rdev->gem.objects); in radeon_gem_prime_import_sg_table() 63 mutex_unlock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
|
| /linux/drivers/gpu/drm/shmobile/ |
| A D | shmob_drm_plane.c | 43 struct drm_gem_cma_object *gem; in shmob_drm_plane_compute_base() local 47 gem = drm_fb_cma_get_gem_obj(fb, 0); in shmob_drm_plane_compute_base() 48 splane->dma[0] = gem->paddr + fb->offsets[0] in shmob_drm_plane_compute_base() 53 gem = drm_fb_cma_get_gem_obj(fb, 1); in shmob_drm_plane_compute_base() 54 splane->dma[1] = gem->paddr + fb->offsets[1] in shmob_drm_plane_compute_base()
|
| /linux/drivers/gpu/drm/aspeed/ |
| A D | aspeed_gfx_crtc.c | 170 struct drm_gem_cma_object *gem; in aspeed_gfx_pipe_update() local 187 gem = drm_fb_cma_get_gem_obj(fb, 0); in aspeed_gfx_pipe_update() 188 if (!gem) in aspeed_gfx_pipe_update() 190 writel(gem->paddr, priv->base + CRT_ADDR); in aspeed_gfx_pipe_update()
|