Home
last modified time | relevance | path

Searched refs:page_shift (Results 1 – 25 of 117) sorted by relevance

12345

/linux/drivers/infiniband/hw/hns/
A Dhns_roce_alloc.c68 u32 page_shift, u32 flags) in hns_roce_buf_alloc() argument
77 if (WARN_ON(page_shift < HNS_HW_PAGE_SHIFT)) in hns_roce_buf_alloc()
85 buf->page_shift = page_shift; in hns_roce_buf_alloc()
86 page_size = 1 << buf->page_shift; in hns_roce_buf_alloc()
135 unsigned int page_shift) in hns_roce_get_kmem_bufs() argument
141 if (page_shift > buf->trunk_shift) { in hns_roce_get_kmem_bufs()
143 page_shift, buf->trunk_shift); in hns_roce_get_kmem_bufs()
151 offset += (1 << page_shift); in hns_roce_get_kmem_bufs()
159 unsigned int page_shift) in hns_roce_get_umem_bufs() argument
165 rdma_umem_for_each_dma_block(umem, &biter, 1 << page_shift) { in hns_roce_get_umem_bufs()
A Dhns_roce_mr.c663 unsigned int page_shift) in mtr_check_direct_pages() argument
665 size_t page_size = 1 << page_shift; in mtr_check_direct_pages()
711 buf_attr->page_shift, in mtr_alloc_bufs()
739 mtr->umem, page_shift); in mtr_map_bufs()
742 mtr->kmem, page_shift); in mtr_map_bufs()
892 unsigned int page_shift; in mtr_init_buf_cfg() local
905 page_shift = HNS_HW_PAGE_SHIFT; in mtr_init_buf_cfg()
913 page_shift = attr->page_shift; in mtr_init_buf_cfg()
915 1 << page_shift); in mtr_init_buf_cfg()
916 cfg->buf_pg_shift = page_shift; in mtr_init_buf_cfg()
[all …]
/linux/drivers/infiniband/core/
A Dumem_odp.c61 size_t page_size = 1UL << umem_odp->page_shift; in ib_init_umem_odp()
75 ndmas = (end - start) >> umem_odp->page_shift; in ib_init_umem_odp()
136 umem_odp->page_shift = PAGE_SHIFT; in ib_umem_odp_alloc_implicit()
184 odp_data->page_shift = PAGE_SHIFT; in ib_umem_odp_alloc_child()
247 umem_odp->page_shift = PAGE_SHIFT; in ib_umem_odp_get()
250 umem_odp->page_shift = HPAGE_SHIFT; in ib_umem_odp_get()
356 unsigned int page_shift, hmm_order, pfn_start_idx; in ib_umem_odp_map_dma_and_lock() local
368 page_shift = umem_odp->page_shift; in ib_umem_odp_map_dma_and_lock()
442 if (hmm_order + PAGE_SHIFT < page_shift) { in ib_umem_odp_map_dma_and_lock()
446 __func__, hmm_order, page_shift); in ib_umem_odp_map_dma_and_lock()
[all …]
/linux/drivers/pci/endpoint/
A Dpci-epc-mem.c26 unsigned int page_shift = ilog2(mem->window.page_size); in pci_epc_mem_get_order() local
29 size >>= page_shift; in pci_epc_mem_get_order()
53 unsigned int page_shift; in pci_epc_multi_mem_init() local
73 page_shift = ilog2(page_size); in pci_epc_multi_mem_init()
74 pages = windows[i].size >> page_shift; in pci_epc_multi_mem_init()
173 unsigned int page_shift; in pci_epc_mem_alloc_addr() local
188 page_shift = ilog2(mem->window.page_size); in pci_epc_mem_alloc_addr()
190 ((phys_addr_t)pageno << page_shift); in pci_epc_mem_alloc_addr()
238 unsigned int page_shift; in pci_epc_mem_free_addr() local
250 page_shift = ilog2(page_size); in pci_epc_mem_free_addr()
[all …]
/linux/tools/testing/selftests/kvm/lib/aarch64/
A Dprocessor.c26 unsigned int shift = (vm->pgtable_levels - 1) * (vm->page_shift - 3) + vm->page_shift; in pgd_index()
34 unsigned int shift = 2 * (vm->page_shift - 3) + vm->page_shift; in pud_index()
35 uint64_t mask = (1UL << (vm->page_shift - 3)) - 1; in pud_index()
45 unsigned int shift = (vm->page_shift - 3) + vm->page_shift; in pmd_index()
46 uint64_t mask = (1UL << (vm->page_shift - 3)) - 1; in pmd_index()
56 uint64_t mask = (1UL << (vm->page_shift - 3)) - 1; in pte_index()
57 return (gva >> vm->page_shift) & mask; in pte_index()
62 uint64_t mask = ((1UL << (vm->va_bits - vm->page_shift)) - 1) << vm->page_shift; in pte_addr()
68 unsigned int shift = (vm->pgtable_levels - 1) * (vm->page_shift - 3) + vm->page_shift; in ptrs_per_pgd()
74 return 1 << (vm->page_shift - 3); in ptrs_per_pte()
[all …]
/linux/tools/testing/selftests/powerpc/mm/
A Dbad_accesses.c69 unsigned long i, j, addr, region_shift, page_shift, page_size; in test() local
84 page_shift = 16; in test()
86 page_shift = 12; in test()
103 (1 << page_shift) >> 10, in test()
121 for (j = page_shift - 1; j < 60; j++) { in test()
130 addr = (base | delta) & ~((1 << page_shift) - 1); in test()
/linux/arch/powerpc/include/asm/
A Dultravisor.h50 u64 page_shift) in uv_page_in() argument
53 page_shift); in uv_page_in()
57 u64 page_shift) in uv_page_out() argument
60 page_shift); in uv_page_out()
75 static inline int uv_page_inval(u64 lpid, u64 gpa, u64 page_shift) in uv_page_inval() argument
77 return ucall_norets(UV_PAGE_INVAL, lpid, gpa, page_shift); in uv_page_inval()
A Dkvm_book3s_uvmem.h15 unsigned long page_shift);
19 unsigned long page_shift);
54 unsigned long flags, unsigned long page_shift) in kvmppc_h_svm_page_in() argument
61 unsigned long flags, unsigned long page_shift) in kvmppc_h_svm_page_out() argument
A Diommu.h165 __u32 page_shift,
170 __u32 page_shift,
291 extern int iommu_tce_check_ioba(unsigned long page_shift,
294 extern int iommu_tce_check_gpa(unsigned long page_shift,
/linux/arch/powerpc/kvm/
A Dbook3s_hv_uvmem.c560 gpa, 0, page_shift); in __kvmppc_svm_page_out()
579 unsigned long page_shift, in kvmppc_svm_page_out() argument
733 unsigned long page_shift, in kvmppc_svm_page_in() argument
771 gpa, 0, page_shift); in kvmppc_svm_page_in()
868 unsigned long page_shift) in kvmppc_share_page() argument
909 page_shift)) { in kvmppc_share_page()
928 unsigned long page_shift) in kvmppc_h_svm_page_in() argument
939 if (page_shift != PAGE_SHIFT) in kvmppc_h_svm_page_in()
961 end = start + (1UL << page_shift); in kvmppc_h_svm_page_in()
1048 if (page_shift != PAGE_SHIFT) in kvmppc_h_svm_page_out()
[all …]
A Dbook3s_64_vio.c135 if ((tbltmp->it_page_shift <= stt->page_shift) && in kvm_spapr_tce_attach_iommu_group()
137 stt->offset << stt->page_shift) && in kvm_spapr_tce_attach_iommu_group()
139 stt->size << stt->page_shift)) { in kvm_spapr_tce_attach_iommu_group()
288 if (!args->size || args->page_shift < 12 || args->page_shift > 34 || in kvm_vm_ioctl_create_spapr_tce()
303 stt->page_shift = args->page_shift; in kvm_vm_ioctl_create_spapr_tce()
370 if (iommu_tce_check_gpa(stt->page_shift, gpa)) in kvmppc_tce_validate()
583 entry = ioba >> stt->page_shift; in kvmppc_h_put_tce()
625 entry = ioba >> stt->page_shift; in kvmppc_h_put_tce_indirect()
728 unsigned long entry = ioba >> stt->page_shift; in kvmppc_h_stuff_tce()
746 kvmppc_tce_put(stt, ioba >> stt->page_shift, tce_value); in kvmppc_h_stuff_tce()
[all …]
A Dbook3s_64_vio_hv.c113 if (iommu_tce_check_gpa(stt->page_shift, gpa)) in kvmppc_rm_tce_validate()
209 idx = (ioba >> stt->page_shift) - stt->offset; in kvmppc_rm_ioba_validate()
310 unsigned long subpages = 1ULL << (stt->page_shift - tbl->it_page_shift); in kvmppc_rm_tce_iommu_unmap()
370 unsigned long subpages = 1ULL << (stt->page_shift - tbl->it_page_shift); in kvmppc_rm_tce_iommu_map()
413 entry = ioba >> stt->page_shift; in kvmppc_rm_h_put_tce()
501 entry = ioba >> stt->page_shift; in kvmppc_rm_h_put_tce_indirect()
613 unsigned long entry = ioba >> stt->page_shift; in kvmppc_rm_h_stuff_tce()
630 for (i = 0; i < npages; ++i, ioba += (1ULL << stt->page_shift)) in kvmppc_rm_h_stuff_tce()
631 kvmppc_rm_tce_put(stt, ioba >> stt->page_shift, tce_value); in kvmppc_rm_h_stuff_tce()
635 iommu_tce_kill_rm(stit->tbl, ioba >> stt->page_shift, npages); in kvmppc_rm_h_stuff_tce()
[all …]
/linux/tools/testing/selftests/vm/
A Dhmm-tests.c60 unsigned int page_shift; in FIXTURE() local
68 unsigned int page_shift; in FIXTURE() local
236 size = npages << self->page_shift; in TEST_F()
299 size = npages << self->page_shift; in TEST_F()
358 size = npages << self->page_shift; in TEST_F()
406 size = npages << self->page_shift; in TEST_F()
474 size = npages << self->page_shift; in TEST_F()
552 size = npages << self->page_shift; in TEST_F()
644 npages = size >> self->page_shift; in TEST_F()
694 npages = size >> self->page_shift; in TEST_F()
[all …]
/linux/tools/testing/selftests/kvm/lib/
A Dkvm_util.c238 vm->page_shift = vm_guest_mode_params[mode].page_shift; in vm_create()
706 if ((ptr1 >> vm->page_shift) != ((ptr1 + amt) >> vm->page_shift)) in kvm_memcmp_hva_gva()
708 if ((ptr2 >> vm->page_shift) != ((ptr2 + amt) >> vm->page_shift)) in kvm_memcmp_hva_gva()
711 assert((ptr1 >> vm->page_shift) == ((ptr1 + amt - 1) >> vm->page_shift)); in kvm_memcmp_hva_gva()
712 assert((ptr2 >> vm->page_shift) == ((ptr2 + amt - 1) >> vm->page_shift)); in kvm_memcmp_hva_gva()
936 guest_paddr >> vm->page_shift, npages); in vm_userspace_mem_region_add()
1284 vaddr >> vm->page_shift); in vm_vaddr_alloc()
2231 base = pg = paddr_min >> vm->page_shift; in vm_phy_pages_alloc()
2328 return vm->page_shift; in vm_get_page_shift()
2347 unsigned int page_shift, in vm_calc_num_pages() argument
[all …]
/linux/tools/testing/selftests/kvm/lib/x86_64/
A Dvmx.c408 TEST_ASSERT((nested_paddr >> vm->page_shift) <= vm->max_gfn, in nested_pg_map()
416 TEST_ASSERT((paddr >> vm->page_shift) <= vm->max_gfn, in nested_pg_map()
429 pml4e[index[3]].address = vm_alloc_page_table(vm) >> vm->page_shift; in nested_pg_map()
439 pdpe[index[2]].address = vm_alloc_page_table(vm) >> vm->page_shift; in nested_pg_map()
449 pde[index[1]].address = vm_alloc_page_table(vm) >> vm->page_shift; in nested_pg_map()
458 pte[index[0]].address = paddr >> vm->page_shift; in nested_pg_map()
514 i = (region->region.guest_phys_addr >> vm->page_shift) - 1; in nested_map_memslot()
515 last = i + (region->region.memory_size >> vm->page_shift); in nested_map_memslot()
522 (uint64_t)i << vm->page_shift, in nested_map_memslot()
523 (uint64_t)i << vm->page_shift, in nested_map_memslot()
[all …]
A Dprocessor.c192 uint64_t *page_table = addr_gpa2hva(vm, pt_pfn << vm->page_shift); in virt_get_pte()
193 int index = vaddr >> (vm->page_shift + level * 9) & 0x1ffu; in virt_get_pte()
212 pte->pfn = paddr >> vm->page_shift; in virt_create_upper_pte()
214 pte->pfn = vm_alloc_page_table(vm) >> vm->page_shift; in virt_create_upper_pte()
244 TEST_ASSERT(sparsebit_is_set(vm->vpages_valid, (vaddr >> vm->page_shift)), in __virt_pg_map()
249 TEST_ASSERT((paddr >> vm->page_shift) <= vm->max_gfn, in __virt_pg_map()
258 pml4e = virt_create_upper_pte(vm, vm->pgd >> vm->page_shift, in __virt_pg_map()
275 pte->pfn = paddr >> vm->page_shift; in __virt_pg_map()
316 (vaddr >> vm->page_shift)), in _vm_get_page_table_entry()
1457 max_gfn = (1ULL << (vm->pa_bits - vm->page_shift)) - 1; in vm_compute_max_gfn()
[all …]
/linux/drivers/net/ethernet/mellanox/mlx5/core/
A Dalloc.c81 buf->page_shift = (u8)get_order(size) + PAGE_SHIFT; in mlx5_buf_alloc_node()
94 while (t & ((1 << buf->page_shift) - 1)) { in mlx5_buf_alloc_node()
95 --buf->page_shift; in mlx5_buf_alloc_node()
128 buf->page_shift = PAGE_SHIFT; in mlx5_frag_buf_alloc_node()
142 if (frag->map & ((1 << buf->page_shift) - 1)) { in mlx5_frag_buf_alloc_node()
146 &frag->map, buf->page_shift); in mlx5_frag_buf_alloc_node()
295 addr = buf->frags->map + (i << buf->page_shift); in mlx5_fill_page_array()
/linux/drivers/net/ethernet/mellanox/mlx4/
A Dmr.c201 mtt->page_shift = MLX4_ICM_PAGE_SHIFT; in mlx4_mtt_init()
204 mtt->page_shift = page_shift; in mlx4_mtt_init()
419 int page_shift, struct mlx4_mr *mr) in mlx4_mr_alloc_reserved() argument
528 int npages, int page_shift, struct mlx4_mr *mr) in mlx4_mr_alloc() argument
538 access, npages, page_shift, mr); in mlx4_mr_alloc()
591 int page_shift, struct mlx4_mpt_entry *mpt_entry) in mlx4_mr_rereg_mem_write() argument
595 err = mlx4_mtt_init(dev, npages, page_shift, &mr->mtt); in mlx4_mr_rereg_mem_write()
601 mpt_entry->entity_size = cpu_to_be32(page_shift); in mlx4_mr_rereg_mem_write()
610 if (mr->mtt.page_shift == 0) in mlx4_mr_rereg_mem_write()
613 if (mr->mtt.order >= 0 && mr->mtt.page_shift == 0) { in mlx4_mr_rereg_mem_write()
[all …]
/linux/arch/powerpc/platforms/pseries/
A Diommu.c512 tbl->it_offset = win_addr >> page_shift; in iommu_table_setparms_common()
513 tbl->it_size = window_size >> page_shift; in iommu_table_setparms_common()
514 tbl->it_page_shift = page_shift; in iommu_table_setparms_common()
1073 BUID_LO(buid), page_shift, window_shift); in create_ddw()
1180 u32 page_shift, u32 window_shift) in ddw_property_create() argument
1202 ddwprop->tce_shift = cpu_to_be32(page_shift); in ddw_property_create()
1225 int page_shift; in enable_ddw() local
1325 page_shift = iommu_get_page_shift(query.page_size); in enable_ddw()
1326 if (!page_shift) { in enable_ddw()
1342 (1ULL << (MAX_PHYSMEM_BITS - page_shift))) in enable_ddw()
[all …]
/linux/drivers/mtd/nand/raw/
A Dnand_bbt.c180 from = ((loff_t)page) << this->page_shift; in read_bbt()
561 int blocktopage = this->bbt_erase_shift - this->page_shift; in search_bbt()
667 (this->bbt_erase_shift - this->page_shift); in get_bbt_block()
695 page = block << (this->bbt_erase_shift - this->page_shift); in get_bbt_block()
819 to = ((loff_t)page) << this->page_shift; in write_bbt()
835 ops.ooblen = (len >> this->page_shift) * mtd->oobsize; in write_bbt()
842 pageoffs = page - (int)(to >> this->page_shift); in write_bbt()
843 offs = pageoffs << this->page_shift; in write_bbt()
870 (len >> this->page_shift)* mtd->oobsize); in write_bbt()
1088 len += (len >> this->page_shift) * mtd->oobsize; in nand_update_bbt()
[all …]
/linux/arch/powerpc/platforms/powernv/
A Dpci-ioda-tce.c50 u64 dma_offset, unsigned int page_shift) in pnv_pci_setup_iommu_table() argument
54 tbl->it_page_shift = page_shift; in pnv_pci_setup_iommu_table()
292 __u32 page_shift, __u64 window_size, __u32 levels, in pnv_pci_ioda2_table_alloc_pages() argument
299 unsigned int entries_shift = window_shift - page_shift; in pnv_pci_ioda2_table_alloc_pages()
315 if ((level_shift - 3) * levels + page_shift >= 55) in pnv_pci_ioda2_table_alloc_pages()
349 page_shift); in pnv_pci_ioda2_table_alloc_pages()
A Dpci.h291 extern unsigned long pnv_pci_ioda2_get_table_size(__u32 page_shift,
321 __u32 page_shift, __u64 window_size, __u32 levels,
332 u64 dma_offset, unsigned int page_shift);
/linux/drivers/infiniband/sw/rdmavt/
A Dmr.c368 mr->mr.page_shift = PAGE_SHIFT; in rvt_reg_user_mr()
560 u32 ps = 1 << mr->mr.page_shift; in rvt_set_page()
561 u32 mapped_segs = mr->mr.length >> mr->mr.page_shift; in rvt_set_page()
595 mr->mr.page_shift = PAGE_SHIFT; in rvt_map_mr_sg()
772 if (mr->page_shift) { in rvt_lkey_ok()
780 entries_spanned_by_off = off >> mr->page_shift; in rvt_lkey_ok()
781 off -= (entries_spanned_by_off << mr->page_shift); in rvt_lkey_ok()
879 if (mr->page_shift) { in rvt_rkey_ok()
887 entries_spanned_by_off = off >> mr->page_shift; in rvt_rkey_ok()
888 off -= (entries_spanned_by_off << mr->page_shift); in rvt_rkey_ok()
/linux/include/rdma/
A Dib_umem_odp.h44 unsigned int page_shift; member
67 umem_odp->page_shift; in ib_umem_odp_num_pages()
/linux/drivers/infiniband/hw/mlx5/
A Dmr.c838 u64 page_size = 1ULL << page_shift; in get_octo_len()
975 mr->page_shift = order_base_2(page_size); in alloc_cacheable_mr()
1088 wr->page_shift = mr->page_shift; in mlx5_ib_create_xlt_wr()
1118 int page_shift, int flags) in mlx5_ib_update_xlt() argument
1166 wr.page_shift = page_shift; in mlx5_ib_update_xlt()
1217 1 << mr->page_shift), in mlx5_ib_update_mr_pas()
1226 BIT(mr->page_shift)) { in mlx5_ib_update_mr_pas()
1289 mr->page_shift = order_base_2(page_size); in reg_create()
2002 int access_mode, int page_shift) in mlx5_set_umr_free_mkey() argument
2056 int page_shift = 0; in mlx5_ib_alloc_pi_mr() local
[all …]

Completed in 97 milliseconds

12345