Home
last modified time | relevance | path

Searched refs:map (Results 1 – 25 of 112) sorted by relevance

12345

/xen/tools/libxc/
A Dxc_physdev.c50 map.domid = domid; in xc_physdev_map_pirq()
52 map.index = index; in xc_physdev_map_pirq()
55 rc = do_physdev_op(xch, PHYSDEVOP_map_pirq, &map, sizeof(map)); in xc_physdev_map_pirq()
58 *pirq = map.pirq; in xc_physdev_map_pirq()
81 map.domid = domid; in xc_physdev_map_pirq_msi()
83 map.index = index; in xc_physdev_map_pirq_msi()
84 map.pirq = *pirq; in xc_physdev_map_pirq_msi()
85 map.bus = bus; in xc_physdev_map_pirq_msi()
86 map.devfn = devfn; in xc_physdev_map_pirq_msi()
90 rc = do_physdev_op(xch, PHYSDEVOP_map_pirq, &map, sizeof(map)); in xc_physdev_map_pirq_msi()
[all …]
A Dxc_core_arm.c47 xc_core_memory_map_t *map; in xc_core_arch_memory_map_get() local
52 map = malloc(sizeof(*map)); in xc_core_arch_memory_map_get()
53 if ( map == NULL ) in xc_core_arch_memory_map_get()
59 map->addr = 0; in xc_core_arch_memory_map_get()
60 map->size = ((uint64_t)p2m_size) << PAGE_SHIFT; in xc_core_arch_memory_map_get()
62 *mapp = map; in xc_core_arch_memory_map_get()
A Dxc_pagetab.c35 void *map; in xc_translate_foreign_address() local
85 map = xc_map_foreign_range(xch, dom, PAGE_SIZE, PROT_READ, in xc_translate_foreign_address()
87 if (!map) in xc_translate_foreign_address()
89 memcpy(&pte, map + (paddr & (PAGE_SIZE - 1)), size); in xc_translate_foreign_address()
90 munmap(map, PAGE_SIZE); in xc_translate_foreign_address()
A Dxc_core_x86.c50 xc_core_memory_map_t *map; in xc_core_arch_memory_map_get() local
55 map = malloc(sizeof(*map)); in xc_core_arch_memory_map_get()
56 if ( map == NULL ) in xc_core_arch_memory_map_get()
62 map->addr = 0; in xc_core_arch_memory_map_get()
63 map->size = ((uint64_t)p2m_size) << PAGE_SHIFT; in xc_core_arch_memory_map_get()
65 *mapp = map; in xc_core_arch_memory_map_get()
A Dxc_dom_x86.c176 memset(map, 0, sizeof(*map)); in count_pgtables()
182 map->lvls[l].pfn = dom->pfn_alloc_end + map->area.pgtables; in count_pgtables()
198 map->lvls[l].from = map->area.from & ~mask; in count_pgtables()
199 map->lvls[l].to = map->area.to | mask; in count_pgtables()
229 if ( map->lvls[l].from < map->lvls[l].to ) in count_pgtables()
231 ((map->lvls[l].to - map->lvls[l].from) >> bits) + 1; in count_pgtables()
234 map->lvls[l].from, map->lvls[l].to, map->lvls[l].pgtables); in count_pgtables()
235 map->area.pgtables += map->lvls[l].pgtables; in count_pgtables()
263 map->area.pfn = 0; in alloc_pgtables_pv()
371 map = domx86->maps + m; in get_pg_table()
[all …]
/xen/xen/arch/x86/
A De820.c98 (unsigned long long)(map[i].addr + map[i].size) - 1); in print_e820_memory_map()
355 end = PFN_DOWN(e820.map[i].addr + e820.map[i].size); in find_max_pfn()
398 e820.map[i].size = limit - e820.map[i].addr; in clip_to_limit()
402 memmove(&e820.map[i], &e820.map[i+1], in clip_to_limit()
583 memmove(e820->map + i + 1, e820->map + i, in e820_add_range()
622 memmove(&e820->map[i+1], &e820->map[i], in e820_change_range_type()
646 memmove(&e820->map[i+2], &e820->map[i], in e820_change_range_type()
661 if ( (e820->map[i].type != e820->map[i+1].type) || in e820_change_range_type()
662 ((e820->map[i].addr + e820->map[i].size) != e820->map[i+1].addr) ) in e820_change_range_type()
664 e820->map[i].size += e820->map[i+1].size; in e820_change_range_type()
[all …]
A Dsetup.c721 memcpy(map, bootsym(bios_e820map), sizeof(*map) * n); in copy_bios_e820()
1031 if ( (map->base_addr_high == 0) && (map->length_high != 0) ) in __start_xen()
1043 ((u64)map->base_addr_high << 32) | (u64)map->base_addr_low; in __start_xen()
1045 ((u64)map->length_high << 32) | (u64)map->length_low; in __start_xen()
1046 e820_raw.map[e820_raw.nr_map].type = map->type; in __start_xen()
1055 e820_raw.map[0].addr = 0; in __start_xen()
1066 e820_raw.map[0].addr = 0; in __start_xen()
1179 e = (boot_e820.map[i].addr + boot_e820.map[i].size) & ~mask; in __start_xen()
1435 e = (boot_e820.map[i].addr + boot_e820.map[i].size) & ~mask; in __start_xen()
1459 map_e = boot_e820.map[j].addr + boot_e820.map[j].size; in __start_xen()
[all …]
A Dphysdev.c304 physdev_map_pirq_t map; in do_physdev_op() local
311 switch ( map.type ) in do_physdev_op()
315 msi.seg = map.bus >> 16; in do_physdev_op()
319 if ( map.table_base ) in do_physdev_op()
321 msi.seg = map.bus >> 16; in do_physdev_op()
328 msi.bus = map.bus; in do_physdev_op()
329 msi.devfn = map.devfn; in do_physdev_op()
330 msi.entry_nr = map.entry_nr; in do_physdev_op()
331 msi.table_base = map.table_base; in do_physdev_op()
332 ret = physdev_map_pirq(map.domid, map.type, &map.index, &map.pirq, in do_physdev_op()
[all …]
/xen/xen/include/asm-x86/
A Dmpspec.h41 #define physid_set(physid, map) set_bit(physid, (map).mask) argument
42 #define physid_clear(physid, map) clear_bit(physid, (map).mask) argument
43 #define physid_isset(physid, map) test_bit(physid, (map).mask) argument
44 #define physid_test_and_set(physid, map) test_and_set_bit(physid, (map).mask) argument
46 #define first_physid(map) find_first_bit((map).mask, \ argument
48 #define next_physid(id, map) find_next_bit((map).mask, \ argument
50 #define last_physid(map) ({ \ argument
51 const unsigned long *mask = (map).mask; \
61 #define physids_clear(map) bitmap_zero((map).mask, MAX_APICS) argument
63 #define physids_empty(map) bitmap_empty((map).mask, MAX_APICS) argument
[all …]
/xen/xen/drivers/acpi/apei/
A Dapei-io.c54 struct apei_iomap *map; in __apei_find_iomap() local
57 if (map->paddr + map->size >= paddr + size && in __apei_find_iomap()
59 return map; in __apei_find_iomap()
70 if (map) in __apei_ioremap_fast()
71 return map->vaddr + (paddr - map->paddr); in __apei_ioremap_fast()
117 if (!map) in apei_pre_map()
122 xfree(map); in apei_pre_map()
130 map->vaddr = vaddr; in apei_pre_map()
136 return map->vaddr + (paddr - map->paddr); in apei_pre_map()
149 if (map) in apei_post_unmap()
[all …]
/xen/tools/xentrace/
A Dmread.c71 if ( h->map[h->last].buffer in mread64()
81 if ( h->map[bind].buffer in mread64()
98 if(h->map[h->clock].buffer == NULL) in mread64()
103 if(!h->map[h->clock].accessed) in mread64()
108 h->map[h->clock].accessed=0; in mread64()
110 if(h->map[h->clock].buffer) in mread64()
125 if ( h->map[h->clock].buffer == MAP_FAILED ) in mread64()
127 h->map[h->clock].buffer = NULL; in mread64()
136 h->map[bind].accessed=1; in mread64()
137 b=h->map[bind].buffer; in mread64()
[all …]
/xen/tools/firmware/hvmloader/
A De820.c46 if ( memory_map.map[i].type == E820_RESERVED && in memory_map_setup()
48 memory_map.map[i].addr, memory_map.map[i].size) ) in memory_map_setup()
71 uint64_t map_start = memory_map.map[i].addr; in adjust_memory_map()
72 uint64_t map_size = memory_map.map[i].size; in adjust_memory_map()
76 if ( memory_map.map[i].type == E820_RAM && in adjust_memory_map()
84 if ( memory_map.map[i].type == E820_RAM && in adjust_memory_map()
97 memory_map.map[i].addr = GB(4); in adjust_memory_map()
98 memory_map.map[i].size = in adjust_memory_map()
100 memory_map.map[i].addr; in adjust_memory_map()
101 memory_map.map[i].type = E820_RAM; in adjust_memory_map()
[all …]
A Dpci.c46 if ( memory_map.map[i].type == E820_RESERVED && in check_overlap_all()
48 memory_map.map[i].addr, in check_overlap_all()
49 memory_map.map[i].size) ) in check_overlap_all()
65 end = memory_map.map[i].addr + memory_map.map[i].size; in find_next_rmrr()
67 if ( memory_map.map[i].type == E820_RESERVED && in find_next_rmrr()
487 memory_map.map[next_rmrr].addr, in pci_setup()
488 memory_map.map[next_rmrr].size) ) in pci_setup()
490 base = memory_map.map[next_rmrr].addr + in pci_setup()
491 memory_map.map[next_rmrr].size; in pci_setup()
/xen/xen/include/asm-x86/mach-generic/
A Dmach_apic.h61 static inline void ioapic_phys_id_map(physid_mask_t *map) in ioapic_phys_id_map() argument
63 *map = phys_cpu_present_map; in ioapic_phys_id_map()
66 static inline int check_apicid_used(const physid_mask_t *map, int apicid) in check_apicid_used() argument
68 return physid_isset(apicid, *map); in check_apicid_used()
76 static inline void set_apicid(int phys_apicid, physid_mask_t *map) in set_apicid() argument
78 physid_set(phys_apicid, *map); in set_apicid()
/xen/tools/libs/gnttab/
A Dfreebsd.c73 struct ioctl_gntdev_map_grant_ref map; in osdep_gnttab_grant_map() local
82 map.refs = malloc(refs_size); in osdep_gnttab_grant_map()
87 if ( map.refs == MAP_FAILED ) in osdep_gnttab_grant_map()
96 map.refs[i].domid = domids[i * domids_stride]; in osdep_gnttab_grant_map()
97 map.refs[i].ref = refs[i]; in osdep_gnttab_grant_map()
100 map.count = count; in osdep_gnttab_grant_map()
109 map.index); in osdep_gnttab_grant_map()
115 notify.index = map.index; in osdep_gnttab_grant_map()
143 unmap_grant.index = map.index; in osdep_gnttab_grant_map()
152 munmap(map.refs, refs_size); in osdep_gnttab_grant_map()
[all …]
A Dlinux.c93 struct ioctl_gntdev_map_grant_ref *map; in osdep_gnttab_grant_map() local
94 unsigned int map_size = sizeof(*map) + (count - 1) * sizeof(map->refs[0]); in osdep_gnttab_grant_map()
103 map = alloca(map_size); in osdep_gnttab_grant_map()
109 if ( map == MAP_FAILED ) in osdep_gnttab_grant_map()
118 map->refs[i].domid = domids[i * domids_stride]; in osdep_gnttab_grant_map()
119 map->refs[i].ref = refs[i]; in osdep_gnttab_grant_map()
122 map->count = count; in osdep_gnttab_grant_map()
131 map->index); in osdep_gnttab_grant_map()
153 notify.index = map->index; in osdep_gnttab_grant_map()
179 unmap_grant.index = map->index; in osdep_gnttab_grant_map()
[all …]
/xen/xen/include/xen/
A Dbitmap.h88 extern void __bitmap_set(unsigned long *map, unsigned int start, int len);
89 extern void __bitmap_clear(unsigned long *map, unsigned int start, int len);
242 static inline void bitmap_set(unsigned long *map, unsigned int start, in bitmap_set() argument
246 __set_bit(start, map); in bitmap_set()
251 memset((char *)map + start / 8, 0xff, nbits / 8); in bitmap_set()
253 __bitmap_set(map, start, nbits); in bitmap_set()
256 static inline void bitmap_clear(unsigned long *map, unsigned int start, in bitmap_clear() argument
260 __clear_bit(start, map); in bitmap_clear()
265 memset((char *)map + start / 8, 0, nbits / 8); in bitmap_clear()
267 __bitmap_clear(map, start, nbits); in bitmap_clear()
/xen/tools/libxl/
A Dlibxl_utils.c609 bitmap->map = libxl__calloc(NOGC, sizeof(*bitmap->map), sz); in libxl_bitmap_alloc()
618 memset(map, '\0', sizeof(*map)); in libxl_bitmap_init()
623 if (!map) in libxl_bitmap_dispose()
626 free(map->map); in libxl_bitmap_dispose()
627 map->map = NULL; in libxl_bitmap_dispose()
638 memcpy(dptr->map, sptr->map, sz * sizeof(*dptr->map)); in libxl_bitmap_copy()
653 memcpy(dptr->map, sptr->map, sz * sizeof(*dptr->map)); in libxl__bitmap_copy_best_effort()
664 memcpy(dptr->map, sptr->map, sptr->size * sizeof(*sptr->map)); in libxl_bitmap_copy_alloc()
737 or_map->map[i] = (small_map->map[i] | large_map->map[i]); in libxl_bitmap_or()
740 or_map->map[i] = large_map->map[i]; in libxl_bitmap_or()
[all …]
A Dlibxl_x86.c242 struct e820entry map[], in e820_host_sanitize() argument
247 rc = xc_get_machine_memory_map(CTX->xch, map, *nr); in e820_host_sanitize()
253 rc = e820_sanitize(gc, map, nr, b_info->target_memkb, in e820_host_sanitize()
265 struct e820entry map[E820MAX]; in libxl__e820_alloc() local
276 rc = e820_host_sanitize(gc, b_info, map, &nr); in libxl__e820_alloc()
571 struct e820entry map[E820MAX]; in libxl__arch_vnuma_build_vmemrange() local
584 rc = e820_host_sanitize(gc, b_info, map, &nr_e820); in libxl__arch_vnuma_build_vmemrange()
602 if (map[e820_count].type != E820_RAM) { in libxl__arch_vnuma_build_vmemrange()
613 remaining_bytes : map[e820_count].size; in libxl__arch_vnuma_build_vmemrange()
619 map[e820_count].addr += bytes; in libxl__arch_vnuma_build_vmemrange()
[all …]
/xen/xen/drivers/vpci/
A Dheader.c33 bool map; member
39 const struct map_data *map = data; in map_range() local
55 rc = map->map ? map_mmio_regions(map->d, _gfn(s), size, _mfn(s)) in map_range()
66 map->map ? "" : "un", s, e, map->d->domain_id, rc); in map_range()
88 bool map = cmd & PCI_COMMAND_MEMORY; in modify_decoding() local
99 if ( map && !rom_only && vpci_make_msix_hole(pdev) ) in modify_decoding()
112 (map ? PCI_ROM_ADDRESS_ENABLE : 0); in modify_decoding()
114 header->bars[i].enabled = header->rom_enabled = map; in modify_decoding()
121 header->bars[i].enabled = map; in modify_decoding()
136 .map = v->vpci.cmd & PCI_COMMAND_MEMORY, in vpci_process_pending()
[all …]
/xen/xen/xsm/flask/ss/
A Debitmap.c227 u64 map; in ebitmap_read() local
310 rc = next_entry(&map, fp, sizeof(u64)); in ebitmap_read()
316 map = le64_to_cpu(map); in ebitmap_read()
319 while ( map ) in ebitmap_read()
321 n->maps[index++] = map & (-1UL); in ebitmap_read()
322 map = EBITMAP_SHIFT_UNIT_SIZE(map); in ebitmap_read()
/xen/xen/drivers/cpufreq/
A Dcpufreq.c55 cpumask_var_t map; member
200 if (!zalloc_cpumask_var(&cpufreq_dom->map)) { in cpufreq_add_cpu()
209 firstcpu = cpumask_first(cpufreq_dom->map); in cpufreq_add_cpu()
253 firstcpu = cpumask_first(cpufreq_dom->map); in cpufreq_add_cpu()
262 cpumask_set_cpu(cpu, cpufreq_dom->map); in cpufreq_add_cpu()
299 cpumask_clear_cpu(cpu, cpufreq_dom->map); in cpufreq_add_cpu()
307 if (cpumask_empty(cpufreq_dom->map)) { in cpufreq_add_cpu()
309 free_cpumask_var(cpufreq_dom->map); in cpufreq_add_cpu()
363 cpumask_clear_cpu(cpu, cpufreq_dom->map); in cpufreq_del_cpu()
373 if (cpumask_empty(cpufreq_dom->map)) { in cpufreq_del_cpu()
[all …]
/xen/xen/arch/x86/mm/
A Dp2m-pod.c689 unsigned long * map = NULL; in p2m_pod_zero_check_superpage() local
766 if ( *(map + j) != 0 ) in p2m_pod_zero_check_superpage()
769 unmap_domain_page(map); in p2m_pod_zero_check_superpage()
804 if ( *(map+j) != 0 ) in p2m_pod_zero_check_superpage()
810 unmap_domain_page(map); in p2m_pod_zero_check_superpage()
888 map[i] = NULL; in p2m_pod_zero_check()
907 if ( !map[i] ) in p2m_pod_zero_check()
940 map[i] = NULL; in p2m_pod_zero_check()
951 if ( !map[i] ) in p2m_pod_zero_check()
960 map[i] = NULL; in p2m_pod_zero_check()
[all …]
/xen/xen/common/compat/
A Dmemory.c22 struct compat_reserved_device_memory_map map; member
30 uint32_t sbdf = PCI_SBDF3(grdm->map.dev.pci.seg, grdm->map.dev.pci.bus, in get_reserved_device_memory()
31 grdm->map.dev.pci.devfn).sbdf; in get_reserved_device_memory()
33 if ( !(grdm->map.flags & XENMEM_RDM_ALL) && (sbdf != id) ) in get_reserved_device_memory()
36 if ( grdm->used_entries < grdm->map.nr_entries ) in get_reserved_device_memory()
379 if ( copy_from_guest(&grdm.map, compat, 1) || in compat_memory_op()
380 !compat_handle_okay(grdm.map.buffer, grdm.map.nr_entries) ) in compat_memory_op()
383 if ( grdm.map.flags & ~XENMEM_RDM_ALL ) in compat_memory_op()
390 if ( !rc && grdm.map.nr_entries < grdm.used_entries ) in compat_memory_op()
392 grdm.map.nr_entries = grdm.used_entries; in compat_memory_op()
[all …]
/xen/tools/tests/mem-sharing/
A Dmemshrtool.c133 void *map; in main() local
140 map = xc_map_foreign_range(xch, domid, 4096, PROT_WRITE, gfn); in main()
141 if( map ) in main()
142 munmap(map, 4096); in main()
143 R((int)!map); in main()

Completed in 37 milliseconds

12345