Lines Matching refs:av
1626 unlink_chunk (mstate av, mchunkptr p) in unlink_chunk() argument
1960 malloc_init_state (mstate av) in malloc_init_state() argument
1968 bin = bin_at (av, i); in malloc_init_state()
1973 if (av != &main_arena) in malloc_init_state()
1975 set_noncontiguous (av); in malloc_init_state()
1976 if (av == &main_arena) in malloc_init_state()
1978 atomic_store_relaxed (&av->have_fastchunks, false); in malloc_init_state()
1980 av->top = initial_top (av); in malloc_init_state()
2080 do_check_chunk (mstate av, mchunkptr p) in do_check_chunk() argument
2084 char *max_address = (char *) (av->top) + chunksize (av->top); in do_check_chunk()
2085 char *min_address = max_address - av->system_mem; in do_check_chunk()
2090 if (p != av->top) in do_check_chunk()
2092 if (contiguous (av)) in do_check_chunk()
2095 assert (((char *) p + sz) <= ((char *) (av->top))); in do_check_chunk()
2109 if (contiguous (av) && av->top != initial_top (av)) in do_check_chunk()
2125 do_check_free_chunk (mstate av, mchunkptr p) in do_check_free_chunk() argument
2130 do_check_chunk (av, p); in do_check_free_chunk()
2145 assert (next == av->top || inuse (next)); in do_check_free_chunk()
2160 do_check_inuse_chunk (mstate av, mchunkptr p) in do_check_inuse_chunk() argument
2164 do_check_chunk (av, p); in do_check_inuse_chunk()
2183 do_check_free_chunk (av, prv); in do_check_inuse_chunk()
2186 if (next == av->top) in do_check_inuse_chunk()
2192 do_check_free_chunk (av, next); in do_check_inuse_chunk()
2200 do_check_remalloced_chunk (mstate av, mchunkptr p, INTERNAL_SIZE_T s) in do_check_remalloced_chunk() argument
2206 assert (av == arena_for_chunk (p)); in do_check_remalloced_chunk()
2208 assert (av == &main_arena); in do_check_remalloced_chunk()
2210 assert (av != &main_arena); in do_check_remalloced_chunk()
2213 do_check_inuse_chunk (av, p); in do_check_remalloced_chunk()
2230 do_check_malloced_chunk (mstate av, mchunkptr p, INTERNAL_SIZE_T s) in do_check_malloced_chunk() argument
2233 do_check_remalloced_chunk (av, p, s); in do_check_malloced_chunk()
2261 do_check_malloc_state (mstate av) in do_check_malloc_state() argument
2279 assert (av->top != 0); in do_check_malloc_state()
2282 if (av->top == initial_top (av)) in do_check_malloc_state()
2289 if (av == &main_arena && contiguous (av)) in do_check_malloc_state()
2290 assert ((char *) mp_.sbrk_base + av->system_mem == in do_check_malloc_state()
2291 (char *) av->top + chunksize (av->top)); in do_check_malloc_state()
2302 p = fastbin (av, i); in do_check_malloc_state()
2316 if (av == &main_arena && i > max_fast_bin) in do_check_malloc_state()
2325 do_check_inuse_chunk (av, p); in do_check_malloc_state()
2336 b = bin_at (av, i); in do_check_malloc_state()
2341 unsigned int binbit = get_binmap (av, i); in do_check_malloc_state()
2352 do_check_free_chunk (av, p); in do_check_malloc_state()
2391 (q != av->top && inuse (q) && in do_check_malloc_state()
2394 do_check_inuse_chunk (av, q); in do_check_malloc_state()
2399 check_chunk (av, av->top); in do_check_malloc_state()
2420 sysmalloc_mmap (INTERNAL_SIZE_T nb, size_t pagesize, int extra_flags, mstate av) in sysmalloc_mmap() argument
2496 check_chunk (av, p); in sysmalloc_mmap()
2510 size_t pagesize, int extra_flags, mstate av) in sysmalloc_mmap_fallback() argument
2515 if (contiguous (av)) in sysmalloc_mmap_fallback()
2540 set_noncontiguous (av); in sysmalloc_mmap_fallback()
2547 sysmalloc (INTERNAL_SIZE_T nb, mstate av) in sysmalloc() argument
2579 if (av == NULL in sysmalloc()
2589 mm = sysmalloc_mmap (nb, mp_.hp_pagesize, mp_.hp_flags, av); in sysmalloc()
2594 mm = sysmalloc_mmap (nb, pagesize, 0, av); in sysmalloc()
2601 if (av == NULL) in sysmalloc()
2606 old_top = av->top; in sysmalloc()
2617 assert ((old_top == initial_top (av) && old_size == 0) || in sysmalloc()
2626 if (av != &main_arena) in sysmalloc()
2637 av->system_mem += old_heap->size - old_heap_size; in sysmalloc()
2644 heap->ar_ptr = av; in sysmalloc()
2646 av->system_mem += heap->size; in sysmalloc()
2648 top (av) = chunk_at_offset (heap, sizeof (*heap)); in sysmalloc()
2649 set_head (top (av), (heap->size - sizeof (*heap)) | PREV_INUSE); in sysmalloc()
2665 _int_free (av, old_top, 1); in sysmalloc()
2678 char *mm = sysmalloc_mmap (nb, pagesize, 0, av); in sysmalloc()
2695 if (contiguous (av)) in sysmalloc()
2749 mp_.hp_flags, av); in sysmalloc()
2753 MMAP_AS_MORECORE_SIZE, 0, av); in sysmalloc()
2766 av->system_mem += size; in sysmalloc()
2775 else if (contiguous (av) && old_size && brk < old_end) in sysmalloc()
2806 if (contiguous (av)) in sysmalloc()
2810 av->system_mem += brk - old_end; in sysmalloc()
2895 av->top = (mchunkptr) aligned_brk; in sysmalloc()
2896 set_head (av->top, (snd_brk - aligned_brk + correction) | PREV_INUSE); in sysmalloc()
2897 av->system_mem += correction; in sysmalloc()
2933 _int_free (av, old_top, 1); in sysmalloc()
2941 if ((unsigned long) av->system_mem > (unsigned long) (av->max_system_mem)) in sysmalloc()
2942 av->max_system_mem = av->system_mem; in sysmalloc()
2943 check_malloc_state (av); in sysmalloc()
2946 p = av->top; in sysmalloc()
2954 av->top = remainder; in sysmalloc()
2955 set_head (p, nb | PREV_INUSE | (av != &main_arena ? NON_MAIN_ARENA : 0)); in sysmalloc()
2957 check_malloced_chunk (av, p, nb); in sysmalloc()
2977 systrim (size_t pad, mstate av) in systrim() argument
2986 top_size = chunksize (av->top); in systrim()
3008 if (current_brk == (char *) (av->top) + top_size) in systrim()
3032 av->system_mem -= released; in systrim()
3033 set_head (av->top, (top_size - released) | PREV_INUSE); in systrim()
3034 check_malloc_state (av); in systrim()
3625 mstate av; in __libc_calloc() local
3648 av = &main_arena; in __libc_calloc()
3650 arena_get (av, sz); in __libc_calloc()
3652 if (av) in __libc_calloc()
3657 oldtop = top (av); in __libc_calloc()
3658 oldtopsize = chunksize (top (av)); in __libc_calloc()
3661 if (av == &main_arena && in __libc_calloc()
3662 oldtopsize < mp_.sbrk_base + av->max_system_mem - (char *) oldtop) in __libc_calloc()
3663 oldtopsize = (mp_.sbrk_base + av->max_system_mem - (char *) oldtop); in __libc_calloc()
3665 if (av != &main_arena) in __libc_calloc()
3679 mem = _int_malloc (av, sz); in __libc_calloc()
3682 av == arena_for_chunk (mem2chunk (mem))); in __libc_calloc()
3686 if (mem == 0 && av != NULL) in __libc_calloc()
3689 av = arena_get_retry (av, sz); in __libc_calloc()
3690 mem = _int_malloc (av, sz); in __libc_calloc()
3693 if (av != NULL) in __libc_calloc()
3694 __libc_lock_unlock (av->mutex); in __libc_calloc()
3770 _int_malloc (mstate av, size_t bytes) in _int_malloc() argument
3811 if (__glibc_unlikely (av == NULL)) in _int_malloc()
3813 void *p = sysmalloc (nb, av); in _int_malloc()
3841 mfastbinptr *fb = &fastbin (av, idx); in _int_malloc()
3859 check_remalloced_chunk (av, victim, nb); in _int_malloc()
3904 bin = bin_at (av, idx); in _int_malloc()
3915 if (av != &main_arena) in _int_malloc()
3917 check_malloced_chunk (av, victim, nb); in _int_malloc()
3934 if (av != &main_arena) in _int_malloc()
3964 if (atomic_load_relaxed (&av->have_fastchunks)) in _int_malloc()
3965 malloc_consolidate (av); in _int_malloc()
3994 while ((victim = unsorted_chunks (av)->bk) != unsorted_chunks (av)) in _int_malloc()
4001 || __glibc_unlikely (size > av->system_mem)) in _int_malloc()
4004 || __glibc_unlikely (chunksize_nomask (next) > av->system_mem)) in _int_malloc()
4009 || __glibc_unlikely (victim->fd != unsorted_chunks (av))) in _int_malloc()
4023 bck == unsorted_chunks (av) && in _int_malloc()
4024 victim == av->last_remainder && in _int_malloc()
4030 unsorted_chunks (av)->bk = unsorted_chunks (av)->fd = remainder; in _int_malloc()
4031 av->last_remainder = remainder; in _int_malloc()
4032 remainder->bk = remainder->fd = unsorted_chunks (av); in _int_malloc()
4040 (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_malloc()
4044 check_malloced_chunk (av, victim, nb); in _int_malloc()
4053 unsorted_chunks (av)->bk = bck; in _int_malloc()
4054 bck->fd = unsorted_chunks (av); in _int_malloc()
4061 if (av != &main_arena) in _int_malloc()
4076 check_malloced_chunk (av, victim, nb); in _int_malloc()
4090 bck = bin_at (av, victim_index); in _int_malloc()
4096 bck = bin_at (av, victim_index); in _int_malloc()
4147 mark_bin (av, victim_index); in _int_malloc()
4185 bin = bin_at (av, idx); in _int_malloc()
4205 unlink_chunk (av, victim); in _int_malloc()
4211 if (av != &main_arena) in _int_malloc()
4220 bck = unsorted_chunks (av); in _int_malloc()
4234 (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_malloc()
4238 check_malloced_chunk (av, victim, nb); in _int_malloc()
4257 bin = bin_at (av, idx); in _int_malloc()
4259 map = av->binmap[block]; in _int_malloc()
4272 while ((map = av->binmap[block]) == 0); in _int_malloc()
4274 bin = bin_at (av, (block << BINMAPSHIFT)); in _int_malloc()
4292 av->binmap[block] = map &= ~bit; /* Write through */ in _int_malloc()
4307 unlink_chunk (av, victim); in _int_malloc()
4313 if (av != &main_arena) in _int_malloc()
4324 bck = unsorted_chunks (av); in _int_malloc()
4335 av->last_remainder = remainder; in _int_malloc()
4342 (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_malloc()
4346 check_malloced_chunk (av, victim, nb); in _int_malloc()
4369 victim = av->top; in _int_malloc()
4372 if (__glibc_unlikely (size > av->system_mem)) in _int_malloc()
4379 av->top = remainder; in _int_malloc()
4381 (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_malloc()
4384 check_malloced_chunk (av, victim, nb); in _int_malloc()
4392 else if (atomic_load_relaxed (&av->have_fastchunks)) in _int_malloc()
4394 malloc_consolidate (av); in _int_malloc()
4407 void *p = sysmalloc (nb, av); in _int_malloc()
4420 _int_free (mstate av, mchunkptr p, int have_lock) in _int_free() argument
4445 check_inuse_chunk(av, p); in _int_free()
4500 && (chunk_at_offset(p, size) != av->top) in _int_free()
4507 >= av->system_mem, 0)) in _int_free()
4515 __libc_lock_lock (av->mutex); in _int_free()
4517 || chunksize (chunk_at_offset (p, size)) >= av->system_mem); in _int_free()
4518 __libc_lock_unlock (av->mutex); in _int_free()
4527 atomic_store_relaxed (&av->have_fastchunks, true); in _int_free()
4529 fb = &fastbin (av, idx); in _int_free()
4576 __libc_lock_lock (av->mutex); in _int_free()
4582 if (__glibc_unlikely (p == av->top)) in _int_free()
4585 if (__builtin_expect (contiguous (av) in _int_free()
4587 >= ((char *) av->top + chunksize(av->top)), 0)) in _int_free()
4595 || __builtin_expect (nextsize >= av->system_mem, 0)) in _int_free()
4607 unlink_chunk (av, p); in _int_free()
4610 if (nextchunk != av->top) { in _int_free()
4616 unlink_chunk (av, nextchunk); in _int_free()
4627 bck = unsorted_chunks(av); in _int_free()
4644 check_free_chunk(av, p); in _int_free()
4655 av->top = p; in _int_free()
4656 check_chunk(av, p); in _int_free()
4673 if (atomic_load_relaxed (&av->have_fastchunks)) in _int_free()
4674 malloc_consolidate(av); in _int_free()
4676 if (av == &main_arena) { in _int_free()
4678 if ((unsigned long)(chunksize(av->top)) >= in _int_free()
4680 systrim(mp_.top_pad, av); in _int_free()
4685 heap_info *heap = heap_for_ptr(top(av)); in _int_free()
4687 assert(heap->ar_ptr == av); in _int_free()
4693 __libc_lock_unlock (av->mutex); in _int_free()
4714 static void malloc_consolidate(mstate av) in malloc_consolidate() argument
4730 atomic_store_relaxed (&av->have_fastchunks, false); in malloc_consolidate()
4732 unsorted_bin = unsorted_chunks(av); in malloc_consolidate()
4742 maxfb = &fastbin (av, NFASTBINS - 1); in malloc_consolidate()
4743 fb = &fastbin (av, 0); in malloc_consolidate()
4754 if ((&fastbin (av, idx)) != fb) in malloc_consolidate()
4758 check_inuse_chunk(av, p); in malloc_consolidate()
4772 unlink_chunk (av, p); in malloc_consolidate()
4775 if (nextchunk != av->top) { in malloc_consolidate()
4780 unlink_chunk (av, nextchunk); in malloc_consolidate()
4802 av->top = p; in malloc_consolidate()
4816 _int_realloc (mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize, in _int_realloc() argument
4830 || __builtin_expect (oldsize >= av->system_mem, 0)) in _int_realloc()
4833 check_inuse_chunk (av, oldp); in _int_realloc()
4841 || __builtin_expect (nextsize >= av->system_mem, 0)) in _int_realloc()
4854 if (next == av->top && in _int_realloc()
4858 set_head_size (oldp, nb | (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_realloc()
4859 av->top = chunk_at_offset (oldp, nb); in _int_realloc()
4860 set_head (av->top, (newsize - nb) | PREV_INUSE); in _int_realloc()
4861 check_inuse_chunk (av, oldp); in _int_realloc()
4866 else if (next != av->top && in _int_realloc()
4872 unlink_chunk (av, next); in _int_realloc()
4878 newmem = _int_malloc (av, nb - MALLOC_ALIGN_MASK); in _int_realloc()
4900 _int_free (av, oldp, 1); in _int_realloc()
4901 check_inuse_chunk (av, newp); in _int_realloc()
4915 set_head_size (newp, newsize | (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_realloc()
4923 set_head_size (newp, nb | (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_realloc()
4925 (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_realloc()
4928 _int_free (av, remainder, 1); in _int_realloc()
4931 check_inuse_chunk (av, newp); in _int_realloc()
4940 _int_memalign (mstate av, size_t alignment, size_t bytes) in _int_memalign() argument
4968 m = (char *) (_int_malloc (av, nb + alignment + MINSIZE)); in _int_memalign()
5003 (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_memalign()
5005 set_head_size (p, leadsize | (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_memalign()
5006 _int_free (av, p, 1); in _int_memalign()
5022 (av != &main_arena ? NON_MAIN_ARENA : 0)); in _int_memalign()
5024 _int_free (av, remainder, 1); in _int_memalign()
5028 check_inuse_chunk (av, p); in _int_memalign()
5038 mtrim (mstate av, size_t pad) in mtrim() argument
5041 malloc_consolidate (av); in mtrim()
5051 mbinptr bin = bin_at (av, i); in mtrim()
5087 return result | (av == &main_arena ? systrim (pad, av) : 0); in mtrim()
5150 int_mallinfo (mstate av, struct mallinfo2 *m) in int_mallinfo() argument
5160 check_malloc_state (av); in int_mallinfo()
5163 avail = chunksize (av->top); in int_mallinfo()
5172 for (p = fastbin (av, i); in int_mallinfo()
5189 b = bin_at (av, i); in int_mallinfo()
5200 m->uordblks += av->system_mem - avail; in int_mallinfo()
5201 m->arena += av->system_mem; in int_mallinfo()
5203 if (av == &main_arena) in int_mallinfo()
5208 m->keepcost = chunksize (av->top); in int_mallinfo()
5452 mstate av = &main_arena; in __libc_mallopt() local
5457 __libc_lock_lock (av->mutex); in __libc_mallopt()
5463 malloc_consolidate (av); in __libc_mallopt()
5511 __libc_lock_unlock (av->mutex); in __libc_mallopt()