Lines Matching refs:ar_ptr

1244     performs a bit mask operation and indirection through the ar_ptr
1744 #define fastbin(ar_ptr, idx) ((ar_ptr)->fastbinsY[idx])
2644 heap->ar_ptr = av;
3237 mstate ar_ptr;
3244 arena_get (ar_ptr, bytes);
3245 victim = _int_malloc (ar_ptr, bytes);
3246 if (!victim && ar_ptr != NULL)
3248 ar_ptr = arena_get_retry (ar_ptr, bytes);
3249 victim = _int_malloc (ar_ptr, bytes);
3253 if (ar_ptr != NULL)
3254 __libc_lock_unlock (ar_ptr->mutex);
3288 mstate ar_ptr;
3327 arena_get (ar_ptr, bytes);
3329 victim = _int_malloc (ar_ptr, bytes);
3332 if (!victim && ar_ptr != NULL)
3335 ar_ptr = arena_get_retry (ar_ptr, bytes);
3336 victim = _int_malloc (ar_ptr, bytes);
3339 if (ar_ptr != NULL)
3340 __libc_lock_unlock (ar_ptr->mutex);
3345 ar_ptr == arena_for_chunk (mem2chunk (victim)));
3353 mstate ar_ptr;
3390 ar_ptr = arena_for_chunk (p);
3391 _int_free (ar_ptr, p, 0);
3401 mstate ar_ptr;
3431 ar_ptr = NULL;
3435 ar_ptr = arena_for_chunk (oldp);
3485 newp = _int_realloc (ar_ptr, oldp, oldsize, nb);
3487 ar_ptr == arena_for_chunk (mem2chunk (newp)));
3492 __libc_lock_lock (ar_ptr->mutex);
3494 newp = _int_realloc (ar_ptr, oldp, oldsize, nb);
3496 __libc_lock_unlock (ar_ptr->mutex);
3498 ar_ptr == arena_for_chunk (mem2chunk (newp)));
3510 _int_free (ar_ptr, oldp, 0);
3531 mstate ar_ptr;
3568 arena_get (ar_ptr, bytes + alignment + MINSIZE);
3570 p = _int_memalign (ar_ptr, alignment, bytes);
3571 if (!p && ar_ptr != NULL)
3574 ar_ptr = arena_get_retry (ar_ptr, bytes);
3575 p = _int_memalign (ar_ptr, alignment, bytes);
3578 if (ar_ptr != NULL)
3579 __libc_lock_unlock (ar_ptr->mutex);
3582 ar_ptr == arena_for_chunk (mem2chunk (p)));
4687 assert(heap->ar_ptr == av);
5103 mstate ar_ptr = &main_arena;
5106 __libc_lock_lock (ar_ptr->mutex);
5107 result |= mtrim (ar_ptr, s);
5108 __libc_lock_unlock (ar_ptr->mutex);
5110 ar_ptr = ar_ptr->next;
5112 while (ar_ptr != &main_arena);
5217 mstate ar_ptr;
5223 ar_ptr = &main_arena;
5226 __libc_lock_lock (ar_ptr->mutex);
5227 int_mallinfo (ar_ptr, &m);
5228 __libc_lock_unlock (ar_ptr->mutex);
5230 ar_ptr = ar_ptr->next;
5232 while (ar_ptr != &main_arena);
5267 mstate ar_ptr;
5275 for (i = 0, ar_ptr = &main_arena;; i++)
5280 __libc_lock_lock (ar_ptr->mutex);
5281 int_mallinfo (ar_ptr, &mi);
5287 dump_heap (heap_for_ptr (top (ar_ptr)));
5291 __libc_lock_unlock (ar_ptr->mutex);
5292 ar_ptr = ar_ptr->next;
5293 if (ar_ptr == &main_arena)
5729 mstate ar_ptr = &main_arena;
5747 __libc_lock_lock (ar_ptr->mutex);
5752 avail = chunksize (ar_ptr->top);
5757 mchunkptr p = fastbin (ar_ptr, i);
5790 bin = bin_at (ar_ptr, i);
5819 if (ar_ptr != &main_arena)
5822 heap_info *heap = heap_for_ptr (top (ar_ptr));
5833 __libc_lock_unlock (ar_ptr->mutex);
5853 total_system += ar_ptr->system_mem;
5854 total_max_system += ar_ptr->max_system_mem;
5862 ar_ptr->system_mem, ar_ptr->max_system_mem);
5864 if (ar_ptr != &main_arena)
5879 ar_ptr->system_mem, ar_ptr->system_mem);
5880 total_aspace += ar_ptr->system_mem;
5881 total_aspace_mprotect += ar_ptr->system_mem;
5885 ar_ptr = ar_ptr->next;
5887 while (ar_ptr != &main_arena);