Lines Matching defs:p

75     free(void* p);
76 realloc(void* p, size_t n);
87 malloc_usable_size(void* p);
354 The REALLOC_ZERO_BYTES_FREES macro controls the behavior of realloc (p, 0)
355 when p is nonnull. If the macro is nonzero, the realloc call returns NULL;
357 p is freed. Glibc uses a nonzero REALLOC_ZERO_BYTES_FREES, which
361 and it might not even free p.
615 free(void* p)
616 Releases the chunk of memory pointed to by p, that had been previously
618 It has no effect if p is null. It can have arbitrary (i.e., bad!)
619 effects if p has already been freed.
636 realloc(void* p, size_t n)
638 as does chunk p up to the minimum of (n, p's size) bytes, or null
641 The returned pointer may or may not be the same as p. The algorithm
642 prefers extending p when possible, otherwise it employs the
645 If p is null, realloc is equivalent to malloc.
648 ANSI) and p is NOT freed.
650 if n is for fewer bytes than already held by p, the newly unused
751 malloc_usable_size(void* p);
761 p = malloc(n);
762 assert(malloc_usable_size(p) >= 256);
1119 static void munmap_chunk(mchunkptr p);
1121 static mchunkptr mremap_chunk(mchunkptr p, size_t new_size);
1304 #define chunk2mem(p) ((void*)((char*)(p) + CHUNK_HDR_SZ))
1307 #define chunk2mem_tag(p) ((void*)tag_at ((char*)(p) + CHUNK_HDR_SZ))
1324 #define misaligned_chunk(p) \
1325 ((uintptr_t)(MALLOC_ALIGNMENT == CHUNK_HDR_SZ ? (p) : chunk2mem (p)) \
1375 #define prev_inuse(p) ((p)->mchunk_size & PREV_INUSE)
1382 #define chunk_is_mmapped(p) ((p)->mchunk_size & IS_MMAPPED)
1391 #define chunk_main_arena(p) (((p)->mchunk_size & NON_MAIN_ARENA) == 0)
1394 #define set_non_main_arena(p) ((p)->mchunk_size |= NON_MAIN_ARENA)
1408 #define chunksize(p) (chunksize_nomask (p) & ~(SIZE_BITS))
1411 #define chunksize_nomask(p) ((p)->mchunk_size)
1414 #define next_chunk(p) ((mchunkptr) (((char *) (p)) + chunksize (p)))
1417 #define prev_size(p) ((p)->mchunk_prev_size)
1420 #define set_prev_size(p, sz) ((p)->mchunk_prev_size = (sz))
1423 #define prev_chunk(p) ((mchunkptr) (((char *) (p)) - prev_size (p)))
1426 #define chunk_at_offset(p, s) ((mchunkptr) (((char *) (p)) + (s)))
1428 /* extract p's inuse bit */
1429 #define inuse(p) \
1430 ((((mchunkptr) (((char *) (p)) + chunksize (p)))->mchunk_size) & PREV_INUSE)
1433 #define set_inuse(p) \
1434 ((mchunkptr) (((char *) (p)) + chunksize (p)))->mchunk_size |= PREV_INUSE
1436 #define clear_inuse(p) \
1437 ((mchunkptr) (((char *) (p)) + chunksize (p)))->mchunk_size &= ~(PREV_INUSE)
1441 #define inuse_bit_at_offset(p, s) \
1442 (((mchunkptr) (((char *) (p)) + (s)))->mchunk_size & PREV_INUSE)
1444 #define set_inuse_bit_at_offset(p, s) \
1445 (((mchunkptr) (((char *) (p)) + (s)))->mchunk_size |= PREV_INUSE)
1447 #define clear_inuse_bit_at_offset(p, s) \
1448 (((mchunkptr) (((char *) (p)) + (s)))->mchunk_size &= ~(PREV_INUSE))
1452 #define set_head_size(p, s) ((p)->mchunk_size = (((p)->mchunk_size & SIZE_BITS) | (s)))
1455 #define set_head(p, s) ((p)->mchunk_size = (s))
1458 #define set_foot(p, s) (((mchunkptr) ((char *) (p) + (s)))->mchunk_prev_size = (s))
1465 #define memsize(p) \
1467 chunksize (p) - CHUNK_HDR_SZ : \
1468 chunksize (p) - CHUNK_HDR_SZ + (chunk_is_mmapped (p) ? 0 : SIZE_SZ))
1626 unlink_chunk (mstate av, mchunkptr p)
1628 if (chunksize (p) != prev_size (next_chunk (p)))
1631 mchunkptr fd = p->fd;
1632 mchunkptr bk = p->bk;
1634 if (__builtin_expect (fd->bk != p || bk->fd != p, 0))
1639 if (!in_smallbin_range (chunksize_nomask (p)) && p->fd_nextsize != NULL)
1641 if (p->fd_nextsize->bk_nextsize != p
1642 || p->bk_nextsize->fd_nextsize != p)
1647 if (p->fd_nextsize == p)
1651 fd->fd_nextsize = p->fd_nextsize;
1652 fd->bk_nextsize = p->bk_nextsize;
1653 p->fd_nextsize->bk_nextsize = fd;
1654 p->bk_nextsize->fd_nextsize = fd;
1659 p->fd_nextsize->bk_nextsize = p->bk_nextsize;
1660 p->bk_nextsize->fd_nextsize = p->fd_nextsize;
2003 alloc_perturb (char *p, size_t n)
2006 memset (p, perturb_byte ^ 0xff, n);
2010 free_perturb (char *p, size_t n)
2013 memset (p, perturb_byte, n);
2023 madvise_thp (void *p, INTERNAL_SIZE_T size)
2033 if (__glibc_unlikely (!PTR_IS_ALIGNED (p, GLRO (dl_pagesize))))
2035 void *q = PTR_ALIGN_DOWN (p, GLRO (dl_pagesize));
2036 size += PTR_DIFF (p, q);
2037 p = q;
2040 __madvise (p, size, MADV_HUGEPAGE);
2080 do_check_chunk (mstate av, mchunkptr p)
2082 unsigned long sz = chunksize (p);
2087 if (!chunk_is_mmapped (p))
2090 if (p != av->top)
2094 assert (((char *) p) >= min_address);
2095 assert (((char *) p + sz) <= ((char *) (av->top)));
2103 assert (prev_inuse (p));
2111 assert (((char *) p) < min_address || ((char *) p) >= max_address);
2114 assert (((prev_size (p) + sz) & (GLRO (dl_pagesize) - 1)) == 0);
2116 assert (aligned_OK (chunk2mem (p)));
2125 do_check_free_chunk (mstate av, mchunkptr p)
2127 INTERNAL_SIZE_T sz = chunksize_nomask (p) & ~(PREV_INUSE | NON_MAIN_ARENA);
2128 mchunkptr next = chunk_at_offset (p, sz);
2130 do_check_chunk (av, p);
2133 assert (!inuse (p));
2134 assert (!chunk_is_mmapped (p));
2140 assert (aligned_OK (chunk2mem (p)));
2142 assert (prev_size (next_chunk (p)) == sz);
2144 assert (prev_inuse (p));
2148 assert (p->fd->bk == p);
2149 assert (p->bk->fd == p);
2160 do_check_inuse_chunk (mstate av, mchunkptr p)
2164 do_check_chunk (av, p);
2166 if (chunk_is_mmapped (p))
2170 assert (inuse (p));
2172 next = next_chunk (p);
2178 if (!prev_inuse (p))
2181 mchunkptr prv = prev_chunk (p);
2182 assert (next_chunk (prv) == p);
2200 do_check_remalloced_chunk (mstate av, mchunkptr p, INTERNAL_SIZE_T s)
2202 INTERNAL_SIZE_T sz = chunksize_nomask (p) & ~(PREV_INUSE | NON_MAIN_ARENA);
2204 if (!chunk_is_mmapped (p))
2206 assert (av == arena_for_chunk (p));
2207 if (chunk_main_arena (p))
2213 do_check_inuse_chunk (av, p);
2219 assert (aligned_OK (chunk2mem (p)));
2230 do_check_malloced_chunk (mstate av, mchunkptr p, INTERNAL_SIZE_T s)
2233 do_check_remalloced_chunk (av, p, s);
2245 assert (prev_inuse (p));
2264 mchunkptr p;
2302 p = fastbin (av, i);
2317 assert (p == 0);
2319 while (p != 0)
2321 if (__glibc_unlikely (misaligned_chunk (p)))
2325 do_check_inuse_chunk (av, p);
2326 total += chunksize (p);
2328 assert (fastbin_index (chunksize (p)) == i);
2329 p = REVEAL_PTR (p->fd);
2349 for (p = last (b); p != b; p = p->bk)
2352 do_check_free_chunk (av, p);
2353 size = chunksize (p);
2361 assert (p->bk == b ||
2362 (unsigned long) chunksize (p->bk) >= (unsigned long) chunksize (p));
2366 if (p->fd_nextsize != NULL)
2368 if (p->fd_nextsize == p)
2369 assert (p->bk_nextsize == p);
2372 if (p->fd_nextsize == first (b))
2373 assert (chunksize (p) < chunksize (p->fd_nextsize));
2375 assert (chunksize (p) > chunksize (p->fd_nextsize));
2377 if (p == first (b))
2378 assert (chunksize (p) > chunksize (p->bk_nextsize));
2380 assert (chunksize (p) < chunksize (p->bk_nextsize));
2384 assert (p->bk_nextsize == NULL);
2388 assert (p->fd_nextsize == NULL && p->bk_nextsize == NULL);
2390 for (q = next_chunk (p);
2472 mchunkptr p; /* the allocated/returned chunk */
2477 p = (mchunkptr) (mm + correction);
2478 set_prev_size (p, correction);
2479 set_head (p, (size - correction) | IS_MMAPPED);
2483 p = (mchunkptr) mm;
2484 set_prev_size (p, 0);
2485 set_head (p, size | IS_MMAPPED);
2496 check_chunk (av, p);
2498 return chunk2mem (p);
2563 mchunkptr p; /* the allocated/returned chunk */
2946 p = av->top;
2947 size = chunksize (p);
2953 remainder = chunk_at_offset (p, nb);
2955 set_head (p, nb | PREV_INUSE | (av != &main_arena ? NON_MAIN_ARENA : 0));
2957 check_malloced_chunk (av, p, nb);
2958 return chunk2mem (p);
3043 munmap_chunk (mchunkptr p)
3046 INTERNAL_SIZE_T size = chunksize (p);
3048 assert (chunk_is_mmapped (p));
3050 uintptr_t mem = (uintptr_t) chunk2mem (p);
3051 uintptr_t block = (uintptr_t) p - prev_size (p);
3052 size_t total_size = prev_size (p) + size;
3074 mremap_chunk (mchunkptr p, size_t new_size)
3077 INTERNAL_SIZE_T offset = prev_size (p);
3078 INTERNAL_SIZE_T size = chunksize (p);
3081 assert (chunk_is_mmapped (p));
3083 uintptr_t block = (uintptr_t) p - offset;
3084 uintptr_t mem = (uintptr_t) chunk2mem(p);
3095 return p;
3105 p = (mchunkptr) (cp + offset);
3107 assert (aligned_OK (chunk2mem (p)));
3109 assert (prev_size (p) == offset);
3110 set_head (p, (new_size - offset) | IS_MMAPPED);
3116 return p;
3354 mchunkptr p; /* chunk corresponding to mem */
3366 p = mem2chunk (mem);
3368 if (chunk_is_mmapped (p)) /* release mmapped memory. */
3373 && chunksize_nomask (p) > mp_.mmap_threshold
3374 && chunksize_nomask (p) <= DEFAULT_MMAP_THRESHOLD_MAX)
3376 mp_.mmap_threshold = chunksize (p);
3381 munmap_chunk (p);
3388 (void)tag_region (chunk2mem (p), memsize (p));
3390 ar_ptr = arena_for_chunk (p);
3391 _int_free (ar_ptr, p, 0);
3532 void *p;
3562 p = _int_memalign (&main_arena, alignment, bytes);
3563 assert (!p || chunk_is_mmapped (mem2chunk (p)) ||
3564 &main_arena == arena_for_chunk (mem2chunk (p)));
3565 return tag_new_usable (p);
3570 p = _int_memalign (ar_ptr, alignment, bytes);
3571 if (!p && ar_ptr != NULL)
3575 p = _int_memalign (ar_ptr, alignment, bytes);
3581 assert (!p || chunk_is_mmapped (mem2chunk (p)) ||
3582 ar_ptr == arena_for_chunk (mem2chunk (p)));
3583 return tag_new_usable (p);
3701 mchunkptr p = mem2chunk (mem);
3707 return tag_new_zero_region (mem, memsize (p));
3709 INTERNAL_SIZE_T csz = chunksize (p);
3712 if (chunk_is_mmapped (p))
3721 if (perturb_byte == 0 && (p == oldtop && csz > oldtopsize))
3813 void *p = sysmalloc (nb, av);
3814 if (p != NULL)
3815 alloc_perturb (p, bytes);
3816 return p;
3886 void *p = chunk2mem (victim);
3887 alloc_perturb (p, bytes);
3888 return p;
3944 void *p = chunk2mem (victim);
3945 alloc_perturb (p, bytes);
3946 return p;
4045 void *p = chunk2mem (victim);
4046 alloc_perturb (p, bytes);
4047 return p;
4077 void *p = chunk2mem (victim);
4078 alloc_perturb (p, bytes);
4079 return p;
4239 void *p = chunk2mem (victim);
4240 alloc_perturb (p, bytes);
4241 return p;
4347 void *p = chunk2mem (victim);
4348 alloc_perturb (p, bytes);
4349 return p;
4385 void *p = chunk2mem (victim);
4386 alloc_perturb (p, bytes);
4387 return p;
4407 void *p = sysmalloc (nb, av);
4408 if (p != NULL)
4409 alloc_perturb (p, bytes);
4410 return p;
4420 _int_free (mstate av, mchunkptr p, int have_lock)
4431 size = chunksize (p);
4437 if (__builtin_expect ((uintptr_t) p > (uintptr_t) -size, 0)
4438 || __builtin_expect (misaligned_chunk (p), 0))
4445 check_inuse_chunk(av, p);
4453 tcache_entry *e = (tcache_entry *) chunk2mem (p);
4481 tcache_put (p, tc_idx);
4500 && (chunk_at_offset(p, size) != av->top)
4504 if (__builtin_expect (chunksize_nomask (chunk_at_offset (p, size))
4506 || __builtin_expect (chunksize (chunk_at_offset (p, size))
4516 fail = (chunksize_nomask (chunk_at_offset (p, size)) <= CHUNK_HDR_SZ
4517 || chunksize (chunk_at_offset (p, size)) >= av->system_mem);
4525 free_perturb (chunk2mem(p), size - CHUNK_HDR_SZ);
4538 if (__builtin_expect (old == p, 0))
4540 p->fd = PROTECT_PTR (&p->fd, old);
4541 *fb = p;
4548 if (__builtin_expect (old == p, 0))
4551 p->fd = PROTECT_PTR (&p->fd, old);
4553 while ((old = catomic_compare_and_exchange_val_rel (fb, p, old2))
4569 else if (!chunk_is_mmapped(p)) {
4578 nextchunk = chunk_at_offset(p, size);
4582 if (__glibc_unlikely (p == av->top))
4598 free_perturb (chunk2mem(p), size - CHUNK_HDR_SZ);
4601 if (!prev_inuse(p)) {
4602 prevsize = prev_size (p);
4604 p = chunk_at_offset(p, -((long) prevsize));
4605 if (__glibc_unlikely (chunksize(p) != prevsize))
4607 unlink_chunk (av, p);
4631 p->fd = fwd;
4632 p->bk = bck;
4635 p->fd_nextsize = NULL;
4636 p->bk_nextsize = NULL;
4638 bck->fd = p;
4639 fwd->bk = p;
4641 set_head(p, size | PREV_INUSE);
4642 set_foot(p, size);
4644 check_free_chunk(av, p);
4654 set_head(p, size | PREV_INUSE);
4655 av->top = p;
4656 check_chunk(av, p);
4700 munmap_chunk (p);
4718 mchunkptr p; /* current chunk being consolidated */
4745 p = atomic_exchange_acq (fb, NULL);
4746 if (p != 0) {
4749 if (__glibc_unlikely (misaligned_chunk (p)))
4753 unsigned int idx = fastbin_index (chunksize (p));
4758 check_inuse_chunk(av, p);
4759 nextp = REVEAL_PTR (p->fd);
4762 size = chunksize (p);
4763 nextchunk = chunk_at_offset(p, size);
4766 if (!prev_inuse(p)) {
4767 prevsize = prev_size (p);
4769 p = chunk_at_offset(p, -((long) prevsize));
4770 if (__glibc_unlikely (chunksize(p) != prevsize))
4772 unlink_chunk (av, p);
4785 unsorted_bin->fd = p;
4786 first_unsorted->bk = p;
4789 p->fd_nextsize = NULL;
4790 p->bk_nextsize = NULL;
4793 set_head(p, size | PREV_INUSE);
4794 p->bk = unsorted_bin;
4795 p->fd = first_unsorted;
4796 set_foot(p, size);
4801 set_head(p, size | PREV_INUSE);
4802 av->top = p;
4805 } while ( (p = nextp) != 0);
4944 mchunkptr p; /* corresponding chunk */
4945 char *brk; /* alignment point within p */
4973 p = mem2chunk (m);
4986 if ((unsigned long) (brk - (char *) (p)) < MINSIZE)
4990 leadsize = brk - (char *) (p);
4991 newsize = chunksize (p) - leadsize;
4994 if (chunk_is_mmapped (p))
4996 set_prev_size (newp, prev_size (p) + leadsize);
5005 set_head_size (p, leadsize | (av != &main_arena ? NON_MAIN_ARENA : 0));
5006 _int_free (av, p, 1);
5007 p = newp;
5010 (((unsigned long) (chunk2mem (p))) % alignment) == 0);
5014 if (!chunk_is_mmapped (p))
5016 size = chunksize (p);
5020 remainder = chunk_at_offset (p, nb);
5023 set_head_size (p, nb);
5028 check_inuse_chunk (av, p);
5029 return chunk2mem (p);
5053 for (mchunkptr p = last (bin); p != bin; p = p->bk)
5055 INTERNAL_SIZE_T size = chunksize (p);
5060 char *paligned_mem = (char *) (((uintptr_t) p
5064 assert ((char *) chunk2mem (p) + 2 * CHUNK_HDR_SZ
5066 assert ((char *) p + size > paligned_mem);
5069 size -= paligned_mem - (char *) p;
5125 mchunkptr p = mem2chunk (mem);
5127 if (chunk_is_mmapped (p))
5128 return chunksize (p) - CHUNK_HDR_SZ;
5129 else if (inuse (p))
5130 return memsize (p);
5154 mchunkptr p;
5172 for (p = fastbin (av, i);
5173 p != 0;
5174 p = REVEAL_PTR (p->fd))
5176 if (__glibc_unlikely (misaligned_chunk (p)))
5180 fastavail += chunksize (p);
5190 for (p = last (b); p != b; p = p->bk)
5193 avail += chunksize (p);
5757 mchunkptr p = fastbin (ar_ptr, i);
5758 if (p != NULL)
5761 size_t thissize = chunksize (p);
5763 while (p != NULL)
5765 if (__glibc_unlikely (misaligned_chunk (p)))
5769 p = REVEAL_PTR (p->fd);