Lines Matching refs:prev_size
1248 as the prev_size of the NEXT chunk. This makes it easier to
1417 #define prev_size(p) ((p)->mchunk_prev_size)
1423 #define prev_chunk(p) ((mchunkptr) (((char *) (p)) - prev_size (p)))
1628 if (chunksize (p) != prev_size (next_chunk (p)))
1629 malloc_printerr ("corrupted size vs. prev_size");
2114 assert (((prev_size (p) + sz) & (GLRO (dl_pagesize) - 1)) == 0);
2142 assert (prev_size (next_chunk (p)) == sz);
2427 following chunk whose prev_size field could be used.
2453 The offset to the start of the mmapped region is stored in the prev_size
3051 uintptr_t block = (uintptr_t) p - prev_size (p);
3052 size_t total_size = prev_size (p) + size;
3077 INTERNAL_SIZE_T offset = prev_size (p);
3109 assert (prev_size (p) == offset);
4006 if (__glibc_unlikely ((prev_size (next) & ~(SIZE_BITS)) != size))
4007 malloc_printerr ("malloc(): mismatching next->prev_size (unsorted)");
4602 prevsize = prev_size (p);
4606 malloc_printerr ("corrupted size vs. prev_size while consolidating");
4767 prevsize = prev_size (p);
4771 malloc_printerr ("corrupted size vs. prev_size in fastbins");
4996 set_prev_size (newp, prev_size (p) + leadsize);