glibc-2.33 源码
if ((unsigned long) (oldsize) >= (unsigned long) (nb))
{
/* already big enough; split below */
newp = oldp;
newsize = oldsize;
}
newmem = _int_malloc (av, nb - MALLOC_ALIGN_MASK);
if (newmem == 0)
return 0; /* propagate failure */
newp = mem2chunk (newmem);
newsize = chunksize (newp);
/*
Avoid copy if newp is next chunk after oldp.
*/
if (newp == next)
{
newsize += oldsize;
newp = oldp;
}
else
{
void *oldmem = chunk2mem (oldp);
newmem = TAG_NEW_USABLE (newmem);
memcpy (newmem, oldmem,
CHUNK_AVAILABLE_SIZE (oldp) - CHUNK_HDR_SZ);
(void) TAG_REGION (chunk2rawmem (oldp), oldsize);
_int_free (av, oldp, 1);
check_inuse_chunk (av, newp);
return chunk2mem (newp);
}
void*
_int_realloc(mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
INTERNAL_SIZE_T nb)
{
mchunkptr newp; /* chunk to return */
INTERNAL_SIZE_T newsize; /* its size */
void* newmem; /* corresponding user mem */
mchunkptr next; /* next contiguous chunk after oldp */
mchunkptr remainder; /* extra space at end of newp */
unsigned long remainder_size; /* its size */
/* oldmem size */
if (__builtin_expect (chunksize_nomask (oldp) <= CHUNK_HDR_SZ, 0)
|| __builtin_expect (oldsize >= av->system_mem, 0))
malloc_printerr ("realloc(): invalid old size");
check_inuse_chunk (av, oldp);
/* All callers already filter out mmap'ed chunks. */
assert (!chunk_is_mmapped (oldp));
next = chunk_at_offset (oldp, oldsize);
INTERNAL_SIZE_T nextsize = chunksize (next);
if (__builtin_expect (chunksize_nomask (next) <= CHUNK_HDR_SZ, 0)
|| __builtin_expect (nextsize >= av->system_mem, 0))
malloc_printerr ("realloc(): invalid next size");
if ((unsigned long) (oldsize) >= (unsigned long) (nb))
{
/* already big enough; split below */
newp = oldp;
newsize = oldsize;
}
else
{
/* Try to expand forward into top */
if (next == av->top &&
(unsigned long) (newsize = oldsize + nextsize) >=
(unsigned long) (nb + MINSIZE))
{
set_head_size (oldp, nb | (av != &main_arena ? NON_MAIN_ARENA : 0));
av->top = chunk_at_offset (oldp, nb);
set_head (av->top, (newsize - nb) | PREV_INUSE);
check_inuse_chunk (av, oldp);
return TAG_NEW_USABLE (chunk2rawmem (oldp));
}
/* Try to expand forward into next chunk; split off remainder below */
else if (next != av->top &&
!inuse (next) &&
(unsigned long) (newsize = oldsize + nextsize) >=
(unsigned long) (nb))
{
newp = oldp;
unlink_chunk (av, next);
}
/* allocate, copy, free */
else
{
newmem = _int_malloc (av, nb - MALLOC_ALIGN_MASK);
if (newmem == 0)
return 0; /* propagate failure */
newp = mem2chunk (newmem);
newsize = chunksize (newp);
/*
Avoid copy if newp is next chunk after oldp.
*/
if (newp == next)
{
newsize += oldsize;
newp = oldp;
}
else
{
void *oldmem = chunk2mem (oldp);
newmem = TAG_NEW_USABLE (newmem);
memcpy (newmem, oldmem,
CHUNK_AVAILABLE_SIZE (oldp) - CHUNK_HDR_SZ);
(void) TAG_REGION (chunk2rawmem (oldp), oldsize);
_int_free (av, oldp, 1);
check_inuse_chunk (av, newp);
return chunk2mem (newp);
}
}
}
/* If possible, free extra space in old or extended chunk */
assert ((unsigned long) (newsize) >= (unsigned long) (nb));
remainder_size = newsize - nb;
if (remainder_size < MINSIZE) /* not enough extra to split off */
{
set_head_size (newp, newsize | (av != &main_arena ? NON_MAIN_ARENA : 0));
set_inuse_bit_at_offset (newp, newsize);
}
else /* split remainder */
{
remainder = chunk_at_offset (newp, nb);
/* Clear any user-space tags before writing the header. */
remainder = TAG_REGION (remainder, remainder_size);
set_head_size (newp, nb | (av != &main_arena ? NON_MAIN_ARENA : 0));
set_head (remainder, remainder_size | PREV_INUSE |
(av != &main_arena ? NON_MAIN_ARENA : 0));
/* Mark remainder as inuse so free() won't complain */
set_inuse_bit_at_offset (remainder, remainder_size);
_int_free (av, remainder, 1);
}
check_inuse_chunk (av, newp);
return TAG_NEW_USABLE (chunk2rawmem (newp));
}
|