22 #if defined(__clang_analyzer__) && !defined(SDL_DISABLE_ANALYZE_MACROS)
23 #define SDL_DISABLE_ANALYZE_MACROS 1
26 #include "../SDL_internal.h"
32 #if defined(HAVE_MALLOC)
41 return calloc(nmemb, size);
56 #define LACKS_SYS_TYPES_H
58 #define LACKS_STRINGS_H
59 #define LACKS_STRING_H
60 #define LACKS_STDLIB_H
510 #define WIN32_LEAN_AND_MEAN
513 #define HAVE_MORECORE 0
514 #define LACKS_UNISTD_H
515 #define LACKS_SYS_PARAM_H
516 #define LACKS_SYS_MMAN_H
517 #define LACKS_STRING_H
518 #define LACKS_STRINGS_H
519 #define LACKS_SYS_TYPES_H
520 #define LACKS_ERRNO_H
521 #define LACKS_FCNTL_H
522 #define MALLOC_FAILURE_ACTION
523 #define MMAP_CLEARS 0
526 #if defined(DARWIN) || defined(_DARWIN)
528 #ifndef HAVE_MORECORE
529 #define HAVE_MORECORE 0
534 #ifndef LACKS_SYS_TYPES_H
535 #include <sys/types.h>
539 #define MAX_SIZE_T (~(size_t)0)
542 #define ONLY_MSPACES 0
551 #ifndef MALLOC_ALIGNMENT
552 #define MALLOC_ALIGNMENT ((size_t)8U)
558 #define ABORT abort()
560 #ifndef ABORT_ON_ASSERT_FAILURE
561 #define ABORT_ON_ASSERT_FAILURE 1
563 #ifndef PROCEED_ON_ERROR
564 #define PROCEED_ON_ERROR 0
576 #define MMAP_CLEARS 1
580 #define HAVE_MREMAP 1
582 #define HAVE_MREMAP 0
585 #ifndef MALLOC_FAILURE_ACTION
586 #define MALLOC_FAILURE_ACTION errno = ENOMEM;
588 #ifndef HAVE_MORECORE
590 #define HAVE_MORECORE 0
592 #define HAVE_MORECORE 1
596 #define MORECORE_CONTIGUOUS 0
599 #define MORECORE sbrk
601 #ifndef MORECORE_CONTIGUOUS
602 #define MORECORE_CONTIGUOUS 1
605 #ifndef DEFAULT_GRANULARITY
606 #if MORECORE_CONTIGUOUS
607 #define DEFAULT_GRANULARITY (0)
609 #define DEFAULT_GRANULARITY ((size_t)64U * (size_t)1024U)
612 #ifndef DEFAULT_TRIM_THRESHOLD
613 #ifndef MORECORE_CANNOT_TRIM
614 #define DEFAULT_TRIM_THRESHOLD ((size_t)2U * (size_t)1024U * (size_t)1024U)
616 #define DEFAULT_TRIM_THRESHOLD MAX_SIZE_T
619 #ifndef DEFAULT_MMAP_THRESHOLD
621 #define DEFAULT_MMAP_THRESHOLD ((size_t)256U * (size_t)1024U)
623 #define DEFAULT_MMAP_THRESHOLD MAX_SIZE_T
626 #ifndef USE_BUILTIN_FFS
627 #define USE_BUILTIN_FFS 0
629 #ifndef USE_DEV_RANDOM
630 #define USE_DEV_RANDOM 0
633 #define NO_MALLINFO 0
635 #ifndef MALLINFO_FIELD_TYPE
636 #define MALLINFO_FIELD_TYPE size_t
639 #define memset SDL_memset
640 #define memcpy SDL_memcpy
641 #define malloc SDL_malloc
642 #define calloc SDL_calloc
643 #define realloc SDL_realloc
644 #define free SDL_free
653 #define M_TRIM_THRESHOLD (-1)
654 #define M_GRANULARITY (-2)
655 #define M_MMAP_THRESHOLD (-3)
684 #ifdef HAVE_USR_INCLUDE_MALLOC_H
685 #include "/usr/include/malloc.h"
714 #ifndef USE_DL_PREFIX
715 #define dlcalloc calloc
717 #define dlmalloc malloc
718 #define dlmemalign memalign
719 #define dlrealloc realloc
720 #define dlvalloc valloc
721 #define dlpvalloc pvalloc
722 #define dlmallinfo mallinfo
723 #define dlmallopt mallopt
724 #define dlmalloc_trim malloc_trim
725 #define dlmalloc_stats malloc_stats
726 #define dlmalloc_usable_size malloc_usable_size
727 #define dlmalloc_footprint malloc_footprint
728 #define dlmalloc_max_footprint malloc_max_footprint
729 #define dlindependent_calloc independent_calloc
730 #define dlindependent_comalloc independent_comalloc
1073 typedef void *mspace;
1086 mspace create_mspace(
size_t capacity,
int locked);
1094 size_t destroy_mspace(mspace msp);
1105 mspace create_mspace_with_base(
void *base,
size_t capacity,
int locked);
1111 void *mspace_malloc(mspace msp,
size_t bytes);
1121 void mspace_free(mspace msp,
void *mem);
1132 void *mspace_realloc(mspace msp,
void *mem,
size_t newsize);
1138 void *mspace_calloc(mspace msp,
size_t n_elements,
size_t elem_size);
1144 void *mspace_memalign(mspace msp,
size_t alignment,
size_t bytes);
1150 void **mspace_independent_calloc(mspace msp,
size_t n_elements,
1151 size_t elem_size,
void *chunks[]);
1157 void **mspace_independent_comalloc(mspace msp,
size_t n_elements,
1158 size_t sizes[],
void *chunks[]);
1164 size_t mspace_footprint(mspace msp);
1170 size_t mspace_max_footprint(mspace msp);
1178 struct mallinfo mspace_mallinfo(mspace msp);
1185 void mspace_malloc_stats(mspace msp);
1191 int mspace_trim(mspace msp,
size_t pad);
1196 int mspace_mallopt(
int,
int);
1217 #pragma warning( disable : 4146 )
1220 #ifndef LACKS_STDIO_H
1224 #ifndef LACKS_ERRNO_H
1230 #ifndef LACKS_STDLIB_H
1234 #if ABORT_ON_ASSERT_FAILURE
1235 #define assert(x) if(!(x)) ABORT
1242 #ifndef LACKS_STRING_H
1246 #ifndef LACKS_STRINGS_H
1247 #include <strings.h>
1251 #ifndef LACKS_SYS_MMAN_H
1252 #include <sys/mman.h>
1254 #ifndef LACKS_FCNTL_H
1259 #ifndef LACKS_UNISTD_H
1262 #if !defined(__FreeBSD__) && !defined(__OpenBSD__) && !defined(__NetBSD__)
1263 extern void *sbrk(ptrdiff_t);
1269 #ifndef malloc_getpagesize
1270 # ifdef _SC_PAGESIZE
1271 # ifndef _SC_PAGE_SIZE
1272 # define _SC_PAGE_SIZE _SC_PAGESIZE
1275 # ifdef _SC_PAGE_SIZE
1276 # define malloc_getpagesize sysconf(_SC_PAGE_SIZE)
1278 # if defined(BSD) || defined(DGUX) || defined(HAVE_GETPAGESIZE)
1279 extern size_t getpagesize();
1280 # define malloc_getpagesize getpagesize()
1283 # define malloc_getpagesize getpagesize()
1285 # ifndef LACKS_SYS_PARAM_H
1286 # include <sys/param.h>
1288 # ifdef EXEC_PAGESIZE
1289 # define malloc_getpagesize EXEC_PAGESIZE
1293 # define malloc_getpagesize NBPG
1295 # define malloc_getpagesize (NBPG * CLSIZE)
1299 # define malloc_getpagesize NBPC
1302 # define malloc_getpagesize PAGESIZE
1304 # define malloc_getpagesize ((size_t)4096U)
1318 #define SIZE_T_SIZE (sizeof(size_t))
1319 #define SIZE_T_BITSIZE (sizeof(size_t) << 3)
1323 #define SIZE_T_ZERO ((size_t)0)
1324 #define SIZE_T_ONE ((size_t)1)
1325 #define SIZE_T_TWO ((size_t)2)
1326 #define TWO_SIZE_T_SIZES (SIZE_T_SIZE<<1)
1327 #define FOUR_SIZE_T_SIZES (SIZE_T_SIZE<<2)
1328 #define SIX_SIZE_T_SIZES (FOUR_SIZE_T_SIZES+TWO_SIZE_T_SIZES)
1329 #define HALF_MAX_SIZE_T (MAX_SIZE_T / 2U)
1332 #define CHUNK_ALIGN_MASK (MALLOC_ALIGNMENT - SIZE_T_ONE)
1335 #define is_aligned(A) (((size_t)((A)) & (CHUNK_ALIGN_MASK)) == 0)
1338 #define align_offset(A)\
1339 ((((size_t)(A) & CHUNK_ALIGN_MASK) == 0)? 0 :\
1340 ((MALLOC_ALIGNMENT - ((size_t)(A) & CHUNK_ALIGN_MASK)) & CHUNK_ALIGN_MASK))
1352 #define MFAIL ((void*)(MAX_SIZE_T))
1353 #define CMFAIL ((char*)(MFAIL))
1356 #define IS_MMAPPED_BIT (SIZE_T_ZERO)
1357 #define USE_MMAP_BIT (SIZE_T_ZERO)
1358 #define CALL_MMAP(s) MFAIL
1359 #define CALL_MUNMAP(a, s) (-1)
1360 #define DIRECT_MMAP(s) MFAIL
1363 #define IS_MMAPPED_BIT (SIZE_T_ONE)
1364 #define USE_MMAP_BIT (SIZE_T_ONE)
1367 #define CALL_MUNMAP(a, s) munmap((a), (s))
1368 #define MMAP_PROT (PROT_READ|PROT_WRITE)
1369 #if !defined(MAP_ANONYMOUS) && defined(MAP_ANON)
1370 #define MAP_ANONYMOUS MAP_ANON
1372 #ifdef MAP_ANONYMOUS
1373 #define MMAP_FLAGS (MAP_PRIVATE|MAP_ANONYMOUS)
1374 #define CALL_MMAP(s) mmap(0, (s), MMAP_PROT, MMAP_FLAGS, -1, 0)
1380 #define MMAP_FLAGS (MAP_PRIVATE)
1381 static int dev_zero_fd = -1;
1382 #define CALL_MMAP(s) ((dev_zero_fd < 0) ? \
1383 (dev_zero_fd = open("/dev/zero", O_RDWR), \
1384 mmap(0, (s), MMAP_PROT, MMAP_FLAGS, dev_zero_fd, 0)) : \
1385 mmap(0, (s), MMAP_PROT, MMAP_FLAGS, dev_zero_fd, 0))
1388 #define DIRECT_MMAP(s) CALL_MMAP(s)
1396 VirtualAlloc(0, size, MEM_RESERVE | MEM_COMMIT, PAGE_READWRITE);
1397 return (ptr != 0) ? ptr :
MFAIL;
1404 void *ptr = VirtualAlloc(0, size, MEM_RESERVE | MEM_COMMIT | MEM_TOP_DOWN,
1406 return (ptr != 0) ? ptr :
MFAIL;
1413 MEMORY_BASIC_INFORMATION minfo;
1416 if (VirtualQuery(cptr, &minfo,
sizeof(minfo)) == 0)
1418 if (minfo.BaseAddress != cptr || minfo.AllocationBase != cptr ||
1419 minfo.State != MEM_COMMIT || minfo.RegionSize > size)
1421 if (VirtualFree(cptr, 0, MEM_RELEASE) == 0)
1423 cptr += minfo.RegionSize;
1424 size -= minfo.RegionSize;
1429 #define CALL_MMAP(s) win32mmap(s)
1430 #define CALL_MUNMAP(a, s) win32munmap((a), (s))
1431 #define DIRECT_MMAP(s) win32direct_mmap(s)
1435 #if HAVE_MMAP && HAVE_MREMAP
1436 #define CALL_MREMAP(addr, osz, nsz, mv) mremap((addr), (osz), (nsz), (mv))
1438 #define CALL_MREMAP(addr, osz, nsz, mv) MFAIL
1442 #define CALL_MORECORE(S) MORECORE(S)
1444 #define CALL_MORECORE(S) MFAIL
1448 #define USE_NONCONTIGUOUS_BIT (4U)
1451 #define EXTERN_BIT (8U)
1474 #include <pthread.h>
1475 #define MLOCK_T pthread_mutex_t
1476 #define INITIAL_LOCK(l) pthread_mutex_init(l, NULL)
1477 #define ACQUIRE_LOCK(l) pthread_mutex_lock(l)
1478 #define RELEASE_LOCK(l) pthread_mutex_unlock(l)
1481 static MLOCK_T morecore_mutex = PTHREAD_MUTEX_INITIALIZER;
1492 #define MLOCK_T long
1497 #ifdef InterlockedCompareExchangePointer
1498 if (!InterlockedCompareExchange(sl, 1, 0))
1501 if (!InterlockedCompareExchange((
void **) sl, (
void *) 1, (
void *) 0))
1511 InterlockedExchange(sl, 0);
1514 #define INITIAL_LOCK(l) *(l)=0
1515 #define ACQUIRE_LOCK(l) win32_acquire_lock(l)
1516 #define RELEASE_LOCK(l) win32_release_lock(l)
1518 static MLOCK_T morecore_mutex;
1523 #define USE_LOCK_BIT (2U)
1525 #define USE_LOCK_BIT (0U)
1526 #define INITIAL_LOCK(l)
1529 #if USE_LOCKS && HAVE_MORECORE
1530 #define ACQUIRE_MORECORE_LOCK() ACQUIRE_LOCK(&morecore_mutex);
1531 #define RELEASE_MORECORE_LOCK() RELEASE_LOCK(&morecore_mutex);
1533 #define ACQUIRE_MORECORE_LOCK()
1534 #define RELEASE_MORECORE_LOCK()
1538 #define ACQUIRE_MAGIC_INIT_LOCK() ACQUIRE_LOCK(&magic_init_mutex);
1539 #define RELEASE_MAGIC_INIT_LOCK() RELEASE_LOCK(&magic_init_mutex);
1541 #define ACQUIRE_MAGIC_INIT_LOCK()
1542 #define RELEASE_MAGIC_INIT_LOCK()
1700 #define MCHUNK_SIZE (sizeof(mchunk))
1703 #define CHUNK_OVERHEAD (TWO_SIZE_T_SIZES)
1705 #define CHUNK_OVERHEAD (SIZE_T_SIZE)
1709 #define MMAP_CHUNK_OVERHEAD (TWO_SIZE_T_SIZES)
1711 #define MMAP_FOOT_PAD (FOUR_SIZE_T_SIZES)
1714 #define MIN_CHUNK_SIZE\
1715 ((MCHUNK_SIZE + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK)
1718 #define chunk2mem(p) ((void*)((char*)(p) + TWO_SIZE_T_SIZES))
1719 #define mem2chunk(mem) ((mchunkptr)((char*)(mem) - TWO_SIZE_T_SIZES))
1721 #define align_as_chunk(A) (mchunkptr)((A) + align_offset(chunk2mem(A)))
1724 #define MAX_REQUEST ((-MIN_CHUNK_SIZE) << 2)
1725 #define MIN_REQUEST (MIN_CHUNK_SIZE - CHUNK_OVERHEAD - SIZE_T_ONE)
1728 #define pad_request(req) \
1729 (((req) + CHUNK_OVERHEAD + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK)
1732 #define request2size(req) \
1733 (((req) < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(req))
1746 #define PINUSE_BIT (SIZE_T_ONE)
1747 #define CINUSE_BIT (SIZE_T_TWO)
1748 #define INUSE_BITS (PINUSE_BIT|CINUSE_BIT)
1751 #define FENCEPOST_HEAD (INUSE_BITS|SIZE_T_SIZE)
1754 #define cinuse(p) ((p)->head & CINUSE_BIT)
1755 #define pinuse(p) ((p)->head & PINUSE_BIT)
1756 #define chunksize(p) ((p)->head & ~(INUSE_BITS))
1758 #define clear_pinuse(p) ((p)->head &= ~PINUSE_BIT)
1759 #define clear_cinuse(p) ((p)->head &= ~CINUSE_BIT)
1762 #define chunk_plus_offset(p, s) ((mchunkptr)(((char*)(p)) + (s)))
1763 #define chunk_minus_offset(p, s) ((mchunkptr)(((char*)(p)) - (s)))
1766 #define next_chunk(p) ((mchunkptr)( ((char*)(p)) + ((p)->head & ~INUSE_BITS)))
1767 #define prev_chunk(p) ((mchunkptr)( ((char*)(p)) - ((p)->prev_foot) ))
1770 #define next_pinuse(p) ((next_chunk(p)->head) & PINUSE_BIT)
1773 #define get_foot(p, s) (((mchunkptr)((char*)(p) + (s)))->prev_foot)
1774 #define set_foot(p, s) (((mchunkptr)((char*)(p) + (s)))->prev_foot = (s))
1777 #define set_size_and_pinuse_of_free_chunk(p, s)\
1778 ((p)->head = (s|PINUSE_BIT), set_foot(p, s))
1781 #define set_free_with_pinuse(p, s, n)\
1782 (clear_pinuse(n), set_size_and_pinuse_of_free_chunk(p, s))
1784 #define is_mmapped(p)\
1785 (!((p)->head & PINUSE_BIT) && ((p)->prev_foot & IS_MMAPPED_BIT))
1788 #define overhead_for(p)\
1789 (is_mmapped(p)? MMAP_CHUNK_OVERHEAD : CHUNK_OVERHEAD)
1793 #define calloc_must_clear(p) (!is_mmapped(p))
1795 #define calloc_must_clear(p) (1)
1907 #define leftmost_child(t) ((t)->child[0] != 0? (t)->child[0] : (t)->child[1])
1974 #define is_mmapped_segment(S) ((S)->sflags & IS_MMAPPED_BIT)
1975 #define is_extern_segment(S) ((S)->sflags & EXTERN_BIT)
2056 #define NSMALLBINS (32U)
2057 #define NTREEBINS (32U)
2058 #define SMALLBIN_SHIFT (3U)
2059 #define SMALLBIN_WIDTH (SIZE_T_ONE << SMALLBIN_SHIFT)
2060 #define TREEBIN_SHIFT (8U)
2061 #define MIN_LARGE_SIZE (SIZE_T_ONE << TREEBIN_SHIFT)
2062 #define MAX_SMALL_SIZE (MIN_LARGE_SIZE - SIZE_T_ONE)
2063 #define MAX_SMALL_REQUEST (MAX_SMALL_SIZE - CHUNK_ALIGN_MASK - CHUNK_OVERHEAD)
2112 #define is_global(M) ((M) == &_gm_)
2113 #define is_initialized(M) ((M)->top != 0)
2119 #define use_lock(M) ((M)->mflags & USE_LOCK_BIT)
2120 #define enable_lock(M) ((M)->mflags |= USE_LOCK_BIT)
2121 #define disable_lock(M) ((M)->mflags &= ~USE_LOCK_BIT)
2123 #define use_mmap(M) ((M)->mflags & USE_MMAP_BIT)
2124 #define enable_mmap(M) ((M)->mflags |= USE_MMAP_BIT)
2125 #define disable_mmap(M) ((M)->mflags &= ~USE_MMAP_BIT)
2127 #define use_noncontiguous(M) ((M)->mflags & USE_NONCONTIGUOUS_BIT)
2128 #define disable_contiguous(M) ((M)->mflags |= USE_NONCONTIGUOUS_BIT)
2130 #define set_lock(M,L)\
2131 ((M)->mflags = (L)?\
2132 ((M)->mflags | USE_LOCK_BIT) :\
2133 ((M)->mflags & ~USE_LOCK_BIT))
2136 #define page_align(S)\
2137 (((S) + (mparams.page_size)) & ~(mparams.page_size - SIZE_T_ONE))
2140 #define granularity_align(S)\
2141 (((S) + (mparams.granularity)) & ~(mparams.granularity - SIZE_T_ONE))
2143 #define is_page_aligned(S)\
2144 (((size_t)(S) & (mparams.page_size - SIZE_T_ONE)) == 0)
2145 #define is_granularity_aligned(S)\
2146 (((size_t)(S) & (mparams.granularity - SIZE_T_ONE)) == 0)
2149 #define segment_holds(S, A)\
2150 ((char*)(A) >= S->base && (char*)(A) < S->base + S->size)
2156 msegmentptr sp = &m->seg;
2158 if (addr >= sp->base &&
addr < sp->base + sp->size)
2160 if ((sp = sp->next) == 0)
2169 msegmentptr sp = &m->seg;
2171 if ((
char *) sp >= ss->base && (
char *) sp < ss->base + ss->size)
2173 if ((sp = sp->next) == 0)
2178 #ifndef MORECORE_CANNOT_TRIM
2179 #define should_trim(M,s) ((s) > (M)->trim_check)
2181 #define should_trim(M,s) (0)
2189 #define TOP_FOOT_SIZE\
2190 (align_offset(chunk2mem(0))+pad_request(sizeof(struct malloc_segment))+MIN_CHUNK_SIZE)
2204 #define GLOBALLY_INITIALIZE() (mparams.page_size == 0 && init_mparams())
2206 #define PREACTION(M) ((GLOBALLY_INITIALIZE() || use_lock(M))? ACQUIRE_LOCK(&(M)->mutex) : 0)
2207 #define POSTACTION(M) { if (use_lock(M)) RELEASE_LOCK(&(M)->mutex); }
2211 #define PREACTION(M) (0)
2215 #define POSTACTION(M)
2228 #if PROCEED_ON_ERROR
2231 int malloc_corruption_error_count;
2234 static void reset_on_error(mstate
m);
2236 #define CORRUPTION_ERROR_ACTION(m) reset_on_error(m)
2237 #define USAGE_ERROR_ACTION(m, p)
2241 #ifndef CORRUPTION_ERROR_ACTION
2242 #define CORRUPTION_ERROR_ACTION(m) ABORT
2245 #ifndef USAGE_ERROR_ACTION
2246 #define USAGE_ERROR_ACTION(m,p) ABORT
2255 #define check_free_chunk(M,P)
2256 #define check_inuse_chunk(M,P)
2257 #define check_malloced_chunk(M,P,N)
2258 #define check_mmapped_chunk(M,P)
2259 #define check_malloc_state(M)
2260 #define check_top_chunk(M,P)
2263 #define check_free_chunk(M,P) do_check_free_chunk(M,P)
2264 #define check_inuse_chunk(M,P) do_check_inuse_chunk(M,P)
2265 #define check_top_chunk(M,P) do_check_top_chunk(M,P)
2266 #define check_malloced_chunk(M,P,N) do_check_malloced_chunk(M,P,N)
2267 #define check_mmapped_chunk(M,P) do_check_mmapped_chunk(M,P)
2268 #define check_malloc_state(M) do_check_malloc_state(M)
2270 static void do_check_any_chunk(mstate
m, mchunkptr
p);
2271 static void do_check_top_chunk(mstate
m, mchunkptr
p);
2272 static void do_check_mmapped_chunk(mstate
m, mchunkptr
p);
2273 static void do_check_inuse_chunk(mstate
m, mchunkptr
p);
2274 static void do_check_free_chunk(mstate
m, mchunkptr
p);
2275 static void do_check_malloced_chunk(mstate
m,
void *mem,
size_t s);
2276 static void do_check_tree(mstate
m, tchunkptr
t);
2277 static void do_check_treebin(mstate
m,
bindex_t i);
2278 static void do_check_smallbin(mstate
m,
bindex_t i);
2279 static void do_check_malloc_state(mstate
m);
2280 static int bin_find(mstate
m, mchunkptr
x);
2281 static size_t traverse_and_check(mstate
m);
2286 #define is_small(s) (((s) >> SMALLBIN_SHIFT) < NSMALLBINS)
2287 #define small_index(s) ((s) >> SMALLBIN_SHIFT)
2288 #define small_index2size(i) ((i) << SMALLBIN_SHIFT)
2289 #define MIN_SMALL_INDEX (small_index(MIN_CHUNK_SIZE))
2292 #define smallbin_at(M, i) ((sbinptr)((char*)&((M)->smallbins[(i)<<1])))
2293 #define treebin_at(M,i) (&((M)->treebins[i]))
2296 #if defined(__GNUC__) && defined(i386)
2297 #define compute_tree_index(S, I)\
2299 size_t X = S >> TREEBIN_SHIFT;\
2302 else if (X > 0xFFFF)\
2306 __asm__("bsrl %1,%0\n\t" : "=r" (K) : "rm" (X));\
2307 I = (bindex_t)((K << 1) + ((S >> (K + (TREEBIN_SHIFT-1)) & 1)));\
2311 #define compute_tree_index(S, I)\
2313 size_t X = S >> TREEBIN_SHIFT;\
2316 else if (X > 0xFFFF)\
2319 unsigned int Y = (unsigned int)X;\
2320 unsigned int N = ((Y - 0x100) >> 16) & 8;\
2321 unsigned int K = (((Y <<= N) - 0x1000) >> 16) & 4;\
2323 N += K = (((Y <<= K) - 0x4000) >> 16) & 2;\
2324 K = 14 - N + ((Y <<= K) >> 15);\
2325 I = (K << 1) + ((S >> (K + (TREEBIN_SHIFT-1)) & 1));\
2331 #define bit_for_tree_index(i) \
2332 (i == NTREEBINS-1)? (SIZE_T_BITSIZE-1) : (((i) >> 1) + TREEBIN_SHIFT - 2)
2335 #define leftshift_for_tree_index(i) \
2336 ((i == NTREEBINS-1)? 0 : \
2337 ((SIZE_T_BITSIZE-SIZE_T_ONE) - (((i) >> 1) + TREEBIN_SHIFT - 2)))
2340 #define minsize_for_tree_index(i) \
2341 ((SIZE_T_ONE << (((i) >> 1) + TREEBIN_SHIFT)) | \
2342 (((size_t)((i) & SIZE_T_ONE)) << (((i) >> 1) + TREEBIN_SHIFT - 1)))
2348 #define idx2bit(i) ((binmap_t)(1) << (i))
2351 #define mark_smallmap(M,i) ((M)->smallmap |= idx2bit(i))
2352 #define clear_smallmap(M,i) ((M)->smallmap &= ~idx2bit(i))
2353 #define smallmap_is_marked(M,i) ((M)->smallmap & idx2bit(i))
2355 #define mark_treemap(M,i) ((M)->treemap |= idx2bit(i))
2356 #define clear_treemap(M,i) ((M)->treemap &= ~idx2bit(i))
2357 #define treemap_is_marked(M,i) ((M)->treemap & idx2bit(i))
2361 #if defined(__GNUC__) && defined(i386)
2362 #define compute_bit2idx(X, I)\
2365 __asm__("bsfl %1,%0\n\t" : "=r" (J) : "rm" (X));\
2371 #define compute_bit2idx(X, I) I = ffs(X)-1
2374 #define compute_bit2idx(X, I)\
2376 unsigned int Y = X - 1;\
2377 unsigned int K = Y >> (16-4) & 16;\
2378 unsigned int N = K; Y >>= K;\
2379 N += K = Y >> (8-3) & 8; Y >>= K;\
2380 N += K = Y >> (4-2) & 4; Y >>= K;\
2381 N += K = Y >> (2-1) & 2; Y >>= K;\
2382 N += K = Y >> (1-0) & 1; Y >>= K;\
2383 I = (bindex_t)(N + Y);\
2389 #define least_bit(x) ((x) & -(x))
2392 #define left_bits(x) ((x<<1) | -(x<<1))
2395 #define same_or_left_bits(x) ((x) | -(x))
2428 #define ok_address(M, a) ((char*)(a) >= (M)->least_addr)
2430 #define ok_next(p, n) ((char*)(p) < (char*)(n))
2432 #define ok_cinuse(p) cinuse(p)
2434 #define ok_pinuse(p) pinuse(p)
2437 #define ok_address(M, a) (1)
2438 #define ok_next(b, n) (1)
2439 #define ok_cinuse(p) (1)
2440 #define ok_pinuse(p) (1)
2443 #if (FOOTERS && !INSECURE)
2445 #define ok_magic(M) ((M)->magic == mparams.magic)
2447 #define ok_magic(M) (1)
2453 #if defined(__GNUC__) && __GNUC__ >= 3
2454 #define RTCHECK(e) __builtin_expect(e, 1)
2456 #define RTCHECK(e) (e)
2459 #define RTCHECK(e) (1)
2466 #define mark_inuse_foot(M,p,s)
2469 #define set_inuse(M,p,s)\
2470 ((p)->head = (((p)->head & PINUSE_BIT)|s|CINUSE_BIT),\
2471 ((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT)
2474 #define set_inuse_and_pinuse(M,p,s)\
2475 ((p)->head = (s|PINUSE_BIT|CINUSE_BIT),\
2476 ((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT)
2479 #define set_size_and_pinuse_of_inuse_chunk(M, p, s)\
2480 ((p)->head = (s|PINUSE_BIT|CINUSE_BIT))
2485 #define mark_inuse_foot(M,p,s)\
2486 (((mchunkptr)((char*)(p) + (s)))->prev_foot = ((size_t)(M) ^ mparams.magic))
2488 #define get_mstate_for(p)\
2489 ((mstate)(((mchunkptr)((char*)(p) +\
2490 (chunksize(p))))->prev_foot ^ mparams.magic))
2492 #define set_inuse(M,p,s)\
2493 ((p)->head = (((p)->head & PINUSE_BIT)|s|CINUSE_BIT),\
2494 (((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT), \
2495 mark_inuse_foot(M,p,s))
2497 #define set_inuse_and_pinuse(M,p,s)\
2498 ((p)->head = (s|PINUSE_BIT|CINUSE_BIT),\
2499 (((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT),\
2500 mark_inuse_foot(M,p,s))
2502 #define set_size_and_pinuse_of_inuse_chunk(M, p, s)\
2503 ((p)->head = (s|PINUSE_BIT|CINUSE_BIT),\
2504 mark_inuse_foot(M, p, s))
2519 #if MORECORE_CONTIGUOUS
2526 #if (FOOTERS && !INSECURE)
2532 if ((fd = open(
"/dev/urandom", O_RDONLY)) >= 0 &&
2533 read(fd,
buf,
sizeof(
buf)) ==
sizeof(
buf)) {
2534 s = *((
size_t *)
buf);
2545 s = (
size_t) 0x58585858U;
2562 SYSTEM_INFO system_info;
2563 GetSystemInfo(&system_info);
2575 if ((
sizeof(
size_t) !=
sizeof(
char *)) ||
2577 (
sizeof(
int) < 4) ||
2594 switch (param_number) {
2617 do_check_any_chunk(mstate
m, mchunkptr
p)
2625 do_check_top_chunk(mstate m, mchunkptr p)
2632 assert(sz == m->topsize);
2641 do_check_mmapped_chunk(mstate m, mchunkptr p)
2657 do_check_inuse_chunk(mstate m, mchunkptr p)
2659 do_check_any_chunk(m, p);
2665 do_check_mmapped_chunk(m, p);
2670 do_check_free_chunk(mstate m, mchunkptr p)
2674 do_check_any_chunk(m, p);
2678 if (p != m->dv && p != m->top) {
2682 assert(next->prev_foot == sz);
2694 do_check_malloced_chunk(mstate m,
void *mem,
size_t s)
2699 do_check_inuse_chunk(m, p);
2710 do_check_tree(mstate m, tchunkptr
t)
2725 do_check_any_chunk(m, ((mchunkptr) u));
2726 assert(u->index == tindex);
2732 if (u->parent == 0) {
2733 assert(u->child[0] == 0);
2734 assert(u->child[1] == 0);
2739 assert(u->parent->child[0] == u ||
2740 u->parent->child[1] == u ||
2741 *((tbinptr *) (u->parent)) == u);
2742 if (u->child[0] != 0) {
2743 assert(u->child[0]->parent == u);
2744 assert(u->child[0] != u);
2745 do_check_tree(m, u->child[0]);
2747 if (u->child[1] != 0) {
2748 assert(u->child[1]->parent == u);
2749 assert(u->child[1] != u);
2750 do_check_tree(m, u->child[1]);
2752 if (u->child[0] != 0 && u->child[1] != 0) {
2767 int empty = (m->treemap & (1U <<
i)) == 0;
2771 do_check_tree(m, t);
2776 do_check_smallbin(mstate m,
bindex_t i)
2779 mchunkptr p = b->
bk;
2780 unsigned int empty = (m->smallmap & (1U <<
i)) == 0;
2784 for (; p !=
b; p = p->bk) {
2788 do_check_free_chunk(m, p);
2795 do_check_inuse_chunk(m, q);
2802 bin_find(mstate m, mchunkptr
x)
2813 }
while ((p = p->fd) != b);
2821 while (t != 0 &&
chunksize(t) != size) {
2828 if (u == (tchunkptr)
x)
2830 }
while ((u = u->fd) !=
t);
2839 traverse_and_check(mstate m)
2843 msegmentptr s = &m->seg;
2847 mchunkptr lastq = 0;
2854 do_check_inuse_chunk(m, q);
2856 assert(q == m->dv || bin_find(m, q));
2858 do_check_free_chunk(m, q);
2871 do_check_malloc_state(mstate m)
2877 do_check_smallbin(m, i);
2879 do_check_treebin(m, i);
2881 if (m->dvsize != 0) {
2882 do_check_any_chunk(m, m->dv);
2885 assert(bin_find(m, m->dv) == 0);
2889 do_check_top_chunk(m, m->top);
2892 assert(bin_find(m, m->top) == 0);
2895 total = traverse_and_check(m);
2897 assert(m->footprint <= m->max_footprint);
2907 struct mallinfo nm = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
2914 msegmentptr s = &m->seg;
2932 nm.
hblkhd = m->footprint - sum;
2933 nm.
usmblks = m->max_footprint;
2934 nm.
uordblks = m->footprint - mfree;
2949 #ifndef LACKS_STDIO_H
2956 msegmentptr s = &m->seg;
2957 #ifndef LACKS_STDIO_H
2958 maxfp = m->max_footprint;
2974 #ifndef LACKS_STDIO_H
2975 fprintf(stderr,
"max system bytes = %10lu\n",
2976 (
unsigned long) (maxfp));
2977 fprintf(stderr,
"system bytes = %10lu\n", (
unsigned long) (fp));
2978 fprintf(stderr,
"in use bytes = %10lu\n", (
unsigned long) (used));
2995 #define insert_small_chunk(M, P, S) {\
2996 bindex_t I = small_index(S);\
2997 mchunkptr B = smallbin_at(M, I);\
2999 assert(S >= MIN_CHUNK_SIZE);\
3000 if (!smallmap_is_marked(M, I))\
3001 mark_smallmap(M, I);\
3002 else if (RTCHECK(ok_address(M, B->fd)))\
3005 CORRUPTION_ERROR_ACTION(M);\
3014 #define unlink_small_chunk(M, P, S) {\
3015 mchunkptr F = P->fd;\
3016 mchunkptr B = P->bk;\
3017 bindex_t I = small_index(S);\
3020 assert(chunksize(P) == small_index2size(I));\
3022 clear_smallmap(M, I);\
3023 else if (RTCHECK((F == smallbin_at(M,I) || ok_address(M, F)) &&\
3024 (B == smallbin_at(M,I) || ok_address(M, B)))) {\
3029 CORRUPTION_ERROR_ACTION(M);\
3034 #define unlink_first_small_chunk(M, B, P, I) {\
3035 mchunkptr F = P->fd;\
3038 assert(chunksize(P) == small_index2size(I));\
3040 clear_smallmap(M, I);\
3041 else if (RTCHECK(ok_address(M, F))) {\
3046 CORRUPTION_ERROR_ACTION(M);\
3052 #define replace_dv(M, P, S) {\
3053 size_t DVS = M->dvsize;\
3055 mchunkptr DV = M->dv;\
3056 assert(is_small(DVS));\
3057 insert_small_chunk(M, DV, DVS);\
3066 #define insert_large_chunk(M, X, S) {\
3069 compute_tree_index(S, I);\
3070 H = treebin_at(M, I);\
3072 X->child[0] = X->child[1] = 0;\
3073 if (!treemap_is_marked(M, I)) {\
3074 mark_treemap(M, I);\
3076 X->parent = (tchunkptr)H;\
3081 size_t K = S << leftshift_for_tree_index(I);\
3083 if (chunksize(T) != S) {\
3084 tchunkptr* C = &(T->child[(K >> (SIZE_T_BITSIZE-SIZE_T_ONE)) & 1]);\
3088 else if (RTCHECK(ok_address(M, C))) {\
3095 CORRUPTION_ERROR_ACTION(M);\
3100 tchunkptr F = T->fd;\
3101 if (RTCHECK(ok_address(M, T) && ok_address(M, F))) {\
3109 CORRUPTION_ERROR_ACTION(M);\
3134 #define unlink_large_chunk(M, X) {\
3135 tchunkptr XP = X->parent;\
3138 tchunkptr F = X->fd;\
3140 if (RTCHECK(ok_address(M, F))) {\
3145 CORRUPTION_ERROR_ACTION(M);\
3150 if (((R = *(RP = &(X->child[1]))) != 0) ||\
3151 ((R = *(RP = &(X->child[0]))) != 0)) {\
3153 while ((*(CP = &(R->child[1])) != 0) ||\
3154 (*(CP = &(R->child[0])) != 0)) {\
3157 if (RTCHECK(ok_address(M, RP)))\
3160 CORRUPTION_ERROR_ACTION(M);\
3165 tbinptr* H = treebin_at(M, X->index);\
3167 if ((*H = R) == 0) \
3168 clear_treemap(M, X->index);\
3170 else if (RTCHECK(ok_address(M, XP))) {\
3171 if (XP->child[0] == X) \
3177 CORRUPTION_ERROR_ACTION(M);\
3179 if (RTCHECK(ok_address(M, R))) {\
3182 if ((C0 = X->child[0]) != 0) {\
3183 if (RTCHECK(ok_address(M, C0))) {\
3188 CORRUPTION_ERROR_ACTION(M);\
3190 if ((C1 = X->child[1]) != 0) {\
3191 if (RTCHECK(ok_address(M, C1))) {\
3196 CORRUPTION_ERROR_ACTION(M);\
3200 CORRUPTION_ERROR_ACTION(M);\
3207 #define insert_chunk(M, P, S)\
3208 if (is_small(S)) insert_small_chunk(M, P, S)\
3209 else { tchunkptr TP = (tchunkptr)(P); insert_large_chunk(M, TP, S); }
3211 #define unlink_chunk(M, P, S)\
3212 if (is_small(S)) unlink_small_chunk(M, P, S)\
3213 else { tchunkptr TP = (tchunkptr)(P); unlink_large_chunk(M, TP); }
3219 #define internal_malloc(m, b) mspace_malloc(m, b)
3220 #define internal_free(m, mem) mspace_free(m,mem);
3223 #define internal_malloc(m, b)\
3224 (m == gm)? dlmalloc(b) : mspace_malloc(m, b)
3225 #define internal_free(m, mem)\
3226 if (m == gm) dlfree(mem); else mspace_free(m,mem);
3228 #define internal_malloc(m, b) dlmalloc(b)
3229 #define internal_free(m, mem) dlfree(mem)
3256 mchunkptr p = (mchunkptr) (mm + offset);
3263 if (mm < m->least_addr)
3265 if ((m->footprint += mmsize) > m->max_footprint)
3266 m->max_footprint = m->footprint;
3292 oldmmsize, newmmsize, 1);
3294 mchunkptr newp = (mchunkptr) (cp + offset);
3301 if (cp < m->least_addr)
3303 if ((m->footprint += newmmsize - oldmmsize) > m->max_footprint)
3304 m->max_footprint = m->footprint;
3320 p = (mchunkptr) ((
char *) p +
offset);
3339 bin->fd = bin->bk = bin;
3343 #if PROCEED_ON_ERROR
3347 reset_on_error(mstate m)
3350 ++malloc_corruption_error_count;
3352 m->smallbins = m->treebins = 0;
3353 m->dvsize = m->topsize = 0;
3370 size_t psize = (
char *) oldfirst - (
char *)
p;
3372 size_t qsize = psize - nb;
3375 assert((
char *) oldfirst > (
char *) q);
3380 if (oldfirst == m->top) {
3381 size_t tsize = m->topsize += qsize;
3385 }
else if (oldfirst == m->dv) {
3386 size_t dsize = m->dvsize += qsize;
3411 char *old_top = (
char *) m->top;
3413 char *old_end = oldsp->base + oldsp->size;
3417 char *asp = rawsp +
offset;
3419 mchunkptr sp = (mchunkptr) csp;
3420 msegmentptr ss = (msegmentptr) (
chunk2mem(sp));
3422 mchunkptr p = tnext;
3432 m->seg.base = tbase;
3433 m->seg.size = tsize;
3434 m->seg.sflags = mmapped;
3442 if ((
char *) (&(nextp->head)) < old_end)
3450 if (csp != old_top) {
3451 mchunkptr q = (mchunkptr) old_top;
3452 size_t psize = csp - old_top;
3588 size_t ssize = end - br;
3599 if ((m->footprint += tsize) > m->max_footprint)
3600 m->max_footprint = m->footprint;
3603 m->seg.base = m->least_addr = tbase;
3604 m->seg.size = tsize;
3605 m->seg.sflags = mmap_flag;
3614 (
size_t) ((tbase + tsize) - (
char *) mn) -
3621 msegmentptr sp = &m->seg;
3622 while (sp != 0 && tbase != sp->base + sp->size)
3626 init_top(m, m->top, m->topsize + tsize);
3628 if (tbase < m->least_addr)
3629 m->least_addr = tbase;
3631 while (sp != 0 && sp->base != tbase + tsize)
3636 char *oldbase = sp->base;
3645 if (nb < m->topsize) {
3646 size_t rsize = m->topsize -= nb;
3647 mchunkptr p = m->top;
3667 size_t released = 0;
3668 msegmentptr pred = &m->seg;
3669 msegmentptr sp = pred->
next;
3671 char *base = sp->
base;
3672 size_t size = sp->size;
3673 msegmentptr next = sp->
next;
3680 tchunkptr tp = (tchunkptr) p;
3690 m->footprint -=
size;
3708 size_t released = 0;
3712 if (m->topsize > pad) {
3715 size_t extra = ((m->topsize - pad + (unit -
SIZE_T_ONE)) / unit -
3722 size_t newsize = sp->size - extra;
3724 if ((
CALL_MREMAP(sp->base, sp->size, newsize, 0) !=
3726 || (
CALL_MUNMAP(sp->base + newsize, extra) == 0)) {
3737 if (old_br == sp->base + sp->size) {
3740 if (rel_br !=
CMFAIL && new_br < old_br)
3741 released = old_br - new_br;
3748 if (released != 0) {
3749 sp->size -= released;
3750 m->footprint -= released;
3751 init_top(m, m->top, m->topsize - released);
3765 return (released != 0) ? 1 : 0;
3789 if ((rsize = trem) == 0)
3794 if (rt != 0 && rt != t)
3804 if (t == 0 && v == 0) {
3806 if (leftbits != 0) {
3824 if (v != 0 && rsize < (
size_t) (m->dvsize - nb)) {
3909 else if (oldsize >= nb) {
3910 size_t rsize = oldsize - nb;
3918 }
else if (next == m->top && oldsize + m->topsize > nb) {
3920 size_t newsize = oldsize + m->topsize;
3921 size_t newtopsize = newsize - nb;
3926 m->topsize = newtopsize;
3947 memcpy(newmem, oldmem, (oc < bytes) ? oc : bytes);
3965 if ((alignment & (alignment -
SIZE_T_ONE)) != 0) {
3967 while (a < alignment)
3987 if ((((
size_t) (mem)) % alignment) != 0) {
4001 ((
size_t) (br - (
char *) (
p)) >=
4003 mchunkptr newp = (mchunkptr) pos;
4004 size_t leadsize = pos - (
char *) (p);
4005 size_t newsize =
chunksize(p) - leadsize;
4008 newp->prev_foot = p->prev_foot + leadsize;
4022 size_t remainder_size = size - nb;
4025 set_inuse(m, remainder, remainder_size);
4049 ialloc(mstate m,
size_t n_elements,
size_t *
sizes,
int opts,
void *chunks[])
4060 size_t element_size;
4061 size_t contents_size;
4065 size_t remainder_size;
4067 mchunkptr array_chunk;
4074 if (n_elements == 0)
4080 if (n_elements == 0)
4083 array_size =
request2size(n_elements * (
sizeof(
void *)));
4089 contents_size = n_elements * element_size;
4093 for (i = 0; i != n_elements; ++
i)
4097 size = contents_size + array_size;
4125 size_t array_chunk_size;
4127 array_chunk_size = remainder_size - contents_size;
4128 marray = (
void **) (
chunk2mem(array_chunk));
4130 remainder_size = contents_size;
4136 if (i != n_elements - 1) {
4137 if (element_size != 0)
4138 size = element_size;
4141 remainder_size -=
size;
4151 if (marray != chunks) {
4153 if (element_size != 0) {
4154 assert(remainder_size == element_size);
4160 for (i = 0; i != n_elements; ++
i)
4208 smallbits =
gm->smallmap >> idx;
4210 if ((smallbits & 0x3U) != 0) {
4212 idx += ~smallbits & 1;
4223 else if (nb >
gm->dvsize) {
4224 if (smallbits != 0) {
4251 else if (
gm->treemap != 0
4267 if (nb <= gm->dvsize) {
4268 size_t rsize =
gm->dvsize - nb;
4269 mchunkptr p =
gm->dv;
4276 size_t dvs =
gm->dvsize;
4286 else if (nb < gm->topsize) {
4287 size_t rsize =
gm->topsize -= nb;
4288 mchunkptr p =
gm->top;
4320 mstate
fm = get_mstate_for(p);
4334 size_t prevsize = p->prev_foot;
4336 prevsize &= ~IS_MMAPPED_BIT;
4338 if (
CALL_MUNMAP((
char *) p - prevsize, psize) == 0)
4339 fm->footprint -= psize;
4361 if (next == fm->top) {
4362 size_t tsize = fm->topsize += psize;
4372 }
else if (next == fm->dv) {
4373 size_t dsize = fm->dvsize += psize;
4410 if (n_elements != 0) {
4411 req = n_elements * elem_size;
4412 if (((n_elements | elem_size) & ~(
size_t) 0xffff) &&
4413 (req / n_elements != elem_size))
4427 #ifdef REALLOC_ZERO_BYTES_FREES
4437 mstate m = get_mstate_for(
mem2chunk(oldmem));
4456 size_t sz = elem_size;
4457 return ialloc(
gm, n_elements, &sz, 3, chunks);
4463 return ialloc(
gm, n_elements, sizes, 0, chunks);
4499 return gm->footprint;
4505 return gm->max_footprint;
4546 init_user_mstate(
char *tbase,
size_t tsize)
4555 m->seg.base = m->least_addr = tbase;
4556 m->seg.size = m->footprint = m->max_footprint = tsize;
4568 create_mspace(
size_t capacity,
int locked)
4578 char *tbase = (
char *) (
CALL_MMAP(tsize));
4580 m = init_user_mstate(tbase, tsize);
4589 create_mspace_with_base(
void *base,
size_t capacity,
int locked)
4597 m = init_user_mstate((
char *) base, capacity);
4605 destroy_mspace(mspace msp)
4608 mstate ms = (mstate) msp;
4610 msegmentptr sp = &ms->seg;
4612 char *base = sp->
base;
4613 size_t size = sp->size;
4614 flag_t flag = sp->sflags;
4633 mspace_malloc(mspace msp,
size_t bytes)
4635 mstate ms = (mstate) msp;
4648 smallbits = ms->smallmap >> idx;
4650 if ((smallbits & 0x3U) != 0) {
4652 idx += ~smallbits & 1;
4663 else if (nb > ms->dvsize) {
4664 if (smallbits != 0) {
4691 else if (ms->treemap != 0
4701 if (ms->treemap != 0 && (mem =
tmalloc_large(ms, nb)) != 0) {
4707 if (nb <= ms->dvsize) {
4708 size_t rsize = ms->dvsize - nb;
4709 mchunkptr p = ms->dv;
4716 size_t dvs = ms->dvsize;
4726 else if (nb < ms->topsize) {
4727 size_t rsize = ms->topsize -= nb;
4728 mchunkptr p = ms->top;
4749 mspace_free(mspace msp,
void *mem)
4754 mstate
fm = get_mstate_for(p);
4756 mstate fm = (mstate) msp;
4768 size_t prevsize = p->prev_foot;
4770 prevsize &= ~IS_MMAPPED_BIT;
4772 if (
CALL_MUNMAP((
char *) p - prevsize, psize) == 0)
4773 fm->footprint -= psize;
4795 if (next == fm->top) {
4796 size_t tsize = fm->topsize += psize;
4806 }
else if (next == fm->dv) {
4807 size_t dsize = fm->dvsize += psize;
4837 mspace_calloc(mspace msp,
size_t n_elements,
size_t elem_size)
4841 mstate ms = (mstate) msp;
4846 if (n_elements != 0) {
4847 req = n_elements * elem_size;
4848 if (((n_elements | elem_size) & ~(
size_t) 0xffff) &&
4849 (req / n_elements != elem_size))
4859 mspace_realloc(mspace msp,
void *oldmem,
size_t bytes)
4862 return mspace_malloc(msp, bytes);
4863 #ifdef REALLOC_ZERO_BYTES_FREES
4865 mspace_free(msp, oldmem);
4872 mstate ms = get_mstate_for(p);
4874 mstate ms = (mstate) msp;
4885 mspace_memalign(mspace msp,
size_t alignment,
size_t bytes)
4887 mstate ms = (mstate) msp;
4896 mspace_independent_calloc(mspace msp,
size_t n_elements,
4897 size_t elem_size,
void *chunks[])
4899 size_t sz = elem_size;
4900 mstate ms = (mstate) msp;
4905 return ialloc(ms, n_elements, &sz, 3, chunks);
4909 mspace_independent_comalloc(mspace msp,
size_t n_elements,
4910 size_t sizes[],
void *chunks[])
4912 mstate ms = (mstate) msp;
4917 return ialloc(ms, n_elements, sizes, 0, chunks);
4921 mspace_trim(mspace msp,
size_t pad)
4924 mstate ms = (mstate) msp;
4937 mspace_malloc_stats(mspace msp)
4939 mstate ms = (mstate) msp;
4948 mspace_footprint(mspace msp)
4951 mstate ms = (mstate) msp;
4961 mspace_max_footprint(mspace msp)
4964 mstate ms = (mstate) msp;
4975 mspace_mallinfo(mspace msp)
4977 mstate ms = (mstate) msp;
4986 mspace_mallopt(
int param_number,
int value)
#define USAGE_ERROR_ACTION(m, p)
#define DEFAULT_MMAP_THRESHOLD
#define unlink_large_chunk(M, X)
#define request2size(req)
GLdouble GLdouble GLdouble r
static void * sys_alloc(mstate m, size_t nb)
#define minsize_for_tree_index(i)
#define ACQUIRE_MAGIC_INIT_LOCK()
GLuint GLfloat GLfloat GLfloat x1
static int has_segment_link(mstate m, msegmentptr ss)
MALLINFO_FIELD_TYPE arena
static void * internal_realloc(mstate m, void *oldmem, size_t bytes)
#define compute_tree_index(S, I)
MALLINFO_FIELD_TYPE hblks
EGLSurface EGLnsecsANDROID time
#define insert_chunk(M, P, S)
GLint GLint GLint GLint GLint x
static int win32_acquire_lock(MLOCK_T *sl)
static int change_mparam(int param_number, int value)
#define mark_inuse_foot(M, p, s)
#define DEFAULT_TRIM_THRESHOLD
#define MALLOC_FAILURE_ACTION
GLdouble GLdouble GLdouble GLdouble q
static void * tmalloc_large(mstate m, size_t nb)
#define smallbin_at(M, i)
#define replace_dv(M, P, S)
#define disable_contiguous(M)
#define is_initialized(M)
#define set_free_with_pinuse(p, s, n)
mchunkptr smallbins[(NSMALLBINS+1)*2]
MALLINFO_FIELD_TYPE ordblks
#define dlindependent_calloc
#define compute_bit2idx(X, I)
#define dlmalloc_usable_size
#define chunk_plus_offset(p, s)
#define CALL_MUNMAP(a, s)
#define leftmost_child(t)
#define FOUR_SIZE_T_SIZES
#define CORRUPTION_ERROR_ACTION(m)
static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb)
#define dlmalloc_footprint
#define USE_NONCONTIGUOUS_BIT
#define small_index2size(i)
return Display return Display Bool Bool int int int return Display XEvent Bool(*) XPointer return Display return Display Drawable _Xconst char unsigned int unsigned int return Display Pixmap Pixmap XColor XColor unsigned int unsigned int return Display _Xconst char char int char return Display Visual unsigned int int int char unsigned int unsigned int in i)
#define ACQUIRE_MORECORE_LOCK()
#define use_noncontiguous(M)
void * SDL_calloc(size_t nmemb, size_t size)
static int sys_trim(mstate m, size_t pad)
#define unlink_first_small_chunk(M, B, P, I)
#define check_inuse_chunk(M, P)
static void ** ialloc(mstate m, size_t n_elements, size_t *sizes, int opts, void *chunks[])
static void init_bins(mstate m)
#define treemap_is_marked(M, i)
static struct malloc_params mparams
#define MORECORE_CONTIGUOUS
#define dlmalloc_max_footprint
#define set_inuse(M, p, s)
static void * tmalloc_small(mstate m, size_t nb)
static struct mallinfo internal_mallinfo(mstate m)
GLuint GLsizei const GLuint const GLintptr const GLsizeiptr * sizes
static MLOCK_T magic_init_mutex
MALLINFO_FIELD_TYPE fordblks
GLsizei const GLfloat * value
#define align_as_chunk(A)
#define unlink_chunk(M, P, S)
tbinptr treebins[NTREEBINS]
#define MAX_SMALL_REQUEST
#define leftshift_for_tree_index(i)
#define DEFAULT_GRANULARITY
struct malloc_segment * next
GLenum GLuint GLenum GLsizei const GLchar * buf
#define chunk_minus_offset(p, s)
static void internal_malloc_stats(mstate m)
#define should_trim(M, s)
struct malloc_tree_chunk * bk
struct malloc_tree_chunk * parent
#define RELEASE_MAGIC_INIT_LOCK()
MALLINFO_FIELD_TYPE fsmblks
static void init_top(mstate m, mchunkptr p, size_t psize)
#define check_malloc_state(M)
static void win32_release_lock(MLOCK_T *sl)
MALLINFO_FIELD_TYPE keepcost
#define check_malloced_chunk(M, P, N)
static void * prepend_alloc(mstate m, char *newbase, char *oldbase, size_t nb)
#define insert_large_chunk(M, X, S)
#define segment_holds(S, A)
static int init_mparams(void)
MALLINFO_FIELD_TYPE hblkhd
static size_t release_unused_segments(mstate m)
static void add_segment(mstate m, char *tbase, size_t tsize, flag_t mmapped)
#define RELEASE_MORECORE_LOCK()
struct malloc_tree_chunk * child[2]
SDL_PRINTF_FORMAT_STRING const char int SDL_PRINTF_FORMAT_STRING const char int SDL_PRINTF_FORMAT_STRING const char int SDL_PRINTF_FORMAT_STRING const char const char SDL_SCANF_FORMAT_STRING const char return SDL_ThreadFunction const char void return Uint32 return Uint32 void
#define CALL_MREMAP(addr, osz, nsz, mv)
#define internal_free(m, mem)
#define check_mmapped_chunk(M, P)
static void * internal_memalign(mstate m, size_t alignment, size_t bytes)
#define smallmap_is_marked(M, i)
#define calloc_must_clear(p)
static msegmentptr segment_holding(mstate m, char *addr)
#define is_mmapped_segment(S)
static void * win32direct_mmap(size_t size)
#define set_size_and_pinuse_of_free_chunk(p, s)
GLboolean GLboolean GLboolean GLboolean a
#define is_extern_segment(S)
static struct malloc_state _gm_
#define internal_malloc(m, b)
#define check_free_chunk(M, P)
#define granularity_align(S)
static void * mmap_alloc(mstate m, size_t nb)
GLboolean GLboolean GLboolean b
#define MALLINFO_FIELD_TYPE
#define is_page_aligned(S)
#define dlindependent_comalloc
#define check_top_chunk(M, P)
#define set_size_and_pinuse_of_inuse_chunk(M, p, s)
MALLINFO_FIELD_TYPE uordblks
#define set_inuse_and_pinuse(M, p, s)
static int win32munmap(void *ptr, size_t size)
struct malloc_tree_chunk * fd
MALLINFO_FIELD_TYPE smblks
MALLINFO_FIELD_TYPE usmblks
static void * win32mmap(size_t size)