diff -r 17c9f1627baf Doc/c-api/memory.rst --- a/Doc/c-api/memory.rst Wed Jun 12 09:28:20 2013 +0300 +++ b/Doc/c-api/memory.rst Wed Jun 12 23:46:28 2013 +0200 @@ -155,6 +155,23 @@ versions and is therefore deprecated in :c:func:`PyMem_NEW`, :c:func:`PyMem_RESIZE`, :c:func:`PyMem_DEL`. +Custom memory allocators +======================== + +.. c:function:: void PyMem_GetAllocators(void **ctx_p, void* (**malloc_p) (void *ctx, size_t size), void* (**realloc_p) (void *ctx, void *ptr, size_t size), void (**free_p) (void *ctx, void *ptr)) + + Get internal functions of :c:func:`PyMem_Malloc`, :c:func:`PyMem_Realloc` + and :c:func:`PyMem_Free`. *\*ctx_p* is an arbitrary user value. + +.. c:function:: void PyMem_SetAllocators(void *ctx, void* (*malloc) (void *ctx, size_t size), void* (*realloc) (void *ctx, void *ptr, size_t size), void (*free) (void *ctx, void *ptr)) + + Set internal functions of :c:func:`PyMem_Malloc`, :c:func:`PyMem_Realloc` + and :c:func:`PyMem_Free`. *ctx* is an arbitrary user value. + + ``malloc(ctx, 0)`` and ``realloc(ctx, ptr, 0)`` must not return *NULL*: it + would be treated as an error. + + .. _memoryexamples: Examples diff -r 17c9f1627baf Include/objimpl.h --- a/Include/objimpl.h Wed Jun 12 09:28:20 2013 +0300 +++ b/Include/objimpl.h Wed Jun 12 23:46:28 2013 +0200 @@ -106,41 +106,53 @@ PyAPI_FUNC(Py_ssize_t) _Py_GetAllocatedB #ifndef Py_LIMITED_API PyAPI_FUNC(void) _PyObject_DebugMallocStats(FILE *out); #endif /* #ifndef Py_LIMITED_API */ -#ifdef PYMALLOC_DEBUG /* WITH_PYMALLOC && PYMALLOC_DEBUG */ -PyAPI_FUNC(void *) _PyObject_DebugMalloc(size_t nbytes); -PyAPI_FUNC(void *) _PyObject_DebugRealloc(void *p, size_t nbytes); -PyAPI_FUNC(void) _PyObject_DebugFree(void *p); -PyAPI_FUNC(void) _PyObject_DebugDumpAddress(const void *p); -PyAPI_FUNC(void) _PyObject_DebugCheckAddress(const void *p); -PyAPI_FUNC(void *) _PyObject_DebugMallocApi(char api, size_t nbytes); -PyAPI_FUNC(void *) _PyObject_DebugReallocApi(char api, void *p, size_t nbytes); -PyAPI_FUNC(void) _PyObject_DebugFreeApi(char api, void *p); -PyAPI_FUNC(void) _PyObject_DebugCheckAddressApi(char api, const void *p); -PyAPI_FUNC(void *) _PyMem_DebugMalloc(size_t nbytes); -PyAPI_FUNC(void *) _PyMem_DebugRealloc(void *p, size_t nbytes); -PyAPI_FUNC(void) _PyMem_DebugFree(void *p); -#define PyObject_MALLOC _PyObject_DebugMalloc -#define PyObject_Malloc _PyObject_DebugMalloc -#define PyObject_REALLOC _PyObject_DebugRealloc -#define PyObject_Realloc _PyObject_DebugRealloc -#define PyObject_FREE _PyObject_DebugFree -#define PyObject_Free _PyObject_DebugFree +#endif -#else /* WITH_PYMALLOC && ! PYMALLOC_DEBUG */ +/* Macros */ #define PyObject_MALLOC PyObject_Malloc #define PyObject_REALLOC PyObject_Realloc #define PyObject_FREE PyObject_Free -#endif +#define PyObject_Del PyObject_Free +#define PyObject_DEL PyObject_Free -#else /* ! WITH_PYMALLOC */ -#define PyObject_MALLOC PyMem_MALLOC -#define PyObject_REALLOC PyMem_REALLOC -#define PyObject_FREE PyMem_FREE +/* Get internal functions of PyObject_Malloc(), PyObject_Realloc() and + PyObject_Free(). *ctx_p is an arbitrary user value. */ +PyAPI_FUNC(void) PyObject_GetAllocators( + void **ctx_p, + void* (**malloc_p) (void *ctx, size_t size), + void* (**realloc_p) (void *ctx, void *ptr, size_t size), + void (**free_p) (void *ctx, void *ptr) + ); -#endif /* WITH_PYMALLOC */ +/* Set internal functions of PyObject_Malloc(), PyObject_Realloc() and PyObject_Free(). + ctx is an arbitrary user value. -#define PyObject_Del PyObject_Free -#define PyObject_DEL PyObject_FREE + malloc(ctx, 0) and realloc(ctx, ptr, 0) must not return NULL: it would be + treated as an error. */ +PyAPI_FUNC(void) PyObject_SetAllocators( + void *ctx, + void* (*malloc) (void *ctx, size_t size), + void* (*realloc) (void *ctx, void *ptr, size_t size), + void (*free) (void *ctx, void *ptr) + ); + +/* Get internal functions allocating and deallocating arenas for + PyObject_Malloc(), PyObject_Realloc() and PyObject_Free(). + *ctx_p is an arbitrary user value. */ +PyAPI_FUNC(void) PyObject_GetArenaAllocators( + void **ctx_p, + void* (**malloc_p) (void *ctx, size_t size), + void (**free_p) (void *ctx, void *ptr, size_t size) + ); + +/* Get internal functions allocating and deallocating arenas for + PyObject_Malloc(), PyObject_Realloc() and PyObject_Free(). + ctx is an arbitrary user value. */ +PyAPI_FUNC(void) PyObject_SetArenaAllocators( + void *ctx, + void* (*malloc) (void *ctx, size_t size), + void (*free) (void *ctx, void *ptr, size_t size) + ); /* * Generic object allocator interface diff -r 17c9f1627baf Include/pymem.h --- a/Include/pymem.h Wed Jun 12 09:28:20 2013 +0300 +++ b/Include/pymem.h Wed Jun 12 23:46:28 2013 +0200 @@ -53,17 +53,7 @@ PyAPI_FUNC(void *) PyMem_Malloc(size_t); PyAPI_FUNC(void *) PyMem_Realloc(void *, size_t); PyAPI_FUNC(void) PyMem_Free(void *); -/* Starting from Python 1.6, the wrappers Py_{Malloc,Realloc,Free} are - no longer supported. They used to call PyErr_NoMemory() on failure. */ - /* Macros. */ -#ifdef PYMALLOC_DEBUG -/* Redirect all memory operations to Python's debugging allocator. */ -#define PyMem_MALLOC _PyMem_DebugMalloc -#define PyMem_REALLOC _PyMem_DebugRealloc -#define PyMem_FREE _PyMem_DebugFree - -#else /* ! PYMALLOC_DEBUG */ /* PyMem_MALLOC(0) means malloc(1). Some systems would return NULL for malloc(0), which would be treated as an error. Some platforms @@ -71,13 +61,9 @@ PyAPI_FUNC(void) PyMem_Free(void *); pymalloc. To solve these problems, allocate an extra byte. */ /* Returns NULL to indicate error if a negative size or size larger than Py_ssize_t can represent is supplied. Helps prevents security holes. */ -#define PyMem_MALLOC(n) ((size_t)(n) > (size_t)PY_SSIZE_T_MAX ? NULL \ - : malloc((n) ? (n) : 1)) -#define PyMem_REALLOC(p, n) ((size_t)(n) > (size_t)PY_SSIZE_T_MAX ? NULL \ - : realloc((p), (n) ? (n) : 1)) -#define PyMem_FREE free - -#endif /* PYMALLOC_DEBUG */ +#define PyMem_MALLOC(n) PyMem_Malloc(n) +#define PyMem_REALLOC(p, n) PyMem_Realloc(p, n) +#define PyMem_FREE(p) PyMem_Free(p) /* * Type-oriented memory interface @@ -115,6 +101,27 @@ PyAPI_FUNC(void) PyMem_Free(void *); #define PyMem_Del PyMem_Free #define PyMem_DEL PyMem_FREE +/* Get internal functions of PyMem_Malloc(), PyMem_Realloc() and PyMem_Free(). + *ctx_p is an arbitrary user value. */ +PyAPI_FUNC(void) PyMem_GetAllocators( + void **ctx_p, + void* (**malloc_p) (void *ctx, size_t size), + void* (**realloc_p) (void *ctx, void *ptr, size_t size), + void (**free_p) (void *ctx, void *ptr) + ); + +/* Set internal functions of PyMem_Malloc(), PyMem_Realloc() and PyMem_Free(). + ctx is an arbitrary user value. + + malloc(ctx, 0) and realloc(ctx, ptr, 0) must not return NULL: it would be + treated as an error. */ +PyAPI_FUNC(void) PyMem_SetAllocators( + void *ctx, + void* (*malloc) (void *ctx, size_t size), + void* (*realloc) (void *ctx, void *ptr, size_t size), + void (*free) (void *ctx, void *ptr) + ); + #ifdef __cplusplus } #endif diff -r 17c9f1627baf Objects/object.c --- a/Objects/object.c Wed Jun 12 09:28:20 2013 +0300 +++ b/Objects/object.c Wed Jun 12 23:46:28 2013 +0200 @@ -1859,26 +1859,6 @@ PyTypeObject *_PyCapsule_hack = &PyCapsu Py_ssize_t (*_Py_abstract_hack)(PyObject *) = PyObject_Size; -/* Python's malloc wrappers (see pymem.h) */ - -void * -PyMem_Malloc(size_t nbytes) -{ - return PyMem_MALLOC(nbytes); -} - -void * -PyMem_Realloc(void *p, size_t nbytes) -{ - return PyMem_REALLOC(p, nbytes); -} - -void -PyMem_Free(void *p) -{ - PyMem_FREE(p); -} - void _PyObject_DebugTypeStats(FILE *out) { diff -r 17c9f1627baf Objects/obmalloc.c --- a/Objects/obmalloc.c Wed Jun 12 09:28:20 2013 +0300 +++ b/Objects/obmalloc.c Wed Jun 12 23:46:28 2013 +0200 @@ -1,5 +1,341 @@ #include "Python.h" +/* Python's malloc wrappers (see pymem.h) */ + +static int _PyMem_init; + +static struct { + void *ctx; + void* (*malloc) (void*, size_t); + void* (*realloc) (void*, void*, size_t); + void (*free) (void*, void*); +} _PyMem = {0}; + +static struct { + void *ctx; + void* (*malloc) (void*, size_t); + void* (*realloc) (void*, void*, size_t); + void (*free) (void*, void*); +} _PyObject = {0}; + +static struct { + void *ctx; + void* (*malloc) (void*, size_t); + void (*free) (void*, void*, size_t); +} _PyObject_Arena = {0}; + +typedef struct { + char api_id; + void *ctx; + void* (*malloc) (void*, size_t); + void* (*realloc) (void*, void*, size_t); + void (*free) (void*, void*); +} debug_alloc_api_t; + +static struct { + debug_alloc_api_t mem; + debug_alloc_api_t object; +} _PyMem_Debug; + + +/* Forward declaration */ + +#ifdef PYMALLOC_DEBUG /* WITH_PYMALLOC && PYMALLOC_DEBUG */ +static void* _PyMem_DebugMalloc(void *ctx, size_t size); +static void _PyMem_DebugFree(void *ctx, void *p); +static void* _PyMem_DebugRealloc(void *ctx, void *ptr, size_t size); + +static void _PyObject_DebugDumpAddress(const void *p); +static void _PyMem_DebugCheckAddress(char api_id, const void *p); + +/* We tag each block with an API ID in order to tag API violations */ +#define _PYMALLOC_MEM_ID 'm' /* the PyMem_Malloc() API */ +#define _PYMALLOC_OBJ_ID 'o' /* The PyObject_Malloc() API */ +#endif + +#ifdef WITH_PYMALLOC +static void* _PyObject_Malloc(void *ctx, size_t size); +static void _PyObject_Free(void *ctx, void *p); +static void* _PyObject_Realloc(void *ctx, void *ptr, size_t size); +#endif + + +static void * +_PyMem_Malloc(void *ctx, size_t size) +{ + /* PyMem_Malloc(0) means malloc(1). Some systems would return NULL + for malloc(0), which would be treated as an error. Some platforms would + return a pointer with no memory behind it, which would break pymalloc. + To solve these problems, allocate an extra byte. */ + if (size == 0) + size = 1; + return malloc(size); +} + +static void * +_PyMem_Realloc(void *ctx, void *ptr, size_t size) +{ + if (size == 0) + size = 1; + return realloc(ptr, size); +} + +static void +_PyMem_Free(void *ctx, void *ptr) +{ + return free(ptr); +} + +#ifdef ARENAS_USE_MMAP +static void * +_PyObject_ArenaMmap(void *ctx, size_t size) +{ + void *ptr; + ptr = mmap(NULL, size, PROT_READ|PROT_WRITE, + MAP_PRIVATE|MAP_ANONYMOUS, -1, 0); + if (ptr == MAP_FAILED) + return NULL; + assert(ptr != NULL); + return ptr; +} + +static void +_PyObject_ArenaMunmap(void *ctx, void *ptr, size_t size) +{ + return munmap(ptr, size); +} +#else +static void * +_PyObject_ArenaMalloc(void *ctx, size_t size) +{ + return malloc(size); +} + +static void +_PyObject_ArenaFree(void *ctx, void *ptr, size_t size) +{ + free(ptr); +} +#endif + +static void +_PyMem_InitDefaultAllocators(void) +{ + _PyMem_init = 1; + + _PyMem.ctx = NULL; + _PyMem.malloc = _PyMem_Malloc; + _PyMem.realloc = _PyMem_Realloc; + _PyMem.free = _PyMem_Free; + + _PyObject.ctx = NULL; +#ifdef WITH_PYMALLOC + _PyObject.malloc = _PyObject_Malloc; + _PyObject.free = _PyObject_Free; + _PyObject.realloc = _PyObject_Realloc; +#else + _PyObject.malloc = _PyMem_Malloc; + _PyObject.free = _PyMem_Free; + _PyObject.realloc = _PyMem_Realloc; +#endif + + _PyObject_Arena.ctx = NULL; +#ifdef ARENAS_USE_MMAP + _PyObject_Arena.malloc = _PyObject_ArenaMmap; + _PyObject_Arena.free = _PyObject_ArenaMunmap; +#else + _PyObject_Arena.malloc = _PyObject_ArenaMalloc; + _PyObject_Arena.free = _PyObject_ArenaFree; +#endif +} + +static void +_PyMem_InitDefaultHook(void) +{ +#ifdef PYMALLOC_DEBUG + _PyMem_Debug.mem.api_id = _PYMALLOC_MEM_ID; + PyMem_GetAllocators(&_PyMem_Debug.mem.ctx, + &_PyMem_Debug.mem.malloc, + &_PyMem_Debug.mem.realloc, + &_PyMem_Debug.mem.free); + + _PyMem_Debug.object.api_id = _PYMALLOC_MEM_ID; + PyObject_GetAllocators(&_PyMem_Debug.object.ctx, + &_PyMem_Debug.object.malloc, + &_PyMem_Debug.object.realloc, + &_PyMem_Debug.object.free); + + PyMem_SetAllocators(&_PyMem_Debug.mem, + _PyMem_DebugMalloc, + _PyMem_DebugRealloc, + _PyMem_DebugFree); + + PyObject_SetAllocators(&_PyMem_Debug.object, + _PyMem_DebugMalloc, + _PyMem_DebugRealloc, + _PyMem_DebugFree); +#endif +} + +#define _PyMem_InitAllocators() \ + do { \ + if (!_PyMem_init) { \ + _PyMem_InitDefaultAllocators(); \ + _PyMem_InitDefaultHook(); \ + } \ + } while (0) + +void +PyMem_GetAllocators(void **ctx_p, + void* (**malloc_p) (void *ctx, size_t size), + void* (**realloc_p) (void *ctx, void *ptr, size_t size), + void (**free_p) (void *ctx, void *ptr)) +{ + _PyMem_InitAllocators(); + + *ctx_p = _PyMem.ctx; + *malloc_p = _PyMem.malloc; + *realloc_p = _PyMem.realloc; + *free_p = _PyMem.free; +} + +void +PyMem_SetAllocators(void *ctx, + void* (*malloc) (void *ctx, size_t size), + void* (*realloc) (void *ctx, void *ptr, size_t size), + void (*free) (void *ctx, void *ptr)) +{ + _PyMem_InitAllocators(); + + _PyMem.ctx = ctx; + _PyMem.malloc = malloc; + _PyMem.realloc = realloc; + _PyMem.free = free; +} + +void +PyObject_GetAllocators(void **ctx_p, + void* (**malloc_p) (void *ctx, size_t size), + void* (**realloc_p) (void *ctx, void *ptr, size_t size), + void (**free_p) (void *ctx, void *ptr)) +{ + _PyMem_InitAllocators(); + + *ctx_p = _PyObject.ctx; + *malloc_p = _PyObject.malloc; + *realloc_p = _PyObject.realloc; + *free_p = _PyObject.free; +} + +void +PyObject_SetAllocators(void *ctx, + void* (*malloc) (void *ctx, size_t size), + void* (*realloc) (void *ctx, void *ptr, size_t size), + void (*free) (void *ctx, void *ptr)) +{ + _PyMem_InitAllocators(); + + _PyObject.ctx = ctx; + _PyObject.malloc = malloc; + _PyObject.realloc = realloc; + _PyObject.free = free; +} + +void +PyObject_GetArenaAllocators(void **ctx_p, + void* (**malloc_p) (void *ctx, size_t size), + void (**free_p) (void *ctx, void *ptr, size_t size)) +{ + _PyMem_InitAllocators(); + + *malloc_p = _PyObject_Arena.malloc; + *free_p = _PyObject_Arena.free; + *ctx_p = _PyObject_Arena.ctx; +} + +void +PyObject_SetArenaAllocators(void *ctx, + void* (*malloc) (void *ctx, size_t size), + void (*free) (void *ctx, void *ptr, size_t size)) +{ + _PyMem_InitAllocators(); + + _PyObject_Arena.malloc = malloc; + _PyObject_Arena.free = free; + _PyObject_Arena.ctx = ctx; +} + +void * +PyMem_Malloc(size_t size) +{ + _PyMem_InitAllocators(); + + /* + * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes. + * Most python internals blindly use a signed Py_ssize_t to track + * things without checking for overflows or negatives. + * As size_t is unsigned, checking for size < 0 is not required. + */ + if (size > (size_t)PY_SSIZE_T_MAX) + return NULL; + + return _PyMem.malloc(_PyMem.ctx, size); +} + +void * +PyMem_Realloc(void *ptr, size_t size) +{ + _PyMem_InitAllocators(); + + if (size > (size_t)PY_SSIZE_T_MAX) + return NULL; + + return _PyMem.realloc(_PyMem.ctx, ptr, size); +} + +void +PyMem_Free(void *ptr) +{ + _PyMem_InitAllocators(); + _PyMem.free(_PyMem.ctx, ptr); +} + +void * +PyObject_Malloc(size_t size) +{ + _PyMem_InitAllocators(); + + /* + * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes. + * Most python internals blindly use a signed Py_ssize_t to track + * things without checking for overflows or negatives. + * As size_t is unsigned, checking for size < 0 is not required. + */ + if (size > (size_t)PY_SSIZE_T_MAX) + return NULL; + + return _PyObject.malloc(_PyObject.ctx, size); +} + +void * +PyObject_Realloc(void *ptr, size_t size) +{ + _PyMem_InitAllocators(); + + if (size > (size_t)PY_SSIZE_T_MAX) + return NULL; + + return _PyObject.realloc(_PyObject.ctx, ptr, size); +} + +void +PyObject_Free(void *ptr) +{ + _PyMem_InitAllocators(); + _PyObject.free(_PyObject.ctx, ptr); +} + + #ifdef WITH_PYMALLOC #ifdef HAVE_MMAP @@ -545,7 +881,6 @@ new_arena(void) struct arena_object* arenaobj; uint excess; /* number of bytes above pool alignment */ void *address; - int err; #ifdef PYMALLOC_DEBUG if (Py_GETENV("PYTHONMALLOCSTATS")) @@ -567,11 +902,12 @@ new_arena(void) return NULL; /* overflow */ #endif nbytes = numarenas * sizeof(*arenas); - arenaobj = (struct arena_object *)realloc(arenas, nbytes); + arenaobj = (struct arena_object *)PyMem_Realloc(arenas, nbytes); if (arenaobj == NULL) return NULL; arenas = arenaobj; + /* We might need to fix pointers that were copied. However, * new_arena only gets called when all the pages in the * previous arenas are full. Thus, there are *no* pointers @@ -598,15 +934,8 @@ new_arena(void) arenaobj = unused_arena_objects; unused_arena_objects = arenaobj->nextarena; assert(arenaobj->address == 0); -#ifdef ARENAS_USE_MMAP - address = mmap(NULL, ARENA_SIZE, PROT_READ|PROT_WRITE, - MAP_PRIVATE|MAP_ANONYMOUS, -1, 0); - err = (address == MAP_FAILED); -#else - address = malloc(ARENA_SIZE); - err = (address == 0); -#endif - if (err) { + address = _PyObject_Arena.malloc(_PyObject_Arena.ctx, ARENA_SIZE); + if (address == NULL) { /* The allocation failed: return NULL after putting the * arenaobj back. */ @@ -769,9 +1098,8 @@ int Py_ADDRESS_IN_RANGE(void *P, poolp p * Unless the optimizer reorders everything, being too smart... */ -#undef PyObject_Malloc -void * -PyObject_Malloc(size_t nbytes) +static void * +_PyObject_Malloc(void *ctx, size_t nbytes) { block *bp; poolp pool; @@ -788,17 +1116,6 @@ PyObject_Malloc(size_t nbytes) #endif /* - * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes. - * Most python internals blindly use a signed Py_ssize_t to track - * things without checking for overflows or negatives. - * As size_t is unsigned, checking for nbytes < 0 is not required. - */ - if (nbytes > PY_SSIZE_T_MAX) { - _Py_AllocatedBlocks--; - return NULL; - } - - /* * This implicitly redirects malloc(0). */ if ((nbytes - 1) < SMALL_REQUEST_THRESHOLD) { @@ -970,10 +1287,8 @@ redirect: * last chance to serve the request) or when the max memory limit * has been reached. */ - if (nbytes == 0) - nbytes = 1; { - void *result = malloc(nbytes); + void *result = PyMem_Malloc(nbytes); if (!result) _Py_AllocatedBlocks--; return result; @@ -982,9 +1297,8 @@ redirect: /* free */ -#undef PyObject_Free -void -PyObject_Free(void *p) +static void +_PyObject_Free(void *ctx, void *p) { poolp pool; block *lastfree; @@ -1093,11 +1407,8 @@ PyObject_Free(void *p) unused_arena_objects = ao; /* Free the entire arena. */ -#ifdef ARENAS_USE_MMAP - munmap((void *)ao->address, ARENA_SIZE); -#else - free((void *)ao->address); -#endif + _PyObject_Arena.free(_PyObject_Arena.ctx, + (void *)ao->address, ARENA_SIZE); ao->address = 0; /* mark unassociated */ --narenas_currently_allocated; @@ -1206,7 +1517,7 @@ PyObject_Free(void *p) redirect: #endif /* We didn't allocate this address. */ - free(p); + PyMem_Free(p); } /* realloc. If p is NULL, this acts like malloc(nbytes). Else if nbytes==0, @@ -1214,9 +1525,8 @@ redirect: * return a non-NULL result. */ -#undef PyObject_Realloc -void * -PyObject_Realloc(void *p, size_t nbytes) +static void * +_PyObject_Realloc(void *ctx, void *p, size_t nbytes) { void *bp; poolp pool; @@ -1226,16 +1536,7 @@ PyObject_Realloc(void *p, size_t nbytes) #endif if (p == NULL) - return PyObject_Malloc(nbytes); - - /* - * Limit ourselves to PY_SSIZE_T_MAX bytes to prevent security holes. - * Most python internals blindly use a signed Py_ssize_t to track - * things without checking for overflows or negatives. - * As size_t is unsigned, checking for nbytes < 0 is not required. - */ - if (nbytes > PY_SSIZE_T_MAX) - return NULL; + return _PyObject_Malloc(ctx, nbytes); #ifdef WITH_VALGRIND /* Treat running_on_valgrind == -1 the same as 0 */ @@ -1263,10 +1564,10 @@ PyObject_Realloc(void *p, size_t nbytes) } size = nbytes; } - bp = PyObject_Malloc(nbytes); + bp = _PyObject_Malloc(ctx, nbytes); if (bp != NULL) { memcpy(bp, p, size); - PyObject_Free(p); + _PyObject_Free(ctx, p); } return bp; } @@ -1284,14 +1585,14 @@ PyObject_Realloc(void *p, size_t nbytes) * at p. Instead we punt: let C continue to manage this block. */ if (nbytes) - return realloc(p, nbytes); + return PyMem_Realloc(p, nbytes); /* C doesn't define the result of realloc(p, 0) (it may or may not * return NULL then), but Python's docs promise that nbytes==0 never * returns NULL. We don't pass 0 to realloc(), to avoid that endcase * to begin with. Even then, we can't be sure that realloc() won't * return NULL. */ - bp = realloc(p, 1); + bp = PyMem_Realloc(p, 1); return bp ? bp : p; } @@ -1301,24 +1602,6 @@ PyObject_Realloc(void *p, size_t nbytes) /* pymalloc not enabled: Redirect the entry points to malloc. These will * only be used by extensions that are compiled with pymalloc enabled. */ -void * -PyObject_Malloc(size_t n) -{ - return PyMem_MALLOC(n); -} - -void * -PyObject_Realloc(void *p, size_t n) -{ - return PyMem_REALLOC(p, n); -} - -void -PyObject_Free(void *p) -{ - PyMem_FREE(p); -} - Py_ssize_t _Py_GetAllocatedBlocks(void) { @@ -1344,10 +1627,6 @@ Py_ssize_t #define DEADBYTE 0xDB /* dead (newly freed) memory */ #define FORBIDDENBYTE 0xFB /* untouchable bytes at each end of a block */ -/* We tag each block with an API ID in order to tag API violations */ -#define _PYMALLOC_MEM_ID 'm' /* the PyMem_Malloc() API */ -#define _PYMALLOC_OBJ_ID 'o' /* The PyObject_Malloc() API */ - static size_t serialno = 0; /* incremented on each debug {m,re}alloc */ /* serialno is always incremented via calling this routine. The point is @@ -1430,58 +1709,18 @@ p[2*S: 2*S+n] p[2*S+n: 2*S+n+S] Copies of FORBIDDENBYTE. Used to catch over- writes and reads. p[2*S+n+S: 2*S+n+2*S] - A serial number, incremented by 1 on each call to _PyObject_DebugMalloc - and _PyObject_DebugRealloc. + A serial number, incremented by 1 on each call to _PyMem_DebugMalloc + and _PyMem_DebugRealloc. This is a big-endian size_t. If "bad memory" is detected later, the serial number gives an excellent way to set a breakpoint on the next run, to capture the instant at which this block was passed out. */ -/* debug replacements for the PyMem_* memory API */ -void * -_PyMem_DebugMalloc(size_t nbytes) +static void * +_PyMem_DebugMalloc(void *ctx, size_t nbytes) { - return _PyObject_DebugMallocApi(_PYMALLOC_MEM_ID, nbytes); -} -void * -_PyMem_DebugRealloc(void *p, size_t nbytes) -{ - return _PyObject_DebugReallocApi(_PYMALLOC_MEM_ID, p, nbytes); -} -void -_PyMem_DebugFree(void *p) -{ - _PyObject_DebugFreeApi(_PYMALLOC_MEM_ID, p); -} - -/* debug replacements for the PyObject_* memory API */ -void * -_PyObject_DebugMalloc(size_t nbytes) -{ - return _PyObject_DebugMallocApi(_PYMALLOC_OBJ_ID, nbytes); -} -void * -_PyObject_DebugRealloc(void *p, size_t nbytes) -{ - return _PyObject_DebugReallocApi(_PYMALLOC_OBJ_ID, p, nbytes); -} -void -_PyObject_DebugFree(void *p) -{ - _PyObject_DebugFreeApi(_PYMALLOC_OBJ_ID, p); -} -void -_PyObject_DebugCheckAddress(const void *p) -{ - _PyObject_DebugCheckAddressApi(_PYMALLOC_OBJ_ID, p); -} - - -/* generic debug memory api, with an "id" to identify the API in use */ -void * -_PyObject_DebugMallocApi(char id, size_t nbytes) -{ + debug_alloc_api_t *api = (debug_alloc_api_t *)ctx; uchar *p; /* base address of malloc'ed block */ uchar *tail; /* p + 2*SST + nbytes == pointer to tail pad bytes */ size_t total; /* nbytes + 4*SST */ @@ -1492,14 +1731,14 @@ void * /* overflow: can't represent total as a size_t */ return NULL; - p = (uchar *)PyObject_Malloc(total); + p = (uchar *)api->malloc(api->ctx, total); if (p == NULL) return NULL; /* at p, write size (SST bytes), id (1 byte), pad (SST-1 bytes) */ write_size_t(p, nbytes); - p[SST] = (uchar)id; - memset(p + SST + 1 , FORBIDDENBYTE, SST-1); + p[SST] = (uchar)api->api_id; + memset(p + SST + 1, FORBIDDENBYTE, SST-1); if (nbytes > 0) memset(p + 2*SST, CLEANBYTE, nbytes); @@ -1517,25 +1756,27 @@ void * Then fills the original bytes with DEADBYTE. Then calls the underlying free. */ -void -_PyObject_DebugFreeApi(char api, void *p) +static void +_PyMem_DebugFree(void *ctx, void *p) { + debug_alloc_api_t *api = (debug_alloc_api_t *)ctx; uchar *q = (uchar *)p - 2*SST; /* address returned from malloc */ size_t nbytes; if (p == NULL) return; - _PyObject_DebugCheckAddressApi(api, p); + _PyMem_DebugCheckAddress(api->api_id, p); nbytes = read_size_t(q); nbytes += 4*SST; if (nbytes > 0) memset(q, DEADBYTE, nbytes); - PyObject_Free(q); + api->free(api->ctx, q); } -void * -_PyObject_DebugReallocApi(char api, void *p, size_t nbytes) +static void * +_PyMem_DebugRealloc(void *ctx, void *p, size_t nbytes) { + debug_alloc_api_t *api = (debug_alloc_api_t *)ctx; uchar *q = (uchar *)p; uchar *tail; size_t total; /* nbytes + 4*SST */ @@ -1543,9 +1784,9 @@ void * int i; if (p == NULL) - return _PyObject_DebugMallocApi(api, nbytes); + return _PyMem_DebugMalloc(ctx, nbytes); - _PyObject_DebugCheckAddressApi(api, p); + _PyMem_DebugCheckAddress(api->api_id, p); bumpserialno(); original_nbytes = read_size_t(q - 2*SST); total = nbytes + 4*SST; @@ -1562,12 +1803,12 @@ void * * case we didn't get the chance to mark the old memory with DEADBYTE, * but we live with that. */ - q = (uchar *)PyObject_Realloc(q - 2*SST, total); + q = (uchar *)api->realloc(api->ctx, q - 2*SST, total); if (q == NULL) return NULL; write_size_t(q, nbytes); - assert(q[SST] == (uchar)api); + assert(q[SST] == (uchar)api->api_id); for (i = 1; i < SST; ++i) assert(q[SST + i] == FORBIDDENBYTE); q += 2*SST; @@ -1589,8 +1830,8 @@ void * * and call Py_FatalError to kill the program. * The API id, is also checked. */ - void -_PyObject_DebugCheckAddressApi(char api, const void *p) +static void +_PyMem_DebugCheckAddress(char api, const void *p) { const uchar *q = (const uchar *)p; char msgbuf[64]; @@ -1642,7 +1883,7 @@ error: } /* Display info to stderr about the memory block at p. */ -void +static void _PyObject_DebugDumpAddress(const void *p) { const uchar *q = (const uchar *)p;