@@ -909,6 +909,7 @@ static int running_on_valgrind = -1;
909
909
*/
910
910
#define ARENA_BITS 18
911
911
#define ARENA_SIZE (1 << ARENA_BITS) /* 256 KiB */
912
+ #define ARENA_SIZE_MASK (ARENA_SIZE - 1)
912
913
913
914
#ifdef WITH_MEMORY_LIMITS
914
915
#define MAX_ARENAS (SMALL_MEMORY_LIMIT / ARENA_SIZE)
@@ -1235,18 +1236,21 @@ _Py_GetAllocatedBlocks(void)
1235
1236
}
1236
1237
1237
1238
/*==========================================================================*/
1238
- /* radix tree for tracking arena coverage
1239
+ /* radix tree for tracking arena usage
1239
1240
1240
- key format (2^20 arena size)
1241
- 15 -> MAP1
1242
- 15 -> MAP2
1243
- 14 -> MAP3
1241
+ bit allocation for keys (2^20 arena size)
1242
+
1243
+ 64-bit pointers:
1244
+ 16 -> ignored (BITS - PHYSICAL_BITS)
1245
+ 10 -> MAP_TOP
1246
+ 10 -> MAP_MID
1247
+ 8 -> MAP_BOT
1244
1248
20 -> ideal aligned arena
1245
1249
----
1246
1250
64
1247
1251
1248
- key format (2^20 arena size)
1249
- 12 -> MAP3
1252
+ 32-bit pointers:
1253
+ 12 -> MAP_BOT
1250
1254
20 -> ideal aligned arena
1251
1255
----
1252
1256
32
@@ -1264,11 +1268,9 @@ _Py_GetAllocatedBlocks(void)
1264
1268
*/
1265
1269
#define PHYSICAL_BITS 48
1266
1270
1267
- /* need more layers of radix tree */
1271
+ /* use the top and mid layers of the radix tree */
1268
1272
#define USE_INTERIOR_NODES
1269
1273
1270
- #define arena_root_t arena_map1_t
1271
-
1272
1274
#elif SIZEOF_VOID_P == 4
1273
1275
1274
1276
#define BITS 32
@@ -1281,130 +1283,123 @@ _Py_GetAllocatedBlocks(void)
1281
1283
1282
1284
#endif /* SIZEOF_VOID_P */
1283
1285
1284
- #define ARENA_MASK (ARENA_SIZE - 1)
1285
-
1286
1286
/* arena_coverage_t members require this to be true */
1287
1287
#if ARENA_BITS >= 32
1288
1288
# error "arena size must be < 2^32"
1289
1289
#endif
1290
1290
1291
1291
#ifdef USE_INTERIOR_NODES
1292
- /* bits used for MAP1 and MAP2 nodes */
1292
+ /* number of bits used for MAP_TOP and MAP_MID nodes */
1293
1293
#define INTERIOR_BITS ((PHYSICAL_BITS - ARENA_BITS + 2) / 3)
1294
1294
#else
1295
1295
#define INTERIOR_BITS 0
1296
1296
#endif
1297
1297
1298
- #define MAP1_BITS INTERIOR_BITS
1299
- #define MAP1_LENGTH (1 << MAP1_BITS )
1300
- #define MAP1_MASK (MAP3_LENGTH - 1)
1298
+ #define MAP_TOP_BITS INTERIOR_BITS
1299
+ #define MAP_TOP_LENGTH (1 << MAP_TOP_BITS )
1300
+ #define MAP_TOP_MASK (MAP_BOT_LENGTH - 1)
1301
1301
1302
- #define MAP2_BITS INTERIOR_BITS
1303
- #define MAP2_LENGTH (1 << MAP2_BITS )
1304
- #define MAP2_MASK (MAP2_LENGTH - 1)
1302
+ #define MAP_MID_BITS INTERIOR_BITS
1303
+ #define MAP_MID_LENGTH (1 << MAP_MID_BITS )
1304
+ #define MAP_MID_MASK (MAP_MID_LENGTH - 1)
1305
1305
1306
- #define MAP3_BITS (PHYSICAL_BITS - ARENA_BITS - 2*INTERIOR_BITS)
1307
- #define MAP3_LENGTH (1 << MAP3_BITS )
1308
- #define MAP3_MASK (MAP3_LENGTH - 1)
1306
+ #define MAP_BOT_BITS (PHYSICAL_BITS - ARENA_BITS - 2*INTERIOR_BITS)
1307
+ #define MAP_BOT_LENGTH (1 << MAP_BOT_BITS )
1308
+ #define MAP_BOT_MASK (MAP_BOT_LENGTH - 1)
1309
1309
1310
- #define MAP3_SHIFT ARENA_BITS
1311
- #define MAP2_SHIFT (MAP3_BITS + MAP3_SHIFT )
1312
- #define MAP1_SHIFT (MAP2_BITS + MAP2_SHIFT )
1310
+ #define MAP_BOT_SHIFT ARENA_BITS
1311
+ #define MAP_MID_SHIFT (MAP_BOT_BITS + MAP_BOT_SHIFT )
1312
+ #define MAP_TOP_SHIFT (MAP_MID_BITS + MAP_MID_SHIFT )
1313
1313
1314
1314
#define AS_UINT (p ) ((uintptr_t)(p))
1315
- #define MAP3_INDEX (p ) ((AS_UINT(p) >> MAP3_SHIFT ) & MAP3_MASK )
1316
- #define MAP2_INDEX (p ) ((AS_UINT(p) >> MAP2_SHIFT ) & MAP2_MASK )
1317
- #define MAP1_INDEX (p ) ((AS_UINT(p) >> MAP1_SHIFT ) & MAP1_MASK )
1315
+ #define MAP_BOT_INDEX (p ) ((AS_UINT(p) >> MAP_BOT_SHIFT ) & MAP_BOT_MASK )
1316
+ #define MAP_MID_INDEX (p ) ((AS_UINT(p) >> MAP_MID_SHIFT ) & MAP_MID_MASK )
1317
+ #define MAP_TOP_INDEX (p ) ((AS_UINT(p) >> MAP_TOP_SHIFT ) & MAP_TOP_MASK )
1318
1318
1319
1319
#if PHYSICAL_BITS > BITS
1320
+ /* Return non-physical bits of pointer. Should be same for all valid
1321
+ * pointers if PHYSICAL_BITS set correctly. */
1320
1322
#define HIGH_BITS (p ) (AS_UINT(p) >> PHYSICAL_BITS)
1321
1323
#else
1322
1324
#define HIGH_BITS (p ) 0
1323
1325
#endif
1324
1326
1325
1327
1326
- /* See arena_map_mark_used() for the meaning of these members. */
1328
+ /* This is the leaf of the radix tree. See arena_map_mark_used() for the
1329
+ * meaning of these members. */
1327
1330
typedef struct {
1328
1331
int32_t tail_hi ;
1329
1332
int32_t tail_lo ;
1330
1333
} arena_coverage_t ;
1331
1334
1332
- typedef struct arena_map3 {
1335
+ typedef struct arena_map_bot {
1333
1336
/* The members tail_hi and tail_lo are accessed together. So, it
1334
1337
* better to have them as an array of structs, rather than two
1335
1338
* arrays.
1336
1339
*/
1337
- arena_coverage_t arenas [MAP3_LENGTH ];
1338
- } arena_map3_t ;
1340
+ arena_coverage_t arenas [MAP_BOT_LENGTH ];
1341
+ } arena_map_bot_t ;
1339
1342
1340
1343
#ifdef USE_INTERIOR_NODES
1341
- typedef struct arena_map2 {
1342
- struct arena_map3 * ptrs [MAP2_LENGTH ];
1343
- } arena_map2_t ;
1344
+ typedef struct arena_map_mid {
1345
+ struct arena_map_bot * ptrs [MAP_MID_LENGTH ];
1346
+ } arena_map_mid_t ;
1344
1347
1345
- typedef struct arena_map1 {
1346
- struct arena_map2 * ptrs [MAP1_LENGTH ];
1347
- } arena_map1_t ;
1348
+ typedef struct arena_map_top {
1349
+ struct arena_map_mid * ptrs [MAP_TOP_LENGTH ];
1350
+ } arena_map_top_t ;
1348
1351
#endif
1349
1352
1350
- /* The root of tree (MAP1) and contains all MAP2 nodes. Note that by
1351
- * initializing like this, the memory should be in the BSS. The OS will
1352
- * only map pages as the MAP2 nodes get used (OS pages are demand loaded
1353
- * as needed).
1353
+ /* The root of radix tree. Note that by initializing like this, the memory
1354
+ * should be in the BSS. The OS will only memory map pages as the MAP_MID
1355
+ * nodes get used (OS pages are demand loaded as needed).
1354
1356
*/
1355
1357
#ifdef USE_INTERIOR_NODES
1356
- static arena_map1_t arena_map_root ;
1357
-
1358
- /* number of used radix tree nodes */
1359
- static int arena_map1_count ;
1360
- static int arena_map2_count ;
1358
+ static arena_map_top_t arena_map_root ;
1359
+ /* accounting for number of used interior nodes */
1360
+ static int arena_map_top_count ;
1361
+ static int arena_map_mid_count ;
1362
+ #else
1363
+ static arena_map_bot_t arena_map_root ;
1364
+ #endif
1361
1365
1362
- /* Return a pointer to a MAP3 node, return NULL if it doesn't exist
1363
- * or it cannot be created */
1364
- static arena_map3_t *
1366
+ /* Return a pointer to a bottom tree node, return NULL if it doesn't exist or
1367
+ * it cannot be created */
1368
+ static arena_map_bot_t *
1365
1369
arena_map_get (block * p , int create )
1366
1370
{
1371
+ #ifdef USE_INTERIOR_NODES
1367
1372
/* sanity check that PHYSICAL_BITS is correct */
1368
1373
assert (HIGH_BITS (p ) == HIGH_BITS (& arena_map_root ));
1369
- int i1 = MAP1_INDEX (p );
1374
+ int i1 = MAP_TOP_INDEX (p );
1370
1375
if (arena_map_root .ptrs [i1 ] == NULL ) {
1371
1376
if (!create ) {
1372
1377
return NULL ;
1373
1378
}
1374
- arena_map2_t * n = PyMem_RawCalloc (1 , sizeof (arena_map2_t ));
1379
+ arena_map_mid_t * n = PyMem_RawCalloc (1 , sizeof (arena_map_mid_t ));
1375
1380
if (n == NULL ) {
1376
1381
return NULL ;
1377
1382
}
1378
1383
arena_map_root .ptrs [i1 ] = n ;
1379
- arena_map1_count ++ ;
1384
+ arena_map_top_count ++ ;
1380
1385
}
1381
- int i2 = MAP2_INDEX (p );
1386
+ int i2 = MAP_MID_INDEX (p );
1382
1387
if (arena_map_root .ptrs [i1 ]-> ptrs [i2 ] == NULL ) {
1383
1388
if (!create ) {
1384
1389
return NULL ;
1385
1390
}
1386
- arena_map3_t * n = PyMem_RawCalloc (1 , sizeof (arena_map3_t ));
1391
+ arena_map_bot_t * n = PyMem_RawCalloc (1 , sizeof (arena_map_bot_t ));
1387
1392
if (n == NULL ) {
1388
1393
return NULL ;
1389
1394
}
1390
1395
arena_map_root .ptrs [i1 ]-> ptrs [i2 ] = n ;
1391
- arena_map2_count ++ ;
1396
+ arena_map_mid_count ++ ;
1392
1397
}
1393
1398
return arena_map_root .ptrs [i1 ]-> ptrs [i2 ];
1394
- }
1395
-
1396
- #else /* !USE_INTERIOR_NODES */
1397
- static arena_map3_t arena_map_root ;
1398
-
1399
- /* Return a pointer to a MAP3 node, return NULL if it doesn't exist
1400
- * or it cannot be created */
1401
- static arena_map3_t *
1402
- arena_map_get (block * p , int create )
1403
- {
1399
+ #else
1404
1400
return & arena_map_root ;
1405
- }
1406
-
1407
1401
#endif
1402
+ }
1408
1403
1409
1404
1410
1405
/* The radix tree only tracks arenas. So, for 16 MiB arenas, we throw
@@ -1436,22 +1431,22 @@ arena_map_mark_used(uintptr_t arena_base, int is_used)
1436
1431
{
1437
1432
/* sanity check that PHYSICAL_BITS is correct */
1438
1433
assert (HIGH_BITS (arena_base ) == HIGH_BITS (& arena_map_root ));
1439
- arena_map3_t * n_hi = arena_map_get ((block * )arena_base , is_used );
1434
+ arena_map_bot_t * n_hi = arena_map_get ((block * )arena_base , is_used );
1440
1435
if (n_hi == NULL ) {
1441
1436
assert (is_used ); /* otherwise node should already exist */
1442
1437
return 0 ; /* failed to allocate space for node */
1443
1438
}
1444
- int i3 = MAP3_INDEX ((block * )arena_base );
1445
- int32_t tail = (int32_t )(arena_base & ARENA_MASK );
1439
+ int i3 = MAP_BOT_INDEX ((block * )arena_base );
1440
+ int32_t tail = (int32_t )(arena_base & ARENA_SIZE_MASK );
1446
1441
if (tail == 0 ) {
1447
1442
/* is ideal arena address */
1448
1443
n_hi -> arenas [i3 ].tail_hi = is_used ? -1 : 0 ;
1449
1444
}
1450
1445
else {
1451
1446
/* arena_base address is not ideal (aligned to arena size) and
1452
- * so it potentially covers two MAP3 nodes. Get the MAP3 node
1453
- * for the next arena. Note that it might be in different MAP1
1454
- * and MAP2 nodes as well so we need to call arena_map_get()
1447
+ * so it potentially covers two MAP_BOT nodes. Get the MAP_BOT node
1448
+ * for the next arena. Note that it might be in different MAP_TOP
1449
+ * and MAP_MID nodes as well so we need to call arena_map_get()
1455
1450
* again (do the full tree traversal).
1456
1451
*/
1457
1452
n_hi -> arenas [i3 ].tail_hi = is_used ? tail : 0 ;
@@ -1461,13 +1456,13 @@ arena_map_mark_used(uintptr_t arena_base, int is_used)
1461
1456
* must overflow to 0. However, that would mean arena_base was
1462
1457
* "ideal" and we should not be in this case. */
1463
1458
assert (arena_base < arena_base_next );
1464
- arena_map3_t * n_lo = arena_map_get ((block * )arena_base_next , is_used );
1459
+ arena_map_bot_t * n_lo = arena_map_get ((block * )arena_base_next , is_used );
1465
1460
if (n_lo == NULL ) {
1466
1461
assert (is_used ); /* otherwise should already exist */
1467
1462
n_hi -> arenas [i3 ].tail_hi = 0 ;
1468
1463
return 0 ; /* failed to allocate space for node */
1469
1464
}
1470
- int i3_next = MAP3_INDEX (arena_base_next );
1465
+ int i3_next = MAP_BOT_INDEX (arena_base_next );
1471
1466
n_lo -> arenas [i3_next ].tail_lo = is_used ? tail : 0 ;
1472
1467
}
1473
1468
return 1 ;
@@ -1476,17 +1471,17 @@ arena_map_mark_used(uintptr_t arena_base, int is_used)
1476
1471
/* Return true if 'p' is a pointer inside an obmalloc arena.
1477
1472
* _PyObject_Free() calls this so it needs to be very fast. */
1478
1473
static int
1479
- arena_map_is_marked (block * p )
1474
+ arena_map_is_used (block * p )
1480
1475
{
1481
- arena_map3_t * n = arena_map_get (p , 0 );
1476
+ arena_map_bot_t * n = arena_map_get (p , 0 );
1482
1477
if (n == NULL ) {
1483
1478
return 0 ;
1484
1479
}
1485
- int i3 = MAP3_INDEX (p );
1480
+ int i3 = MAP_BOT_INDEX (p );
1486
1481
/* ARENA_BITS must be < 32 so that the tail is a non-negative int32_t. */
1487
1482
int32_t hi = n -> arenas [i3 ].tail_hi ;
1488
1483
int32_t lo = n -> arenas [i3 ].tail_lo ;
1489
- int32_t tail = (int32_t )(AS_UINT (p ) & ARENA_MASK );
1484
+ int32_t tail = (int32_t )(AS_UINT (p ) & ARENA_SIZE_MASK );
1490
1485
return (tail < lo ) || (tail >= hi && hi != 0 );
1491
1486
}
1492
1487
@@ -1606,7 +1601,7 @@ new_arena(void)
1606
1601
static bool
1607
1602
address_in_range (void * p , poolp pool )
1608
1603
{
1609
- return arena_map_is_marked (p );
1604
+ return arena_map_is_used (p );
1610
1605
}
1611
1606
1612
1607
@@ -1954,7 +1949,7 @@ insert_to_freepool(poolp pool)
1954
1949
ao -> nextarena = unused_arena_objects ;
1955
1950
unused_arena_objects = ao ;
1956
1951
1957
- /* mark arena as not under control of obmalloc */
1952
+ /* mark arena region as not under control of obmalloc */
1958
1953
arena_map_mark_used (ao -> address , 0 );
1959
1954
1960
1955
/* Free the entire arena. */
@@ -2200,8 +2195,6 @@ _PyObject_Realloc(void *ctx, void *ptr, size_t nbytes)
2200
2195
return PyMem_RawRealloc (ptr , nbytes );
2201
2196
}
2202
2197
2203
-
2204
-
2205
2198
#else /* ! WITH_PYMALLOC */
2206
2199
2207
2200
/*==========================================================================*/
@@ -2903,8 +2896,8 @@ _PyObject_DebugMallocStats(FILE *out)
2903
2896
(void )printone (out , "# arenas highwater mark" , narenas_highwater );
2904
2897
(void )printone (out , "# arenas allocated current" , narenas );
2905
2898
#ifdef USE_INTERIOR_NODES
2906
- (void )printone (out , "# arena map level 1 nodes" , arena_map1_count );
2907
- (void )printone (out , "# arena map level 2 nodes" , arena_map2_count );
2899
+ (void )printone (out , "# arena map top nodes" , arena_map_top_count );
2900
+ (void )printone (out , "# arena map mid nodes" , arena_map_mid_count );
2908
2901
fputc ('\n' , out );
2909
2902
#endif
2910
2903
@@ -2930,5 +2923,4 @@ _PyObject_DebugMallocStats(FILE *out)
2930
2923
return 1 ;
2931
2924
}
2932
2925
2933
-
2934
2926
#endif /* #ifdef WITH_PYMALLOC */
0 commit comments