8000 Allocate `rb_shape_tree` statically · ruby/ruby@7c22330 · GitHub
[go: up one dir, main page]

Skip to content

Commit 7c22330

Browse files
committed
Allocate rb_shape_tree statically
There is no point allocating it during init, it adds a useless indirection.
1 parent de4b910 commit 7c22330

File tree

4 files changed

+47
-50
lines changed

4 files changed

+47
-50
lines changed

shape.c

Lines changed: 41 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,8 @@ redblack_left(redblack_node_t *node)
4848
return LEAF;
4949
}
5050
else {
51-
RUBY_ASSERT(node->l < rb_shape_tree->cache_size);
52-
redblack_node_t *left = &rb_shape_tree->shape_cache[node->l - 1];
51+
RUBY_ASSERT(node->l < rb_shape_tree.cache_size);
52+
redblack_node_t *left = &rb_shape_tree.shape_cache[node->l - 1];
5353
return left;
5454
}
5555
}
@@ -61,8 +61,8 @@ redblack_right(redblack_node_t *node)
6161
return LEAF;
6262
}
6363
else {
64-
RUBY_ASSERT(node->r < rb_shape_tree->cache_size);
65-
redblack_node_t *right = &rb_shape_tree->shape_cache[node->r - 1];
64+
RUBY_ASSERT(node->r < rb_shape_tree.cache_size);
65+
redblack_node_t *right = &rb_shape_tree.shape_cache[node->r - 1];
6666
return right;
6767
}
6868
}
@@ -120,7 +120,7 @@ redblack_id_for(redblack_node_t *node)
120120
return 0;
121121
}
122122
else {
123-
redblack_node_t *redblack_nodes = rb_shape_tree->shape_cache;
123+
redblack_node_t *redblack_nodes = rb_shape_tree.shape_cache;
124124
redblack_id_t id = (redblack_id_t)(node - redblack_nodes);
125125
return id + 1;
126126
}
@@ -129,16 +129,16 @@ redblack_id_for(redblack_node_t *node)
129129
static redblack_node_t *
130130
redblack_new(char color, ID key, rb_shape_t *value, redblack_node_t *left, redblack_node_t *right)
131131
{
132-
if (rb_shape_tree->cache_size + 1 >= REDBLACK_CACHE_SIZE) {
132+
if (rb_shape_tree.cache_size + 1 >= REDBLACK_CACHE_SIZE) {
133133
// We're out of cache, just quit
134134
return LEAF;
135135
}
136136

137137
RUBY_ASSERT(left == LEAF || left->key < key);
138138
RUBY_ASSERT(right == LEAF || right->key > key);
139139

140-
redblack_node_t *redblack_nodes = rb_shape_tree->shape_cache;
141-
redblack_node_t *node = &redblack_nodes[(rb_shape_tree->cache_size)++];
140+
redblack_node_t *redblack_nodes = rb_shape_tree.shape_cache;
141+
redblack_node_t *node = &redblack_nodes[(rb_shape_tree.cache_size)++];
142142
node->key = key;
143143
node->value = (rb_shape_t *)((uintptr_t)value | color);
144144
node->l = redblack_id_for(left);
@@ -288,20 +288,20 @@ redblack_insert(redblack_node_t *tree, ID key, rb_shape_t *value)
288288
}
289289
#endif
290290

291-
rb_shape_tree_t *rb_shape_tree = NULL;
291+
rb_shape_tree_t rb_shape_tree = { 0 };
292292
static VALUE shape_tree_obj = Qfalse;
293293

294294
rb_shape_t *
295295
rb_shape_get_root_shape(void)
296296
{
297-
return rb_shape_tree->root_shape;
297+
return rb_shape_tree.root_shape;
298298
}
299299

300300
static void
301301
shape_tree_mark(void *data)
302302
{
303303
rb_shape_t *cursor = rb_shape_get_root_shape();
304-
rb_shape_t *end = RSHAPE(rb_shape_tree->next_shape_id - 1);
304+
rb_shape_t *end = RSHAPE(rb_shape_tree.next_shape_id - 1);
305305
while (cursor < end) {
306306
if (cursor->edges && !SINGLE_CHILD_P(cursor->edges)) {
307307
rb_gc_mark_movable(cursor->edges);
@@ -314,7 +314,7 @@ static void
314314
shape_tree_compact(void *data)
315315
{
316316
rb_shape_t *cursor = rb_shape_get_root_shape();
317-
rb_shape_t *end = RSHAPE(rb_shape_tree->next_shape_id - 1);
317+
rb_shape_t *end = RSHAPE(rb_shape_tree.next_shape_id - 1);
318318
while (cursor < end) {
319319
if (cursor->edges && !SINGLE_CHILD_P(cursor->edges)) {
320320
cursor->edges = rb_gc_location(cursor->edges);
@@ -326,7 +326,7 @@ shape_tree_compact(void *data)
326326
static size_t
327327
shape_tree_memsize(const void *data)
328328
{
329-
return rb_shape_tree->cache_size * sizeof(redblack_node_t);
329+
return rb_shape_tree.cache_size * sizeof(redblack_node_t);
330330
}
331331

332332
static const rb_data_type_t shape_tree_type = {
@@ -349,14 +349,14 @@ static inline shape_id_t
349349
raw_shape_id(rb_shape_t *shape)
35035 F438 0
{
351351
RUBY_ASSERT(shape);
352-
return (shape_id_t)(shape - rb_shape_tree->shape_list);
352+
return (shape_id_t)(shape - rb_shape_tree.shape_list);
353353
}
354354

355355
static inline shape_id_t
356356
shape_id(rb_shape_t *shape, shape_id_t previous_shape_id)
357357
{
358358
RUBY_ASSERT(shape);
359-
shape_id_t raw_id = (shape_id_t)(shape - rb_shape_tree->shape_list);
359+
shape_id_t raw_id = (shape_id_t)(shape - rb_shape_tree.shape_list);
360360
return raw_id | (previous_shape_id & SHAPE_ID_FLAGS_MASK);
361361
}
362362

@@ -373,7 +373,7 @@ rb_shape_each_shape_id(each_shape_callback callback, void *data)
373373
{
374374
rb_shape_t *start = rb_shape_get_root_shape();
375375
rb_shape_t *cursor = start;
376-
rb_shape_t *end = RSHAPE(rb_shape_tree->next_shape_id);
376+
rb_shape_t *end = RSHAPE(rb_shape_tree.next_shape_id);
377377
while (cursor < end) {
378378
callback((shape_id_t)(cursor - start), data);
379379
cursor += 1;
@@ -414,14 +414,14 @@ rb_shape_depth(shape_id_t shape_id)
414414
static rb_shape_t *
415415
shape_alloc(void)
416416
{
417-
shape_id_t shape_id = (shape_id_t)RUBY_ATOMIC_FETCH_ADD(rb_shape_tree->next_shape_id, 1);
417+
shape_id_t shape_id = (shape_id_t)RUBY_ATOMIC_FETCH_ADD(rb_shape_tree.next_shape_id, 1);
418418

419419
if (shape_id == (MAX_SHAPE_ID + 1)) {
420420
// TODO: Make an OutOfShapesError ??
421421
rb_bug("Out of shapes");
422422
}
423423

424-
return &rb_shape_tree->shape_list[shape_id];
424+
return &rb_shape_tree.shape_list[shape_id];
425425
}
426426

427427
static rb_shape_t *
@@ -485,7 +485,7 @@ redblack_cache_ancestors(rb_shape_t *shape)
485485
static attr_index_t
486486
shape_grow_capa(attr_index_t current_capa)
487487
{
488-
const attr_index_t *capacities = rb_shape_tree->capacities;
488+
const attr_index_t *capacities = rb_shape_tree.capacities;
489489

490490
// First try to use the next size that will be embeddable in a larger object slot.
491491
attr_index_t capa;
@@ -564,7 +564,7 @@ get_next_shape_internal_atomic(rb_shape_t *shape, ID id, enum shape_type shape_t
564564
if (!res) {
565565
// If we're not allowed to create a new variation, of if we're out of shapes
566566
// we return TOO_COMPLEX_SHAPE.
567-
if (!new_variations_allowed || rb_shape_tree->next_shape_id > MAX_SHAPE_ID) {
567+
if (!new_variations_allowed || rb_shape_tree.next_shape_id > MAX_SHAPE_ID) {
568568
res = NULL;
569569
}
570570
else {
@@ -640,7 +640,7 @@ get_next_shape_internal(rb_shape_t *shape, ID id, enum shape_type shape_type, bo
640640
if (!res) {
641641
// If we're not allowed to create a new variation, of if we're out of shapes
642642
// we return TOO_COMPLEX_SHAPE.
643-
if (!new_variations_allowed || rb_shape_tree->next_shape_id > MAX_SHAPE_ID) {
643+
if (!new_variations_allowed || rb_shape_tree.next_shape_id > MAX_SHAPE_ID) {
644644
res = NULL;
645645
}
646646
else {
@@ -1238,7 +1238,7 @@ rb_shape_verify_consistency(VALUE obj, shape_id_t shape_id)
12381238

12391239
uint8_t flags_heap_index = rb_shape_heap_index(shape_id);
12401240
if (RB_TYPE_P(obj, T_OBJECT)) {
1241-
size_t shape_id_slot_size = rb_shape_tree->capacities[flags_heap_index - 1] * sizeof(VALUE) + sizeof(struct RBasic);
1241+
size_t shape_id_slot_size = rb_shape_tree.capacities[flags_heap_index - 1] * sizeof(VALUE) + sizeof(struct RBasic);
12421242
size_t actual_slot_size = rb_gc_obj_slot_size(obj);
12431243

12441244
if (shape_id_slot_size != actual_slot_size) {
@@ -1388,15 +1388,15 @@ rb_shape_root_shape(VALUE self)
13881388
static VALUE
13891389
rb_shape_shapes_available(VALUE self)
13901390
{
1391-
return INT2NUM(MAX_SHAPE_ID - (rb_shape_tree->next_shape_id - 1));
1391+
return INT2NUM(MAX_SHAPE_ID - (rb_shape_tree.next_shape_id - 1));
13921392
}
13931393

13941394
static VALUE
13951395
rb_shape_exhaust(int argc, VALUE *argv, VALUE self)
13961396
{
13971397
rb_check_arity(argc, 0, 1);
13981398
int offset = argc == 1 ? NUM2INT(argv[0]) : 0;
1399-
rb_shape_tree->next_shape_id = MAX_SHAPE_ID - offset + 1;
1399+
rb_shape_tree.next_shape_id = MAX_SHAPE_ID - offset + 1;
14001400
return Qnil;
14011401
}
14021402

@@ -1452,7 +1452,7 @@ static VALUE
14521452
rb_shape_find_by_id(VALUE mod, VALUE id)
14531453
{
14541454
shape_id_t shape_id = NUM2UINT(id);
1455-
if (shape_id >= rb_shape_tree->next_shape_id) {
1455+
if (shape_id >= rb_shape_tree.next_shape_id) {
14561456
rb_raise(rb_eArgError, "Shape ID %d is out of bounds\n", shape_id);
14571457
}
14581458
return shape_id_t_to_rb_cShape(shape_id);
@@ -1466,8 +1466,6 @@ rb_shape_find_by_id(VALUE mod, VALUE id)
14661466
void
14671467
Init_default_shapes(void)
14681468
{
1469-
rb_shape_tree = xcalloc(1, sizeof(rb_shape_tree_t));
1470-
14711469
size_t *heap_sizes = rb_gc_heap_sizes();
14721470
size_t heaps_count = 0;
14731471
while (heap_sizes[heaps_count]) {
@@ -1479,23 +1477,23 @@ Init_default_shapes(void)
14791477
for (index = 0; index < heaps_count; index++) {
14801478
capacities[index] = (heap_sizes[index] - sizeof(struct RBasic)) / sizeof(VALUE);
14811479
}
1482-
rb_shape_tree->capacities = capacities;
1480+
rb_shape_tree.capacities = capacities;
14831481

14841482
#ifdef HAVE_MMAP
14851483
size_t shape_list_mmap_size = rb_size_mul_or_raise(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t), rb_eRuntimeError);
1486-
rb_shape_tree->shape_list = (rb_shape_t *)mmap(NULL, shape_list_mmap_size,
1484+
rb_shape_tree.shape_list = (rb_shape_t *)mmap(NULL, shape_list_mmap_size,
14871485
PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1488-
if (rb_shape_tree->shape_list == MAP_FAILED) {
1489-
rb_shape_tree->shape_list = 0;
1486+
if (rb_shape_tree.shape_list == MAP_FAILED) {
1487+
rb_shape_tree.shape_list = 0;
14901488
}
14911489
else {
1492-
ruby_annotate_mmap(rb_shape_tree->shape_list, shape_list_mmap_size, "Ruby:Init_default_shapes:shape_list");
1490+
ruby_annotate_mmap(rb_shape_tree.shape_list, shape_list_mmap_size, "Ruby:Init_default_shapes:shape_list");
14931491
}
14941492
#else
1495-
rb_shape_tree->shape_list = xcalloc(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t));
1493+
rb_shape_tree.shape_list = xcalloc(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t));
14961494
#endif
14971495

1498-
if (!rb_shape_tree->shape_list) {
1496+
if (!rb_shape_tree.shape_list) {
14991497
rb_memerror();
15001498
}
15011499

@@ -1505,19 +1503,19 @@ Init_default_shapes(void)
15051503

15061504
#ifdef HAVE_MMAP
15071505
size_t shape_cache_mmap_size = rb_size_mul_or_raise(REDBLACK_CACHE_SIZE, sizeof(redblack_node_t), rb_eRuntimeError);
1508-
rb_shape_tree->shape_cache = (redblack_node_t *)mmap(NULL, shape_cache_mmap_size,
1506+
rb_shape_tree.shape_cache = (redblack_node_t *)mmap(NULL, shape_cache_mmap_size,
15091507
PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1510-
rb_shape_tree->cache_size = 0;
1508+
rb_shape_tree.cache_size = 0;
15111509

15121510
// If mmap fails, then give up on the redblack tree cache.
15131511
// We set the cache size such that the redblack node allocators think
15141512
// the cache is full.
1515-
if (rb_shape_tree->shape_cache == MAP_FAILED) {
1516-
rb_shape_tree->shape_cache = 0;
1517-
rb_shape_tree->cache_size = REDBLACK_CACHE_SIZE;
1513+
if (rb_shape_tree.shape_cache == MAP_FAILED) {
1514+
rb_shape_tree.shape_cache = 0;
1515+
rb_shape_tree.cache_size = REDBLACK_CACHE_SIZE;
15181516
}
15191517
else {
1520-
ruby_annotate_mmap(rb_shape_tree->shape_cache, shape_cache_mmap_size, "Ruby:Init_default_shapes:shape_cache");
1518+
ruby_annotate_mmap(rb_shape_tree.shape_cache, shape_cache_mmap_size, "Ruby:Init_default_shapes:shape_cache");
15211519
}
15221520
#endif
15231521

@@ -1528,8 +1526,8 @@ Init_default_shapes(void)
15281526
rb_shape_t *root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
15291527
root->capacity = 0;
15301528
root->type = SHAPE_ROOT;
1531-
rb_shape_tree->root_shape = root;
1532-
RUBY_ASSERT(raw_shape_id(rb_shape_tree->root_shape) == ROOT_SHAPE_ID);
1529+
rb_shape_tree.root_shape = root;
1530+
RUBY_ASSERT(raw_shape_id(rb_shape_tree.root_shape) == ROOT_SHAPE_ID);
15331531

15341532
rb_shape_t *root_with_obj_id = rb_shape_alloc_with_parent_id(0, ROOT_SHAPE_ID);
15351533
root_with_obj_id->type = SHAPE_OBJ_ID;
@@ -1541,8 +1539,7 @@ Init_default_shapes(void)
15411539
void
15421540
rb_shape_free_all(void)
15431541
{
1544-
xfree((void *)rb_shape_tree->capacities);
1545-
xfree(rb_shape_tree);
1542+
xfree((void *)rb_shape_tree.capacities);
15461543
}
15471544

15481545
void

shape.h

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ typedef struct {
9494
} rb_shape_tree_t;
9595

9696
RUBY_SYMBOL_EXPORT_BEGIN
97-
RUBY_EXTERN rb_shape_tree_t *rb_shape_tree;
97+
RUBY_EXTERN rb_shape_tree_t rb_shape_tree;
9898
RUBY_SYMBOL_EXPORT_END
9999

100100
union rb_attr_index_cache {
@@ -151,7 +151,7 @@ RSHAPE(shape_id_t shape_id)
151151
uint32_t offset = (shape_id & SHAPE_ID_OFFSET_MASK);
152152
RUBY_ASSERT(offset != INVALID_SHAPE_ID);
153153

154-
return &rb_shape_tree->shape_list[offset];
154+
return &rb_shape_tree.shape_list[offset];
155155
}
156156

157157
int32_t rb_shape_id_offset(void);
@@ -240,7 +240,7 @@ RSHAPE_EMBEDDED_CAPACITY(shape_id_t shape_id)
240240
{
241241
uint8_t heap_index = rb_shape_heap_index(shape_id);
242242
if (heap_index) {
243-
return rb_shape_tree->capacities[heap_index - 1];
243+
return rb_shape_tree.capacities[heap_index - 1];
244244
}
245245
return 0;
246246
}

vm.c

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -736,8 +736,8 @@ vm_stat(int argc, VALUE *argv, VALUE self)
736736
SET(constant_cache_invalidations, ruby_vm_constant_cache_invalidations);
737737
SET(constant_cache_misses, ruby_vm_constant_cache_misses);
738738
SET(global_cvar_state, ruby_vm_global_cvar_state);
739-
SET(next_shape_id, (rb_serial_t)rb_shape_tree->next_shape_id);
740-
SET(shape_cache_size, (rb_serial_t)rb_shape_tree->cache_size);
739+
SET(next_shape_id, (rb_serial_t)rb_shape_tree.next_shape_id);
740+
SET(shape_cache_size, (rb_serial_t)rb_shape_tree.cache_size);
741741
#undef SET
742742

743743
#if USE_DEBUG_COUNTER

yjit.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -778,7 +778,7 @@ VALUE
778778
rb_object_shape_count(void)
779779
{
780780
// next_shape_id starts from 0, so it's the same as the count
781-
return ULONG2NUM((unsigned long)rb_shape_tree->next_shape_id);
781+
return ULONG2NUM((unsigned long)rb_shape_tree.next_shape_id);
782782
}
783783

784784
bool

0 commit comments

Comments
 (0)
0