8000 shape.h: allow better inlining of shape access code · ruby/ruby@310b996 · GitHub
[go: up one dir, main page]

Skip to content

Commit 310b996

Browse files
committed
shape.h: allow better inlining of shape access code
By turning `RSHAPE` in a `static inline` we allow all shape lookup code to be fully inlined.
1 parent 51a41c1 commit 310b996

File tree

4 files changed

+57
-60
lines changed

4 files changed

+57
-60
lines changed

shape.c

Lines changed: 44 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,8 @@ redblack_left(redblack_node_t *node)
4848
return LEAF;
4949
}
5050
else {
51-
RUBY_ASSERT(node->l < GET_SHAPE_TREE()->cache_size);
52-
redblack_node_t *left = &GET_SHAPE_TREE()->shape_cache[node->l - 1];
51+
RUBY_ASSERT(node->l < rb_shape_tree->cache_size);
52+
redblack_node_t *left = &rb_shape_tree->shape_cache[node->l - 1];
5353
return left;
5454
}
5555
}
@@ -61,8 +61,8 @@ redblack_right(redblack_node_t *node)
6161
return LEAF;
6262
}
6363
else {
64-
RUBY_ASSERT(node->r < GET_SHAPE_TREE()->cache_size);
65-
redblack_node_t *right = &GET_SHAPE_TREE()->shape_cache[node->r - 1];
64+
RUBY_ASSERT(node->r < rb_shape_tree->cache_size);
65+
redblack_node_t *right = &rb_shape_tree->shape_cache[node->r - 1];
6666
return right;
6767
}
6868
}
@@ -120,7 +120,7 @@ redblack_id_for(redblack_node_t *node)
120120
return 0;
121121
}
122122
else {
123-
redblack_node_t *redblack_nodes = GET_SHAPE_TREE()->shape_cache;
123+
redblack_node_t *redblack_nodes = rb_shape_tree->shape_cache;
124124
redblack_id_t id = (redblack_id_t)(node - redblack_nodes);
125125
return id + 1;
126126
}
@@ -129,16 +129,16 @@ redblack_id_for(redblack_node_t *node)
129129
static redblack_node_t *
130130
redblack_new(char color, ID key, rb_shape_t *value, redblack_node_t *left, redblack_node_t *right)
131131
{
132-
if (GET_SHAPE_TREE()->cache_size + 1 >= REDBLACK_CACHE_SIZE) {
132+
if (rb_shape_tree->cache_size + 1 >= REDBLACK_CACHE_SIZE) {
133133
// We're out of cache, just quit
134134
return LEAF;
135135
}
136136

137137
RUBY_ASSERT(left == LEAF || left->key < key);
138138
RUBY_ASSERT(right == LEAF || right->key > key);
139139

140-
redblack_node_t *redblack_nodes = GET_SHAPE_TREE()->shape_cache;
141-
redblack_node_t *node = &redblack_nodes[(GET_SHAPE_TREE()->cache_size)++];
140+
redblack_node_t *redblack_nodes = rb_shape_tree->shape_cache;
141+
redblack_node_t *node = &redblack_nodes[(rb_shape_tree->cache_size)++];
142142
node->key = key;
143143
node->value = (rb_shape_t *)((uintptr_t)value | color);
144144
node->l = redblack_id_for(left);
@@ -288,20 +288,20 @@ redblack_insert(redblack_node_t *tree, ID key, rb_shape_t *value)
288288
}
289289
#endif
290290

291-
rb_shape_tree_t *rb_shape_tree_ptr = NULL;
291+
rb_shape_tree_t *rb_shape_tree = NULL; // extern
292292
static VALUE shape_tree_obj = Qfalse;
293293

294294
rb_shape_t *
295295
rb_shape_get_root_shape(void)
296296
{
297-
return GET_SHAPE_TREE()->root_shape;
297+
return rb_shape_tree->root_shape;
298298
}
299299

300300
static void
301301
shape_tree_mark(void *data)
302302
{
303303
rb_shape_t *cursor = rb_shape_get_root_shape();
304-
rb_shape_t *end = RSHAPE(GET_SHAPE_TREE()->next_shape_id - 1);
304+
rb_shape_t *end = RSHAPE(rb_shape_tree->next_shape_id - 1);
305305
while (cursor < end) {
306306
if (cursor->edges && !SINGLE_CHILD_P(cursor->edges)) {
307307
rb_gc_mark_movable(cursor->edges);
@@ -314,7 +314,7 @@ static void
314314
shape_tree_compact(void *data)
315315
{
316316
rb_shape_t *cursor = rb_shape_get_root_shape();
317-
rb_shape_t *end = RSHAPE(GET_SHAPE_TREE()->next_shape_id - 1);
317+
rb_shape_t *end = RSHAPE(rb_shape_tree->next_shape_id - 1);
318318
while (cursor < end) {
319319
if (cursor->edges && !SINGLE_CHILD_P(cursor->edges)) {
320320
cursor->edges = rb_gc_location(cursor->edges);
@@ -326,7 +326,7 @@ shape_tree_compact(void *data)
326326
static size_t
327327
shape_tree_memsize(const void *data)
328328
{
329-
return GET_SHAPE_TREE()->cache_size * sizeof(redblack_node_t);
329+
return rb_shape_tree->cache_size * sizeof(redblack_node_t);
330330
}
331331

332332
static const rb_data_type_t shape_tree_type = {
@@ -349,14 +349,14 @@ static inline shape_id_t
349349
raw_shape_id(rb_shape_t *shape)
350350
{
351351
RUBY_ASSERT(shape);
352-
return (shape_id_t)(shape - GET_SHAPE_TREE()->shape_list);
352+
return (shape_id_t)(shape - rb_shape_tree->shape_list);
353353
}
354354

355355
static inline shape_id_t
356356
shape_id(rb_shape_t *shape, shape_id_t previous_shape_id)
357357
{
358358
RUBY_ASSERT(shape);
359-
shape_id_t raw_id = (shape_id_t)(shape - GET_SHAPE_TREE()->shape_list);
359+
shape_id_t raw_id = (shape_id_t)(shape - rb_shape_tree->shape_list);
360360
return raw_id | (previous_shape_id & SHAPE_ID_FLAGS_MASK);
361361
}
362362

@@ -373,7 +373,7 @@ rb_shape_each_shape_id(each_shape_callback callback, void *data)
373373
{
374374
rb_shape_t *start = rb_shape_get_root_shape();
375375
rb_shape_t *cursor = start;
376-
rb_shape_t *end = RSHAPE(GET_SHAPE_TREE()->next_shape_id);
376+
rb_shape_t *end = RSHAPE(rb_shape_tree->next_shape_id);
377377
while (cursor < end) {
378378
callback((shape_id_t)(cursor - start), data);
379379
cursor += 1;
@@ -383,10 +383,7 @@ rb_shape_each_shape_id(each_shape_callback callback, void *data)
383383
RUBY_FUNC_EXPORTED rb_shape_t *
384384
rb_shape_lookup(shape_id_t shape_id)
385385
{
386-
uint32_t offset = (shape_id & SHAPE_ID_OFFSET_MASK);
387-
RUBY_ASSERT(offset != INVALID_SHAPE_ID);
388-
389-
return &GET_SHAPE_TREE()->shape_list[offset];
386+
return RSHAPE(shape_id);
390387
}
391388

392389
RUBY_FUNC_EXPORTED shape_id_t
@@ -416,14 +413,14 @@ rb_shape_depth(shape_id_t shape_id)
416413
static rb_shape_t *
417414
shape_alloc(void)
418415
{
419-
shape_id_t shape_id = (shape_id_t)RUBY_ATOMIC_FETCH_ADD(GET_SHAPE_TREE()->next_shape_id, 1);
416+
shape_id_t shape_id = (shape_id_t)RUBY_ATOMIC_FETCH_ADD(rb_shape_tree->next_shape_id, 1);
420417

421418
if (shape_id == (MAX_SHAPE_ID + 1)) {
422419
// TODO: Make an OutOfShapesError ??
423420
rb_bug("Out of shapes");
424421
}
425422

426-
return &GET_SHAPE_TREE()->shape_list[shape_id];
423+
return &rb_shape_tree->shape_list[shape_id];
427424
}
428425

429426
static rb_shape_t *
@@ -487,7 +484,7 @@ redblack_cache_ancestors(rb_shape_t *shape)
487484
static attr_index_t
488485
shape_grow_capa(attr_index_t current_capa)
489486
{
490-
const attr_index_t *capacities = GET_SHAPE_TREE()->capacities;
487+
const attr_index_t *capacities = rb_shape_tree->capacities;
491488

492489
// First try to use the next size that will be embeddable in a larger object slot.
493490
attr_index_t capa;
@@ -566,7 +563,7 @@ get_next_shape_internal_atomic(rb_shape_t *shape, ID id, enum shape_type shape_t
566563
if (!res) {
567564
// If we're not allowed to create a new variation, of if we're out of shapes
568565
// we return TOO_COMPLEX_SHAPE.
569-
if (!new_variations_allowed || GET_SHAPE_TREE()->next_shape_id > MAX_SHAPE_ID) {
566+
if (!new_variations_allowed || rb_shape_tree->next_shape_id > MAX_SHAPE_ID) {
570567
res = NULL;
571568
}
572569
else {
@@ -642,7 +639,7 @@ get_next_shape_internal(rb_shape_t *shape, ID id, enum shape_type shape_type, bo
642639
if (!res) {
643640
// If we're not allowed to create a new variation, of if we're out of shapes
644641
// we return TOO_COMPLEX_SHAPE.
645-
if (!new_variations_allowed || GET_SHAPE_TREE()->next_shape_id > MAX_SHAPE_ID) {
642+
if (!new_variations_allowed || rb_shape_tree->next_shape_id > MAX_SHAPE_ID) {
646643
res = NULL;
647644
}
648645
else {
@@ -1239,7 +1236,7 @@ rb_shape_verify_consistency(VALUE obj, shape_id_t shape_id)
12391236

12401237
uint8_t flags_heap_index = rb_shape_heap_index(shape_id);
12411238
if (RB_TYPE_P(obj, T_OBJECT)) {
1242-
size_t shape_id_slot_size = GET_SHAPE_TREE()->capacities[flags_heap_index - 1] * sizeof(VALUE) + sizeof(struct RBasic);
1239+
size_t shape_id_slot_size = rb_shape_tree->capacities[flags_heap_index - 1] * sizeof(VALUE) + sizeof(struct RBasic);
12431240
size_t actual_slot_size = rb_gc_obj_slot_size(obj);
12441241

12451242
if (shape_id_slot_size != actual_slot_size) {
@@ -1389,15 +1386,15 @@ rb_shape_root_shape(VALUE self)
13891386
static VALUE
13901387
rb_shape_shapes_available(VALUE self)
13911388
{
1392-
return INT2NUM(MAX_SHAPE_ID - (GET_SHAPE_TREE()->next_shape_id - 1));
1389+
return INT2NUM(MAX_SHAPE_ID - (rb_shape_tree->next_shape_id - 1));
13931390
}
13941391

13951392
static VALUE
13961393
rb_shape_exhaust(int argc, VALUE *argv, VALUE self)
13971394
{
13981395
rb_check_arity(argc, 0, 1);
13991396
int offset = argc == 1 ? NUM2INT(argv[0]) : 0;
1400-
GET_SHAPE_TREE()->next_shape_id = MAX_SHAPE_ID - offset + 1;
1397+
rb_shape_tree->next_shape_id = MAX_SHAPE_ID - offset + 1;
14011398
return Qnil;
14021399
}
14031400

@@ -1453,7 +1450,7 @@ static VALUE
14531450
rb_shape_find_by_id(VALUE mod, VALUE id)
14541451
{
14551452
shape_id_t shape_id = NUM2UINT(id);
1456-
if (shape_id >= GET_SHAPE_TREE()->next_shape_id) {
1453+
if (shape_id >= rb_shape_tree->next_shape_id) {
14571454
rb_raise(rb_eArgError, "Shape ID %d is out of bounds\n", shape_id);
14581455
}
14591456
return shape_id_t_to_rb_cShape(shape_id);
@@ -1467,7 +1464,7 @@ rb_shape_find_by_id(VALUE mod, VALUE id)
14671464
void
14681465
Init_default_shapes(void)
14691466
{
1470-
rb_shape_tree_ptr = xcalloc(1, sizeof(rb_shape_tree_t));
1467+
rb_shape_tree = xcalloc(1, sizeof(rb_shape_tree_t));
14711468

14721469
size_t *heap_sizes = rb_gc_heap_sizes();
14731470
size_t heaps_count = 0;
@@ -1480,23 +1477,23 @@ Init_default_shapes(void)
14801477
for (index = 0; index < heaps_count; index++) {
14811478
capacities[index] = (heap_sizes[index] - sizeof(struct RBasic)) / sizeof(VALUE);
14821479
}
1483-
GET_SHAPE_TREE()->capacities = capacities;
1480+
rb_shape_tree->capacities = capacities;
14841481

14851482
#ifdef HAVE_MMAP
14861483
size_t shape_list_mmap_size = rb_size_mul_or_raise(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t), rb_eRuntimeError);
1487-
rb_shape_tree_ptr->shape_list = (rb_shape_t *)mmap(NULL, shape_list_mmap_size,
1484+
rb_shape_tree->shape_list = (rb_shape_t *)mmap(NULL, shape_list_mmap_size,
14881485
PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1489-
if (GET_SHAPE_TREE()->shape_list == MAP_FAILED) {
1490-
GET_SHAPE_TREE()->shape_list = 0;
1486+
if (rb_shape_tree->shape_list == MAP_FAILED) {
1487+
rb_shape_tree->shape_list = 0;
14911488
}
14921489
else {
1493-
ruby_annotate_mmap(rb_shape_tree_ptr->shape_list, shape_list_mmap_size, "Ruby:Init_default_shapes:shape_list");
1490+
ruby_annotate_mmap(rb_shape_tree->shape_list, shape_list_mmap_size, "Ruby:Init_default_shapes:shape_list");
14941491
}
14951492
#else
1496-
GET_SHAPE_TREE()->shape_list = xcalloc(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t));
1493+
rb_shape_tree->shape_list = xcalloc(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t));
14971494
#endif
14981495

1499-
if (!GET_SHAPE_TREE()->shape_list) {
1496+
if (!rb_shape_tree->shape_list) {
15001497
rb_memerror();
15011498
}
15021499

@@ -1506,19 +1503,19 @@ Init_default_shapes(void)
15061503

15071504
#ifdef HAVE_MMAP
15081505
size_t shape_cache_mmap_size = rb_size_mul_or_raise(REDBLACK_CACHE_SIZE, sizeof(redblack_node_t), rb_eRuntimeError);
1509-
rb_shape_tree_ptr->shape_cache = (redblack_node_t *)mmap(NULL, shape_cache_mmap_size,
1506+
rb_shape_tree->shape_cache = (redblack_node_t *)mmap(NULL, shape_cache_mmap_size,
15101507
PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1511-
rb_shape_tree_ptr->cache_size = 0;
1508+
rb_shape_tree->cache_size = 0;
15121509

15131510
// If mmap fails, then give up on the redblack tree cache.
15141511
// We set the cache size such that the redblack node allocators think
15151512
// the cache is full.
1516-
if (GET_SHAPE_TREE()->shape_cache == MAP_FAILED) {
1517-
GET_SHAPE_TREE()->shape_cache = 0;
1518-
GET_SHAPE_TREE()->cache_size = REDBLACK_CACHE_SIZE;
1513+
if (rb_shape_tree->shape_cache == MAP_FAILED) {
1514+
rb_shape_tree->shape_cache = 0;
1515+
rb_shape_tree->cache_size = REDBLACK_CACHE_SIZE;
15191516
}
15201517
else {
1521-
ruby_annotate_mmap(rb_shape_tree_ptr->shape_cache, shape_cache_mmap_size, "Ruby:Init_default_shapes:shape_cache");
1518+
ruby_annotate_mmap(rb_shape_tree->shape_cache, shape_cache_mmap_size, "Ruby:Init_default_shapes:shape_cache");
15221519
}
15231520
#endif
15241521

@@ -1529,8 +1526,8 @@ Init_default_shapes(void)
15291526
rb_shape_t *root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
15301527
root->capacity = 0;
15311528
root->type = SHAPE_ROOT;
1532-
GET_SHAPE_TREE()->root_shape = root;
1533-
RUBY_ASSERT(raw_shape_id(GET_SHAPE_TREE()->root_shape) == ROOT_SHAPE_ID);
1529+
rb_shape_tree->root_shape = root;
1530+
RUBY_ASSERT(raw_shape_id(rb_shape_tree->root_shape) == ROOT_SHAPE_ID);
15341531

15351532
rb_shape_t *root_with_obj_id = rb_shape_alloc_with_parent_id(0, ROOT_SHAPE_ID);
15361533
root_with_obj_id->type = SHAPE_OBJ_ID;
@@ -1542,8 +1539,8 @@ Init_default_shapes(void)
15421539
void
15431540
rb_shape_free_all(void)
15441541
{
1545-
xfree((void *)GET_SHAPE_TREE()->capacities);
1546-
xfree(GET_SHAPE_TREE());
1542+
xfree((void *)rb_shape_tree->capacities);
1543+
xfree(rb_shape_tree);
15471544
}
15481545

15491546
void

shape.h

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ typedef struct {
9292
redblack_node_t *shape_cache;
9393
unsigned int cache_size;
9494
} rb_shape_tree_t;
95-
RUBY_EXTERN rb_shape_tree_t *rb_shape_tree_ptr;
95+
RUBY_EXTERN rb_shape_tree_t *rb_shape_tree;
9696

9797
union rb_attr_index_cache {
9898
uint64_t pack;
@@ -102,13 +102,6 @@ union rb_attr_index_cache {
102102
} unpack;
103103
};
104104

105-
static inline rb_shape_tree_t *
106-
rb_current_shape_tree(void)
107-
{
108-
return rb_shape_tree_ptr;
109-
}
110-
#define GET_SHAPE_TREE() rb_current_shape_tree()
111-
112105
static inline shape_id_t
113106
RBASIC_SHAPE_ID(VALUE obj)
114107
{
@@ -148,7 +141,14 @@ RBASIC_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
148141
#endif
149142
}
150143

151-
#define RSHAPE rb_shape_lookup
144+
static inline rb_shape_t *
145+
RSHAPE(shape_id_t shape_id)
146+
{
147+
uint32_t offset = (shape_id & SHAPE_ID_OFFSET_MASK);
148+
RUBY_ASSERT(offset != INVALID_SHAPE_ID);
149+
150+
return &rb_shape_tree->shape_list[offset];
151+
}
152152

153153
int32_t rb_shape_id_offset(void);
154154

@@ -237,7 +237,7 @@ RSHAPE_EMBEDDED_CAPACITY(shape_id_t shape_id)
237237
{
238238
uint8_t heap_index = rb_shape_heap_index(shape_id);
239239
if (heap_index) {
240-
return GET_SHAPE_TREE()->capacities[heap_index - 1];
240+
return rb_shape_tree->capacities[heap_index - 1];
241241
}
242242
return 0;
243243
}

vm.c

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -736,8 +736,8 @@ vm_stat(int argc, VALUE *argv, VALUE self)
736736
SET(constant_cache_invalidations, ruby_vm_constant_cache_invalidations);
737737
SET(constant_cache_misses, ruby_vm_constant_cache_misses);
738738
SET(global_cvar_state, ruby_vm_global_cvar_state);
739-
SET(next_shape_id, (rb_serial_t)GET_SHAPE_TREE()->next_shape_id);
740-
SET(shape_cache_size, (rb_serial_t)GET_SHAPE_TREE()->cache_size);
739+
SET(next_shape_id, (rb_serial_t)rb_shape_tree->next_shape_id);
740+
SET(shape_cache_size, (rb_serial_t)rb_shape_tree->cache_size);
741741
#undef SET
742742

743743
#if USE_DEBUG_COUNTER

yjit.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -778,7 +778,7 @@ VALUE
778778
rb_object_shape_count(void)
779779
{
780780
// next_shape_id starts from 0, so it's the same as the count
781-
return ULONG2NUM((unsigned long)GET_SHAPE_TREE()->next_shape_id);
781+
return ULONG2NUM((unsigned long)rb_shape_tree->next_shape_id);
782782
}
783783

784784
bool

0 commit comments

Comments
 (0)
0