diff options
author | Jean Boussier <[email protected]> | 2025-06-11 14:32:35 +0200 |
---|---|---|
committer | Jean Boussier <[email protected]> | 2025-06-11 16:38:38 +0200 |
commit | 95201299fd7bf0918dfbd8c127ce2b5b33ffa537 (patch) | |
tree | b6659cd0932fa3f0cb71c8cf8c2aba382d86cf6b | |
parent | 4463ac264dc44979ea74bbca3de58ae72d5eea71 (diff) |
Refactor the last references to `rb_shape_t`
The type isn't opaque because Ruby isn't often compiled with LTO,
so for optimization purpose it's better to allow as much inlining
as possible.
However ideally only `shape.c` and `shape.h` should deal with
the actual struct, and everything else should just deal with opaque
`shape_id_t`.
Notes
Notes:
Merged: https://github.com/ruby/ruby/pull/13586
-rw-r--r-- | ext/objspace/objspace_dump.c | 11 | ||||
-rw-r--r-- | internal/class.h | 2 | ||||
-rw-r--r-- | internal/variable.h | 2 | ||||
-rw-r--r-- | object.c | 2 | ||||
-rw-r--r-- | shape.c | 25 | ||||
-rw-r--r-- | shape.h | 17 | ||||
-rw-r--r-- | variable.c | 103 | ||||
-rw-r--r-- | vm_insnhelper.c | 9 |
8 files changed, 96 insertions, 75 deletions
diff --git a/ext/objspace/objspace_dump.c b/ext/objspace/objspace_dump.c index ac8bafaea9..83b434c3a1 100644 --- a/ext/objspace/objspace_dump.c +++ b/ext/objspace/objspace_dump.c @@ -792,22 +792,21 @@ shape_id_i(shape_id_t shape_id, void *data) return; } - rb_shape_t *shape = RSHAPE(shape_id); dump_append(dc, "{\"address\":"); - dump_append_ref(dc, (VALUE)shape); + dump_append_ref(dc, (VALUE)RSHAPE(shape_id)); dump_append(dc, ", \"type\":\"SHAPE\", \"id\":"); dump_append_sizet(dc, shape_id); - if (shape->type != SHAPE_ROOT) { + if (RSHAPE_TYPE(shape_id) != SHAPE_ROOT) { dump_append(dc, ", \"parent_id\":"); - dump_append_lu(dc, shape->parent_id); + dump_append_lu(dc, RSHAPE_PARENT(shape_id)); } dump_append(dc, ", \"depth\":"); dump_append_sizet(dc, rb_shape_depth(shape_id)); - switch((enum shape_type)shape->type) { + switch (RSHAPE_TYPE(shape_id)) { case SHAPE_ROOT: dump_append(dc, ", \"shape_type\":\"ROOT\""); break; @@ -815,7 +814,7 @@ shape_id_i(shape_id_t shape_id, void *data) dump_append(dc, ", \"shape_type\":\"IVAR\""); dump_append(dc, ",\"edge_name\":"); - dump_append_id(dc, shape->edge_name); + dump_append_id(dc, RSHAPE_EDGE_NAME(shape_id)); break; case SHAPE_OBJ_ID: diff --git a/internal/class.h b/internal/class.h index 620c7e9c53..5601978292 100644 --- a/internal/class.h +++ b/internal/class.h @@ -576,7 +576,7 @@ RCLASS_FIELDS_COUNT(VALUE obj) return count; } else { - return RSHAPE(RBASIC_SHAPE_ID(obj))->next_field_index; + return RSHAPE_LEN(RBASIC_SHAPE_ID(obj)); } } diff --git a/internal/variable.h b/internal/variable.h index a0608b22d1..8da6c678a5 100644 --- a/internal/variable.h +++ b/internal/variable.h @@ -13,7 +13,7 @@ #include "constant.h" /* for rb_const_entry_t */ #include "ruby/internal/stdbool.h" /* for bool */ #include "ruby/ruby.h" /* for VALUE */ -#include "shape.h" /* for rb_shape_t */ +#include "shape.h" /* for shape_id_t */ /* variable.c */ void rb_gc_mark_global_tbl(void); @@ -340,7 +340,7 @@ rb_obj_copy_ivar(VALUE dest, VALUE obj) shape_id_t dest_shape_id = src_shape_id; shape_id_t initial_shape_id = RBASIC_SHAPE_ID(dest); - RUBY_ASSERT(RSHAPE(initial_shape_id)->type == SHAPE_ROOT); + RUBY_ASSERT(RSHAPE_TYPE_P(initial_shape_id, SHAPE_ROOT)); dest_shape_id = rb_shape_rebuild(initial_shape_id, src_shape_id); if (UNLIKELY(rb_shape_too_complex_p(dest_shape_id))) { @@ -1184,6 +1184,31 @@ rb_shape_memsize(shape_id_t shape_id) return memsize; } +bool +rb_shape_foreach_field(shape_id_t initial_shape_id, rb_shape_foreach_transition_callback func, void *data) +{ + RUBY_ASSERT(!rb_shape_too_complex_p(initial_shape_id)); + + rb_shape_t *shape = RSHAPE(initial_shape_id); + if (shape->type == SHAPE_ROOT) { + return true; + } + + shape_id_t parent_id = shape_id(RSHAPE(shape->parent_id), initial_shape_id); + if (rb_shape_foreach_field(parent_id, func, data)) { + switch (func(shape_id(shape, initial_shape_id), data)) { + case ST_STOP: + return false; + case ST_CHECK: + case ST_CONTINUE: + break; + default: + rb_bug("unreachable"); + } + } + return true; +} + #if RUBY_DEBUG bool rb_shape_verify_consistency(VALUE obj, shape_id_t shape_id) @@ -158,6 +158,9 @@ shape_id_t rb_shape_get_next_iv_shape(shape_id_t shape_id, ID id); bool rb_shape_get_iv_index(shape_id_t shape_id, ID id, attr_index_t *value); bool rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t *value, shape_id_t *shape_id_hint); +typedef int rb_shape_foreach_transition_callback(shape_id_t shape_id, void *data); +bool rb_shape_foreach_field(shape_id_t shape_id, rb_shape_foreach_transition_callback func, void *data); + shape_id_t rb_shape_transition_frozen(VALUE obj); shape_id_t rb_shape_transition_complex(VALUE obj); shape_id_t rb_shape_transition_remove_ivar(VALUE obj, ID id, shape_id_t *removed_shape_id); @@ -211,10 +214,22 @@ rb_shape_root(size_t heap_id) return ROOT_SHAPE_ID | ((heap_index + 1) << SHAPE_ID_HEAP_INDEX_OFFSET); } +static inline shape_id_t +RSHAPE_PARENT(shape_id_t shape_id) +{ + return RSHAPE(shape_id)->parent_id; +} + +static inline enum shape_type +RSHAPE_TYPE(shape_id_t shape_id) +{ + return RSHAPE(shape_id)->type; +} + static inline bool RSHAPE_TYPE_P(shape_id_t shape_id, enum shape_type type) { - return RSHAPE(shape_id)->type == type; + return RSHAPE_TYPE(shape_id) == type; } static inline attr_index_t diff --git a/variable.c b/variable.c index a54bebcec0..38249b4e82 100644 --- a/variable.c +++ b/variable.c @@ -1303,7 +1303,7 @@ VALUE rb_obj_field_get(VALUE obj, shape_id_t target_shape_id) { RUBY_ASSERT(!SPECIAL_CONST_P(obj)); - RUBY_ASSERT(RSHAPE(target_shape_id)->type == SHAPE_IVAR || RSHAPE(target_shape_id)->type == SHAPE_OBJ_ID); + RUBY_ASSERT(RSHAPE_TYPE_P(target_shape_id, SHAPE_IVAR) || RSHAPE_TYPE_P(target_shape_id, SHAPE_OBJ_ID)); if (rb_shape_too_complex_p(target_shape_id)) { st_table *fields_hash; @@ -1325,7 +1325,7 @@ rb_obj_field_get(VALUE obj, shape_id_t target_shape_id) break; } VALUE value = Qundef; - st_lookup(fields_hash, RSHAPE(target_shape_id)->edge_name, &value); + st_lookup(fields_hash, RSHAPE_EDGE_NAME(target_shape_id), &value); #if RUBY_DEBUG if (UNDEF_P(value)) { @@ -1337,7 +1337,7 @@ rb_obj_field_get(VALUE obj, shape_id_t target_shape_id) return value; } - attr_index_t attr_index = RSHAPE(target_shape_id)->next_field_index - 1; + attr_index_t attr_index = RSHAPE_INDEX(target_shape_id); VALUE *fields; switch (BUILTIN_TYPE(obj)) { case T_CLASS: @@ -1505,7 +1505,7 @@ rb_ivar_delete(VALUE obj, ID id, VALUE undef) goto too_complex; } - RUBY_ASSERT(RSHAPE(next_shape_id)->next_field_index == RSHAPE(old_shape_id)->next_field_index - 1); + RUBY_ASSERT(RSHAPE_LEN(next_shape_id) == RSHAPE_LEN(old_shape_id) - 1); VALUE *fields; switch(BUILTIN_TYPE(obj)) { @@ -1526,9 +1526,9 @@ rb_ivar_delete(VALUE obj, ID id, VALUE undef) RUBY_ASSERT(removed_shape_id != INVALID_SHAPE_ID); - attr_index_t new_fields_count = RSHAPE(next_shape_id)->next_field_index; + attr_index_t new_fields_count = RSHAPE_LEN(next_shape_id); - attr_index_t removed_index = RSHAPE(removed_shape_id)->next_field_index - 1; + attr_index_t removed_index = RSHAPE_INDEX(removed_shape_id); val = fields[removed_index]; size_t trailing_fields = new_fields_count - removed_index; @@ -1810,8 +1810,8 @@ generic_fields_lookup_ensure_size(st_data_t *k, st_data_t *v, st_data_t u, int e if (!existing || fields_lookup->resize) { if (existing) { - RUBY_ASSERT(RSHAPE(fields_lookup->shape_id)->type == SHAPE_IVAR || RSHAPE(fields_lookup->shape_id)->type == SHAPE_OBJ_ID); - RUBY_ASSERT(RSHAPE_CAPACITY(RSHAPE(fields_lookup->shape_id)->parent_id) < RSHAPE_CAPACITY(fields_lookup->shape_id)); + RUBY_ASSERT(RSHAPE_TYPE_P(fields_lookup->shape_id, SHAPE_IVAR) || RSHAPE_TYPE_P(fields_lookup->shape_id, SHAPE_OBJ_ID)); + RUBY_ASSERT(RSHAPE_CAPACITY(RSHAPE_PARENT(fields_lookup->shape_id)) < RSHAPE_CAPACITY(fields_lookup->shape_id)); } else { FL_SET_RAW((VALUE)*k, FL_EXIVAR); @@ -2186,57 +2186,42 @@ struct iv_itr_data { bool ivar_only; }; -/* - * Returns a flag to stop iterating depending on the result of +callback+. - */ -static bool -iterate_over_shapes_with_callback(rb_shape_t *shape, rb_ivar_foreach_callback_func *callback, struct iv_itr_data *itr_data) +static int +iterate_over_shapes_callback(shape_id_t shape_id, void *data) { - switch ((enum shape_type)shape->type) { - case SHAPE_ROOT: - return false; - case SHAPE_OBJ_ID: - if (itr_data->ivar_only) { - return iterate_over_shapes_with_callback(RSHAPE(shape->parent_id), callback, itr_data); - } - // fallthrough - case SHAPE_IVAR: - ASSUME(callback); - if (iterate_over_shapes_with_callback(RSHAPE(shape->parent_id), callback, itr_data)) { - return true; - } + struct iv_itr_data *itr_data = data; - VALUE * iv_list; - switch (BUILTIN_TYPE(itr_data->obj)) { - case T_OBJECT: - RUBY_ASSERT(!rb_shape_obj_too_complex_p(itr_data->obj)); - iv_list = ROBJECT_FIELDS(itr_data->obj); - break; - case T_CLASS: - case T_MODULE: - RUBY_ASSERT(!rb_shape_obj_too_complex_p(itr_data->obj)); - iv_list = RCLASS_PRIME_FIELDS(itr_data->obj); - break; - default: - iv_list = itr_data->fields_tbl->as.shape.fields; - break; - } - VALUE val = iv_list[shape->next_field_index - 1]; - if (!UNDEF_P(val)) { - switch (callback(shape->edge_name, val, itr_data->arg)) { - case ST_CHECK: - case ST_CONTINUE: - break; - case ST_STOP: - return true; - default: - rb_bug("unreachable"); - } - } - return false; + if (itr_data->ivar_only && !RSHAPE_TYPE_P(shape_id, SHAPE_IVAR)) { + return ST_CONTINUE; + } + + VALUE *iv_list; + switch (BUILTIN_TYPE(itr_data->obj)) { + case T_OBJECT: + RUBY_ASSERT(!rb_shape_obj_too_complex_p(itr_data->obj)); + iv_list = ROBJECT_FIELDS(itr_data->obj); + break; + case T_CLASS: + case T_MODULE: + RUBY_ASSERT(!rb_shape_obj_too_complex_p(itr_data->obj)); + iv_list = RCLASS_PRIME_FIELDS(itr_data->obj); + break; default: - UNREACHABLE_RETURN(false); + iv_list = itr_data->fields_tbl->as.shape.fields; + break; } + + VALUE val = iv_list[RSHAPE_INDEX(shape_id)]; + return itr_data->func(RSHAPE_EDGE_NAME(shape_id), val, itr_data->arg); +} + +/* + * Returns a flag to stop iterating depending on the result of +callback+. + */ +static void +iterate_over_shapes(shape_id_t shape_id, rb_ivar_foreach_callback_func *callback, struct iv_itr_data *itr_data) +{ + rb_shape_foreach_field(shape_id, iterate_over_shapes_callback, itr_data); } static int @@ -2262,7 +2247,7 @@ obj_fields_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg, b rb_st_foreach(ROBJECT_FIELDS_HASH(obj), each_hash_iv, (st_data_t)&itr_data); } else { - iterate_over_shapes_with_callback(RSHAPE(shape_id), func, &itr_data); + iterate_over_shapes(shape_id, func, &itr_data); } } @@ -2285,7 +2270,7 @@ gen_fields_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg, b rb_st_foreach(fields_tbl->as.complex.table, each_hash_iv, (st_data_t)&itr_data); } else { - iterate_over_shapes_with_callback(RSHAPE(shape_id), func, &itr_data); + iterate_over_shapes(shape_id, func, &itr_data); } } @@ -2306,7 +2291,7 @@ class_fields_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg, rb_st_foreach(RCLASS_WRITABLE_FIELDS_HASH(obj), each_hash_iv, (st_data_t)&itr_data); } else { - iterate_over_shapes_with_callback(RSHAPE(shape_id), func, &itr_data); + iterate_over_shapes(shape_id, func, &itr_data); } } @@ -2344,7 +2329,7 @@ rb_copy_generic_ivar(VALUE dest, VALUE obj) shape_id_t initial_shape_id = rb_obj_shape_id(dest); if (!rb_shape_canonical_p(src_shape_id)) { - RUBY_ASSERT(RSHAPE(initial_shape_id)->type == SHAPE_ROOT); + RUBY_ASSERT(RSHAPE_TYPE_P(initial_shape_id, SHAPE_ROOT)); dest_shape_id = rb_shape_rebuild(initial_shape_id, src_shape_id); if (UNLIKELY(rb_shape_too_complex_p(dest_shape_id))) { diff --git a/vm_insnhelper.c b/vm_insnhelper.c index 24709eee2e..f09220a2ea 100644 --- a/vm_insnhelper.c +++ b/vm_insnhelper.c @@ -1455,9 +1455,7 @@ vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_i RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID); } else if (dest_shape_id != INVALID_SHAPE_ID) { - rb_shape_t *dest_shape = RSHAPE(dest_shape_id); - - if (shape_id == dest_shape->parent_id && dest_shape->edge_name == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) { + if (shape_id == RSHAPE_PARENT(dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) { RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id)); } else { @@ -1498,10 +1496,9 @@ vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t i VM_ASSERT(!rb_ractor_shareable_p(obj)); } else if (dest_shape_id != INVALID_SHAPE_ID) { - rb_shape_t *dest_shape = RSHAPE(dest_shape_id); - shape_id_t source_shape_id = dest_shape->parent_id; + shape_id_t source_shape_id = RSHAPE_PARENT(dest_shape_id); - if (shape_id == source_shape_id && dest_shape->edge_name == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) { + if (shape_id == source_shape_id && RSHAPE_EDGE_NAME(dest_shape_id) == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) { RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID); RBASIC_SET_SHAPE_ID(obj, dest_shape_id); |