summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJean Boussier <[email protected]>2023-10-30 12:29:59 +0100
committerPeter Zhu <[email protected]>2023-10-31 12:07:54 -0400
commit4aacc559d99988f395eced3534c7a6938bd356c8 (patch)
treea33af6b2ae7ae803053dad304687b321cdd73e93
parent85ad1025328989bb4e10436aed121b9136b0c8bf (diff)
Handle running out of shapes in `Object#dup`
There is a handful of call sites where we may transition to OBJ_TOO_COMPLEX_SHAPE if we just ran out of shapes, but that weren't handling it properly.
-rw-r--r--object.c11
-rw-r--r--shape.c15
-rw-r--r--test/ruby/test_shapes.rb26
-rw-r--r--vm_insnhelper.c22
4 files changed, 60 insertions, 14 deletions
diff --git a/object.c b/object.c
index 5cb0e0a650..73fbe78edc 100644
--- a/object.c
+++ b/object.c
@@ -326,9 +326,18 @@ rb_obj_copy_ivar(VALUE dest, VALUE obj)
RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);
shape_to_set_on_dest = rb_shape_rebuild_shape(initial_shape, src_shape);
+ if (UNLIKELY(rb_shape_id(shape_to_set_on_dest) == OBJ_TOO_COMPLEX_SHAPE_ID)) {
+ st_table * table = rb_st_init_numtable_with_size(src_num_ivs);
+
+ rb_ivar_foreach(obj, rb_obj_evacuate_ivs_to_hash_table, (st_data_t)table);
+ rb_shape_set_too_complex(dest);
+ ROBJECT(dest)->as.heap.ivptr = (VALUE *)table;
+
+ return;
+ }
}
- RUBY_ASSERT(src_num_ivs <= shape_to_set_on_dest->capacity);
+ RUBY_ASSERT(src_num_ivs <= shape_to_set_on_dest->capacity || rb_shape_id(shape_to_set_on_dest) == OBJ_TOO_COMPLEX_SHAPE_ID);
if (initial_shape->capacity < shape_to_set_on_dest->capacity) {
rb_ensure_iv_list_size(dest, initial_shape->capacity, shape_to_set_on_dest->capacity);
dest_buf = ROBJECT_IVPTR(dest);
diff --git a/shape.c b/shape.c
index 165b077934..016d0f848d 100644
--- a/shape.c
+++ b/shape.c
@@ -695,8 +695,9 @@ rb_shape_transition_shape_capa_create(rb_shape_t* shape, size_t new_capacity)
ID edge_name = rb_make_temporary_id(new_capacity);
bool dont_care;
rb_shape_t * new_shape = get_next_shape_internal(shape, edge_name, SHAPE_CAPACITY_CHANGE, &dont_care, true);
- RUBY_ASSERT(rb_shape_id(new_shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
- new_shape->capacity = (uint32_t)new_capacity;
+ if (rb_shape_id(new_shape) != OBJ_TOO_COMPLEX_SHAPE_ID) {
+ new_shape->capacity = (uint32_t)new_capacity;
+ }
return new_shape;
}
@@ -819,12 +820,18 @@ rb_shape_traverse_from_new_root(rb_shape_t *initial_shape, rb_shape_t *dest_shap
rb_shape_t *
rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape)
{
+ RUBY_ASSERT(rb_shape_id(initial_shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
+ RUBY_ASSERT(rb_shape_id(dest_shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
+
rb_shape_t * midway_shape;
RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);
if (dest_shape->type != initial_shape->type) {
midway_shape = rb_shape_rebuild_shape(initial_shape, rb_shape_get_parent(dest_shape));
+ if (UNLIKELY(rb_shape_id(midway_shape) == OBJ_TOO_COMPLEX_SHAPE_ID)) {
+ return midway_shape;
+ }
}
else {
midway_shape = initial_shape;
@@ -837,7 +844,9 @@ rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape)
midway_shape = rb_shape_transition_shape_capa(midway_shape);
}
- midway_shape = rb_shape_get_next_iv_shape(midway_shape, dest_shape->edge_name);
+ if (LIKELY(rb_shape_id(midway_shape) != OBJ_TOO_COMPLEX_SHAPE_ID)) {
+ midway_shape = rb_shape_get_next_iv_shape(midway_shape, dest_shape->edge_name);
+ }
break;
case SHAPE_ROOT:
case SHAPE_FROZEN:
diff --git a/test/ruby/test_shapes.rb b/test/ruby/test_shapes.rb
index af2af5c778..6d98daf233 100644
--- a/test/ruby/test_shapes.rb
+++ b/test/ruby/test_shapes.rb
@@ -226,6 +226,32 @@ class TestShapes < Test::Unit::TestCase
end;
end
+ def test_run_out_of_shape_rb_obj_copy_ivar
+ assert_separately([], "#{<<~"begin;"}\n#{<<~'end;'}")
+ begin;
+ class A
+ def initialize
+ init # Avoid right sizing
+ end
+
+ def init
+ @a = @b = @c = @d = @e = @f = 1
+ end
+ end
+
+ a = A.new
+
+ o = Object.new
+ i = 0
+ while RubyVM::Shape.shapes_available > 1
+ o.instance_variable_set(:"@i#{i}", 1)
+ i += 1
+ end
+
+ a.dup
+ end;
+ end
+
def test_use_all_shapes_module
assert_separately([], "#{<<~"begin;"}\n#{<<~'end;'}")
begin;
diff --git a/vm_insnhelper.c b/vm_insnhelper.c
index 2fc9b96114..1e514b7008 100644
--- a/vm_insnhelper.c
+++ b/vm_insnhelper.c
@@ -1382,18 +1382,20 @@ vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic,
{
rb_ivar_set(obj, id, val);
shape_id_t next_shape_id = rb_shape_get_shape_id(obj);
- rb_shape_t *next_shape = rb_shape_get_shape_by_id(next_shape_id);
- attr_index_t index;
+ if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
+ rb_shape_t *next_shape = rb_shape_get_shape_by_id(next_shape_id);
+ attr_index_t index;
- if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
- if (index >= MAX_IVARS) {
- rb_raise(rb_eArgError, "too many instance variables");
- }
+ if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
+ if (index >= MAX_IVARS) {
+ rb_raise(rb_eArgError, "too many instance variables");
+ }
- populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
- }
- else {
- rb_bug("didn't find the id");
+ populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
+ }
+ else {
+ rb_bug("didn't find the id");
+ }
}
return val;