diff options
-rw-r--r-- | gc.c | 33 | ||||
-rw-r--r-- | iseq.c | 49 |
2 files changed, 51 insertions, 31 deletions
@@ -7015,11 +7015,22 @@ gc_mark_imemo(rb_objspace_t *objspace, VALUE obj) case imemo_callinfo: return; case imemo_callcache: - { - const struct rb_callcache *cc = (const struct rb_callcache *)obj; - // should not mark klass here - gc_mark(objspace, (VALUE)vm_cc_cme(cc)); - } + /* cc is callcache. + * + * cc->klass (klass) should not be marked because if the klass is + * free'ed, the cc->klass will be cleared by `vm_cc_invalidate()`. + * + * cc->cme (cme) should not be marked because if cc is invalidated + * when cme is free'ed. + * - klass marks cme if klass uses cme. + * - caller classe's ccs->cme marks cc->cme. + * - if cc is invalidated (klass doesn't refer the cc), + * cc is invalidated by `vm_cc_invalidate()` and cc->cme is + * not be accessed. + * - On the multi-Ractors, cme will be collected with global GC + * so that it is safe if GC is not interleaving while accessing + * cc and cme. + */ return; case imemo_constcache: { @@ -10122,12 +10133,14 @@ gc_ref_update_imemo(rb_objspace_t *objspace, VALUE obj) if (!is_live_object(objspace, cc->klass)) { *((VALUE *)(&cc->klass)) = (VALUE)0; } - } - if (cc->cme_) { - TYPED_UPDATE_IF_MOVED(objspace, struct rb_callable_method_entry_struct *, cc->cme_); - if (!is_live_object(objspace, (VALUE)cc->cme_)) { - *((struct rb_callable_method_entry_struct **)(&cc->cme_)) = (struct rb_callable_method_entry_struct *)0; + // cc->cme_ is available if cc->klass is given + + if (cc->cme_) { + TYPED_UPDATE_IF_MOVED(objspace, struct rb_callable_method_entry_struct *, cc->cme_); + if (!is_live_object(objspace, (VALUE)cc->cme_)) { + *((struct rb_callable_method_entry_struct **)(&cc->cme_)) = (struct rb_callable_method_entry_struct *)0; + } } } } @@ -282,6 +282,29 @@ rb_iseq_mark_and_move_each_value(const rb_iseq_t *iseq, VALUE *original_iseq) } } +static bool +cc_is_active(const struct rb_callcache *cc, bool reference_updating) +{ + if (cc) { + if (reference_updating) { + cc = (const struct rb_callcache *)rb_gc_location((VALUE)cc); + } + + if (vm_cc_markable(cc)) { + if (cc->klass) { // cc is not invalidated + const struct rb_callable_method_entry_struct *cme = vm_cc_cme(cc); + if (reference_updating) { + cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cme); + } + if (!METHOD_ENTRY_INVALIDATED(cme)) { + return true; + } + } + } + } + return false; +} + void rb_iseq_mark_and_move(rb_iseq_t *iseq, bool reference_updating) { @@ -310,27 +333,11 @@ rb_iseq_mark_and_move(rb_iseq_t *iseq, bool reference_updating) if (cds[i].ci) rb_gc_mark_and_move_ptr(&cds[i].ci); - const struct rb_callcache *cc = cds[i].cc; - if (cc) { - if (reference_updating) { - cc = (const struct rb_callcache *)rb_gc_location((VALUE)cc); - } - - if (vm_cc_markable(cc)) { - VM_ASSERT((cc->flags & VM_CALLCACHE_ON_STACK) == 0); - - const struct rb_callable_method_entry_struct *cme = vm_cc_cme(cc); - if (reference_updating) { - cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cme); - } - - if (cc->klass && !METHOD_ENTRY_INVALIDATED(cme)) { - rb_gc_mark_and_move_ptr(&cds[i].cc); - } - else { - cds[i].cc = rb_vm_empty_cc(); - } - } + if (cc_is_active(cds[i].cc, reference_updating)) { + rb_gc_mark_and_move_ptr(&cds[i].cc); + } + else { + cds[i].cc = rb_vm_empty_cc(); } } } |