diff options
author | 卜部昌平 <[email protected]> | 2019-10-03 12:26:41 +0900 |
---|---|---|
committer | 卜部昌平 <[email protected]> | 2019-10-03 12:45:24 +0900 |
commit | eb92159d72fc711387f7e17ffbaca1678f23fd47 (patch) | |
tree | 13c5177b80fbd50c7113eee5aca5158652f24f1b /vm_insnhelper.c | |
parent | ef697388becedf36966a2edcdcf88baca342b9e2 (diff) |
Revert https://github.com/ruby/ruby/pull/2486
This reverts commits: 10d6a3aca7 8ba48c1b85 fba8627dc1 dd883de5ba
6c6a25feca 167e6b48f1 7cb96d41a5 3207979278 595b3c4fdd 1521f7cf89
c11c5e69ac cf33608203 3632a812c0 f56506be0d 86427a3219 .
The reason for the revert is that we observe ABA problem around
inline method cache. When a cache misshits, we search for a
method entry. And if the entry is identical to what was cached
before, we reuse the cache. But the commits we are reverting here
introduced situations where a method entry is freed, then the
identical memory region is used for another method entry. An
inline method cache cannot detect that ABA.
Here is a code that reproduce such situation:
```ruby
require 'prime'
class << Integer
alias org_sqrt sqrt
def sqrt(n)
raise
end
GC.stress = true
Prime.each(7*37){} rescue nil # <- Here we populate CC
class << Object.new; end
# These adjacent remove-then-alias maneuver
# frees a method entry, then immediately
# reuses it for another.
remove_method :sqrt
alias sqrt org_sqrt
end
Prime.each(7*37).to_a # <- SEGV
```
Diffstat (limited to 'vm_insnhelper.c')
-rw-r--r-- | vm_insnhelper.c | 52 |
1 files changed, 29 insertions, 23 deletions
diff --git a/vm_insnhelper.c b/vm_insnhelper.c index ef5a6db1af..127c8c3f9e 100644 --- a/vm_insnhelper.c +++ b/vm_insnhelper.c @@ -19,7 +19,8 @@ #include "ruby/config.h" #include "debug_counter.h" -extern void rb_method_entry_spoof(const rb_method_entry_t *me); +extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t type, ID mid); +extern void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts); extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2); extern VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj, int argc, const VALUE *argv, int priv); @@ -578,8 +579,8 @@ vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, return val; } -PUREFUNC(static const rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar)); -static const rb_callable_method_entry_t * +PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar)); +static rb_callable_method_entry_t * check_method_entry(VALUE obj, int can_be_svar) { if (obj == Qfalse) return NULL; @@ -590,7 +591,7 @@ check_method_entry(VALUE obj, int can_be_svar) switch (imemo_type(obj)) { case imemo_ment: - return (const rb_callable_method_entry_t *)obj; + return (rb_callable_method_entry_t *)obj; case imemo_cref: return NULL; case imemo_svar: @@ -609,7 +610,7 @@ MJIT_STATIC const rb_callable_method_entry_t * rb_vm_frame_method_entry(const rb_control_frame_t *cfp) { const VALUE *ep = cfp->ep; - const rb_callable_method_entry_t *me; + rb_callable_method_entry_t *me; while (!VM_ENV_LOCAL_P(ep)) { if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me; @@ -620,7 +621,7 @@ rb_vm_frame_method_entry(const rb_control_frame_t *cfp) } static rb_cref_t * -method_entry_cref(const rb_callable_method_entry_t *me) +method_entry_cref(rb_callable_method_entry_t *me) { switch (me->def->type) { case VM_METHOD_TYPE_ISEQ: @@ -644,7 +645,7 @@ check_cref(VALUE obj, int can_be_svar) switch (imemo_type(obj)) { case imemo_ment: - return method_entry_cref((const rb_callable_method_entry_t *)obj); + return method_entry_cref((rb_callable_method_entry_t *)obj); case imemo_cref: return (rb_cref_t *)obj; case imemo_svar: @@ -1390,6 +1391,9 @@ calccall(const struct rb_call_info *ci, const struct rb_call_cache *cc, const rb else if (LIKELY(cc->me != me)) { return vm_call_general; /* normal cases */ } + else if (UNLIKELY(cc->def != me->def)) { + return vm_call_general; /* cc->me was refined elsewhere */ + } /* "Calling a formerly-public method, which is now privatised, with an * explicit receiver" is the only situation we have to check here. A * formerly-private method now publicised is an absolutely safe thing. @@ -1412,6 +1416,7 @@ rb_vm_search_method_slowpath(const struct rb_call_info *ci, struct rb_call_cache GET_GLOBAL_METHOD_STATE(), RCLASS_SERIAL(klass), me, + me ? me->def : NULL, calccall(ci, cc, me), }; VM_ASSERT(callable_method_entry_p(cc->me)); @@ -2573,31 +2578,32 @@ find_defined_class_by_owner(VALUE current_class, VALUE target_owner) return current_class; /* maybe module function */ } -static const void* -aliased_callable_method_entry0(const rb_method_entry_t *me) +static const rb_callable_method_entry_t * +aliased_callable_method_entry(const rb_callable_method_entry_t *me) { const rb_method_entry_t *orig_me = me->def->body.alias.original_me; const rb_callable_method_entry_t *cme; - if (orig_me->defined_class != 0) { - VM_ASSERT(callable_class_p(orig_me->defined_class)); - return orig_me; - } - else { + if (orig_me->defined_class == 0) { VALUE defined_class = find_defined_class_by_owner(me->defined_class, orig_me->owner); VM_ASSERT(RB_TYPE_P(orig_me->owner, T_MODULE)); cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class); - const rb_method_entry_t *ret = - rb_method_entry_from_template((const rb_method_entry_t*)me, cme); - rb_method_entry_spoof(ret); - return ret; + + if (me->def->alias_count + me->def->complemented_count == 0) { + RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme); + } + else { + rb_method_definition_t *def = + rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id); + rb_method_definition_set((rb_method_entry_t *)me, def, (void *)cme); + } + } + else { + cme = (const rb_callable_method_entry_t *)orig_me; } -} -static const rb_callable_method_entry_t* -aliased_callable_method_entry(const rb_callable_method_entry_t *me) -{ - return aliased_callable_method_entry0((const void*)me); + VM_ASSERT(callable_method_entry_p(cme)); + return cme; } static const rb_callable_method_entry_t * |