summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--benchmark/struct_accessor.yml25
-rw-r--r--test/ruby/test_settracefunc.rb49
-rw-r--r--vm_callinfo.h9
-rw-r--r--vm_eval.c8
-rw-r--r--vm_insnhelper.c49
5 files changed, 118 insertions, 22 deletions
diff --git a/benchmark/struct_accessor.yml b/benchmark/struct_accessor.yml
new file mode 100644
index 0000000000..61176cfdd4
--- /dev/null
+++ b/benchmark/struct_accessor.yml
@@ -0,0 +1,25 @@
+prelude: |
+ C = Struct.new(:x) do
+ class_eval <<-END
+ def r
+ #{'x;'*256}
+ end
+ def w
+ #{'self.x = nil;'*256}
+ end
+ def rm
+ m = method(:x)
+ #{'m.call;'*256}
+ end
+ def wm
+ m = method(:x=)
+ #{'m.call(nil);'*256}
+ end
+ END
+ end
+ obj = C.new(nil)
+benchmark:
+ member_reader: "obj.r"
+ member_writer: "obj.w"
+ member_reader_method: "obj.rm"
+ member_writer_method: "obj.wm"
diff --git a/test/ruby/test_settracefunc.rb b/test/ruby/test_settracefunc.rb
index 8647723ed3..8fcc909d83 100644
--- a/test/ruby/test_settracefunc.rb
+++ b/test/ruby/test_settracefunc.rb
@@ -955,6 +955,55 @@ CODE
assert_equal(expected*2, events)
end
+ def test_tracepoint_struct
+ c = Struct.new(:x) do
+ alias y x
+ alias y= x=
+ end
+ obj = c.new
+
+ ar_meth = obj.method(:x)
+ aw_meth = obj.method(:x=)
+ aar_meth = obj.method(:y)
+ aaw_meth = obj.method(:y=)
+ events = []
+ trace = TracePoint.new(:c_call, :c_return){|tp|
+ next if !target_thread?
+ next if tp.path != __FILE__
+ next if tp.method_id == :call
+ case tp.event
+ when :c_call
+ assert_raise(RuntimeError) {tp.return_value}
+ events << [tp.event, tp.method_id, tp.callee_id]
+ when :c_return
+ events << [tp.event, tp.method_id, tp.callee_id, tp.return_value]
+ end
+ }
+ test_proc = proc do
+ obj.x = 1
+ obj.x
+ obj.y = 2
+ obj.y
+ aw_meth.call(1)
+ ar_meth.call
+ aaw_meth.call(2)
+ aar_meth.call
+ end
+ test_proc.call # populate call caches
+ trace.enable(&test_proc)
+ expected = [
+ [:c_call, :x=, :x=],
+ [:c_return, :x=, :x=, 1],
+ [:c_call, :x, :x],
+ [:c_return, :x, :x, 1],
+ [:c_call, :x=, :y=],
+ [:c_return, :x=, :y=, 2],
+ [:c_call, :x, :y],
+ [:c_return, :x, :y, 2],
+ ]
+ assert_equal(expected*2, events)
+ end
+
class XYZZYException < Exception; end
def method_test_tracepoint_raised_exception err
raise err
diff --git a/vm_callinfo.h b/vm_callinfo.h
index fb6998b8b3..8437f2176c 100644
--- a/vm_callinfo.h
+++ b/vm_callinfo.h
@@ -300,6 +300,7 @@ struct rb_callcache {
#define VM_CALLCACHE_UNMARKABLE FL_FREEZE
#define VM_CALLCACHE_ON_STACK FL_EXIVAR
+/* VM_CALLCACHE_IVAR used for IVAR/ATTRSET/STRUCT_AREF/STRUCT_ASET methods */
#define VM_CALLCACHE_IVAR IMEMO_FL_USER0
#define VM_CALLCACHE_BF IMEMO_FL_USER1
#define VM_CALLCACHE_SUPER IMEMO_FL_USER2
@@ -488,6 +489,12 @@ vm_cc_call_set(const struct rb_callcache *cc, vm_call_handler call)
}
static inline void
+set_vm_cc_ivar(const struct rb_callcache *cc)
+{
+ *(VALUE *)&cc->flags |= VM_CALLCACHE_IVAR;
+}
+
+static inline void
vm_cc_attr_index_set(const struct rb_callcache *cc, attr_index_t index, shape_id_t dest_shape_id)
{
uintptr_t *attr_value = (uintptr_t *)&cc->aux_.attr.value;
@@ -498,7 +505,7 @@ vm_cc_attr_index_set(const struct rb_callcache *cc, attr_index_t index, shape_id
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(cc != vm_cc_empty());
*attr_value = (attr_index_t)(index + 1) | ((uintptr_t)(dest_shape_id) << SHAPE_FLAG_SHIFT);
- *(VALUE *)&cc->flags |= VM_CALLCACHE_IVAR;
+ set_vm_cc_ivar(cc);
}
static inline bool
diff --git a/vm_eval.c b/vm_eval.c
index a1336cf17a..937f0582c4 100644
--- a/vm_eval.c
+++ b/vm_eval.c
@@ -299,11 +299,15 @@ vm_call0_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const
}
case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
vm_call_check_arity(calling, 0, argv);
- ret = vm_call_opt_struct_aref0(ec, calling);
+ VM_CALL_METHOD_ATTR(ret,
+ vm_call_opt_struct_aref0(ec, calling),
+ (void)0);
goto success;
case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
vm_call_check_arity(calling, 1, argv);
- ret = vm_call_opt_struct_aset0(ec, calling, argv[0]);
+ VM_CALL_METHOD_ATTR(ret,
+ vm_call_opt_struct_aset0(ec, calling, argv[0]),
+ (void)0);
goto success;
default:
rb_bug("vm_call0: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
diff --git a/vm_insnhelper.c b/vm_insnhelper.c
index 50ca9902fd..b72167f75d 100644
--- a/vm_insnhelper.c
+++ b/vm_insnhelper.c
@@ -4342,6 +4342,19 @@ vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
NOINLINE(static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
const struct rb_callinfo *ci, const struct rb_callcache *cc));
+#define VM_CALL_METHOD_ATTR(var, func, nohook) \
+ if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
+ EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
+ vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
+ var = func; \
+ EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
+ vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
+ } \
+ else { \
+ nohook; \
+ var = func; \
+ }
+
static VALUE
vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
const struct rb_callinfo *ci, const struct rb_callcache *cc)
@@ -4356,35 +4369,33 @@ vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb
case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
return vm_call_opt_block_call(ec, cfp, calling);
- case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
+ case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
CALLER_SETUP_ARG(cfp, calling, ci, 0);
rb_check_arity(calling->argc, 0, 0);
- CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
- return vm_call_opt_struct_aref(ec, cfp, calling);
- case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
+ VALUE v;
+ VM_CALL_METHOD_ATTR(v,
+ vm_call_opt_struct_aref(ec, cfp, calling),
+ set_vm_cc_ivar(cc); \
+ CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
+ return v;
+ }
+ case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
CALLER_SETUP_ARG(cfp, calling, ci, 1);
rb_check_arity(calling->argc, 1, 1);
- CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
- return vm_call_opt_struct_aset(ec, cfp, calling);
+
+ VALUE v;
+ VM_CALL_METHOD_ATTR(v,
+ vm_call_opt_struct_aset(ec, cfp, calling),
+ set_vm_cc_ivar(cc); \
+ CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
+ return v;
+ }
default:
rb_bug("vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
}
}
-#define VM_CALL_METHOD_ATTR(var, func, nohook) \
- if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
- EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
- vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
- var = func; \
- EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
- vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
- } \
- else { \
- nohook; \
- var = func; \
- }
-
static VALUE
vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{