1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
|
module RubyVM::MJIT
class ExitCompiler
def initialize
@gc_refs = [] # TODO: GC offsets?
end
# Used for invalidating a block on entry.
# @param pc [Integer]
# @param asm [RubyVM::MJIT::Assembler]
def compile_entry_exit(pc, ctx, asm, cause:)
# Increment per-insn exit counter
incr_insn_exit(pc, asm)
# Fix pc/sp offsets for the interpreter
save_pc_and_sp(pc, ctx, asm, reset_sp_offset: false)
# Restore callee-saved registers
asm.comment("#{cause}: entry exit")
asm.pop(SP)
asm.pop(EC)
asm.pop(CFP)
asm.mov(:rax, Qundef)
asm.ret
end
# Set to cfp->jit_return by default for leave insn
# @param asm [RubyVM::MJIT::Assembler]
def compile_leave_exit(asm)
asm.comment('default cfp->jit_return')
# Restore callee-saved registers
asm.pop(SP)
asm.pop(EC)
asm.pop(CFP)
# :rax is written by #leave
asm.ret
end
# Fire cfunc events on invalidation by TracePoint
# @param asm [RubyVM::MJIT::Assembler]
def compile_full_cfunc_return(asm)
# This chunk of code expects REG_EC to be filled properly and
# RAX to contain the return value of the C method.
asm.comment('full cfunc return')
asm.mov(C_ARG_OPNDS[0], EC)
asm.mov(C_ARG_OPNDS[1], :rax)
asm.call(C.rb_full_cfunc_return)
# TODO: count the exit
# Restore callee-saved registers
asm.pop(SP)
asm.pop(EC)
asm.pop(CFP)
asm.mov(:rax, Qundef)
asm.ret
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def compile_side_exit(jit, ctx, asm)
# Increment per-insn exit counter
incr_insn_exit(jit.pc, asm)
# Fix pc/sp offsets for the interpreter
save_pc_and_sp(jit.pc, ctx.dup, asm) # dup to avoid sp_offset update
# Restore callee-saved registers
asm.comment("exit to interpreter on #{pc_to_insn(jit.pc).name}")
asm.pop(SP)
asm.pop(EC)
asm.pop(CFP)
asm.mov(:rax, Qundef)
asm.ret
end
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
# @param branch_stub [RubyVM::MJIT::BranchStub]
# @param target0_p [TrueClass,FalseClass]
def compile_branch_stub(ctx, asm, branch_stub, target0_p)
# Call rb_mjit_branch_stub_hit
asm.comment("branch stub hit: #{branch_stub.iseq.body.location.label}@#{C.rb_iseq_path(branch_stub.iseq)}:#{iseq_lineno(branch_stub.iseq, target0_p ? branch_stub.target0.pc : branch_stub.target1.pc)}")
asm.mov(:rdi, to_value(branch_stub))
asm.mov(:esi, ctx.sp_offset)
asm.mov(:edx, target0_p ? 1 : 0)
asm.call(C.rb_mjit_branch_stub_hit)
# Jump to the address returned by rb_mjit_stub_hit
asm.jmp(:rax)
end
private
def pc_to_insn(pc)
Compiler.decode_insn(C.VALUE.new(pc).*)
end
# @param pc [Integer]
# @param asm [RubyVM::MJIT::Assembler]
def incr_insn_exit(pc, asm)
if C.mjit_opts.stats
insn = Compiler.decode_insn(C.VALUE.new(pc).*)
asm.comment("increment insn exit: #{insn.name}")
asm.mov(:rax, (C.mjit_insn_exits + insn.bin).to_i)
asm.add([:rax], 1) # TODO: lock
end
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def save_pc_and_sp(pc, ctx, asm, reset_sp_offset: true)
# Update pc (TODO: manage PC offset?)
asm.comment("save PC#{' and SP' if ctx.sp_offset != 0} to CFP")
asm.mov(:rax, pc) # rax = jit.pc
asm.mov([CFP, C.rb_control_frame_t.offsetof(:pc)], :rax) # cfp->pc = rax
# Update sp
if ctx.sp_offset != 0
asm.add(SP, C.VALUE.size * ctx.sp_offset) # sp += stack_size
asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], SP) # cfp->sp = sp
if reset_sp_offset
ctx.sp_offset = 0
end
end
end
def to_value(obj)
@gc_refs << obj
C.to_value(obj)
end
def iseq_lineno(iseq, pc)
C.rb_iseq_line_no(iseq, (pc - iseq.body.iseq_encoded.to_i) / C.VALUE.size)
end
end
end
|