8000 Change call_ptr to take a *const u8 instead of requiring pre-wrapping… · kddnewton/ruby@d2e2e56 · GitHub
[go: up one dir, main page]

Skip to content

Commit d2e2e56

Browse files
authored
Change call_ptr to take a *const u8 instead of requiring pre-wrapping in a CodePtr (ruby#184)
1 parent f85efad commit d2e2e56

File tree

3 files changed

+25
-43
lines changed

3 files changed

+25
-43
lines changed

yjit/src/asm/x86_64/mod.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1007,13 +1007,13 @@ pub fn call_rel32(cb: &mut CodeBlock, rel32: i32) {
10071007
}
10081008

10091009
/// call - Call a pointer, encode with a 32-bit offset if possible
1010-
pub fn call_ptr(cb: &mut CodeBlock, scratch_opnd: X86Opnd, dst_ptr: CodePtr) {
1010+
pub fn call_ptr(cb: &mut CodeBlock, scratch_opnd: X86Opnd, dst_ptr: *const u8) {
10111011
if let X86Opnd::Reg(scratch_reg) = scratch_opnd {
10121012
// Pointer to the end of this call instruction
10131013
let end_ptr = cb.get_ptr(cb.write_pos + 5);
10141014

10151015
// Compute the jump offset
1016-
let rel64: i64 = dst_ptr.into_i64() - end_ptr.into_i64();
1016+
let rel64: i64 = dst_ptr as i64 - end_ptr.into_i64();
10171017

10181018
// If the offset fits in 32-bit
10191019
if rel64 >= i32::MIN.into() && rel64 <= i32::MAX.into() {
@@ -1022,7 +1022,7 @@ pub fn call_ptr(cb: &mut CodeBlock, scratch_opnd: X86Opnd, dst_ptr: CodePtr) {
10221022
}
10231023

10241024
// Move the pointer into the scratch register and call
1025-
mov(cb, scratch_opnd, const_ptr_opnd(dst_ptr.raw_ptr()));
1025+
mov(cb, scratch_opnd, const_ptr_opnd(dst_ptr));
10261026
call(cb, scratch_opnd);
10271027
} else {
10281028
unreachable!();

yjit/src/codegen.rs

Lines changed: 21 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -441,7 +441,7 @@ fn gen_exit(exit_pc: *mut VALUE, ctx: &Context, cb: &mut CodeBlock) -> CodePtr
441441
#[cfg(feature = "stats")]
442442
if get_option!(gen_stats) {
443443
mov(cb, RDI, const_ptr_opnd(exit_pc));
444-
call_ptr(cb, RSI, CodePtr(yjit_count_side_exit_op));
444+
call_ptr(cb, RSI, yjit_count_side_exit_op as *const u8);
445445
}
446446

447447
pop(cb, REG_SP);
@@ -1424,8 +1424,7 @@ fn gen_newarray(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb:
14241424
mov(cb, C_ARG_REGS[0], REG_EC);
14251425
mov(cb, C_ARG_REGS[1], imm_opnd(n.into()));
14261426
lea(cb, C_ARG_REGS[2], values_ptr);
1427-
let ary_new = CodePtr::from(rb_ec_ary_new_from_values as *mut u8);
1428-
call_ptr(cb, REG0, ary_new);
1427+
call_ptr(cb, REG0, rb_ec_ary_new_from_values as *const u8);
14291428

14301429
ctx.stack_pop(n as usize);
14311430
let stack_ret = ctx.stack_push(Type::Array);
@@ -1444,8 +1443,7 @@ fn gen_duparray(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb:
14441443

14451444
// call rb_ary_resurrect(VALUE ary);
14461445
jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], ary);
1447-
let ary_res = CodePtr::from(rb_ary_resurrect as *mut u8);
1448-
call_ptr(cb, REG0, ary_res);
1446+
call_ptr(cb, REG0, rb_ary_resurrect as *const u8);
14491447

14501448
let stack_ret = ctx.stack_push(Type::Array);
14511449
mov(cb, stack_ret, RAX);
@@ -1463,8 +1461,7 @@ fn gen_duphash(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb: &
14631461

14641462
// call rb_hash_resurrect(VALUE hash);
14651463
jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], hash);
1466-
let hash_res = CodePtr::from(rb_hash_resurrect as *mut u8);
1467-
call_ptr(cb, REG0, hash_res);
1464+
call_ptr(cb, REG0, rb_hash_resurrect as *const u8);
14681465

14691466
let stack_ret = ctx.stack_push(Type::Hash);
14701467
mov(cb, stack_ret, RAX);
@@ -1487,8 +1484,7 @@ fn gen_splatarray(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb
14871484
// Call rb_vm_splat_array(flag, ary)
14881485
jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], flag);
14891486
mov(cb, C_ARG_REGS[1], ary_opnd);
1490-
let splat_array = CodePtr::from(rb_vm_splat_array as *mut u8);
1491-
call_ptr(cb, REG1, splat_array);
1487+
call_ptr(cb, REG1, rb_vm_splat_array as *const u8);
14921488

14931489
let stack_ret = ctx.stack_push(Type::Array);
14941490
mov(cb, stack_ret, RAX);
@@ -1508,8 +1504,7 @@ fn gen_newrange(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb:
15081504
mov(cb, C_ARG_REGS[0], ctx.stack_opnd(1));
15091505
mov(cb, C_ARG_REGS[1], ctx.stack_opnd(0));
15101506
mov(cb, C_ARG_REGS[2], uimm_opnd(flag.into()));
1511-
let range_new = CodePtr::from(rb_range_new as *mut u8);
1512-
call_ptr(cb, REG0, range_new);
1507+
call_ptr(cb, REG0, rb_range_new as *const u8);
15131508

15141509
ctx.stack_pop(2);
15151510
let stack_ret = ctx.stack_push(Type::UnknownHeap);
@@ -1807,8 +1802,7 @@ fn gen_newhash(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb: &
18071802
if num != 0 {
18081803
// val = rb_hash_new_with_size(num / 2);
18091804
mov(cb, C_ARG_REGS[0], imm_opnd(num / 2));
1810-
let hn_code_ptr = CodePtr::from(rb_hash_new_with_size as *mut u8);
1811-
call_ptr(cb, REG0, hn_code_ptr);
1805+
call_ptr(cb, REG0, rb_hash_new_with_size as *const u8);
18121806

18131807
// save the allocated hash as we want to push it after insertion
18141808
push(cb, RAX);
@@ -1818,8 +1812,7 @@ fn gen_newhash(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb: &
18181812
mov(cb, C_ARG_REGS[0], imm_opnd(num));
18191813
lea(cb, C_ARG_REGS[1], ctx.stack_opnd((num - 1).try_into().unwrap()));
18201814
mov(cb, C_ARG_REGS[2], RAX);
1821-
let bi_code_opnd = CodePtr::from(rb_hash_bulk_insert as *mut u8);
1822-
call_ptr(cb, REG0, bi_code_opnd);
1815+
call_ptr(cb, REG0, rb_hash_bulk_insert as *const u8);
18231816

18241817
pop(cb, RAX); // alignment
18251818
pop(cb, RAX);
@@ -1830,8 +1823,7 @@ fn gen_newhash(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb: &
18301823
}
18311824
else {
18321825
// val = rb_hash_new(); 10000
1833-
let hn_code_ptr = CodePtr::from(rb_hash_new as *mut u8);
1834-
call_ptr(cb, REG0, hn_code_ptr);
1826+
call_ptr(cb, REG0, rb_hash_new as *const u8);
18351827

18361828
let stack_ret = ctx.stack_push(Type::Hash);
18371829
mov(cb, stack_ret, RAX);
@@ -1849,8 +1841,7 @@ fn gen_putstring(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb:
18491841

18501842
mov(cb, C_ARG_REGS[0], REG_EC);
18511843
jit_mov_gc_ptr(jit, cb, C_ARG_REGS[1], put_val);
1852-
let str_resurrect = CodePtr::from(rb_ec_str_resurrect as *mut u8);
1853-
call_ptr(cb, REG0, str_resurrect);
1844+
call_ptr(cb, REG0, rb_ec_str_resurrect as *const u8);
18541845

18551846
let stack_top = ctx.stack_push(Type::String);
18561847
mov(cb, stack_top, RAX);
@@ -1980,8 +1971,7 @@ fn gen_set_ivar(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, recv:
19801971
mov(cb, C_ARG_REGS[0], recv_opnd);
19811972
mov(cb, C_ARG_REGS[1], imm_opnd(ivar_index.into()));
19821973
mov(cb, C_ARG_REGS[2], val_opnd);
1983-
let set_ivar_idx = CodePtr::from(rb_vm_set_ivar_idx as *mut u8);
1984-
call_ptr(cb, REG0, set_ivar_idx);
1974+
call_ptr(cb, REG0, rb_vm_set_ivar_idx as *const u8);
19851975

19861976
let out_opnd = ctx.stack_push(Type::Unknown);
19871977
mov(cb, out_opnd, RAX);
@@ -1999,8 +1989,8 @@ fn gen_get_ivar(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb:
19991989
let comptime_val_klass = comptime_receiver.class_of();
20001990
let starting_context = ctx.clone(); // make a copy for use with jit_chain_guard
20011991

2002-
let custom_allocator = unsafe { rb_get_alloc_func(comptime_val_klass).unwrap() as *mut u8 };
2003-
let allocate_instance = rb_class_allocate_instance as *mut u8;
1992+
let custom_allocator = unsafe { rb_get_alloc_func(comptime_val_klass).unwrap() as *const u8 };
1993+
let allocate_instance = rb_class_allocate_instance as *const u8;
20041994

20051995
// If the class uses the default allocator, instances should all be T_OBJECT
20061996
// NOTE: This assumes nobody changes the allocator of the class after allocation.
@@ -2017,8 +2007,7 @@ fn gen_get_ivar(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb:
20172007

20182008
mov(cb, C_ARG_REGS[0], REG0);
20192009
mov(cb, C_ARG_REGS[1], uimm_opnd(ivar_name));
2020-
let ivar_get = CodePtr::from(rb_ivar_get as *mut u8);
2021-
call_ptr(cb, REG1, ivar_get);
2010+
call_ptr(cb, REG1, rb_ivar_get as *const u8);
20222011

20232012
if reg0_opnd != InsnOpnd::SelfOpnd {
20242013
ctx.stack_pop(1);
@@ -2171,8 +2160,7 @@ fn gen_setinstancevariable(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeB
21712160
mov(cb, C_ARG_REGS[4], const_ptr_opnd(ic as *const u8));
21722161
let iseq = VALUE(jit.iseq as usize);
21732162
jit_mov_gc_ptr(jit, cb, C_ARG_REGS[0], iseq);
2174-
let vm_setinstancevar = CodePtr::from(rb_vm_setinstancevariable as *mut u8);
2175-
call_ptr(cb, REG0, vm_setinstancevar);
2163+
call_ptr(cb, REG0, rb_vm_setinstancevariable as *const u8);
21762164

21772165
KeepCompiling
21782166
}
@@ -2196,8 +2184,7 @@ fn gen_defined(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb: &
21962184
mov(cb, C_ARG_REGS[2], uimm_opnd(op_type.into()));
21972185
jit_mov_gc_ptr(jit, cb, C_ARG_REGS[3], obj);
21982186
mov(cb, C_ARG_REGS[4], v_opnd);
2199-
let vm_defined = CodePtr::from(rb_vm_defined as *mut u8);
2200-
call_ptr(cb, REG0, vm_defined);
2187+
call_ptr(cb, REG0, rb_vm_defined as *const u8);
22012188

22022189
// if (vm_defined(ec, GET_CFP(), op_type, obj, v)) {
22032190
// val = pushval;
@@ -2286,8 +2273,7 @@ fn gen_concatstrings(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock,
22862273
// call rb_str_concat_literals(long n, const VALUE *strings);
22872274
mov(cb, C_ARG_REGS[0], imm_opnd(n.into()));
22882275
lea(cb, C_ARG_REGS[1], values_ptr);
2289-
let str_concat_literals = CodePtr::from(rb_str_concat_literals as *mut u8);
2290-
call_ptr(cb, REG0, str_concat_literals);
2276+
call_ptr(cb, REG0, rb_str_concat_literals as *const u8);
22912277

22922278
ctx.stack_pop(n.as_usize());
22932279
let stack_ret = ctx.stack_push(Type::String);
@@ -2485,8 +2471,7 @@ fn gen_equality_specialized(jit: &mut JITState, ctx: &mut Context, cb: &mut Code
24852471
}
24862472

24872473
// Call rb_str_eql_internal(a, b)
2488-
let str_eql = CodePtr::from(rb_str_eql_internal as *mut u8);
2489-
call_ptr(cb, REG0, str_eql);
2474+
call_ptr(cb, REG0, rb_str_eql_internal as *const u8);
24902475

24912476
// Push the output on the stack
24922477
cb.write_label(ret);
@@ -2893,8 +2878,7 @@ fn gen_opt_mod(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb: &
28932878
// Call rb_vm_opt_mod(VALUE recv, VALUE obj)
28942879
mov(cb, C_ARG_REGS[0], arg0);
28952880
mov(cb, C_ARG_REGS[1], arg1);
2896-
let vm_mod = CodePtr::from(rb_vm_opt_mod as *mut u8);
2897-
call_ptr(cb, REG0, vm_mod);
2881+
call_ptr(cb, REG0, rb_vm_opt_mod as *const u8);
28982882

28992883
// If val == Qundef, bail to do a method call
29002884
cmp(cb, RAX, imm_opnd(Qundef.as_i64()));
@@ -4731,8 +4715,7 @@ fn gen_getglobal(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb:
47314715

47324716
mov(cb, C_ARG_REGS[0], imm_opnd(gid.as_i64()));
47334717

4734-
let gvar_get = CodePtr::from(rb_gvar_get as *mut u8);
4735-
call_ptr(cb, REG0, gvar_get);
4718+
call_ptr(cb, REG0, rb_gvar_get as *const u8);
47364719

47374720
let top = ctx.stack_push(Type::Unknown);
47384721
mov(cb, top, RAX);
@@ -4754,8 +4737,7 @@ fn gen_setglobal(jit: &mut JITState, ctx: &mut Context, cb: &mut CodeBlock, ocb:
47544737

47554738
mov(cb, C_ARG_REGS[1], val);
47564739

4757-
let gvar_set = CodePtr::from(rb_gvar_set as *mut u8);
4758-
call_ptr(cb, REG0, gvar_set);
4740+
call_ptr(cb, REG0, rb_gvar_set as *const u8);
47594741

47604742
KeepCompiling
47614743
}

yjit/src/core.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1380,7 +1380,7 @@ fn get_branch_target(
13801380
mov(ocb, C_ARG_REGS[2], REG_EC);
13811381
mov(ocb, C_ARG_REGS[1], uimm_opnd(target_idx as u64));
13821382
mov(ocb, C_ARG_REGS[0] 5642 , const_ptr_opnd(branch_ptr as *const u8));
1383-
call_ptr(ocb, REG0, CodePtr::from(branch_stub_hit as *mut u8));
1383+
call_ptr(ocb, REG0, branch_stub_hit as *mut u8);
13841384

13851385
// Jump to the address returned by the
13861386
// branch_stub_hit call

0 commit comments

Comments
 (0)
0