summaryrefslogtreecommitdiff
path: root/yjit/src/asm/arm64
diff options
context:
space:
mode:
authorKevin Newton <[email protected]>2022-08-05 16:52:23 -0400
committerTakashi Kokubun <[email protected]>2022-08-29 08:47:08 -0700
commit8278d722907dc134e9a3436d5542d7dc168d8925 (patch)
tree418029a7e2786cbd827dba8348e2a3fcb17bdb97 /yjit/src/asm/arm64
parent8fffff536db7d603c6caef80d11c0926d59b1001 (diff)
Left and right shift for IR (https://github.com/Shopify/ruby/pull/374)
* Left and right shift for IR * Update yjit/src/backend/x86_64/mod.rs Co-authored-by: Alan Wu <[email protected]> Co-authored-by: Maxime Chevalier-Boisvert <[email protected]>
Diffstat (limited to 'yjit/src/asm/arm64')
-rw-r--r--yjit/src/asm/arm64/inst/sbfm.rs33
-rw-r--r--yjit/src/asm/arm64/mod.rs21
2 files changed, 52 insertions, 2 deletions
diff --git a/yjit/src/asm/arm64/inst/sbfm.rs b/yjit/src/asm/arm64/inst/sbfm.rs
index 4fbb567ed0..6f69e58043 100644
--- a/yjit/src/asm/arm64/inst/sbfm.rs
+++ b/yjit/src/asm/arm64/inst/sbfm.rs
@@ -31,6 +31,18 @@ pub struct SBFM {
}
impl SBFM {
+ /// ASR
+ /// https://developer.arm.com/documentation/ddi0596/2020-12/Base-Instructions/ASR--immediate---Arithmetic-Shift-Right--immediate---an-alias-of-SBFM-?lang=en
+ pub fn asr(rd: u8, rn: u8, shift: u8, num_bits: u8) -> Self {
+ let (imms, n) = if num_bits == 64 {
+ (0b111111, true)
+ } else {
+ (0b011111, false)
+ };
+
+ Self { rd, rn, immr: shift, imms, n, sf: num_bits.into() }
+ }
+
/// SXTW
/// https://developer.arm.com/documentation/ddi0596/2021-12/Base-Instructions/SXTW--Sign-Extend-Word--an-alias-of-SBFM-?lang=en
pub fn sxtw(rd: u8, rn: u8) -> Self {
@@ -44,13 +56,16 @@ const FAMILY: u32 = 0b1001;
impl From<SBFM> for u32 {
/// Convert an instruction into a 32-bit value.
fn from(inst: SBFM) -> Self {
+ let immr = (inst.immr as u32) & ((1 << 6) - 1);
+ let imms = (inst.imms as u32) & ((1 << 6) - 1);
+
0
| ((inst.sf as u32) << 31)
| (FAMILY << 25)
| (1 << 24)
| ((inst.n as u32) << 22)
- | ((inst.immr as u32) << 16)
- | ((inst.imms as u32) << 10)
+ | (immr << 16)
+ | (imms << 10)
| ((inst.rn as u32) << 5)
| inst.rd as u32
}
@@ -69,6 +84,20 @@ mod tests {
use super::*;
#[test]
+ fn test_asr_32_bits() {
+ let inst = SBFM::asr(0, 1, 2, 32);
+ let result: u32 = inst.into();
+ assert_eq!(0x13027c20, result);
+ }
+
+ #[test]
+ fn test_asr_64_bits() {
+ let inst = SBFM::asr(10, 11, 5, 64);
+ let result: u32 = inst.into();
+ assert_eq!(0x9345fd6a, result);
+ }
+
+ #[test]
fn test_sxtw() {
let inst = SBFM::sxtw(0, 1);
let result: u32 = inst.into();
diff --git a/yjit/src/asm/arm64/mod.rs b/yjit/src/asm/arm64/mod.rs
index d114f64a22..68be36c256 100644
--- a/yjit/src/asm/arm64/mod.rs
+++ b/yjit/src/asm/arm64/mod.rs
@@ -166,6 +166,22 @@ pub fn ands(cb: &mut CodeBlock, rd: A64Opnd, rn: A64Opnd, rm: A64Opnd) {
cb.write_bytes(&bytes);
}
+/// ASR - arithmetic shift right rn by shift, put the result in rd, don't update
+/// flags
+pub fn asr(cb: &mut CodeBlock, rd: A64Opnd, rn: A64Opnd, shift: A64Opnd) {
+ let bytes: [u8; 4] = match (rd, rn, shift) {
+ (A64Opnd::Reg(rd), A64Opnd::Reg(rn), A64Opnd::UImm(shift)) => {
+ assert!(rd.num_bits == rn.num_bits, "rd and rn must be of the same size.");
+ assert!(uimm_fits_bits(shift, 6), "The shift operand must be 6 bits or less.");
+
+ SBFM::asr(rd.reg_no, rn.reg_no, shift.try_into().unwrap(), rd.num_bits).into()
+ },
+ _ => panic!("Invalid operand combination to asr instruction."),
+ };
+
+ cb.write_bytes(&bytes);
+}
+
/// Whether or not the offset between two instructions fits into the branch with
/// or without link instruction. If it doesn't, then we have to load the value
/// into a register first.
@@ -904,6 +920,11 @@ mod tests {
}
#[test]
+ fn test_asr() {
+ check_bytes("b4fe4a93", |cb| asr(cb, X20, X21, A64Opnd::new_uimm(10)));
+ }
+
+ #[test]
fn test_bcond() {
check_bytes("01200054", |cb| bcond(cb, Condition::NE, A64Opnd::new_imm(0x400)));
}