diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index e8bb2f6fe6194..f1adf60a3ee4d 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -542,107 +542,20 @@ fn xmm_reg_index(reg: InlineAsmReg) -> Option { /// If the register is an AArch64 integer register then return its index. fn a64_reg_index(reg: InlineAsmReg) -> Option { - // Unlike `a64_vreg_index`, we can't subtract `x0` to get the u32 because - // `x19` and `x29` are missing and the integer constants for the - // `x0`..`x30` enum variants don't all match the register number. E.g. the - // integer constant for `x18` is 18, but the constant for `x20` is 19. - Some(match reg { - InlineAsmReg::AArch64(r) => { - use AArch64InlineAsmReg::*; - match r { - x0 => 0, - x1 => 1, - x2 => 2, - x3 => 3, - x4 => 4, - x5 => 5, - x6 => 6, - x7 => 7, - x8 => 8, - x9 => 9, - x10 => 10, - x11 => 11, - x12 => 12, - x13 => 13, - x14 => 14, - x15 => 15, - x16 => 16, - x17 => 17, - x18 => 18, - // x19 is reserved - x20 => 20, - x21 => 21, - x22 => 22, - x23 => 23, - x24 => 24, - x25 => 25, - x26 => 26, - x27 => 27, - x28 => 28, - // x29 is reserved - x30 => 30, - _ => return None, - } - } - InlineAsmReg::Arm64EC(r) => { - use Arm64ECInlineAsmReg::*; - match r { - x0 => 0, - x1 => 1, - x2 => 2, - x3 => 3, - x4 => 4, - x5 => 5, - x6 => 6, - x7 => 7, - x8 => 8, - x9 => 9, - x10 => 10, - x11 => 11, - x12 => 12, - // x13 is reserved - // x14 is reserved - x15 => 15, - x16 => 16, - x17 => 17, - // x18 is reserved - // x19 is reserved - x20 => 20, - x21 => 21, - x22 => 22, - // x23 is reserved - // x24 is reserved - x25 => 25, - x26 => 26, - x27 => 27, - // x28 is reserved - // x29 is reserved - x30 => 30, - _ => return None, - } - } - _ => return None, - }) + match reg { + InlineAsmReg::AArch64(r) => r.reg_index(), + InlineAsmReg::Arm64EC(r) => r.reg_index(), + _ => None, + } } /// If the register is an AArch64 vector register then return its index. fn a64_vreg_index(reg: InlineAsmReg) -> Option { match reg { - InlineAsmReg::AArch64(reg) => { - use AArch64InlineAsmReg::*; - if reg as u32 >= v0 as u32 && reg as u32 <= v31 as u32 { - return Some(reg as u32 - v0 as u32); - } - } - InlineAsmReg::Arm64EC(reg) => { - use Arm64ECInlineAsmReg::*; - if reg as u32 >= v0 as u32 && reg as u32 <= v15 as u32 { - return Some(reg as u32 - v0 as u32); - } - } - _ => {} + InlineAsmReg::AArch64(reg) => reg.vreg_index(), + InlineAsmReg::Arm64EC(reg) => reg.vreg_index(), + _ => None, } - None } /// Converts a register class to an LLVM constraint code. diff --git a/compiler/rustc_target/src/asm/aarch64.rs b/compiler/rustc_target/src/asm/aarch64.rs index 078066821d78f..7a8f9b58c41d4 100644 --- a/compiler/rustc_target/src/asm/aarch64.rs +++ b/compiler/rustc_target/src/asm/aarch64.rs @@ -186,12 +186,66 @@ impl AArch64InlineAsmReg { _arch: InlineAsmArch, modifier: Option, ) -> fmt::Result { - let (prefix, index) = if (self as u32) < Self::v0 as u32 { - (modifier.unwrap_or('x'), self as u32 - Self::x0 as u32) + let (prefix, index) = if let Some(index) = self.reg_index() { + (modifier.unwrap_or('x'), index) + } else if let Some(index) = self.vreg_index() { + (modifier.unwrap_or('v'), index) } else { - (modifier.unwrap_or('v'), self as u32 - Self::v0 as u32) + return out.write_str(self.name()); }; assert!(index < 32); write!(out, "{prefix}{index}") } + + /// If the register is an integer register then return its index. + pub fn reg_index(self) -> Option { + // Unlike `vreg_index`, we can't subtract `x0` to get the u32 because + // `x19` and `x29` are missing and the integer constants for the + // `x0`..`x30` enum variants don't all match the register number. E.g. the + // integer constant for `x18` is 18, but the constant for `x20` is 19. + use AArch64InlineAsmReg::*; + Some(match self { + x0 => 0, + x1 => 1, + x2 => 2, + x3 => 3, + x4 => 4, + x5 => 5, + x6 => 6, + x7 => 7, + x8 => 8, + x9 => 9, + x10 => 10, + x11 => 11, + x12 => 12, + x13 => 13, + x14 => 14, + x15 => 15, + x16 => 16, + x17 => 17, + x18 => 18, + // x19 is reserved + x20 => 20, + x21 => 21, + x22 => 22, + x23 => 23, + x24 => 24, + x25 => 25, + x26 => 26, + x27 => 27, + x28 => 28, + // x29 is reserved + x30 => 30, + _ => return None, + }) + } + + /// If the register is a vector register then return its index. + pub fn vreg_index(self) -> Option { + use AArch64InlineAsmReg::*; + if self as u32 >= v0 as u32 && self as u32 <= v31 as u32 { + return Some(self as u32 - v0 as u32); + } + None + } } diff --git a/compiler/rustc_target/src/asm/arm64ec.rs b/compiler/rustc_target/src/asm/arm64ec.rs index 08af0f800d3f3..86fa5e175621f 100644 --- a/compiler/rustc_target/src/asm/arm64ec.rs +++ b/compiler/rustc_target/src/asm/arm64ec.rs @@ -99,12 +99,66 @@ impl Arm64ECInlineAsmReg { _arch: InlineAsmArch, modifier: Option, ) -> fmt::Result { - let (prefix, index) = if (self as u32) < Self::v0 as u32 { - (modifier.unwrap_or('x'), self as u32 - Self::x0 as u32) + let (prefix, index) = if let Some(index) = self.reg_index() { + (modifier.unwrap_or('x'), index) + } else if let Some(index) = self.vreg_index() { + (modifier.unwrap_or('v'), index) } else { - (modifier.unwrap_or('v'), self as u32 - Self::v0 as u32) + return out.write_str(self.name()); }; assert!(index < 32); write!(out, "{prefix}{index}") } + + /// If the register is an integer register then return its index. + pub fn reg_index(self) -> Option { + // Unlike `vreg_index`, we can't subtract `x0` to get the u32 because + // `x13`, `x19`, `x29`, etc. are missing and the integer constants for the + // `x0`..`x30` enum variants don't all match the register number. E.g. the + // integer constant for `x12` is 12, but the constant for `x15` is 13. + use Arm64ECInlineAsmReg::*; + Some(match self { + x0 => 0, + x1 => 1, + x2 => 2, + x3 => 3, + x4 => 4, + x5 => 5, + x6 => 6, + x7 => 7, + x8 => 8, + x9 => 9, + x10 => 10, + x11 => 11, + x12 => 12, + // x13 is reserved + // x14 is reserved + x15 => 15, + x16 => 16, + x17 => 17, + // x18 is reserved + // x19 is reserved + x20 => 20, + x21 => 21, + x22 => 22, + // x23 is reserved + // x24 is reserved + x25 => 25, + x26 => 26, + x27 => 27, + // x28 is reserved + // x29 is reserved + x30 => 30, + _ => return None, + }) + } + + /// If the register is a vector register then return its index. + pub fn vreg_index(self) -> Option { + use Arm64ECInlineAsmReg::*; + if self as u32 >= v0 as u32 && self as u32 <= v15 as u32 { + return Some(self as u32 - v0 as u32); + } + None + } }