Cranelift: x64, aarch64, s390x, riscv64: ensure addresses are I64s. (#5972)
* Cranelift: x64, aarch64, s390x, riscv64: ensure addresses are I64s. @avanhatt has been looking at our address-mode lowering and found an example where when feeding an `I32`-typed address into a load or store, we can violate assumptions and get incorrect codegen. This should never be reachable in practice, because all producers on 64-bit architectures use 64-bit types for addresses. However, our IR is insufficiently constrained, and allows loads/stores to `I32` addresses as well. This is nonsensical on a 64-bit architecture. Initially I had thought we should tighten either the instruction definition's accepted types, or the CLIF verifier, to reject this. However both are target-independent, and we don't want to bake an assumption of 64-bit-ness into the compiler core. Instead this PR tightens specific backends' lowerings to rejecct loads/stores of `I32`-typed addresses. tl;dr: no security implications as all producers use I64-typed addresses (and must, for correct operation); but we currently accept I32-typed addresses too, and this breaks other assumptions. * Allow R64 as well as I64 types. * Add an explicit extractor to match 64-bit address types.
This commit is contained in:
@@ -513,10 +513,14 @@ impl Context for IsleContext<'_, '_, MInst, AArch64Backend> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn amode(&mut self, ty: Type, addr: Value, offset: u32) -> AMode {
|
fn amode(&mut self, ty: Type, addr: Value, offset: u32) -> AMode {
|
||||||
|
let addr_ty = self.value_type(addr);
|
||||||
|
assert!(addr_ty == I64 || addr_ty == R64);
|
||||||
lower_address(self.lower_ctx, ty, addr, offset as i32)
|
lower_address(self.lower_ctx, ty, addr, offset as i32)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pair_amode(&mut self, addr: Value, offset: u32) -> PairAMode {
|
fn pair_amode(&mut self, addr: Value, offset: u32) -> PairAMode {
|
||||||
|
let addr_ty = self.value_type(addr);
|
||||||
|
assert!(addr_ty == I64 || addr_ty == R64);
|
||||||
lower_pair_address(self.lower_ctx, addr, offset as i32)
|
lower_pair_address(self.lower_ctx, addr, offset as i32)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -697,63 +697,63 @@
|
|||||||
|
|
||||||
;;;;; Rules for `uload8`;;;;;;;;;
|
;;;;; Rules for `uload8`;;;;;;;;;
|
||||||
(rule
|
(rule
|
||||||
(lower (uload8 flags p offset))
|
(lower (uload8 flags p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_load p offset (int_load_op $false 8) flags $I64))
|
(gen_load p offset (int_load_op $false 8) flags $I64))
|
||||||
;;;;; Rules for `sload8`;;;;;;;;;
|
;;;;; Rules for `sload8`;;;;;;;;;
|
||||||
(rule
|
(rule
|
||||||
(lower (sload8 flags p offset))
|
(lower (sload8 flags p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_load p offset (int_load_op $true 8) flags $I64))
|
(gen_load p offset (int_load_op $true 8) flags $I64))
|
||||||
;;;;; Rules for `uload16`;;;;;;;;;
|
;;;;; Rules for `uload16`;;;;;;;;;
|
||||||
(rule
|
(rule
|
||||||
(lower (uload16 flags p offset))
|
(lower (uload16 flags p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_load p offset (int_load_op $false 16) flags $I64))
|
(gen_load p offset (int_load_op $false 16) flags $I64))
|
||||||
|
|
||||||
;;;;; Rules for `iload16`;;;;;;;;;
|
;;;;; Rules for `iload16`;;;;;;;;;
|
||||||
(rule
|
(rule
|
||||||
(lower (sload16 flags p offset))
|
(lower (sload16 flags p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_load p offset (int_load_op $true 16) flags $I64))
|
(gen_load p offset (int_load_op $true 16) flags $I64))
|
||||||
|
|
||||||
;;;;; Rules for `uload32`;;;;;;;;;
|
;;;;; Rules for `uload32`;;;;;;;;;
|
||||||
(rule
|
(rule
|
||||||
(lower (uload32 flags p offset))
|
(lower (uload32 flags p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_load p offset (int_load_op $false 32) flags $I64))
|
(gen_load p offset (int_load_op $false 32) flags $I64))
|
||||||
|
|
||||||
;;;;; Rules for `iload16`;;;;;;;;;
|
;;;;; Rules for `iload16`;;;;;;;;;
|
||||||
(rule
|
(rule
|
||||||
(lower (sload32 flags p offset))
|
(lower (sload32 flags p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_load p offset (int_load_op $true 32) flags $I64))
|
(gen_load p offset (int_load_op $true 32) flags $I64))
|
||||||
|
|
||||||
(rule
|
(rule
|
||||||
(lower (has_type ty (load flags p offset)))
|
(lower (has_type ty (load flags p @ (value_type (ty_addr64 _)) offset)))
|
||||||
(gen_load p offset (load_op ty) flags ty)
|
(gen_load p offset (load_op ty) flags ty)
|
||||||
)
|
)
|
||||||
;;;; for I128
|
;;;; for I128
|
||||||
(rule 1
|
(rule 1
|
||||||
(lower (has_type $I128 (load flags p offset)))
|
(lower (has_type $I128 (load flags p @ (value_type (ty_addr64 _)) offset)))
|
||||||
(gen_load_128 p offset flags))
|
(gen_load_128 p offset flags))
|
||||||
|
|
||||||
;;;;; Rules for `istore8`;;;;;;;;;
|
;;;;; Rules for `istore8`;;;;;;;;;
|
||||||
(rule
|
(rule
|
||||||
(lower (istore8 flags x p offset))
|
(lower (istore8 flags x p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_store p offset (StoreOP.Sb) flags x))
|
(gen_store p offset (StoreOP.Sb) flags x))
|
||||||
;;;;; Rules for `istore16`;;;;;;;;;
|
;;;;; Rules for `istore16`;;;;;;;;;
|
||||||
(rule
|
(rule
|
||||||
(lower (istore16 flags x p offset))
|
(lower (istore16 flags x p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_store p offset (StoreOP.Sh) flags x))
|
(gen_store p offset (StoreOP.Sh) flags x))
|
||||||
|
|
||||||
;;;;; Rules for `istore32`;;;;;;;;;
|
;;;;; Rules for `istore32`;;;;;;;;;
|
||||||
(rule
|
(rule
|
||||||
(lower (istore32 flags x p offset))
|
(lower (istore32 flags x p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_store p offset (StoreOP.Sw) flags x))
|
(gen_store p offset (StoreOP.Sw) flags x))
|
||||||
|
|
||||||
;;;;; Rules for `store`;;;;;;;;;
|
;;;;; Rules for `store`;;;;;;;;;
|
||||||
(rule
|
(rule
|
||||||
(lower (store flags x @ (value_type ty) p offset))
|
(lower (store flags x @ (value_type ty) p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_store p offset (store_op ty) flags x))
|
(gen_store p offset (store_op ty) flags x))
|
||||||
|
|
||||||
;;; special for I128
|
;;; special for I128
|
||||||
(rule 1
|
(rule 1
|
||||||
(lower (store flags x @ (value_type $I128 ) p offset))
|
(lower (store flags x @ (value_type $I128 ) p @ (value_type (ty_addr64 _)) offset))
|
||||||
(gen_store_128 p offset flags x))
|
(gen_store_128 p offset flags x))
|
||||||
|
|
||||||
(decl gen_icmp (IntCC ValueRegs ValueRegs Type) Reg)
|
(decl gen_icmp (IntCC ValueRegs ValueRegs Type) Reg)
|
||||||
|
|||||||
@@ -1811,10 +1811,10 @@
|
|||||||
|
|
||||||
(decl lower_address (MemFlags Value Offset32) MemArg)
|
(decl lower_address (MemFlags Value Offset32) MemArg)
|
||||||
|
|
||||||
(rule (lower_address flags addr (i64_from_offset offset))
|
(rule (lower_address flags addr @ (value_type (ty_addr64 _)) (i64_from_offset offset))
|
||||||
(memarg_reg_plus_off addr offset 0 flags))
|
(memarg_reg_plus_off addr offset 0 flags))
|
||||||
|
|
||||||
(rule 1 (lower_address flags (iadd x y) (i64_from_offset 0))
|
(rule 1 (lower_address flags (has_type (ty_addr64 _) (iadd x y)) (i64_from_offset 0))
|
||||||
(memarg_reg_plus_reg x y 0 flags))
|
(memarg_reg_plus_reg x y 0 flags))
|
||||||
|
|
||||||
(rule 1 (lower_address flags
|
(rule 1 (lower_address flags
|
||||||
@@ -1828,10 +1828,10 @@
|
|||||||
|
|
||||||
(decl lower_address_bias (MemFlags Value Offset32 u8) MemArg)
|
(decl lower_address_bias (MemFlags Value Offset32 u8) MemArg)
|
||||||
|
|
||||||
(rule (lower_address_bias flags addr (i64_from_offset offset) bias)
|
(rule (lower_address_bias flags addr @ (value_type $I64) (i64_from_offset offset) bias)
|
||||||
(memarg_reg_plus_off addr offset bias flags))
|
(memarg_reg_plus_off addr offset bias flags))
|
||||||
|
|
||||||
(rule 1 (lower_address_bias flags (iadd x y) (i64_from_offset 0) bias)
|
(rule 1 (lower_address_bias flags (has_type $I64 (iadd x y)) (i64_from_offset 0) bias)
|
||||||
(memarg_reg_plus_reg x y bias flags))
|
(memarg_reg_plus_reg x y bias flags))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1114,7 +1114,7 @@
|
|||||||
(Amode.ImmRegRegShift off sum index shift flags)))
|
(Amode.ImmRegRegShift off sum index shift flags)))
|
||||||
|
|
||||||
;; Finally, define the toplevel `to_amode`.
|
;; Finally, define the toplevel `to_amode`.
|
||||||
(rule (to_amode flags base offset)
|
(rule (to_amode flags base @ (value_type (ty_addr64 _)) offset)
|
||||||
(amode_finalize (amode_add (amode_initial flags offset) base)))
|
(amode_finalize (amode_add (amode_initial flags offset) base)))
|
||||||
|
|
||||||
;; If an amode has no registers at all and only offsets (a constant
|
;; If an amode has no registers at all and only offsets (a constant
|
||||||
|
|||||||
@@ -412,6 +412,14 @@ macro_rules! isle_common_prelude_methods {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn ty_addr64(&mut self, ty: Type) -> Option<Type> {
|
||||||
|
match ty {
|
||||||
|
I64 | R64 => Some(ty),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn u64_from_imm64(&mut self, imm: Imm64) -> u64 {
|
fn u64_from_imm64(&mut self, imm: Imm64) -> u64 {
|
||||||
imm.bits() as u64
|
imm.bits() as u64
|
||||||
|
|||||||
@@ -347,6 +347,10 @@
|
|||||||
(decl ty_vec128_int (Type) Type)
|
(decl ty_vec128_int (Type) Type)
|
||||||
(extern extractor ty_vec128_int ty_vec128_int)
|
(extern extractor ty_vec128_int ty_vec128_int)
|
||||||
|
|
||||||
|
;; An extractor that only matches types that can be a 64-bit address.
|
||||||
|
(decl ty_addr64 (Type) Type)
|
||||||
|
(extern extractor ty_addr64 ty_addr64)
|
||||||
|
|
||||||
;; A pure constructor that matches everything except vectors with size 32X2.
|
;; A pure constructor that matches everything except vectors with size 32X2.
|
||||||
(decl pure partial not_vec32x2 (Type) Type)
|
(decl pure partial not_vec32x2 (Type) Type)
|
||||||
(extern constructor not_vec32x2 not_vec32x2)
|
(extern constructor not_vec32x2 not_vec32x2)
|
||||||
|
|||||||
Reference in New Issue
Block a user