Fix optimization rules for narrow types: wrap i8 results to 8 bits. (#5409)

* Fix optimization rules for narrow types: wrap i8 results to 8 bits.

This fixes #5405.

In the egraph mid-end's optimization rules, we were rewriting e.g. imuls
of two iconsts to an iconst of the result, but without masking off the
high bits (beyond the result type's width). This was producing iconsts
with set high bits beyond their types' width, which is not legal.

In addition, this PR adds some optimizations to the algebraic rules to
recognize e.g. `x == x` (and all other integer comparison operators) and
resolve to 1 or 0 as appropriate.

* Review feedback.

* Review feedback, again.
This commit is contained in:
Chris Fallin
2022-12-09 14:29:25 -08:00
committed by GitHub
parent e913cf3647
commit 244dce93f6
6 changed files with 120 additions and 15 deletions

View File

@@ -207,3 +207,36 @@
(rule (simplify
(select ty (uextend _ c @ (icmp _ _ _ _)) x y))
(select ty c x y))
;; `x == x` is always true for integers; `x != x` is false. Strict
;; inequalities are false, and loose inequalities are true.
(rule (simplify
(icmp ty (IntCC.Equal) x x))
(iconst ty (imm64 1)))
(rule (simplify
(icmp ty (IntCC.NotEqual) x x))
(iconst ty (imm64 0)))
(rule (simplify
(icmp ty (IntCC.UnsignedGreaterThan) x x))
(iconst ty (imm64 0)))
(rule (simplify
(icmp ty (IntCC.UnsignedGreaterThanOrEqual) x x))
(iconst ty (imm64 1)))
(rule (simplify
(icmp ty (IntCC.SignedGreaterThan) x x))
(iconst ty (imm64 0)))
(rule (simplify
(icmp ty (IntCC.SignedGreaterThanOrEqual) x x))
(iconst ty (imm64 1)))
(rule (simplify
(icmp ty (IntCC.UnsignedLessThan) x x))
(iconst ty (imm64 0)))
(rule (simplify
(icmp ty (IntCC.UnsignedLessThanOrEqual) x x))
(iconst ty (imm64 1)))
(rule (simplify
(icmp ty (IntCC.SignedLessThan) x x))
(iconst ty (imm64 0)))
(rule (simplify
(icmp ty (IntCC.SignedLessThanOrEqual) x x))
(iconst ty (imm64 1)))

View File

@@ -4,56 +4,56 @@
(iadd (fits_in_64 ty)
(iconst ty (u64_from_imm64 k1))
(iconst ty (u64_from_imm64 k2))))
(subsume (iconst ty (imm64 (u64_add k1 k2)))))
(subsume (iconst ty (imm64_masked ty (u64_add k1 k2)))))
(rule (simplify
(isub (fits_in_64 ty)
(iconst ty (u64_from_imm64 k1))
(iconst ty (u64_from_imm64 k2))))
(subsume (iconst ty (imm64 (u64_sub k1 k2)))))
(subsume (iconst ty (imm64_masked ty (u64_sub k1 k2)))))
(rule (simplify
(imul (fits_in_64 ty)
(iconst ty (u64_from_imm64 k1))
(iconst ty (u64_from_imm64 k2))))
(subsume (iconst ty (imm64 (u64_mul k1 k2)))))
(subsume (iconst ty (imm64_masked ty (u64_mul k1 k2)))))
(rule (simplify
(sdiv (fits_in_64 ty)
(iconst ty (u64_from_imm64 k1))
(iconst ty (u64_from_imm64 k2))))
(if-let d (u64_sdiv k1 k2))
(subsume (iconst ty (imm64 d))))
(subsume (iconst ty (imm64_masked ty d))))
(rule (simplify
(udiv (fits_in_64 ty)
(iconst ty (u64_from_imm64 k1))
(iconst ty (u64_from_imm64 k2))))
(if-let d (u64_udiv k1 k2))
(subsume (iconst ty (imm64 d))))
(subsume (iconst ty (imm64_masked ty d))))
(rule (simplify
(bor (fits_in_64 ty)
(iconst ty (u64_from_imm64 k1))
(iconst ty (u64_from_imm64 k2))))
(subsume (iconst ty (imm64 (u64_or k1 k2)))))
(subsume (iconst ty (imm64_masked ty (u64_or k1 k2)))))
(rule (simplify
(band (fits_in_64 ty)
(iconst ty (u64_from_imm64 k1))
(iconst ty (u64_from_imm64 k2))))
(subsume (iconst ty (imm64 (u64_and k1 k2)))))
(subsume (iconst ty (imm64_masked ty (u64_and k1 k2)))))
(rule (simplify
(bxor (fits_in_64 ty)
(iconst ty (u64_from_imm64 k1))
(iconst ty (u64_from_imm64 k2))))
(subsume (iconst ty (imm64 (u64_xor k1 k2)))))
(subsume (iconst ty (imm64_masked ty (u64_xor k1 k2)))))
(rule (simplify
(bnot (fits_in_64 ty)
(iconst ty (u64_from_imm64 k))))
(subsume (iconst ty (imm64 (u64_not k)))))
(subsume (iconst ty (imm64_masked ty (u64_not k)))))
;; Canonicalize via commutativity: push immediates to the right.
;;
@@ -99,23 +99,23 @@
(rule (simplify (isub ty
(isub ty x (iconst ty (u64_from_imm64 k1)))
(iconst ty (u64_from_imm64 k2))))
(isub ty x (iconst ty (imm64 (u64_add k1 k2)))))
(isub ty x (iconst ty (imm64_masked ty (u64_add k1 k2)))))
(rule (simplify (isub ty
(isub ty (iconst ty (u64_from_imm64 k1)) x)
(iconst ty (u64_from_imm64 k2))))
(isub ty (iconst ty (imm64 (u64_sub k1 k2))) x))
(isub ty (iconst ty (imm64_masked ty (u64_sub k1 k2))) x))
(rule (simplify (isub ty
(iadd ty x (iconst ty (u64_from_imm64 k1)))
(iconst ty (u64_from_imm64 k2))))
(isub ty x (iconst ty (imm64 (u64_sub k2 k1)))))
(isub ty x (iconst ty (imm64_masked ty (u64_sub k2 k1)))))
(rule (simplify (iadd ty
(isub ty x (iconst ty (u64_from_imm64 k1)))
(iconst ty (u64_from_imm64 k2))))
(iadd ty x (iconst ty (imm64 (u64_sub k2 k1)))))
(iadd ty x (iconst ty (imm64_masked ty (u64_sub k2 k1)))))
(rule (simplify (iadd ty
(isub ty (iconst ty (u64_from_imm64 k1)) x)
(iconst ty (u64_from_imm64 k2))))
(isub ty (iconst ty (imm64 (u64_add k1 k2))) x))
(isub ty (iconst ty (imm64_masked ty (u64_add k1 k2))) x))
(rule (simplify
(imul ty (imul ty x k1 @ (iconst ty _)) k2 @ (iconst ty _)))
@@ -138,4 +138,3 @@
y)
;; TODO: fadd, fsub, fmul, fdiv, fneg, fabs