More work on sketch for isel and some TODO items derived from it.
This commit is contained in:
@@ -1,3 +1,6 @@
|
|||||||
|
- `and` combinator in input.
|
||||||
|
- inputs to external extractors? "polarity" of args?
|
||||||
|
- "extractor macros" rather than full rule reversal? (rule ...) and (pattern ...)?
|
||||||
- Document semantics carefully, especially wrt extractors.
|
- Document semantics carefully, especially wrt extractors.
|
||||||
- Build out an initial set of bindings for Cranelift LowerCtx with extractors
|
- Build out an initial set of bindings for Cranelift LowerCtx with extractors
|
||||||
for instruction info.
|
for instruction info.
|
||||||
|
|||||||
57
cranelift/isle/isle_examples/test3.isle
Normal file
57
cranelift/isle/isle_examples/test3.isle
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
(type Opcode extern (enum
|
||||||
|
Iadd
|
||||||
|
Isub
|
||||||
|
Load
|
||||||
|
Store))
|
||||||
|
|
||||||
|
(type Inst (primitive Inst))
|
||||||
|
(type Reg (primitive Reg))
|
||||||
|
(type u32 (primitive u32))
|
||||||
|
|
||||||
|
(decl Op (Opcode) Inst)
|
||||||
|
(extractor Op get_opcode)
|
||||||
|
|
||||||
|
(decl InputToReg (Inst u32) Reg)
|
||||||
|
(constructor InputToReg put_input_in_reg)
|
||||||
|
|
||||||
|
(type MachInst (enum
|
||||||
|
(Add (a Reg) (b Reg))
|
||||||
|
(Sub (a Reg) (b Reg))))
|
||||||
|
|
||||||
|
(decl Lower (Inst) MachInst)
|
||||||
|
|
||||||
|
;; These can be made nicer by defining some extractors -- see below.
|
||||||
|
(rule
|
||||||
|
(Lower inst @ (Op (Opcode.Iadd)))
|
||||||
|
(MachInst.Add (InputToReg inst 0) (InputToReg inst 1)))
|
||||||
|
(rule
|
||||||
|
(Lower inst @ (Op (Opcode.Isub)))
|
||||||
|
(MachInst.Sub (InputToReg inst 0) (InputToReg inst 1)))
|
||||||
|
|
||||||
|
;; Extractors that give syntax sugar for (Iadd ra rb), etc.
|
||||||
|
;;
|
||||||
|
;; Note that this is somewhat simplistic: it directly connects inputs to
|
||||||
|
;; MachInst regs; really we'd want to return a VReg or InstInput that we can use
|
||||||
|
;; another extractor to connect to another (producer) inst.
|
||||||
|
;;
|
||||||
|
;; Also, note that while it looks a little indirect, a verification effort could
|
||||||
|
;; define equivalences across the `rule` LHS/RHS pairs, and the types ensure that
|
||||||
|
;; we are dealing (at the semantic level) with pure value equivalences of
|
||||||
|
;; "terms", not arbitrary side-effecting calls.
|
||||||
|
|
||||||
|
(decl Iadd (Reg Reg) Inst)
|
||||||
|
(decl Isub (Reg Reg) Inst)
|
||||||
|
(rule
|
||||||
|
inst @ (Op Opcode.Iadd)
|
||||||
|
(Iadd (InputToReg inst 0) (InputToReg inst 1)))
|
||||||
|
(rule
|
||||||
|
inst @ (Op Opcode.Isub)
|
||||||
|
(Isub (InputToReg inst 0) (InputToReg inst 1)))
|
||||||
|
|
||||||
|
;; Now the nice syntax-sugar that "end-user" backend authors can write:
|
||||||
|
(rule
|
||||||
|
(Lower (Iadd ra rb))
|
||||||
|
(MachInst.Add ra rb))
|
||||||
|
(rule
|
||||||
|
(Lower (Isub ra rb))
|
||||||
|
(MachInst.Sub ra rb))
|
||||||
@@ -666,7 +666,7 @@ impl<'a> Codegen<'a> {
|
|||||||
let ret = self.type_name(term.ret_ty, /* by_ref = */ None);
|
let ret = self.type_name(term.ret_ty, /* by_ref = */ None);
|
||||||
writeln!(
|
writeln!(
|
||||||
code,
|
code,
|
||||||
"fn {}(&mut self, {}) -> Option<{}>;",
|
" fn {}(&mut self, {}) -> Option<{}>;",
|
||||||
ctor_name,
|
ctor_name,
|
||||||
args.join(", "),
|
args.join(", "),
|
||||||
ret,
|
ret,
|
||||||
@@ -700,14 +700,19 @@ impl<'a> Codegen<'a> {
|
|||||||
writeln!(code, "pub enum {} {{", name)?;
|
writeln!(code, "pub enum {} {{", name)?;
|
||||||
for variant in variants {
|
for variant in variants {
|
||||||
let name = &self.typeenv.syms[variant.name.index()];
|
let name = &self.typeenv.syms[variant.name.index()];
|
||||||
|
if variant.fields.is_empty() {
|
||||||
|
writeln!(code, " {},", name)?;
|
||||||
|
} else {
|
||||||
writeln!(code, " {} {{", name)?;
|
writeln!(code, " {} {{", name)?;
|
||||||
for field in &variant.fields {
|
for field in &variant.fields {
|
||||||
let name = &self.typeenv.syms[field.name.index()];
|
let name = &self.typeenv.syms[field.name.index()];
|
||||||
let ty_name = self.typeenv.types[field.ty.index()].name(&self.typeenv);
|
let ty_name =
|
||||||
|
self.typeenv.types[field.ty.index()].name(&self.typeenv);
|
||||||
writeln!(code, " {}: {},", name, ty_name)?;
|
writeln!(code, " {}: {},", name, ty_name)?;
|
||||||
}
|
}
|
||||||
writeln!(code, " }},")?;
|
writeln!(code, " }},")?;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
writeln!(code, "}}")?;
|
writeln!(code, "}}")?;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
@@ -796,10 +801,15 @@ impl<'a> Codegen<'a> {
|
|||||||
for (&termid, trie) in &self.functions_by_input {
|
for (&termid, trie) in &self.functions_by_input {
|
||||||
let termdata = &self.termenv.terms[termid.index()];
|
let termdata = &self.termenv.terms[termid.index()];
|
||||||
|
|
||||||
// Skip terms that are enum variants or that have external constructors.
|
// Skip terms that are enum variants or that have external
|
||||||
|
// constructors/extractors.
|
||||||
match &termdata.kind {
|
match &termdata.kind {
|
||||||
&TermKind::EnumVariant { .. } => continue,
|
&TermKind::EnumVariant { .. } => continue,
|
||||||
&TermKind::Regular { constructor, .. } if constructor.is_some() => continue,
|
&TermKind::Regular {
|
||||||
|
constructor,
|
||||||
|
extractor,
|
||||||
|
..
|
||||||
|
} if constructor.is_some() || extractor.is_some() => continue,
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -851,7 +861,11 @@ impl<'a> Codegen<'a> {
|
|||||||
// Skip terms that are enum variants or that have external extractors.
|
// Skip terms that are enum variants or that have external extractors.
|
||||||
match &termdata.kind {
|
match &termdata.kind {
|
||||||
&TermKind::EnumVariant { .. } => continue,
|
&TermKind::EnumVariant { .. } => continue,
|
||||||
&TermKind::Regular { extractor, .. } if extractor.is_some() => continue,
|
&TermKind::Regular {
|
||||||
|
constructor,
|
||||||
|
extractor,
|
||||||
|
..
|
||||||
|
} if constructor.is_some() || extractor.is_some() => continue,
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -949,6 +963,13 @@ impl<'a> Codegen<'a> {
|
|||||||
self.type_name(ty, None),
|
self.type_name(ty, None),
|
||||||
self.typeenv.syms[variantinfo.name.index()]
|
self.typeenv.syms[variantinfo.name.index()]
|
||||||
);
|
);
|
||||||
|
if input_fields.is_empty() {
|
||||||
|
writeln!(
|
||||||
|
code,
|
||||||
|
"{}let {} = {};",
|
||||||
|
indent, outputname, full_variant_name
|
||||||
|
)?;
|
||||||
|
} else {
|
||||||
writeln!(
|
writeln!(
|
||||||
code,
|
code,
|
||||||
"{}let {} = {} {{",
|
"{}let {} = {} {{",
|
||||||
@@ -958,6 +979,7 @@ impl<'a> Codegen<'a> {
|
|||||||
writeln!(code, "{} {},", indent, input_field)?;
|
writeln!(code, "{} {},", indent, input_field)?;
|
||||||
}
|
}
|
||||||
writeln!(code, "{}}};", indent)?;
|
writeln!(code, "{}}};", indent)?;
|
||||||
|
}
|
||||||
self.define_val(&output, ctx, /* is_ref = */ false);
|
self.define_val(&output, ctx, /* is_ref = */ false);
|
||||||
}
|
}
|
||||||
&ExprInst::Construct {
|
&ExprInst::Construct {
|
||||||
@@ -1089,14 +1111,15 @@ impl<'a> Codegen<'a> {
|
|||||||
let variant = &variants[variant.index()];
|
let variant = &variants[variant.index()];
|
||||||
let variantname = &self.typeenv.syms[variant.name.index()];
|
let variantname = &self.typeenv.syms[variant.name.index()];
|
||||||
let args = self.match_variant_binders(variant, &arg_tys[..], id, ctx);
|
let args = self.match_variant_binders(variant, &arg_tys[..], id, ctx);
|
||||||
|
let args = if args.is_empty() {
|
||||||
|
"".to_string()
|
||||||
|
} else {
|
||||||
|
format!("{{ {} }}", args.join(", "))
|
||||||
|
};
|
||||||
writeln!(
|
writeln!(
|
||||||
code,
|
code,
|
||||||
"{}if let {}::{} {{ {} }} = {} {{",
|
"{}if let {}::{} {} = {} {{",
|
||||||
indent,
|
indent, ty_name, variantname, args, input
|
||||||
ty_name,
|
|
||||||
variantname,
|
|
||||||
args.join(", "),
|
|
||||||
input
|
|
||||||
)?;
|
)?;
|
||||||
Ok(false)
|
Ok(false)
|
||||||
}
|
}
|
||||||
@@ -1339,13 +1362,15 @@ impl<'a> Codegen<'a> {
|
|||||||
let variantinfo = &variants[variant.index()];
|
let variantinfo = &variants[variant.index()];
|
||||||
let variantname = &self.typeenv.syms[variantinfo.name.index()];
|
let variantname = &self.typeenv.syms[variantinfo.name.index()];
|
||||||
let fields = self.match_variant_binders(variantinfo, arg_tys, id, ctx);
|
let fields = self.match_variant_binders(variantinfo, arg_tys, id, ctx);
|
||||||
|
let fields = if fields.is_empty() {
|
||||||
|
"".to_string()
|
||||||
|
} else {
|
||||||
|
format!("{{ {} }}", fields.join(", "))
|
||||||
|
};
|
||||||
writeln!(
|
writeln!(
|
||||||
code,
|
code,
|
||||||
"{} &{}::{} {{ {} }} => {{",
|
"{} &{}::{} {} => {{",
|
||||||
indent,
|
indent, input_ty_name, variantname, fields,
|
||||||
input_ty_name,
|
|
||||||
variantname,
|
|
||||||
fields.join(", ")
|
|
||||||
)?;
|
)?;
|
||||||
let subindent = format!("{} ", indent);
|
let subindent = format!("{} ", indent);
|
||||||
self.generate_body(code, depth + 1, node, &subindent, ctx)?;
|
self.generate_body(code, depth + 1, node, &subindent, ctx)?;
|
||||||
|
|||||||
@@ -324,13 +324,18 @@ impl ExprSequence {
|
|||||||
vars: &HashMap<VarId, (Option<TermId>, Value)>,
|
vars: &HashMap<VarId, (Option<TermId>, Value)>,
|
||||||
gen_final_construct: bool,
|
gen_final_construct: bool,
|
||||||
) -> (Option<TermId>, Vec<Value>) {
|
) -> (Option<TermId>, Vec<Value>) {
|
||||||
|
log::trace!(
|
||||||
|
"gen_expr: expr {:?} gen_final_construct {}",
|
||||||
|
expr,
|
||||||
|
gen_final_construct
|
||||||
|
);
|
||||||
match expr {
|
match expr {
|
||||||
&Expr::ConstInt(ty, val) => (None, vec![self.add_const_int(ty, val)]),
|
&Expr::ConstInt(ty, val) => (None, vec![self.add_const_int(ty, val)]),
|
||||||
&Expr::Let(_ty, ref bindings, ref subexpr) => {
|
&Expr::Let(_ty, ref bindings, ref subexpr) => {
|
||||||
let mut vars = vars.clone();
|
let mut vars = vars.clone();
|
||||||
for &(var, _var_ty, ref var_expr) in bindings {
|
for &(var, _var_ty, ref var_expr) in bindings {
|
||||||
let (var_value_term, var_value) =
|
let (var_value_term, var_value) =
|
||||||
self.gen_expr(typeenv, termenv, &*var_expr, &vars, false);
|
self.gen_expr(typeenv, termenv, &*var_expr, &vars, true);
|
||||||
let var_value = var_value[0];
|
let var_value = var_value[0];
|
||||||
vars.insert(var, (var_value_term, var_value));
|
vars.insert(var, (var_value_term, var_value));
|
||||||
}
|
}
|
||||||
@@ -343,9 +348,11 @@ impl ExprSequence {
|
|||||||
&Expr::Term(ty, term, ref arg_exprs) => {
|
&Expr::Term(ty, term, ref arg_exprs) => {
|
||||||
let termdata = &termenv.terms[term.index()];
|
let termdata = &termenv.terms[term.index()];
|
||||||
let mut arg_values_tys = vec![];
|
let mut arg_values_tys = vec![];
|
||||||
|
log::trace!("Term gen_expr term {}", term.index());
|
||||||
for (arg_ty, arg_expr) in termdata.arg_tys.iter().cloned().zip(arg_exprs.iter()) {
|
for (arg_ty, arg_expr) in termdata.arg_tys.iter().cloned().zip(arg_exprs.iter()) {
|
||||||
|
log::trace!("generating for arg_expr {:?}", arg_expr);
|
||||||
arg_values_tys.push((
|
arg_values_tys.push((
|
||||||
self.gen_expr(typeenv, termenv, &*arg_expr, &vars, false).1[0],
|
self.gen_expr(typeenv, termenv, &*arg_expr, &vars, true).1[0],
|
||||||
arg_ty,
|
arg_ty,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@@ -383,7 +390,21 @@ pub fn lower_rule(
|
|||||||
let ruledata = &termenv.rules[rule.index()];
|
let ruledata = &termenv.rules[rule.index()];
|
||||||
let mut vars = HashMap::new();
|
let mut vars = HashMap::new();
|
||||||
|
|
||||||
|
log::trace!(
|
||||||
|
"lower_rule: ruledata {:?} forward {}",
|
||||||
|
ruledata,
|
||||||
|
is_forward_dir
|
||||||
|
);
|
||||||
|
|
||||||
if is_forward_dir {
|
if is_forward_dir {
|
||||||
|
let can_do_forward = match &ruledata.lhs {
|
||||||
|
&Pattern::Term(..) => true,
|
||||||
|
_ => false,
|
||||||
|
};
|
||||||
|
if !can_do_forward {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let lhs_root_term = pattern_seq.gen_pattern(None, tyenv, termenv, &ruledata.lhs, &mut vars);
|
let lhs_root_term = pattern_seq.gen_pattern(None, tyenv, termenv, &ruledata.lhs, &mut vars);
|
||||||
let root_term = match lhs_root_term {
|
let root_term = match lhs_root_term {
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
@@ -407,6 +428,14 @@ pub fn lower_rule(
|
|||||||
expr_seq.add_return(output_ty, rhs_root_vals[0]);
|
expr_seq.add_return(output_ty, rhs_root_vals[0]);
|
||||||
Some((pattern_seq, expr_seq, root_term))
|
Some((pattern_seq, expr_seq, root_term))
|
||||||
} else {
|
} else {
|
||||||
|
let can_reverse = match &ruledata.rhs {
|
||||||
|
&Expr::Term(..) => true,
|
||||||
|
_ => false,
|
||||||
|
};
|
||||||
|
if !can_reverse {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let arg = pattern_seq.add_arg(0, ruledata.lhs.ty());
|
let arg = pattern_seq.add_arg(0, ruledata.lhs.ty());
|
||||||
let _ = pattern_seq.gen_pattern(Some(arg), tyenv, termenv, &ruledata.lhs, &mut vars);
|
let _ = pattern_seq.gen_pattern(Some(arg), tyenv, termenv, &ruledata.lhs, &mut vars);
|
||||||
let (rhs_root_term, rhs_root_vals) = expr_seq.gen_expr(
|
let (rhs_root_term, rhs_root_vals) = expr_seq.gen_expr(
|
||||||
|
|||||||
@@ -184,6 +184,13 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn parse_type_variant(&mut self) -> ParseResult<Variant> {
|
fn parse_type_variant(&mut self) -> ParseResult<Variant> {
|
||||||
|
if self.is_sym() {
|
||||||
|
let name = self.parse_ident()?;
|
||||||
|
Ok(Variant {
|
||||||
|
name,
|
||||||
|
fields: vec![],
|
||||||
|
})
|
||||||
|
} else {
|
||||||
self.lparen()?;
|
self.lparen()?;
|
||||||
let name = self.parse_ident()?;
|
let name = self.parse_ident()?;
|
||||||
let mut fields = vec![];
|
let mut fields = vec![];
|
||||||
@@ -193,6 +200,7 @@ impl<'a> Parser<'a> {
|
|||||||
self.rparen()?;
|
self.rparen()?;
|
||||||
Ok(Variant { name, fields })
|
Ok(Variant { name, fields })
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn parse_type_field(&mut self) -> ParseResult<Field> {
|
fn parse_type_field(&mut self) -> ParseResult<Field> {
|
||||||
self.lparen()?;
|
self.lparen()?;
|
||||||
|
|||||||
Reference in New Issue
Block a user