From 3ce80298d41aca34538bb5bc4464777350d487df Mon Sep 17 00:00:00 2001 From: belazy <aarthuur01@gmail.com> Date: Thu, 20 Oct 2022 16:14:22 +0200 Subject: [PATCH] =?UTF-8?q?9-add-a-basic-type-checker=20=E2=9C=A8=EF=B8=8F?= =?UTF-8?q?=20Closes=20#9=20=F0=9F=90=94=EF=B8=8F=F0=9F=91=8D=EF=B8=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🦀ï¸ðŸ°ðŸ¦€ï¸ðŸ°ðŸ¦€ï¸ðŸ° * Fix : clippy * Chore : document `TypeCheckingError` * Chore : remove useless mut * Feat : make `Context` private * Feat : simplify `check` * Chore : Remove old comment * Chore : remove eta-expansion for functions to please the big guy * Chore : comment error * Chore : remove `println` * Chore : add doc for `TypeCheckingError` * Fix : put derive before comments * Feat : add more coverage * Fix : golf `whnf()` * Fix : `polymorphism` test * Fix : `pub_crate` for `substitute` * Chore : CamelCase * Feat : add `Type_checking_error` * Chore : move `normal_form` and `whnf` to `term.rs` * Chore : rename tests * Chore : remove debug `println` * Chore : Add documentation * Feat : use `whnf()` instead of `normal_form()` for conversion * Chore : resolve ill-typed `complex_subst` test * Feat : remove `Val`, correctly substitute terms. * Chore : remove `classic_term.rs` * chore(Context): Rename * chore(type_checker): Apply clippy suggestions on tests * fix: Remove Term's Abs/Prop names * chore: format + derive Default for Ctx * chore: Reorganise type_checker + derive Default for Ctx * Feat : add documentation for `type_checker.rs` * Chore: merge `impl Val`s * Chore : remove debug prints in tests * Chore : mark `Err`s with `//TODO #19` * Chore : Move `Val` to the top of the file * Fix : remove `set_var` in tests * Fix : `polymorphism` test * chore: move everything to their respective `impl` * fix: avoid `assert` in `is_def_eq`, move Val-related methods * chore : make `conversion` a method * Fix: make `normal_form` a method * fix: use `self` for `eval` * Fix : Add issue reference * Feat : remove `panic!`s, use `Result` monad instead * Chore : rename `Ctx::empty()` to `Ctx::new()` * Chore : remove duplicate `assert_def_eq` * Chore : add `impl Term` * Chore : remove `use Term::*`, add `use Val::*` instead * Chore : remove `Into` trait for UniverseLevel * remove useless `impl` for `UniverseLevel` * Chore : remove prints * fix : comment `Val` * Fix : make `conv` into an `Into` trait * fix : `Val` constructor names * fix : imports * chore : fix TODO * fix : `lib.rs` imports * fix : inference for lambda-abstractions * chore : remove useless print * chore : fix various comments * fix : `is_universe` * chore : add more comments to `conv` * feat : simplify `quote` * chore : add TODO * fix : commentary for beta_reduction * chore : comment Val * chore : remove old comment * fix : added `Ctx` type to store type of Vars during checking * feat : add polymorphic test which shows the need to add `Ctx` * chore : please Clippy * Fix : stop cloning environments for no reason * fix : test * chore : add comments * feat : add `check` and `infer` * fix: add string data to lambda-pi abstractions from classicTerm * feat: add string data to pi/lambda-abstractions * fix : made clippy happier * chore : adapt existing code to rebase * fix : conversion between Var and VVar * fix : remove complex_subst from the def_eq test suite for now * feat : added Ctx in preparation for type-checking * this pleases the formatter * Fix : conversion algorithm * basic conversion algorithm (doesn't work) --- kernel/src/lib.rs | 4 +- kernel/src/term.rs | 34 +++- kernel/src/type_checker.rs | 373 +++++++++++++++++++++++++++++++++++++ 3 files changed, 404 insertions(+), 7 deletions(-) create mode 100644 kernel/src/type_checker.rs diff --git a/kernel/src/lib.rs b/kernel/src/lib.rs index a02efb26..76a6013f 100644 --- a/kernel/src/lib.rs +++ b/kernel/src/lib.rs @@ -3,9 +3,9 @@ mod command; mod term; +mod type_checker; pub use command::Command; -pub use term::Term; - pub use derive_more; pub use num_bigint; +pub use term::Term; diff --git a/kernel/src/term.rs b/kernel/src/term.rs index 35665ed8..bb437fa8 100644 --- a/kernel/src/term.rs +++ b/kernel/src/term.rs @@ -1,7 +1,9 @@ -use derive_more::{Add, Display, From, Sub}; +use derive_more::{Add, Display, From, Into, Sub}; use num_bigint::BigUint; -#[derive(Add, Clone, Debug, Display, Eq, From, Sub, PartialEq, PartialOrd, Ord)] +#[derive( + Add, Copy, Clone, Debug, Default, Display, Eq, Into, From, Sub, PartialEq, PartialOrd, Ord, +)] pub struct DeBruijnIndex(usize); #[derive(Add, Clone, Debug, Display, Eq, From, Sub, PartialEq, PartialOrd, Ord)] @@ -51,7 +53,7 @@ impl Term { } } - fn substitute(self, rhs: Term, depth: usize) -> Term { + pub(crate) fn substitute(self, rhs: Term, depth: usize) -> Term { match self { Var(i) if i == depth.into() => rhs.shift(depth - 1, 0), Var(i) if i > depth.into() => Var(i - 1.into()), @@ -65,12 +67,34 @@ impl Term { _ => self, } } + + /// Returns the normal form of a term in a given environment. + /// + /// This function is computationally expensive and should only be used for Reduce/Eval commands, not when type-checking. + pub fn normal_form(self) -> Term { + let mut res = self.clone().beta_reduction(); + let mut temp = self; + while res != temp { + temp = res.clone(); + res = res.beta_reduction() + } + res + } + /// Returns the weak-head normal form of a term in a given environment. + pub fn whnf(self) -> Term { + match self.clone() { + App(box t, t2) => match t.whnf() { + whnf @ Abs(_, _) => App(box whnf, t2).beta_reduction().whnf(), + _ => self, + }, + _ => self, + } + } } #[cfg(test)] mod tests { - // TODO: Correctly types lambda terms (#9) - + // /!\ most of these tests are on ill-typed terms and should not be used for further testings use super::Term::*; #[test] diff --git a/kernel/src/type_checker.rs b/kernel/src/type_checker.rs new file mode 100644 index 00000000..c8fc05b0 --- /dev/null +++ b/kernel/src/type_checker.rs @@ -0,0 +1,373 @@ +use crate::term::*; +use num_bigint::BigUint; +use std::cmp::max; +use std::ops::Index; +use Term::*; + +impl Index<DeBruijnIndex> for Vec<Term> { + type Output = Term; + + fn index(&self, i: DeBruijnIndex) -> &Self::Output { + &self[usize::from(i)] + } +} +/// Type representing kernel errors, is used by the toplevel to pretty-print errors. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum TypeCheckingError { + /// t is not a universe + NotUniverse(Term), + + /// t is not a type + NotType(Term), + + /// t1 and t2 are not definitionally equal + NotDefEq(Term, Term), + + /// f of type t1 can't take argument x of type t2 + WrongArgumentType(Term, Term, Term, Term), + + /// t1 is of type ty is not a function, and thus cannot be applied to t2 + NotAFunction(Term, Term, Term), + + /// Expected ty1, found ty2 + TypeMismatch(Term, Term), +} + +use TypeCheckingError::*; +// The context, which is supposed to contain other definitions in the environment, is not implemented for now. +// TODO use context for type-checking (#17) + +// Type of lists of tuples representing the respective types of each variables +type Types = Vec<Term>; + +/// Structure containing a context used for typechecking. It serves to store the types of variables in the following way : +/// in a given context {types,lvl}, the type of `Var(i)` is in `types[lvl-i]`. +#[derive(Clone, Debug, Default)] +struct Context { + types: Types, + lvl: DeBruijnIndex, +} + +impl Context { + fn new() -> Self { + Default::default() + } + + /// Extend Context with a bound variable of type ty. + fn bind(self, ty: Term) -> Context { + let mut new_types = self.types; + new_types.push(ty); + Context { + types: new_types, + lvl: self.lvl + 1.into(), + } + } +} + +impl Term { + /// Conversion function, checks whether two values are definitionally equal. + /// + /// The conversion is untyped, meaning that it should **only** be called during type-checking when the two `Term`s are already known to be of the same type and in the same context. + pub fn conversion(self, rhs: Term, l: DeBruijnIndex) -> bool { + match (self.whnf(), rhs.whnf()) { + (Type(i), Type(j)) => i == j, + + (Prop, Prop) => true, + + (Var(i), Var(j)) => i == j, + + (Prod(a1, b1), Prod(box a2, b2)) => { + a1.conversion(a2, l) + && b1 + .substitute(Var(l), l.into()) + .conversion(b2.substitute(Var(l), l.into()), l + 1.into()) + } + + // Since we assume that both vals already have the same type, + // checking conversion over the argument type is useless. + // However, this doesn't mean we can simply remove the arg type + // from the type constructor in the enum, it is needed to quote back to terms. + (Abs(_, t), Abs(_, u)) => t + .substitute(Var(l), l.into()) + .conversion(u.substitute(Var(l), l.into()), l + 1.into()), + + (App(box t1, box u1), App(box t2, box u2)) => { + t1.conversion(t2, l) && u1.conversion(u2, l) + } + + _ => false, + } + } + + /// Checks whether two terms are definitionally equal. + pub fn is_def_eq(self, rhs: Term) -> Result<(), TypeCheckingError> { + if !self.clone().conversion(rhs.clone(), 1.into()) { + Err(NotDefEq(self, rhs)) + } else { + Ok(()) + } + } + + fn is_universe(&self) -> bool { + matches!(*self, Prop | Type(_)) + } + + /// Computes universe the universe in which `(x : A) -> B` lives when `A : u1` and `B : u2`. + fn imax(self, u2: Term) -> Result<Term, TypeCheckingError> { + match u2 { + // Because Prop is impredicative, if B : Prop, then (x : A) -> b : Prop + Prop => Ok(Prop), + + Type(ref i) => match self { + Prop => Ok(Type(i.clone())), + + // else if u1 = Type(i) and u2 = Type(j), then (x : A) -> B : Type(max(i,j)) + Type(j) => Ok(Type(max(i.clone(), j))), + + _ => Err(NotUniverse(u2.clone())), + }, + + _ => Err(NotUniverse(self)), + } + } + + fn _infer(self, ctx: &Context) -> Result<Term, TypeCheckingError> { + match self { + Prop => Ok(Type(BigUint::from(0_u64).into())), + Type(i) => Ok(Type(i + BigUint::from(1_u64).into())), + Var(i) => Ok(ctx.types[ctx.lvl - i].clone()), + Prod(box a, c) => { + let ua = a.clone()._infer(ctx)?; + if !ua.is_universe() { + Err(NotType(ua)) + } else { + let ctx2 = ctx.clone().bind(a); + let ub = c._infer(&ctx2)?; + if !ub.is_universe() { + Err(NotType(ub)) + } else { + ua.imax(ub) + } + } + } + Abs(box t1, c) => { + let ctx2 = ctx.clone().bind(t1.clone()); + Ok(Prod(box t1, box (*c)._infer(&ctx2)?)) + } + App(box a, box b) => { + let type_a = a.clone()._infer(ctx)?; + if let Prod(box t1, cls) = type_a { + let t1_ = b.clone()._infer(ctx)?; + if !t1.clone().conversion(t1_.clone(), ctx.types.len().into()) { + return Err(WrongArgumentType(a, t1, b, t1_)); + }; + Ok(*cls) + } else { + Err(NotAFunction(a, type_a, b)) + } + } + } + } + + /// Infers the type of a `Term` in a given context. + pub fn infer(self) -> Result<Term, TypeCheckingError> { + self._infer(&Context::new()) + } + fn _check(self, ctx: &Context, ty: Term) -> Result<(), TypeCheckingError> { + let tty = self._infer(ctx)?; + if !tty.clone().conversion(ty.clone(), ctx.types.len().into()) { + return Err(TypeMismatch(ty, tty)); + }; + Ok(()) + } + /// Checks whether a given term is of type `ty` in a given context. + pub fn check(self, ty: Term) -> Result<(), TypeCheckingError> { + self._check(&Context::new(), ty) + } +} + +#[cfg(test)] +mod tests { + use crate::type_checker::*; + fn id(l: usize) -> Box<Term> { + box Abs(box Prop, box Var(l.into())) + } + + #[test] + fn var_subst_1() { + // (λ P. λP. 1) (λP.1) + let t = App( + box Abs(box Prop, box Abs(box Prop, box Var(1.into()))), + id(1), + ); + let nf = Abs(box Prop, box Var(1.into())); + assert_eq!(t.normal_form(), nf) + } + + #[test] + fn var_subst_2() { + let t = App( + box Abs(box Prop, box Abs(box Prop, box Var(2.into()))), + id(1), + ); + let nf = Abs(box Prop, id(1)); + assert_eq!(t.normal_form(), nf) + } + #[test] + fn simple() { + let t1 = App( + box Abs(box Type(BigUint::from(0_u64).into()), box Var(1.into())), + box Prop, + ); + let t2 = Prop; + assert!(t1.clone().conversion(t2, 0.into())); + let ty = t1._infer(&Context::new()); + assert_eq!(ty, Ok(Type(BigUint::from(0_u64).into()))); + } + + #[test] + fn simple_substitute() -> Result<(), TypeCheckingError> { + // λ (x : P -> P).(λ (y :P).x y) x + //λ P -> P.(λ P.2 1) 1 + let term = Abs( + box Prod(box Prop, box Prop), + box App( + box Abs(box Prop, box App(box Var(2.into()), box Var(1.into()))), + box Var(1.into()), + ), + ); + + // λx.x x + let reduced = Abs(box Prop, box App(box Var(1.into()), box Var(1.into()))); + assert_eq!(term.clone().is_def_eq(reduced), Ok(())); + let _ty = term.infer(); + assert!(matches!(_ty, Err(WrongArgumentType(_, _, _, _)))); + Ok(()) + } + + #[test] + fn complex_conv() { + // (λa.λb.λc.a (λd.λe.e (d b)) (λ_.c) (λd.d)) (λa.λb.a b) + let term = App( + box Abs( + // a : ((P → P) → (P → P) → P) → ((P → P) → ((P → P) → P)) + box Prod( + // (P → P) → ((P → P) → P) + box Prod( + // P -> P + box Prod(box Prop, box Prop), + // (P -> P) -> P + box Prod(box Prod(box Prop, box Prop), box Prop), + ), + // (P → P) → ((P → P) → P) + box Prod( + // P -> P + box Prod(box Prop, box Prop), + // (P -> P) -> P + box Prod(box Prod(box Prop, box Prop), box Prop), + ), + ), + box Abs( + // b : P + box Prop, + box Abs( + // c : P + box Prop, + box App( + box App( + box App( + box Var(3.into()), + box Abs( + // d : P -> P + box Prod(box Prop, box Prop), + box Abs( + // e : P -> P + box Prod(box Prop, box Prop), + box App( + box Var(1.into()), + box App(box Var(2.into()), box Var(4.into())), + ), + ), + ), + ), + // _ : P + box Abs(box Prop, box Var(2.into())), + ), + //d : P + box Abs(box Prop, box Var(1.into())), + ), + ), + ), + ), + box Abs( + //a : (P -> P) -> (P -> P) -> P + box Prod( + box Prod(box Prop, box Prop), + box Prod(box Prod(box Prop, box Prop), box Prop), + ), + box Abs( + //b : P -> P + box Prod(box Prop, box Prop), + box App(box Var(2.into()), box Var(1.into())), + ), + ), + ); + // λa : P.λb : P .b + let reduced = Abs(box Prop, box Abs(box Prop, box Var(1.into()))); + assert_eq!(term.clone().is_def_eq(reduced), Ok(())); + assert!(matches!(term.infer(), Ok(_))) + } + + //(λ â„™ → λ â„™ → λ â„™ → (0 (λ â„™ → λ â„™ → ((4 1) 3) λ â„™ → 3)) (λ â„™ → λ â„™ → (0 1) (λ â„™ → 0 λ â„™ → 0))) + #[test] + fn nf_test() { + //λa.a (λx.x) (λx.x) + let reduced = Abs(box Prop, box App(box App(box Var(2.into()), id(1)), id(1))); + let nff = reduced.clone().normal_form(); + assert_eq!(reduced, nff); + assert_eq!(reduced.is_def_eq(nff), Ok(())); + } + + #[test] + fn polymorphism() { + let id = Abs( + box Type(BigUint::from(0_u64).into()), + box Abs(box Var(1.into()), box Var(1.into())), + ); + assert!(matches!(id.infer(), Ok(_))) + } + #[test] + fn type_type() { + assert!(matches!( + Type(BigUint::from(0_u64).into()).check(Type(BigUint::from(1_u64).into())), + Ok(_) + )) + } + + #[test] + fn not_function() { + let t = App(box Prop, box Prop); + assert!(matches!(t.infer(), Err(NotAFunction(..)))) + } + + #[test] + fn not_type_prod() { + let t1 = Prod(box Abs(box Prop, box Var(1.into())), box Prop); + assert!(matches!(t1.infer(), Err(NotType(..)))); + let t2 = Prod(box Prop, box Abs(box Prop, box Prop)); + assert!(matches!(t2.infer(), Err(NotType(..)))); + let wf_prod1 = Prod(box Prop, box Prop); + assert!(matches!( + wf_prod1.check(Type(BigUint::from(0_u64).into())), + Ok(()) + )); + let wf_prod2 = Prod(box Prop, box Var(1.into())); + assert!(matches!(wf_prod2.check(Prop), Ok(()))); + // Type0 -> (A : Prop) -> + let wf_prod3 = Prod(box Prop, box Prop); + assert!(matches!( + wf_prod3.check(Type(BigUint::from(0_u64).into())), + Ok(()) + )); + } +} -- GitLab