Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • loutr/proost
  • jeanas/proost
2 results
Show changes
Commits on Source (4)
  • v-lafeychine's avatar
    Resolve "proost fonctionality and UI" ✨️ · 36190e2a
    v-lafeychine authored and aalbert's avatar aalbert committed
    Closes #22 et #7 🐔️👍️
    Approved-by: default avataraalbert <augustin.albert@bleu-azure.fr>
    Approved-by: default avatarbelazy <aarthuur01@gmail.com>
    Approved-by: default avatarloutr <loutr@crans.org>
    
    🦀️🍰🦀️🍰🦀️🍰
    
    * Chore : add test
    
    * Fix : shift vars when binding lambda-abstractions
    
    * fix(kernel): some tests were wrong
    
    * HOT fix(kernel): command definition fixed
    
    * HOT fix(parser): fix parenthesis
    
    * HOT fix(parser): order of fun
    
    * feat(parser): resolve threads
    
    * chore(parser): Merge renamed_rules on Error
    
    * chore(command): Merge Define/DefineCheckType + Add tests
    
    * chore(parser): apply suggestions
    
    * feat(parser): add location conversion from pest to proost's kernel
    
    * Apply 1 suggestion(s) to 1 file(s)
    
    * fix(all): resolve thread and change Pos for Loc (range position) +
    conversion from position to range position in parser
    
    * fix(all): resolve threads
    
    * feat(kernel): Add tests in commands.rs
    
    * feat(parser, error): add new errors for parser, pos struct, first use of pos and pretty print of parsing errors
    
    * feat(parser, kernel): new kernel error: cannot parse
    
    * feat(proost): new ui v1
    
    * feat(proost): new interface!
    
    * feat(parser, kernel): change grammar and pretty printing
    36190e2a
  • aalbert's avatar
    Resolve "Bug rev parser" ✨️ · a1da3922
    aalbert authored
    Closes #30 🐔️👍️
    
    🦀️🍰🦀️🍰🦀️🍰
    
    * fix(parser)
    a1da3922
  • aalbert's avatar
    Resolve "Remove unknow file "z" (sorry)" ✨️ · 504e1b98
    aalbert authored
    Closes #31 🐔️👍️
    
    🦀️🍰🦀️🍰🦀️🍰
    
    * fix: remove 'z' file
    504e1b98
  • v-lafeychine's avatar
image: vlafeychine/rust
.push-zamok: &push-zamok .push-zamok: &push-zamok
- eval $(ssh-agent -s) - eval $(ssh-agent -s)
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
- \[ -d ${ZAMOK_SOURCE} \] && - \[ -d ${ZAMOK_SOURCE} \] &&
rsync -e "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" -rqz ${ZAMOK_SOURCE}/. v-lafeychine@zamok.crans.org:www/proost/${ZAMOK_TARGET} || true rsync -e "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" -rqz ${ZAMOK_SOURCE}/. v-lafeychine@zamok.crans.org:www/proost/${ZAMOK_TARGET} || true
stages: [check, build, tests, docs] default:
image: vlafeychine/rust
interruptible: true
cache:
paths:
- .cache/cargo/
- target/
variables: variables:
CARGO_HOME: "$CI_PROJECT_DIR/.cache/cargo/" CARGO_HOME: "$CI_PROJECT_DIR/.cache/cargo/"
RUSTFLAGS: "-D warnings" RUSTFLAGS: "-D warnings"
cache:
key: "$CI_JOB_STAGE-$CI_COMMIT_REF_SLUG"
paths:
- .cache/cargo/
- target/
default: stages: [ check, build, tests, docs ]
interruptible: true
format: format:
cache: []
stage: check stage: check
cache: []
script: script:
- cargo fmt --check - cargo fmt --check
...@@ -39,7 +37,6 @@ build: ...@@ -39,7 +37,6 @@ build:
tests: tests:
stage: tests stage: tests
variables: variables:
CARGO_INCREMENTAL: 0 CARGO_INCREMENTAL: 0
EXCLUDE_BR_REGEXP: "#\\[|unreachable!|assert(_eq)?!" EXCLUDE_BR_REGEXP: "#\\[|unreachable!|assert(_eq)?!"
...@@ -50,34 +47,35 @@ tests: ...@@ -50,34 +47,35 @@ tests:
ZAMOK_SOURCE: coverage ZAMOK_SOURCE: coverage
ZAMOK_TARGET: coverage/${CI_COMMIT_REF_SLUG} ZAMOK_TARGET: coverage/${CI_COMMIT_REF_SLUG}
cache:
paths:
- .cache/cargo/
script: script:
- cargo test - cargo test
- ${GRCOV} --ignore "*cargo*" -t cobertura -o ./coverage.xml && - ${GRCOV} --ignore "*cargo*" -t cobertura -o ./coverage_raw.xml &&
xsltproc --novalid --output ./target/coverage.xml ./.gitlab-cov.xsl ./coverage.xml || true xsltproc --novalid --output ./coverage.xml ./.gitlab-cov.xsl ./coverage_raw.xml || true
- ${GRCOV} --ignore "*cargo*" -t lcov -o ./target/coverage.lcov && - ${GRCOV} --ignore "*cargo*" -t lcov -o ./coverage.lcov &&
genhtml --branch --no-function-coverage --precision 2 ./target/coverage.lcov -o ./coverage || true genhtml --branch --no-function-coverage --precision 2 ./coverage.lcov -o ./coverage || true
- find ./target \( -name "*.gcda" -or -name "*.gcno" \) -delete
- *push-zamok - *push-zamok
artifacts: artifacts:
reports: reports:
coverage_report: coverage_report:
coverage_format: cobertura coverage_format: cobertura
path: ./target/coverage.xml path: ./coverage.xml
coverage: '/^ branches...: \d+.\d+%/' coverage: '/^ branches...: \d+.\d+%/'
docs: docs:
stage: docs stage: docs
variables: variables:
ZAMOK_SOURCE: target/doc ZAMOK_SOURCE: target/doc
ZAMOK_TARGET: doc ZAMOK_TARGET: doc
only: only:
refs: refs: [ main ]
- main
script: script:
- cargo doc --no-deps - cargo doc --no-deps
......
...@@ -94,6 +94,17 @@ dependencies = [ ...@@ -94,6 +94,17 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "colored"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd"
dependencies = [
"atty",
"lazy_static",
"winapi",
]
[[package]] [[package]]
name = "convert_case" name = "convert_case"
version = "0.4.0" version = "0.4.0"
...@@ -261,6 +272,12 @@ dependencies = [ ...@@ -261,6 +272,12 @@ dependencies = [
"num-bigint", "num-bigint",
] ]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.133" version = "0.2.133"
...@@ -361,9 +378,9 @@ dependencies = [ ...@@ -361,9 +378,9 @@ dependencies = [
[[package]] [[package]]
name = "pest" name = "pest"
version = "2.3.1" version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb779fcf4bb850fbbb0edc96ff6cf34fd90c4b1a112ce042653280d9a7364048" checksum = "dbc7bc69c062e492337d74d59b120c274fd3d261b6bf6d3207d499b4b379c41a"
dependencies = [ dependencies = [
"thiserror", "thiserror",
"ucd-trie", "ucd-trie",
...@@ -371,9 +388,9 @@ dependencies = [ ...@@ -371,9 +388,9 @@ dependencies = [
[[package]] [[package]]
name = "pest_derive" name = "pest_derive"
version = "2.3.1" version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "502b62a6d0245378b04ffe0a7fb4f4419a4815fce813bd8a0ec89a56e07d67b1" checksum = "60b75706b9642ebcb34dab3bc7750f811609a0eb1dd8b88c2d15bf628c1c65b2"
dependencies = [ dependencies = [
"pest", "pest",
"pest_generator", "pest_generator",
...@@ -381,9 +398,9 @@ dependencies = [ ...@@ -381,9 +398,9 @@ dependencies = [
[[package]] [[package]]
name = "pest_generator" name = "pest_generator"
version = "2.3.1" version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "451e629bf49b750254da26132f1a5a9d11fd8a95a3df51d15c4abd1ba154cb6c" checksum = "f4f9272122f5979a6511a749af9db9bfc810393f63119970d7085fed1c4ea0db"
dependencies = [ dependencies = [
"pest", "pest",
"pest_meta", "pest_meta",
...@@ -394,9 +411,9 @@ dependencies = [ ...@@ -394,9 +411,9 @@ dependencies = [
[[package]] [[package]]
name = "pest_meta" name = "pest_meta"
version = "2.3.1" version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcec162c71c45e269dfc3fc2916eaeb97feab22993a21bcce4721d08cd7801a6" checksum = "4c8717927f9b79515e565a64fe46c38b8cd0427e64c40680b14a7365ab09ac8d"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"pest", "pest",
...@@ -441,6 +458,8 @@ name = "proost" ...@@ -441,6 +458,8 @@ name = "proost"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"clap", "clap",
"colored",
"kernel",
"parser", "parser",
"rustyline", "rustyline",
] ]
......
...@@ -53,7 +53,7 @@ ...@@ -53,7 +53,7 @@
copyToRoot = pkgs.buildEnv { copyToRoot = pkgs.buildEnv {
name = "proost-dependencies"; name = "proost-dependencies";
paths = (with pkgs; [ coreutils findutils gcc gnugrep gnused grcov lcov libxslt openssh rsync rust-ci ]) paths = (with pkgs; [ coreutils gcc gnugrep gnused grcov lcov libxslt openssh rsync rust-ci ])
++ (with pkgs.dockerTools; [ binSh caCertificates fakeNss ]); ++ (with pkgs.dockerTools; [ binSh caCertificates fakeNss ]);
pathsToLink = [ "/bin" "/etc" ]; pathsToLink = [ "/bin" "/etc" ];
}; };
...@@ -70,7 +70,7 @@ ...@@ -70,7 +70,7 @@
commands = [{ commands = [{
name = "coverage"; name = "coverage";
command = let command = let
excl_br_regexp = "#\\[|unreachable!()|assert(_eq)?!"; excl_br_regexp = "#\\[|unreachable!|assert(_eq)?!";
excl_regexp = "//!|///|#\\[|use"; excl_regexp = "//!|///|#\\[|use";
env = "CARGO_INCREMENTAL=0" env = "CARGO_INCREMENTAL=0"
+ " RUSTFLAGS=\"-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort\"" + " RUSTFLAGS=\"-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort\""
......
use crate::Term; use crate::{Environment, KernelError, Term};
use derive_more::Display;
#[derive(Debug, Display, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
pub enum Command { pub enum Command {
#[display(fmt = "Define {} := {}.", _0, _1)] Define(String, Option<Term>, Term),
Define(String, Term),
#[display(fmt = "Check {} : {}.", _0, _1)]
CheckType(Term, Term), CheckType(Term, Term),
#[display(fmt = "Type {}.", _0)]
GetType(Term), GetType(Term),
}
impl Command {
// TODO (#19)
pub fn process(self, env: &mut Environment) -> Result<Option<Term>, KernelError> {
match self {
Command::Define(s, None, term) => term
.infer(env)
.and_then(|t| env.insert(s, term, t).map(|_| None)),
Command::Define(s, Some(t), term) => term
.check(&t, env)
.and_then(|_| env.insert(s, term, t).map(|_| None)),
Command::CheckType(t1, t2) => t1.check(&t2, env).map(|_| None),
Command::GetType(t) => t.infer(env).map(Some),
}
}
}
#[cfg(test)]
mod tests {
use crate::{num_bigint::BigUint, Command, Environment, Term};
fn simple_env() -> Environment {
Environment::new()
.insert(
"x".to_string(),
Term::Type(BigUint::from(0_u64).into()),
Term::Prop,
)
.unwrap()
.clone()
}
#[test]
fn failed_untyped_define() {
let cmd = Command::Define("x".to_string(), None, Term::Prop);
let mut env = simple_env();
assert!(cmd.process(&mut env).is_err());
assert_eq!(env, simple_env());
}
#[test]
fn successful_untyped_define() {
let cmd = Command::Define("y".to_string(), None, Term::Prop);
let mut env = simple_env();
assert!(cmd.process(&mut env).is_ok());
assert_eq!(
env,
*(simple_env()
.insert(
"y".to_string(),
Term::Prop,
Term::Type(BigUint::from(0_u64).into())
)
.unwrap())
);
}
#[test]
fn failed_typed_define() {
let cmd = Command::Define(
"y".to_string(),
Some(Term::Type(BigUint::from(1_u64).into())),
Term::Prop,
);
let mut env = simple_env();
assert!(cmd.process(&mut env).is_err());
assert_eq!(env, simple_env());
}
#[test]
fn successful_typed_define() {
let cmd = Command::Define(
"y".to_string(),
Some(Term::Type(BigUint::from(0_u64).into())),
Term::Prop,
);
let mut env = simple_env();
assert!(cmd.process(&mut env).is_ok());
assert_eq!(
env,
*(simple_env()
.insert(
"y".to_string(),
Term::Prop,
Term::Type(BigUint::from(0_u64).into())
)
.unwrap())
);
}
#[test]
fn failed_checktype() {
let cmd = Command::CheckType(Term::Prop, Term::Prop);
let mut env = simple_env();
assert!(cmd.process(&mut env).is_err());
assert!(env == simple_env());
}
#[test]
fn successful_checktype() {
let cmd = Command::CheckType(Term::Prop, Term::Type(BigUint::from(0_u64).into()));
let mut env = simple_env();
assert!(cmd.process(&mut env).is_ok());
assert_eq!(env, simple_env());
}
#[test]
fn failed_gettype() {
let cmd = Command::GetType(Term::Const("y".to_string()));
let mut env = simple_env();
assert!(cmd.process(&mut env).is_err());
assert!(env == simple_env());
}
#[test]
fn successful_gettype() {
let cmd = Command::GetType(Term::Prop);
let mut env = simple_env();
#[display(fmt = "Define {} : {} := {}.", _0, _1, _2)] assert!(cmd.process(&mut env).is_ok());
DefineCheckType(String, Term, Term), assert_eq!(env, simple_env());
}
} }
use crate::error::KernelError;
use crate::term::Term; use crate::term::Term;
use derive_more::From; use derive_more::From;
use std::collections::HashMap; use std::collections::{hash_map, HashMap};
/// Global Environment, contains the term and type of every definitions, denoted by their strings. /// Global Environment, contains the term and type of every definitions, denoted by their strings.
#[derive(Clone, Default, From)] #[derive(Clone, Default, Debug, Eq, PartialEq, From)]
pub struct Environment(HashMap<String, (Term, Term)>); pub struct Environment(HashMap<String, (Term, Term)>);
// TODO #19
#[derive(Debug, Clone)]
pub enum EnvError {
AlreadyDefined(String),
}
impl Environment { impl Environment {
/// Creates an empty environment. /// Creates an empty environment.
pub fn new() -> Self { pub fn new() -> Self {
Self::default() Self::default()
} }
/// Creates a new environment binding s with (t,ty) /// Creates a new environment binding s with (t1,t2)
pub fn insert(self, s: String, t: Term, ty: Term) -> Result<Self, EnvError> { pub fn insert(&mut self, s: String, t1: Term, t2: Term) -> Result<&Self, KernelError> {
match self.0.clone().get(&s) { if let hash_map::Entry::Vacant(e) = self.0.entry(s.clone()) {
Some(_) => Err(EnvError::AlreadyDefined(s)), e.insert((t1, t2));
None => { Ok(self)
let mut res = self.0; } else {
Err(KernelError::AlreadyDefined(s))
res.insert(s, (t, ty));
Ok(res.into())
}
} }
} }
......
use crate::term::Term;
use derive_more::Display;
// TODO #19
#[derive(Clone, Debug, Display, PartialEq, Eq)]
#[display(fmt = "{}:{}-{}:{}", line1, column1, line2, column2)]
// Line/column position
pub struct Loc {
pub line1: usize,
pub column1: usize,
pub line2: usize,
pub column2: usize,
}
impl Loc {
pub fn new(x1: usize, y1: usize, x2: usize, y2: usize) -> Loc {
Loc {
line1: x1,
column1: y1,
line2: x2,
column2: y2,
}
}
}
// TODO #19
/// Type representing kernel errors, is used by the toplevel to pretty-print errors.
#[derive(Clone, Debug, Display, PartialEq, Eq)]
pub enum KernelError {
// cannot parse command
#[display(fmt = "cannot parse: {}", _1)]
CannotParse(Loc, String),
// s is already defined
#[display(fmt = "{} is already defined", _0)]
AlreadyDefined(String),
/// s is not defined
#[display(fmt = "{} is not defined", _0)]
ConstNotFound(String),
/// t is not a universe
#[display(fmt = "{} is not a universe", _0)]
NotUniverse(Term),
/// t is not a type
#[display(fmt = "{} is not a type", _0)]
NotType(Term),
/// t1 and t2 are not definitionally equal
#[display(fmt = "{} and {} are not definitionaly equal", _0, _1)]
NotDefEq(Term, Term),
/// f of type t1 cannot be applied to x of type t2
#[display(fmt = "{} : {} cannot be applied to {} : {}", _0, _1, _2, _3)]
WrongArgumentType(Term, Term, Term, Term),
/// t1 of type ty is not a function so cannot be applied to t2
#[display(
fmt = "{} : {} is not a function so cannot be applied to {}",
_0,
_1,
_2
)]
NotAFunction(Term, Term, Term),
/// Expected ty1, found ty2
#[display(fmt = "expected {}, found {}", _0, _1)]
TypeMismatch(Term, Term),
}
...@@ -5,10 +5,14 @@ ...@@ -5,10 +5,14 @@
mod command; mod command;
mod environment; mod environment;
mod error;
mod term; mod term;
mod type_checker; mod type_checker;
pub use command::Command;
pub use derive_more; pub use derive_more;
pub use num_bigint; pub use num_bigint;
pub use command::Command;
pub use environment::Environment;
pub use error::{KernelError, Loc};
pub use term::Term; pub use term::Term;
...@@ -18,19 +18,19 @@ pub enum Term { ...@@ -18,19 +18,19 @@ pub enum Term {
#[display(fmt = "{}", _0)] #[display(fmt = "{}", _0)]
Const(String), Const(String),
#[display(fmt = "\u{02119}")] #[display(fmt = "Prop")]
Prop, Prop,
#[display(fmt = "Type({})", _0)] #[display(fmt = "Type {}", _0)]
Type(UniverseLevel), Type(UniverseLevel),
#[display(fmt = "({} {})", _0, _1)] #[display(fmt = "{} {}", _0, _1)]
App(Box<Term>, Box<Term>), App(Box<Term>, Box<Term>),
#[display(fmt = "\u{003BB}{} \u{02192} {}", _0, _1)] #[display(fmt = "\u{003BB} {} \u{02192} {}", _0, _1)]
Abs(Box<Term>, Box<Term>), Abs(Box<Term>, Box<Term>),
#[display(fmt = "\u{02200}{} \u{02192} {}", _0, _1)] #[display(fmt = "\u{03A0} {} \u{02192} {}", _0, _1)]
Prod(Box<Term>, Box<Term>), Prod(Box<Term>, Box<Term>),
} }
...@@ -51,7 +51,7 @@ impl Term { ...@@ -51,7 +51,7 @@ impl Term {
} }
} }
fn shift(&self, offset: usize, depth: usize) -> Term { pub(crate) fn shift(&self, offset: usize, depth: usize) -> Term {
match self { match self {
Var(i) if *i > depth.into() => Var(*i + offset.into()), Var(i) if *i > depth.into() => Var(*i + offset.into()),
App(box t1, box t2) => App(box t1.shift(offset, depth), box t2.shift(offset, depth)), App(box t1, box t2) => App(box t1.shift(offset, depth), box t2.shift(offset, depth)),
...@@ -307,9 +307,8 @@ mod tests { ...@@ -307,9 +307,8 @@ mod tests {
#[test] #[test]
fn beta_red_const() { fn beta_red_const() {
let id_prop = Prod(box Prop, box Prod(box Var(1.into()), box Var(1.into()))); let id_prop = Prod(box Prop, box Prod(box Var(1.into()), box Var(1.into())));
let env = Environment::new() let mut env = Environment::new();
.insert("foo".into(), id_prop.clone(), Prop) env.insert("foo".into(), id_prop.clone(), Prop).unwrap();
.unwrap();
assert_eq!(Const("foo".into()).beta_reduction(&env), id_prop); assert_eq!(Const("foo".into()).beta_reduction(&env), id_prop);
} }
......
use crate::environment::Environment; use crate::environment::Environment;
use crate::error::KernelError;
use crate::term::{DeBruijnIndex, Term}; use crate::term::{DeBruijnIndex, Term};
use num_bigint::BigUint; use num_bigint::BigUint;
use std::cmp::max; use std::cmp::max;
...@@ -13,31 +14,6 @@ impl Index<DeBruijnIndex> for Vec<Term> { ...@@ -13,31 +14,6 @@ impl Index<DeBruijnIndex> for Vec<Term> {
} }
} }
// TODO #19
/// Type representing kernel errors, is used by the toplevel to pretty-print errors.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum TypeCheckingError {
/// Constant s has not been found in the current context
ConstNotFound(String),
/// t is not a universe
NotUniverse(Term),
/// t1 and t2 are not definitionally equal
NotDefEq(Term, Term),
/// f of type t1 can't take argument x of type t2
WrongArgumentType(Term, Term, Term, Term),
/// t1 is of type ty is not a function, and thus cannot be applied to t2
NotAFunction(Term, Term, Term),
/// Expected ty1, found ty2
TypeMismatch(Term, Term),
}
use TypeCheckingError::*;
/// Type of lists of tuples representing the respective types of each variables /// Type of lists of tuples representing the respective types of each variables
type Types = Vec<Term>; type Types = Vec<Term>;
...@@ -99,19 +75,23 @@ impl Term { ...@@ -99,19 +75,23 @@ impl Term {
t1.conversion(&t2, env, lvl) && u1.conversion(&u2, env, lvl) t1.conversion(&t2, env, lvl) && u1.conversion(&u2, env, lvl)
} }
(app @ App(box Abs(_, _), box _), u) | (u, app @ App(box Abs(_, _), box _)) => {
app.beta_reduction(env).conversion(&u, env, lvl)
}
_ => false, _ => false,
} }
} }
/// Checks whether two terms are definitionally equal. /// Checks whether two terms are definitionally equal.
pub fn is_def_eq(&self, rhs: &Term, env: &Environment) -> Result<(), TypeCheckingError> { pub fn is_def_eq(&self, rhs: &Term, env: &Environment) -> Result<(), KernelError> {
self.conversion(rhs, env, 1.into()) self.conversion(rhs, env, 1.into())
.then_some(()) .then_some(())
.ok_or_else(|| NotDefEq(self.clone(), rhs.clone())) .ok_or_else(|| KernelError::NotDefEq(self.clone(), rhs.clone()))
} }
/// Computes universe the universe in which `(x : A) -> B` lives when `A : u1` and `B : u2`. /// Computes universe the universe in which `(x : A) -> B` lives when `A : u1` and `B : u2`.
fn imax(&self, rhs: &Term) -> Result<Term, TypeCheckingError> { fn imax(&self, rhs: &Term) -> Result<Term, KernelError> {
match rhs { match rhs {
// Because Prop is impredicative, if B : Prop, then (x : A) -> b : Prop // Because Prop is impredicative, if B : Prop, then (x : A) -> b : Prop
Prop => Ok(Prop), Prop => Ok(Prop),
...@@ -122,14 +102,14 @@ impl Term { ...@@ -122,14 +102,14 @@ impl Term {
// else if u1 = Type(i) and u2 = Type(j), then (x : A) -> B : Type(max(i,j)) // else if u1 = Type(i) and u2 = Type(j), then (x : A) -> B : Type(max(i,j))
Type(j) => Ok(Type(max(i.clone(), j.clone()))), Type(j) => Ok(Type(max(i.clone(), j.clone()))),
_ => Err(NotUniverse(rhs.clone())), _ => Err(KernelError::NotUniverse(self.clone())),
}, },
_ => Err(NotUniverse(self.clone())), _ => Err(KernelError::NotUniverse(rhs.clone())),
} }
} }
fn _infer(&self, env: &Environment, ctx: &mut Context) -> Result<Term, TypeCheckingError> { fn _infer(&self, env: &Environment, ctx: &mut Context) -> Result<Term, KernelError> {
match self { match self {
Prop => Ok(Type(BigUint::from(0_u64).into())), Prop => Ok(Type(BigUint::from(0_u64).into())),
Type(i) => Ok(Type(i.clone() + BigUint::from(1_u64).into())), Type(i) => Ok(Type(i.clone() + BigUint::from(1_u64).into())),
...@@ -137,18 +117,18 @@ impl Term { ...@@ -137,18 +117,18 @@ impl Term {
Const(s) => match env.get_type(s) { Const(s) => match env.get_type(s) {
Some(ty) => Ok(ty), Some(ty) => Ok(ty),
None => Err(ConstNotFound(s.clone())), None => Err(KernelError::ConstNotFound(s.clone())),
}, },
Prod(box t, u) => { Prod(box t, u) => {
let univ_t = t._infer(env, ctx)?; let univ_t = t._infer(env, ctx)?;
let univ_u = u._infer(env, ctx.clone().bind(t))?; let univ_u = u._infer(env, ctx.clone().bind(t))?;
univ_t.imax(&univ_u) univ_t.normal_form(env).imax(&univ_u.normal_form(env))
} }
Abs(box t, u) => { Abs(box t, u) => {
let u = u._infer(env, ctx.clone().bind(t))?; let u = u._infer(env, ctx.clone().bind(&t.shift(1, 0)))?;
Ok(Prod(box t.clone(), box u)) Ok(Prod(box t.clone(), box u))
} }
...@@ -160,27 +140,29 @@ impl Term { ...@@ -160,27 +140,29 @@ impl Term {
typ_lhs typ_lhs
.conversion(&typ_rhs, env, ctx.types.len().into()) .conversion(&typ_rhs, env, ctx.types.len().into())
.then_some(*cls) .then_some(*cls)
.ok_or_else(|| WrongArgumentType(t.clone(), typ_lhs, u.clone(), typ_rhs)) .ok_or_else(|| {
KernelError::WrongArgumentType(t.clone(), typ_lhs, u.clone(), typ_rhs)
})
} }
x => Err(NotAFunction(t.clone(), x, u.clone())), x => Err(KernelError::NotAFunction(t.clone(), x, u.clone())),
}, },
} }
} }
/// Infers the type of a `Term` in a given context. /// Infers the type of a `Term` in a given context.
pub fn infer(&self, env: &Environment) -> Result<Term, TypeCheckingError> { pub fn infer(&self, env: &Environment) -> Result<Term, KernelError> {
self._infer(env, &mut Context::new()) self._infer(env, &mut Context::new())
} }
/// Checks whether a given term is of type `ty` in a given context. /// Checks whether a given term is of type `ty` in a given context.
pub fn check(&self, ty: &Term, env: &Environment) -> Result<(), TypeCheckingError> { pub fn check(&self, ty: &Term, env: &Environment) -> Result<(), KernelError> {
let ctx = &mut Context::new(); let ctx = &mut Context::new();
let tty = self._infer(env, ctx)?; let tty = self._infer(env, ctx)?;
tty.conversion(ty, env, ctx.types.len().into()) tty.conversion(ty, env, ctx.types.len().into())
.then_some(()) .then_some(())
.ok_or_else(|| TypeMismatch(ty.clone(), tty)) .ok_or_else(|| KernelError::TypeMismatch(tty, ty.clone()))
} }
} }
...@@ -399,11 +381,10 @@ mod tests { ...@@ -399,11 +381,10 @@ mod tests {
#[test] #[test]
fn env_test() { fn env_test() {
let id_prop = Prod(box Prop, box Prod(box Var(1.into()), box Var(1.into()))); let id_prop = Prod(box Prop, box Prod(box Var(1.into()), box Var(2.into())));
let t = Abs(box Prop, box Abs(box Var(1.into()), box Var(1.into()))); let t = Abs(box Prop, box Abs(box Var(1.into()), box Var(1.into())));
let env = Environment::new() let mut env = Environment::new();
.insert("foo".into(), id_prop.clone(), Prop) env.insert("foo".into(), id_prop.clone(), Prop).unwrap();
.unwrap();
assert!(t.check(&Const("foo".into()), &env).is_ok()); assert!(t.check(&Const("foo".into()), &env).is_ok());
assert!(id_prop.is_def_eq(&Const("foo".into()), &env).is_ok()); assert!(id_prop.is_def_eq(&Const("foo".into()), &env).is_ok());
...@@ -424,11 +405,30 @@ mod tests { ...@@ -424,11 +405,30 @@ mod tests {
#[test] #[test]
fn infer_const() { fn infer_const() {
let id_prop = Prod(box Prop, box Prod(box Var(1.into()), box Var(1.into()))); let id_prop = Prod(box Prop, box Prod(box Var(1.into()), box Var(1.into())));
let env = Environment::new() let mut env = Environment::new();
.insert("foo".into(), id_prop, Prop) env.insert("foo".into(), id_prop, Prop).unwrap();
.unwrap();
assert!(Const("foo".into()).infer(&env).is_ok()); assert!(Const("foo".into()).infer(&env).is_ok());
assert!(Const("foo".into()).infer(&Environment::new()).is_err()); assert!(Const("foo".into()).infer(&Environment::new()).is_err());
} }
#[test]
fn prod_var() {
let ty = Prod(box Prop, box Prod(box Var(1.into()), box Var(2.into())));
let t = Abs(box Prop, box Abs(box Var(1.into()), box Var(1.into())));
assert!(t.check(&ty, &Environment::new()).is_ok())
}
#[test]
fn univ_reduc() {
let ty = App(
box Abs(box Prop, box Type(BigUint::from(0_u64).into())),
box Prod(box Prop, box Var(1.into())),
);
assert!(ty.conversion(
&Type(BigUint::from(0_u64).into()),
&Environment::new(),
0.into()
))
}
} }
...@@ -7,5 +7,5 @@ license = "GPL-3.0-or-later" ...@@ -7,5 +7,5 @@ license = "GPL-3.0-or-later"
[dependencies] [dependencies]
kernel = { path = "../kernel" } kernel = { path = "../kernel" }
pest = "2.0" pest = "2.4"
pest_derive = "2.0" pest_derive = "2.4"
//! Fast parser for λ-terms and commands using pest. //! Fast parser for λ-terms and commands using pest.
//! //!
//! Provides the function `parse_commands` to parse both files and single commands. //! Provides the functions to parse both files and single commands.
#![feature(box_syntax)] #![feature(box_syntax)]
...@@ -9,4 +9,4 @@ extern crate pest_derive; ...@@ -9,4 +9,4 @@ extern crate pest_derive;
mod parser; mod parser;
pub use self::parser::parse_commands; pub use self::parser::{parse_file, parse_line};
use kernel::num_bigint::BigUint; use kernel::num_bigint::BigUint;
use kernel::{Command, Term}; use kernel::{Command, KernelError, Loc, Term};
use pest::error::{Error, ErrorVariant}; use pest::error::{Error, LineColLocation};
use pest::iterators::Pair; use pest::iterators::Pair;
use pest::Parser; use pest::{Parser, Span};
use std::cmp::Ordering;
use std::collections::VecDeque; use std::collections::VecDeque;
#[derive(Parser)] #[derive(Parser)]
#[grammar = "term.pest"] #[grammar = "term.pest"]
struct CommandParser; struct CommandParser;
fn build_term_from_expr( // convert pest location to kernel location
pair: Pair<Rule>, fn convert_span(span: Span) -> Loc {
known_vars: &mut VecDeque<String>, let (x1, y1) = span.start_pos().line_col();
//defined_vars: issue #18 TODO use a hash map of known variables let (x2, y2) = span.end_pos().line_col();
) -> Result<Term, Box<Error<Rule>>> { Loc::new(x1, y1, x2, y2)
match pair.as_rule() { }
Rule::Prop => Ok(Term::Prop),
Rule::Type => Ok(Term::Type(
pair.into_inner()
.as_str()
.parse::<BigUint>()
.unwrap()
.into(),
)),
fn build_term_from_expr(pair: Pair<Rule>, known_vars: &mut VecDeque<String>) -> Term {
// location to be used in a future version
let _loc = convert_span(pair.as_span());
match pair.as_rule() {
Rule::Prop => Term::Prop,
Rule::Type => Term::Type(pair.into_inner().fold(BigUint::from(0_u64).into(), |_, x| {
x.as_str().parse::<BigUint>().unwrap().into()
})),
Rule::Var => { Rule::Var => {
//issue #18 TODO use a hash map of known variables
let span = pair.as_span();
let name = pair.into_inner().as_str().to_string(); let name = pair.into_inner().as_str().to_string();
match known_vars.iter().position(|x| *x == name) { match known_vars.iter().position(|x| *x == name) {
Some(i) => Ok(Term::Var((i + 1).into())), Some(i) => Term::Var((i + 1).into()),
None => Err(box Error::new_from_span( None => Term::Const(name),
ErrorVariant::CustomError {
message: String::from("free variable: ") + &name,
},
span,
)),
} }
} }
Rule::Prod => {
let mut iter = pair
.into_inner()
.map(|x| build_term_from_expr(x, known_vars))
.rev();
let t = iter.next().unwrap()?;
iter.try_fold(t, |acc, x| Ok(Term::Prod(box x?, box acc)))
}
Rule::App => { Rule::App => {
let mut iter = pair let mut iter = pair
.into_inner() .into_inner()
.map(|x| build_term_from_expr(x, known_vars)); .map(|x| build_term_from_expr(x, known_vars));
let t = iter.next().unwrap()?; let t = iter.next().unwrap();
iter.try_fold(t, |acc, x| Ok(Term::App(box acc, box x?))) iter.fold(t, |acc, x| Term::App(box acc, box x))
} }
Rule::Abs => { Rule::Abs => {
let iter = pair.into_inner(); let mut iter = pair.into_inner();
let mut iter2 = iter.clone().rev(); let pair = iter.next_back().unwrap();
let mut terms = Vec::new();
for pair in iter { for pair in iter {
if pair.as_rule() == Rule::arg_abs { let mut iter = pair.into_inner().rev();
let name = pair.into_inner().next().unwrap().as_str().to_string(); let t = build_term_from_expr(iter.next().unwrap(), known_vars);
known_vars.push_front(name); for pair in iter.rev() {
known_vars.push_front(pair.as_str().to_string());
terms.push(t.clone());
} }
} }
let t = build_term_from_expr(iter2.next().unwrap(), known_vars)?; let t = build_term_from_expr(pair, known_vars);
iter2.try_fold(t, |acc, x| { terms.into_iter().rev().fold(t, |acc, x| {
known_vars.pop_front(); known_vars.pop_front();
let mut iter3 = x.into_inner(); Term::Abs(box x, box acc)
iter3.next();
let t = build_term_from_expr(iter3.next().unwrap(), known_vars)?;
Ok(Term::Abs(box t, box acc))
}) })
} }
Rule::dProd => { Rule::dProd => {
let iter = pair.into_inner(); let mut iter = pair.into_inner();
let mut iter2 = iter.clone().rev(); let pair = iter.next_back().unwrap();
let mut terms = Vec::new();
for pair in iter { for pair in iter {
if pair.as_rule() == Rule::arg_dprod { let mut iter = pair.into_inner().rev();
let name = pair.into_inner().next().unwrap().as_str().to_string(); let t = build_term_from_expr(iter.next().unwrap(), known_vars);
known_vars.push_front(name); for pair in iter.rev() {
known_vars.push_front(pair.as_str().to_string());
terms.push(t.clone());
} }
} }
let t = build_term_from_expr(iter2.next().unwrap(), known_vars)?; let t = build_term_from_expr(pair, known_vars);
iter2.try_fold(t, |acc, x| { terms.into_iter().rev().fold(t, |acc, x| {
known_vars.pop_front();
Term::Prod(box x, box acc)
})
}
Rule::Prod => {
let mut iter = pair.into_inner();
let pair = iter.next_back().unwrap();
let mut terms = Vec::new();
for pair in iter {
let t = build_term_from_expr(pair, known_vars);
known_vars.push_front("_".to_string());
terms.push(t);
}
let t = build_term_from_expr(pair, known_vars);
terms.into_iter().rev().fold(t, |acc, x| {
known_vars.pop_front(); known_vars.pop_front();
let mut iter3 = x.into_inner(); Term::Prod(box x, box acc)
iter3.next();
let t = build_term_from_expr(iter3.next().unwrap(), known_vars)?;
Ok(Term::Prod(box t, box acc))
}) })
} }
term => panic!("Unexpected term: {:?}", term), term => unreachable!("Unexpected term: {:?}", term),
} }
} }
fn build_command_from_expr(pair: Pair<Rule>) -> Result<Command, Box<Error<Rule>>> { fn build_command_from_expr(pair: Pair<Rule>) -> Command {
// location to be used in a future version
let _loc = convert_span(pair.as_span());
match pair.as_rule() { match pair.as_rule() {
Rule::GetType => { Rule::GetType => {
let mut iter = pair.into_inner(); let mut iter = pair.into_inner();
let t = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new())?; let t = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new());
Ok(Command::GetType(t)) Command::GetType(t)
} }
Rule::CheckType => { Rule::CheckType => {
let mut iter = pair.into_inner(); let mut iter = pair.into_inner();
let t1 = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new())?; let t1 = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new());
let t2 = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new())?; let t2 = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new());
Ok(Command::CheckType(t1, t2)) Command::CheckType(t1, t2)
} }
Rule::Define => { Rule::Define => {
let mut iter = pair.into_inner(); let mut iter = pair.into_inner();
let s = iter.next().unwrap().as_str().to_string(); let s = iter.next().unwrap().as_str().to_string();
let t = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new())?; let term = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new());
Ok(Command::Define(s, t)) Command::Define(s, None, term)
} }
Rule::DefineCheckType => { Rule::DefineCheckType => {
let mut iter = pair.into_inner(); let mut iter = pair.into_inner();
let s = iter.next().unwrap().as_str().to_string(); let s = iter.next().unwrap().as_str().to_string();
let t1 = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new())?; let t = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new());
let t2 = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new())?; let term = build_term_from_expr(iter.next().unwrap(), &mut VecDeque::new());
Ok(Command::DefineCheckType(s, t1, t2)) Command::Define(s, Some(t), term)
} }
Rule::error_dot => Err(box Error::new_from_pos( command => unreachable!("Unexpected command: {:?}", command),
ErrorVariant::CustomError { }
message: String::from("missing dot"), }
},
pair.as_span().end_pos(), // convert pest error to kernel error
)), fn convert_error(err: Error<Rule>) -> KernelError {
command => panic!("Unexpected command: {:?}", command), // renaming error messages
let err = err.renamed_rules(|rule| match *rule {
Rule::string | Rule::Var => "variable".to_owned(),
Rule::number => "number".to_owned(),
Rule::Define => "def var := term".to_owned(),
Rule::CheckType => "check term : term".to_owned(),
Rule::GetType => "check term".to_owned(),
Rule::DefineCheckType => "def var : term := term".to_owned(),
Rule::Abs => "abstraction".to_owned(),
Rule::dProd => "dependent product".to_owned(),
Rule::Prod => "product".to_owned(),
Rule::App => "application".to_owned(),
Rule::Prop => "Prop".to_owned(),
Rule::Type => "Type".to_owned(),
_ => "".to_owned(),
});
// extracting the location from the pest output
let loc = match err.line_col {
LineColLocation::Pos((x, y)) => {
let mut right = y;
let mut left = 1;
let chars = err.line().chars();
let mut i = 0;
for c in chars {
i += 1;
if char::is_whitespace(c) {
match i.cmp(&y) {
Ordering::Less => left = i + 1,
Ordering::Equal => left = y,
Ordering::Greater => break,
}
} else {
right = i
}
}
if i < y {
left = y;
right = y;
}
Loc::new(x, left, x, right)
}
LineColLocation::Span((x1, y1), (x2, y2)) => Loc::new(x1, y1, x2, y2),
};
// extracting the message from the pest output
let message = err.to_string();
let mut chars = message.lines().next_back().unwrap().chars();
for _ in 0..4 {
chars.next();
}
KernelError::CannotParse(loc, chars.as_str().to_string())
}
/// Parse a text input and try to convert it into a command.
///
/// If unsuccessful, a box containing the first error that was encountered is returned.
pub fn parse_line(line: &str) -> Result<Command, KernelError> {
match CommandParser::parse(Rule::command, line) {
Ok(mut pairs) => Ok(build_command_from_expr(pairs.next().unwrap())),
Err(err) => Err(convert_error(err)),
} }
} }
/// Parse a text input and try to convert it into a vector of commands. /// Parse a text input and try to convert it into a vector of commands.
/// ///
/// If unsuccessful, a box containing the first error that was encountered is returned. /// If unsuccessful, a box containing the first error that was encountered is returned.
pub fn parse_commands(file: &str) -> Result<Vec<Command>, Box<Error<Rule>>> { pub fn parse_file(file: &str) -> Result<Vec<Command>, KernelError> {
CommandParser::parse(Rule::File, file)? match CommandParser::parse(Rule::file, file) {
.map(build_command_from_expr) Ok(pairs) => Ok(pairs.into_iter().map(build_command_from_expr).collect()),
.collect() Err(err) => Err(convert_error(err)),
}
} }
...@@ -2,39 +2,35 @@ WHITESPACE = _{ WHITE_SPACE } ...@@ -2,39 +2,35 @@ WHITESPACE = _{ WHITE_SPACE }
COMMENT = _{ "#" ~ (!"\n" ~ ANY)* } COMMENT = _{ "#" ~ (!"\n" ~ ANY)* }
number = @{ ASCII_DIGIT+ } number = @{ ASCII_DIGIT+ }
string = @{ !keywords ~ ASCII_ALPHA ~ ( "_" | ASCII_ALPHANUMERIC )* } string = @{ !keywords ~ ASCII_ALPHA ~ ( "_" | ASCII_ALPHANUMERIC )* }
keywords = @{ ( "fun" | "type" | "def" | "check" | "Prop" | "Type" ) ~ !ASCII_ALPHANUMERIC } keywords = @{ ( "fun" | "def" | "check" | "Prop" | "Type" ) ~ !ASCII_ALPHANUMERIC }
eoi = _{ !ANY } eoi = _{ !ANY }
Term = _{ Abs | dProd | Prod | App | Var | Prop | Type | "(" ~ Term ~ ")" } Term = _{ Abs | dProd | Prod | App | Var | Prop | Type | "(" ~ Term ~ ")" }
term_prod = _{ App | Abs | dProd | Var | Prop | Type | "(" ~ term_prod ~ ")" } term_prod = _{ App | Abs | dProd | Var | Prop | Type | "(" ~ term_prod ~ ")" }
term_app = _{ Abs | dProd | Var | Prop | Type | "(" ~ Prod ~ ")" | "(" ~ term_app ~ ")" } term_app = _{ Abs | Var | Prop | Type | "(" ~ Prod ~ ")" | "(" ~ dProd ~ ")" | "(" ~ term_app ~ ")" }
term_dprod = _{ App | Abs | dProd | Var | Prop | Type | "(" ~ Prod ~ ")" | "(" ~ term_dprod ~ ")" } term_dprod = _{ App | Abs | Prod | dProd | Var | Prop | Type | "(" ~ term_dprod ~ ")" }
term_abs = _{ App | Abs | dProd | Var | Prop | Type | "(" ~ Prod ~ ")" | "(" ~ term_abs ~ ")" } term_abs = _{ App | Abs | Prod | dProd | Var | Prop | Type | "(" ~ term_abs ~ ")" }
Abs = { ( "fun" ~ ( arg_abs_par ~ ( "," ~ arg_abs_par )* ) ~ "=>" ~ Term ) } Abs = { ( "fun" ~ ( arg_abs_par ~ ( "," ~ arg_abs_par )* ) ~ "=>" ~ Term ) }
Abs_ = { ( "fun" ~ ( string ~ ( "," ~ string )* ) ~ "=>" ~ Term ) } arg_abs_par = _{ arg_abs | "(" ~ arg_abs_par ~ ")" }
arg_abs = { ( string ~ ":" ~ term_abs ) } arg_abs = { string+ ~ ":" ~ term_abs }
arg_abs_par = _{ arg_abs | "(" ~ arg_abs ~ ")" }
dProd = { "(" ~ ( arg_dprod_par ~ ( "," ~ arg_dprod_par )* ) ~ ")" ~ "->" ~ Term } dProd = { "(" ~ ( arg_dprod_par ~ ( "," ~ arg_dprod_par )* ) ~ ")" ~ "->" ~ Term }
dProd_ = { "(" ~ ( string ~ ( "," ~ string )* ) ~ ")" ~ "->" ~ Term } arg_dprod_par = _{ arg_dprod | "(" ~ arg_dprod_par ~ ")" }
arg_dprod = { ( string ~ ":" ~ term_dprod ) } arg_dprod = { string+ ~ ":" ~ term_dprod }
arg_dprod_par = _{ arg_dprod | "(" ~ arg_dprod ~ ")" }
App = { term_app ~ term_app+ } App = { term_app ~ term_app+ }
Prod = { term_prod ~ ( "->" ~ term_prod )+ } Prod = { term_prod ~ ( "->" ~ term_prod )+ }
Prop = { "Prop" } Prop = { "Prop" }
Type = { "Type(" ~ number ~ ")" } Type = { "Type(" ~ number ~ ")" | "Type" ~ number? }
Var = { string } Var = { string }
Command = _{ ( Define | CheckType | GetType | DefineCheckType ) ~ "." } Command = _{ Define | CheckType | GetType | DefineCheckType }
Define = { "def" ~ string ~ ":=" ~ Term } Define = { "def" ~ string ~ ":=" ~ Term }
DefineCheckType = { "def" ~ string ~ ":" ~ Term ~ ":=" ~ Term } DefineCheckType = { "def" ~ string ~ ":" ~ Term ~ ":=" ~ Term }
CheckType = { "check" ~ Term ~ ":" ~ Term } CheckType = { "check" ~ Term ~ ":" ~ Term }
GetType = { "type" ~ Term } GetType = { "check" ~ Term }
Error = _{ error_dot } command = _{SOI ~ Command ~ eoi }
error_dot = { ( Define | CheckType | GetType | DefineCheckType ) } file = _{ SOI ~ NEWLINE* ~ (Command ~ NEWLINE+)* ~ Command? ~ eoi }
File = _{ SOI ~ ( NEWLINE* ~ ( Command | Error ) )* ~ NEWLINE* ~ eoi }
...@@ -7,6 +7,8 @@ description = "A simple proof assistant written in Rust" ...@@ -7,6 +7,8 @@ description = "A simple proof assistant written in Rust"
[dependencies] [dependencies]
parser = { path = "../parser" } parser = { path = "../parser" }
kernel = { path = "../kernel" }
clap = { version = "4.0.10", features = ["derive"]} clap = { version = "4.0.10", features = ["derive"]}
rustyline = "10.0.0" rustyline = "10.0.0"
colored = "2"
#![feature(box_syntax)] #![feature(box_syntax)]
mod process;
use clap::Parser; use clap::Parser;
use parser::parse_commands; use kernel::Environment;
use process::*;
use rustyline::error::ReadlineError; use rustyline::error::ReadlineError;
use rustyline::Editor; use rustyline::Editor;
use std::error::Error; use std::error::Error;
use std::fs; use std::fs;
// clap configuration
#[derive(Parser)] #[derive(Parser)]
#[command(author, version, about, long_about = None)] #[command(author, version, about, long_about = None)]
pub struct Args { struct Args {
files: Vec<String>, files: Vec<String>,
#[arg(short, long, action)]
banner: bool,
} }
// constants fetching
const VERSION: &str = env!("CARGO_PKG_VERSION"); const VERSION: &str = env!("CARGO_PKG_VERSION");
const NAME: &str = env!("CARGO_PKG_NAME"); const NAME: &str = env!("CARGO_PKG_NAME");
...@@ -24,11 +27,9 @@ fn main() -> Result<(), Box<dyn Error>> { ...@@ -24,11 +27,9 @@ fn main() -> Result<(), Box<dyn Error>> {
if !args.files.is_empty() { if !args.files.is_empty() {
for path in args.files.iter() { for path in args.files.iter() {
match fs::read_to_string(path) { match fs::read_to_string(path) {
Ok(contents) => { Ok(_contents) => (),
let _ = parse_commands(&contents);
}
Err(_) => { Err(_) => {
println!("Error: No such file or directory: {}", path); println!("no such file or directory: {}", path);
} }
} }
} }
...@@ -37,25 +38,16 @@ fn main() -> Result<(), Box<dyn Error>> { ...@@ -37,25 +38,16 @@ fn main() -> Result<(), Box<dyn Error>> {
let mut rl_err: Option<ReadlineError> = None; let mut rl_err: Option<ReadlineError> = None;
let mut rl = Editor::<()>::new()?; let mut rl = Editor::<()>::new()?;
if args.banner {
println!("#Insert banner here#\n# This is a test #")
}
println!("Welcome to {} {}", NAME, VERSION); println!("Welcome to {} {}", NAME, VERSION);
let mut env = Environment::new();
loop { loop {
let readline = rl.readline(">> "); let readline = rl.readline("\u{00BB} ");
match readline { match readline {
Ok(line) if !line.is_empty() => { Ok(line) if !line.is_empty() => {
rl.add_history_entry(line.as_str()); rl.add_history_entry(line.as_str());
match parse_commands(line.as_str()) { print_repl(process_line(line.as_str(), &mut env));
Ok(commands) => {
for command in commands {
println!("{}", command);
}
}
Err(err) => println!("{}", *err),
}
} }
Ok(_) => (), Ok(_) => (),
Err(ReadlineError::Interrupted) => {} Err(ReadlineError::Interrupted) => {}
......
use colored::*;
use kernel::{Environment, KernelError, Term};
use parser::*;
pub fn process_line(line: &str, env: &mut Environment) -> Result<Option<Term>, KernelError> {
parse_line(line)?.process(env)
}
pub fn print_repl(res: Result<Option<Term>, KernelError>) {
match res {
Ok(None) => println!("{}", "\u{2713}".green()),
Ok(Some(t)) => {
for line in t.to_string().lines() {
println!("{} {}", "\u{2713}".green(), line)
}
}
Err(err) => {
let string = match err {
KernelError::CannotParse(loc, message) => {
if loc.column1 == loc.column2 {
format!("{:0w1$}^\n{m}", "", w1 = loc.column1 - 1, m = message)
} else {
format!(
"{:0w1$}^{:-<w2$}^\n{m}",
"",
"",
w1 = loc.column1 - 1,
w2 = loc.column2 - loc.column1 - 1,
m = message
)
}
}
_ => err.to_string(),
};
for line in string.lines() {
println!("{} {}", "\u{2717}".red(), line)
}
}
}
}