Compare commits

...

2 commits

9 changed files with 134 additions and 21 deletions

1
Cargo.lock generated
View file

@ -503,6 +503,7 @@ dependencies = [
name = "lang" name = "lang"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"ariadne",
"chumsky", "chumsky",
"clap", "clap",
"indexmap", "indexmap",

View file

@ -7,10 +7,11 @@ edition = "2021"
[dependencies] [dependencies]
logos = "0.14" logos = "0.14"
chumsky = "1.0.0-alpha.6" chumsky = {version= "1.0.0-alpha.6", features=["label"]}
petgraph = { workspace = true} petgraph = { workspace = true}
indexmap = "2.2.6" indexmap = "2.2.6"
clap = { version = "4", features = ["derive"] } clap = { version = "4", features = ["derive"] }
ariadne = "0.4.0"
[lints] [lints]
workspace = true workspace = true

View file

@ -0,0 +1,87 @@
use std::{collections::HashMap, fs};
use ariadne::{sources, Label, Report, Source};
use chumsky::{
error::{self, Rich},
ParseResult,
};
use indexmap::IndexMap;
use crate::{
parser::{ast::File, Span},
tokens::Token,
};
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum Stage {
Lex,
Parse,
}
impl Stage {
fn variants() -> [Stage; 2] {
[Stage::Lex, Stage::Parse]
}
}
pub struct ErrorCollector<'filename, 'tokens, 'src> {
files: HashMap<&'filename str, &'src str>,
raw_errors: IndexMap<(&'filename str, Stage), Vec<error::Rich<'tokens, Token<'src>, Span>>>,
}
impl<'filename, 'tokens, 'src> ErrorCollector<'filename, 'tokens, 'src> {
pub fn new(files: Vec<(&'filename str, &'src str)>) -> Self {
Self {
files: HashMap::from_iter(files.clone()),
raw_errors: files
.iter()
.flat_map(|(name, _)| Stage::variants().map(|s| (name, s)))
.map(|(name, stage)| ((*name, stage), Vec::new()))
.collect(),
}
}
pub fn insert_many(
&mut self,
file: &'filename str,
curr_stage: Stage,
mut errs: Vec<error::Rich<'tokens, Token<'src>, Span>>,
) {
let err_vec = self
.raw_errors
.get_mut(&(file, curr_stage))
.expect("filename should exist");
err_vec.append(&mut errs);
}
pub fn analyze_and_report(self) {
let ErrorCollector { files, raw_errors } = self;
todo!()
}
pub fn report_raw(self) {
let ErrorCollector { files, raw_errors } = self;
for ((file, stage), errs) in raw_errors.into_iter() {
for err in errs {
Report::build(ariadne::ReportKind::Error, file, err.span().start)
.with_message(format!("error at stage {stage:?}, {:?}", err.reason()))
.with_label(
Label::new((file, err.span().into_range())).with_message(format!(
"found: {:?}",
err.found().expect("errors should have a reason")
)),
)
.with_help(format!(
"expected: {:?}",
err.expected().collect::<Vec<_>>()
))
.finish()
.print((file, Source::from(files[file])));
}
}
}
}
#[derive(Debug, PartialEq, Eq)]
struct Loc<'filename>(&'filename str, Span);

View file

@ -1,5 +1,3 @@
pub mod err_reporting;
pub mod parser; pub mod parser;
pub mod tokens; pub mod tokens;
pub mod err_reporting {
pub struct GlobalReporter {}
}

View file

@ -1,17 +1,32 @@
use std::{fs, path::PathBuf}; use std::{fs, path::PathBuf};
use clap::Parser; use clap::Parser;
use lang::parser::parse; use lang::{err_reporting::ErrorCollector, parser::parse};
#[derive(Parser)] #[derive(Parser)]
struct Args { struct Args {
file: PathBuf, file: PathBuf,
} }
#[allow(clippy::unwrap_used)]
fn main() { fn main() {
let args = Args::parse(); let args = Args::parse();
let f = fs::read_to_string(args.file).expect("failed to read file"); let n = args.file.clone();
let f = fs::read_to_string(n.clone()).expect("failed to read file");
let mut err_collector = ErrorCollector::new(vec![(n.to_str().unwrap(), &f)]);
println!("file: {f}\n"); println!("file: {f}\n");
println!("parsed: {:?}", parse(&f)) let parse_res = parse(&f);
err_collector.insert_many(
args.file.to_str().unwrap(),
lang::err_reporting::Stage::Parse,
parse_res
.errors()
.into_iter()
.map(|e| e.to_owned())
.collect::<Vec<_>>(),
);
err_collector.report_raw();
println!("res: {:?}", parse_res);
} }

View file

@ -21,8 +21,8 @@ pub mod ast;
mod tests; mod tests;
use self::ast::{Expr, File}; use self::ast::{Expr, File};
type Span = SimpleSpan; pub type Span = SimpleSpan;
type Spanned<T> = (T, Span); pub type Spanned<T> = (T, Span);
pub fn parse<'src>(src: &'src str) -> ParseResult<File<'_>, Rich<'_, Token<'_>>> { pub fn parse<'src>(src: &'src str) -> ParseResult<File<'_>, Rich<'_, Token<'_>>> {
let toks: Vec<_> = Token::lexer(src) let toks: Vec<_> = Token::lexer(src)
@ -43,24 +43,29 @@ fn expr_parser<'tokens, 'src: 'tokens, I: ValueInput<'tokens, Token = Token<'src
Token::VarIdent(name) => (Expr::Var as fn(_) -> _, name), Token::VarIdent(name) => (Expr::Var as fn(_) -> _, name),
Token::InputIdent(name) => (Expr::InputVar as fn(_) -> _, name) Token::InputIdent(name) => (Expr::InputVar as fn(_) -> _, name)
} }
.map_with(|(item_type, name), extra| item_type((name, extra.span()))); .map_with(|(item_type, name), extra| item_type((name, extra.span())))
.labelled("variable");
let attrset = word let attrset = word
.map_with(|n, e| (n, e.span())) .map_with(|n, e| (n, e.span()))
.labelled("attr name")
.then_ignore(just(Token::Colon)) .then_ignore(just(Token::Colon))
.then(expr) .then(expr)
.labelled("attr body")
.separated_by(just(Token::Comma)) .separated_by(just(Token::Comma))
.collect::<Vec<_>>() .collect::<Vec<_>>()
.map(IndexMap::from_iter) .map(IndexMap::from_iter)
.delimited_by(just(Token::BracketOpen), just(Token::BracketClose)) .delimited_by(just(Token::BracketOpen), just(Token::BracketClose))
.map_with(|v, e| (v, e.span())); .map_with(|v, e| (v, e.span()))
.labelled("attrset");
let node = word let node = word
.map_with(|v, e| (v, e.span())) .map_with(|v, e| (v, e.span()))
.then(attrset.clone().or_not()) .then(attrset.clone().or_not())
.map(|(name, params)| Expr::Node(name, params)) .map(|(name, params)| Expr::Node(name, params))
.or(var) .or(var)
.or(attrset.map(Expr::AttrSet)); .or(attrset.map(Expr::AttrSet))
.labelled("node");
let pipeline = node let pipeline = node
.clone() .clone()

View file

@ -39,7 +39,7 @@ fn test_parse_node_with_params() {
} }
fn test_parse_multiple_top_level_complex() { fn test_parse_multiple_top_level_complex() {
const INPUT: &str = r#"def main = meow const INPUT: &str = r"def main = meow
| uwu | uwu
[ foo: @bar [ foo: @bar
, hello: world @| test [ more: params ] | yay , hello: world @| test [ more: params ] | yay
@ -52,7 +52,7 @@ def test = meow
[ hello: $foo [ hello: $foo
, world: @bar , world: @bar
]; ];
"#; ";
assert_eq!( assert_eq!(
parse(INPUT).unwrap(), parse(INPUT).unwrap(),
File { File {

View file

@ -11,7 +11,10 @@ fn test_chrom_abb() {
let chan_r = chromabb.color_matrix( let chan_r = chromabb.color_matrix(
StandardInput::SourceGraphic, StandardInput::SourceGraphic,
ColorMatrixType::Matrix(Box::new([ ColorMatrixType::Matrix(Box::new([
1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 1., 0., 0., 0., 0., //
0., 0., 0., 0., 0., //
0., 0., 0., 0., 0., //
0., 0., 0., 1., 0.,
])), ])),
); );
let offset_r = chromabb.offset(chan_r, 25., 0.); let offset_r = chromabb.offset(chan_r, 25., 0.);
@ -20,7 +23,10 @@ fn test_chrom_abb() {
let chan_b = chromabb.color_matrix( let chan_b = chromabb.color_matrix(
StandardInput::SourceGraphic, StandardInput::SourceGraphic,
ColorMatrixType::Matrix(Box::new([ ColorMatrixType::Matrix(Box::new([
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0., //
0., 0., 0., 0., 0., //
0., 0., 1., 0., 0., //
0., 0., 0., 1., 0.,
])), ])),
); );
let offset_b = chromabb.offset(chan_b, -25., 0.); let offset_b = chromabb.offset(chan_b, -25., 0.);
@ -31,7 +37,10 @@ fn test_chrom_abb() {
let chan_g = chromabb.color_matrix( let chan_g = chromabb.color_matrix(
StandardInput::SourceGraphic, StandardInput::SourceGraphic,
ColorMatrixType::Matrix(Box::new([ ColorMatrixType::Matrix(Box::new([
0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0., //
0., 1., 0., 0., 0., //
0., 0., 0., 0., 0., //
0., 0., 0., 1., 0.,
])), ])),
); );
chromabb.composite_arithmetic(composite_rb, chan_g, 0., 1., 1., 0.); chromabb.composite_arithmetic(composite_rb, chan_g, 0., 1., 1., 0.);

View file

@ -1,4 +1 @@
def main = meow | test; meow gay | uwu
def test = meow [ hello: $foo, world: @bar];
def uwu = owo;