svg-filters & basic parser #15

Merged
schrottkatze merged 67 commits from schrottkatze/iowo:svg-filters into main 2024-07-08 18:29:05 +00:00
8 changed files with 122 additions and 18 deletions
Showing only changes of commit 84448af714 - Show all commits

1
Cargo.lock generated
View file

@ -503,6 +503,7 @@ dependencies = [
name = "lang"
version = "0.1.0"
dependencies = [
"ariadne",
"chumsky",
"clap",
"indexmap",

View file

@ -7,10 +7,11 @@ edition = "2021"
[dependencies]
logos = "0.14"
chumsky = "1.0.0-alpha.6"
chumsky = {version= "1.0.0-alpha.6", features=["label"]}
petgraph = { workspace = true}
indexmap = "2.2.6"
clap = { version = "4", features = ["derive"] }
ariadne = "0.4.0"
[lints]
workspace = true

View file

@ -0,0 +1,87 @@
use std::{collections::HashMap, fs};
use ariadne::{sources, Label, Report, Source};
use chumsky::{
error::{self, Rich},
ParseResult,
};
use indexmap::IndexMap;
use crate::{
parser::{ast::File, Span},
tokens::Token,
};
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum Stage {
Lex,
Parse,
}
impl Stage {
fn variants() -> [Stage; 2] {
[Stage::Lex, Stage::Parse]
}
}
pub struct ErrorCollector<'filename, 'tokens, 'src> {
files: HashMap<&'filename str, &'src str>,
raw_errors: IndexMap<(&'filename str, Stage), Vec<error::Rich<'tokens, Token<'src>, Span>>>,
}
impl<'filename, 'tokens, 'src> ErrorCollector<'filename, 'tokens, 'src> {
pub fn new(files: Vec<(&'filename str, &'src str)>) -> Self {
Self {
files: HashMap::from_iter(files.clone()),
raw_errors: files
.iter()
.flat_map(|(name, _)| Stage::variants().map(|s| (name, s)))
.map(|(name, stage)| ((*name, stage), Vec::new()))
.collect(),
}
}
pub fn insert_many(
&mut self,
file: &'filename str,
curr_stage: Stage,
mut errs: Vec<error::Rich<'tokens, Token<'src>, Span>>,
) {
let err_vec = self
.raw_errors
.get_mut(&(file, curr_stage))
.expect("filename should exist");
err_vec.append(&mut errs);
}
pub fn analyze_and_report(self) {
let ErrorCollector { files, raw_errors } = self;
todo!()
}
pub fn report_raw(self) {
let ErrorCollector { files, raw_errors } = self;
for ((file, stage), errs) in raw_errors.into_iter() {
for err in errs {
Report::build(ariadne::ReportKind::Error, file, err.span().start)
.with_message(format!("error at stage {stage:?}, {:?}", err.reason()))
.with_label(
Label::new((file, err.span().into_range())).with_message(format!(
"found: {:?}",
err.found().expect("errors should have a reason")
)),
)
.with_help(format!(
"expected: {:?}",
err.expected().collect::<Vec<_>>()
))
.finish()
.print((file, Source::from(files[file])));
}
}
}
}
#[derive(Debug, PartialEq, Eq)]
struct Loc<'filename>(&'filename str, Span);

View file

@ -1,5 +1,3 @@
pub mod err_reporting;
pub mod parser;
pub mod tokens;
pub mod err_reporting {
pub struct GlobalReporter {}
}

View file

@ -1,17 +1,32 @@
use std::{fs, path::PathBuf};
use clap::Parser;
use lang::parser::parse;
use lang::{err_reporting::ErrorCollector, parser::parse};
#[derive(Parser)]
struct Args {
file: PathBuf,
}
#[allow(clippy::unwrap_used)]
fn main() {
let args = Args::parse();
let f = fs::read_to_string(args.file).expect("failed to read file");
let n = args.file.clone();
let f = fs::read_to_string(n.clone()).expect("failed to read file");
let mut err_collector = ErrorCollector::new(vec![(n.to_str().unwrap(), &f)]);
println!("file: {f}\n");
println!("parsed: {:?}", parse(&f))
let parse_res = parse(&f);
err_collector.insert_many(
args.file.to_str().unwrap(),
lang::err_reporting::Stage::Parse,
parse_res
.errors()
.into_iter()
.map(|e| e.to_owned())
.collect::<Vec<_>>(),
);
err_collector.report_raw();
println!("res: {:?}", parse_res);
}

View file

@ -21,8 +21,8 @@ pub mod ast;
mod tests;
use self::ast::{Expr, File};
type Span = SimpleSpan;
type Spanned<T> = (T, Span);
pub type Span = SimpleSpan;
pub type Spanned<T> = (T, Span);
pub fn parse<'src>(src: &'src str) -> ParseResult<File<'_>, Rich<'_, Token<'_>>> {
let toks: Vec<_> = Token::lexer(src)
@ -43,24 +43,29 @@ fn expr_parser<'tokens, 'src: 'tokens, I: ValueInput<'tokens, Token = Token<'src
Token::VarIdent(name) => (Expr::Var as fn(_) -> _, name),
Token::InputIdent(name) => (Expr::InputVar as fn(_) -> _, name)
}
.map_with(|(item_type, name), extra| item_type((name, extra.span())));
.map_with(|(item_type, name), extra| item_type((name, extra.span())))
.labelled("variable");
let attrset = word
.map_with(|n, e| (n, e.span()))
.labelled("attr name")
.then_ignore(just(Token::Colon))
.then(expr)
.labelled("attr body")
.separated_by(just(Token::Comma))
.collect::<Vec<_>>()
.map(IndexMap::from_iter)
.delimited_by(just(Token::BracketOpen), just(Token::BracketClose))
.map_with(|v, e| (v, e.span()));
.map_with(|v, e| (v, e.span()))
.labelled("attrset");
let node = word
.map_with(|v, e| (v, e.span()))
.then(attrset.clone().or_not())
.map(|(name, params)| Expr::Node(name, params))
.or(var)
.or(attrset.map(Expr::AttrSet));
.or(attrset.map(Expr::AttrSet))
.labelled("node");
let pipeline = node
.clone()

View file

@ -39,7 +39,7 @@ fn test_parse_node_with_params() {
}
fn test_parse_multiple_top_level_complex() {
const INPUT: &str = r#"def main = meow
const INPUT: &str = r"def main = meow
| uwu
[ foo: @bar
, hello: world @| test [ more: params ] | yay
@ -52,7 +52,7 @@ def test = meow
[ hello: $foo
, world: @bar
];
"#;
";
assert_eq!(
parse(INPUT).unwrap(),
File {

View file

@ -1,4 +1 @@
def main = meow | test;
def test = meow [ hello: $foo, world: @bar];
def uwu = owo;
meow gay | uwu