forked from katzen-cafe/iowo
Compare commits
No commits in common. "ace69b0094e454ff3617300d17cb2b7ef6870e96" and "ae60db77216343d7f259a733ccd2c1e53f95dd0c" have entirely different histories.
ace69b0094
...
ae60db7721
9 changed files with 21 additions and 134 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -503,7 +503,6 @@ dependencies = [
|
||||||
name = "lang"
|
name = "lang"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ariadne",
|
|
||||||
"chumsky",
|
"chumsky",
|
||||||
"clap",
|
"clap",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
|
|
|
@ -7,11 +7,10 @@ edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
logos = "0.14"
|
logos = "0.14"
|
||||||
chumsky = {version= "1.0.0-alpha.6", features=["label"]}
|
chumsky = "1.0.0-alpha.6"
|
||||||
petgraph = { workspace = true}
|
petgraph = { workspace = true}
|
||||||
indexmap = "2.2.6"
|
indexmap = "2.2.6"
|
||||||
clap = { version = "4", features = ["derive"] }
|
clap = { version = "4", features = ["derive"] }
|
||||||
ariadne = "0.4.0"
|
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -1,87 +0,0 @@
|
||||||
use std::{collections::HashMap, fs};
|
|
||||||
|
|
||||||
use ariadne::{sources, Label, Report, Source};
|
|
||||||
use chumsky::{
|
|
||||||
error::{self, Rich},
|
|
||||||
ParseResult,
|
|
||||||
};
|
|
||||||
use indexmap::IndexMap;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
parser::{ast::File, Span},
|
|
||||||
tokens::Token,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
pub enum Stage {
|
|
||||||
Lex,
|
|
||||||
Parse,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Stage {
|
|
||||||
fn variants() -> [Stage; 2] {
|
|
||||||
[Stage::Lex, Stage::Parse]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ErrorCollector<'filename, 'tokens, 'src> {
|
|
||||||
files: HashMap<&'filename str, &'src str>,
|
|
||||||
raw_errors: IndexMap<(&'filename str, Stage), Vec<error::Rich<'tokens, Token<'src>, Span>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'filename, 'tokens, 'src> ErrorCollector<'filename, 'tokens, 'src> {
|
|
||||||
pub fn new(files: Vec<(&'filename str, &'src str)>) -> Self {
|
|
||||||
Self {
|
|
||||||
files: HashMap::from_iter(files.clone()),
|
|
||||||
raw_errors: files
|
|
||||||
.iter()
|
|
||||||
.flat_map(|(name, _)| Stage::variants().map(|s| (name, s)))
|
|
||||||
.map(|(name, stage)| ((*name, stage), Vec::new()))
|
|
||||||
.collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert_many(
|
|
||||||
&mut self,
|
|
||||||
file: &'filename str,
|
|
||||||
curr_stage: Stage,
|
|
||||||
mut errs: Vec<error::Rich<'tokens, Token<'src>, Span>>,
|
|
||||||
) {
|
|
||||||
let err_vec = self
|
|
||||||
.raw_errors
|
|
||||||
.get_mut(&(file, curr_stage))
|
|
||||||
.expect("filename should exist");
|
|
||||||
err_vec.append(&mut errs);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn analyze_and_report(self) {
|
|
||||||
let ErrorCollector { files, raw_errors } = self;
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn report_raw(self) {
|
|
||||||
let ErrorCollector { files, raw_errors } = self;
|
|
||||||
|
|
||||||
for ((file, stage), errs) in raw_errors.into_iter() {
|
|
||||||
for err in errs {
|
|
||||||
Report::build(ariadne::ReportKind::Error, file, err.span().start)
|
|
||||||
.with_message(format!("error at stage {stage:?}, {:?}", err.reason()))
|
|
||||||
.with_label(
|
|
||||||
Label::new((file, err.span().into_range())).with_message(format!(
|
|
||||||
"found: {:?}",
|
|
||||||
err.found().expect("errors should have a reason")
|
|
||||||
)),
|
|
||||||
)
|
|
||||||
.with_help(format!(
|
|
||||||
"expected: {:?}",
|
|
||||||
err.expected().collect::<Vec<_>>()
|
|
||||||
))
|
|
||||||
.finish()
|
|
||||||
.print((file, Source::from(files[file])));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
struct Loc<'filename>(&'filename str, Span);
|
|
|
@ -1,3 +1,5 @@
|
||||||
pub mod err_reporting;
|
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
pub mod tokens;
|
pub mod tokens;
|
||||||
|
pub mod err_reporting {
|
||||||
|
pub struct GlobalReporter {}
|
||||||
|
}
|
||||||
|
|
|
@ -1,32 +1,17 @@
|
||||||
use std::{fs, path::PathBuf};
|
use std::{fs, path::PathBuf};
|
||||||
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use lang::{err_reporting::ErrorCollector, parser::parse};
|
use lang::parser::parse;
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
struct Args {
|
struct Args {
|
||||||
file: PathBuf,
|
file: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::unwrap_used)]
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
let n = args.file.clone();
|
let f = fs::read_to_string(args.file).expect("failed to read file");
|
||||||
let f = fs::read_to_string(n.clone()).expect("failed to read file");
|
|
||||||
let mut err_collector = ErrorCollector::new(vec![(n.to_str().unwrap(), &f)]);
|
|
||||||
|
|
||||||
println!("file: {f}\n");
|
println!("file: {f}\n");
|
||||||
let parse_res = parse(&f);
|
println!("parsed: {:?}", parse(&f))
|
||||||
err_collector.insert_many(
|
|
||||||
args.file.to_str().unwrap(),
|
|
||||||
lang::err_reporting::Stage::Parse,
|
|
||||||
parse_res
|
|
||||||
.errors()
|
|
||||||
.into_iter()
|
|
||||||
.map(|e| e.to_owned())
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
);
|
|
||||||
|
|
||||||
err_collector.report_raw();
|
|
||||||
println!("res: {:?}", parse_res);
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,8 +21,8 @@ pub mod ast;
|
||||||
mod tests;
|
mod tests;
|
||||||
use self::ast::{Expr, File};
|
use self::ast::{Expr, File};
|
||||||
|
|
||||||
pub type Span = SimpleSpan;
|
type Span = SimpleSpan;
|
||||||
pub type Spanned<T> = (T, Span);
|
type Spanned<T> = (T, Span);
|
||||||
|
|
||||||
pub fn parse<'src>(src: &'src str) -> ParseResult<File<'_>, Rich<'_, Token<'_>>> {
|
pub fn parse<'src>(src: &'src str) -> ParseResult<File<'_>, Rich<'_, Token<'_>>> {
|
||||||
let toks: Vec<_> = Token::lexer(src)
|
let toks: Vec<_> = Token::lexer(src)
|
||||||
|
@ -43,29 +43,24 @@ fn expr_parser<'tokens, 'src: 'tokens, I: ValueInput<'tokens, Token = Token<'src
|
||||||
Token::VarIdent(name) => (Expr::Var as fn(_) -> _, name),
|
Token::VarIdent(name) => (Expr::Var as fn(_) -> _, name),
|
||||||
Token::InputIdent(name) => (Expr::InputVar as fn(_) -> _, name)
|
Token::InputIdent(name) => (Expr::InputVar as fn(_) -> _, name)
|
||||||
}
|
}
|
||||||
.map_with(|(item_type, name), extra| item_type((name, extra.span())))
|
.map_with(|(item_type, name), extra| item_type((name, extra.span())));
|
||||||
.labelled("variable");
|
|
||||||
|
|
||||||
let attrset = word
|
let attrset = word
|
||||||
.map_with(|n, e| (n, e.span()))
|
.map_with(|n, e| (n, e.span()))
|
||||||
.labelled("attr name")
|
|
||||||
.then_ignore(just(Token::Colon))
|
.then_ignore(just(Token::Colon))
|
||||||
.then(expr)
|
.then(expr)
|
||||||
.labelled("attr body")
|
|
||||||
.separated_by(just(Token::Comma))
|
.separated_by(just(Token::Comma))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.map(IndexMap::from_iter)
|
.map(IndexMap::from_iter)
|
||||||
.delimited_by(just(Token::BracketOpen), just(Token::BracketClose))
|
.delimited_by(just(Token::BracketOpen), just(Token::BracketClose))
|
||||||
.map_with(|v, e| (v, e.span()))
|
.map_with(|v, e| (v, e.span()));
|
||||||
.labelled("attrset");
|
|
||||||
|
|
||||||
let node = word
|
let node = word
|
||||||
.map_with(|v, e| (v, e.span()))
|
.map_with(|v, e| (v, e.span()))
|
||||||
.then(attrset.clone().or_not())
|
.then(attrset.clone().or_not())
|
||||||
.map(|(name, params)| Expr::Node(name, params))
|
.map(|(name, params)| Expr::Node(name, params))
|
||||||
.or(var)
|
.or(var)
|
||||||
.or(attrset.map(Expr::AttrSet))
|
.or(attrset.map(Expr::AttrSet));
|
||||||
.labelled("node");
|
|
||||||
|
|
||||||
let pipeline = node
|
let pipeline = node
|
||||||
.clone()
|
.clone()
|
||||||
|
|
|
@ -39,7 +39,7 @@ fn test_parse_node_with_params() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_parse_multiple_top_level_complex() {
|
fn test_parse_multiple_top_level_complex() {
|
||||||
const INPUT: &str = r"def main = meow
|
const INPUT: &str = r#"def main = meow
|
||||||
| uwu
|
| uwu
|
||||||
[ foo: @bar
|
[ foo: @bar
|
||||||
, hello: world @| test [ more: params ] | yay
|
, hello: world @| test [ more: params ] | yay
|
||||||
|
@ -52,7 +52,7 @@ def test = meow
|
||||||
[ hello: $foo
|
[ hello: $foo
|
||||||
, world: @bar
|
, world: @bar
|
||||||
];
|
];
|
||||||
";
|
"#;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
parse(INPUT).unwrap(),
|
parse(INPUT).unwrap(),
|
||||||
File {
|
File {
|
||||||
|
|
|
@ -11,10 +11,7 @@ fn test_chrom_abb() {
|
||||||
let chan_r = chromabb.color_matrix(
|
let chan_r = chromabb.color_matrix(
|
||||||
StandardInput::SourceGraphic,
|
StandardInput::SourceGraphic,
|
||||||
ColorMatrixType::Matrix(Box::new([
|
ColorMatrixType::Matrix(Box::new([
|
||||||
1., 0., 0., 0., 0., //
|
1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0.,
|
||||||
0., 0., 0., 0., 0., //
|
|
||||||
0., 0., 0., 0., 0., //
|
|
||||||
0., 0., 0., 1., 0.,
|
|
||||||
])),
|
])),
|
||||||
);
|
);
|
||||||
let offset_r = chromabb.offset(chan_r, 25., 0.);
|
let offset_r = chromabb.offset(chan_r, 25., 0.);
|
||||||
|
@ -23,10 +20,7 @@ fn test_chrom_abb() {
|
||||||
let chan_b = chromabb.color_matrix(
|
let chan_b = chromabb.color_matrix(
|
||||||
StandardInput::SourceGraphic,
|
StandardInput::SourceGraphic,
|
||||||
ColorMatrixType::Matrix(Box::new([
|
ColorMatrixType::Matrix(Box::new([
|
||||||
0., 0., 0., 0., 0., //
|
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 1., 0.,
|
||||||
0., 0., 0., 0., 0., //
|
|
||||||
0., 0., 1., 0., 0., //
|
|
||||||
0., 0., 0., 1., 0.,
|
|
||||||
])),
|
])),
|
||||||
);
|
);
|
||||||
let offset_b = chromabb.offset(chan_b, -25., 0.);
|
let offset_b = chromabb.offset(chan_b, -25., 0.);
|
||||||
|
@ -37,10 +31,7 @@ fn test_chrom_abb() {
|
||||||
let chan_g = chromabb.color_matrix(
|
let chan_g = chromabb.color_matrix(
|
||||||
StandardInput::SourceGraphic,
|
StandardInput::SourceGraphic,
|
||||||
ColorMatrixType::Matrix(Box::new([
|
ColorMatrixType::Matrix(Box::new([
|
||||||
0., 0., 0., 0., 0., //
|
0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0.,
|
||||||
0., 1., 0., 0., 0., //
|
|
||||||
0., 0., 0., 0., 0., //
|
|
||||||
0., 0., 0., 1., 0.,
|
|
||||||
])),
|
])),
|
||||||
);
|
);
|
||||||
chromabb.composite_arithmetic(composite_rb, chan_g, 0., 1., 1., 0.);
|
chromabb.composite_arithmetic(composite_rb, chan_g, 0., 1., 1., 0.);
|
||||||
|
|
|
@ -1 +1,4 @@
|
||||||
meow gay | uwu
|
def main = meow | test;
|
||||||
|
def test = meow [ hello: $foo, world: @bar];
|
||||||
|
|
||||||
|
def uwu = owo;
|
||||||
|
|
Loading…
Reference in a new issue