lang: basic parser

This commit is contained in:
Schrottkatze 2024-04-03 00:08:00 +02:00
parent ae86ae29ab
commit ca84af4e1b
Signed by: schrottkatze
SSH key fingerprint: SHA256:hXb3t1vINBFCiDCmhRABHX5ocdbLiKyCdKI4HK2Rbbc
11 changed files with 362 additions and 33 deletions

View file

@ -7,6 +7,10 @@ edition = "2021"
[dependencies]
logos = "0.14"
chumsky = "1.0.0-alpha.6"
petgraph = { workspace = true}
indexmap = "2.2.6"
clap = { version = "4", features = ["derive"] }
[lints]
workspace = true

View file

@ -1 +1,5 @@
pub mod parser;
pub mod tokens;
pub mod err_reporting {
pub struct GlobalReporter {}
}

17
crates/lang/src/main.rs Normal file
View file

@ -0,0 +1,17 @@
use std::{fs, path::PathBuf};
use clap::Parser;
use lang::parser::parse;
#[derive(Parser)]
struct Args {
file: PathBuf,
}
fn main() {
let args = Args::parse();
let f = fs::read_to_string(args.file).expect("failed to read file");
println!("file: {f}\n");
println!("parsed: {:?}", parse(&f))
}

134
crates/lang/src/parser.rs Normal file
View file

@ -0,0 +1,134 @@
use std::ops::Range;
use chumsky::{
error::Rich,
extra,
input::{Stream, ValueInput},
prelude::*,
primitive::just,
recursive::recursive,
select,
span::SimpleSpan,
IterParser, Parser,
};
use indexmap::IndexMap;
use logos::{Logos, Source};
use crate::tokens::Token;
pub mod ast;
use self::ast::{Expr, File};
type Span = SimpleSpan;
type Spanned<T> = (T, Span);
pub fn parse<'src>(src: &'src str) -> ParseResult<File<'_>, Rich<'_, Token<'_>>> {
let toks: Vec<_> = Token::lexer(src)
.spanned()
.into_iter()
.map(|(t, s)| (t.expect("TODO: add lexer error(s)"), Span::from(s)))
.collect();
let tok_stream = Stream::from_iter(toks).spanned((src.len()..src.len()).into());
expr_parser().parse(tok_stream)
}
fn expr_parser<'tokens, 'src: 'tokens, I: ValueInput<'tokens, Token = Token<'src>, Span = Span>>(
) -> impl Parser<'tokens, I, File<'src>, extra::Err<Rich<'tokens, Token<'src>, Span>>> {
let word = select! { Token::Word(word) => word };
let expr = recursive(|expr| {
let var = select! {
Token::VarIdent(name) => (Expr::Var as fn(_) -> _, name),
Token::InputIdent(name) => (Expr::InputVar as fn(_) -> _, name)
}
.map_with(|(item_type, name), extra| item_type((name, extra.span())));
let attrset = word
.map_with(|n, e| (n, e.span()))
.then_ignore(just(Token::Colon))
.then(expr)
.separated_by(just(Token::Comma))
.collect::<Vec<_>>()
.map(IndexMap::from_iter)
.delimited_by(just(Token::BracketOpen), just(Token::BracketClose))
.map_with(|v, e| (v, e.span()));
let node = word
.map_with(|v, e| (v, e.span()))
.then(attrset.clone().or_not())
.map(|(name, params)| Expr::Node(name, params))
.or(var)
.or(attrset.map(Expr::AttrSet));
let pipeline = node
.clone()
.then(choice((
just(Token::Pipe).to(Expr::SimplePipe as fn(_, _) -> _),
just(Token::MappingPipe).to(Expr::MappingPipe as fn(_, _) -> _),
just(Token::NullPipe).to(Expr::NullPipe as fn(_, _) -> _),
)))
.repeated()
.foldr(node, |(curr, pipe), next| {
pipe(Box::new(curr), Box::new(next))
});
pipeline
});
let decl = just(Token::Def).ignore_then(
word.map_with(|n, e| (n, e.span()))
.then_ignore(just(Token::Equals))
.then(expr.clone().map_with(|expr, extra| (expr, extra.span())))
.then_ignore(just(Token::SemiColon)),
);
expr.map_with(|expr, extra| File {
decls: IndexMap::from_iter([(("main", (0..0).into()), (expr, extra.span()))]),
})
.or(decl.repeated().collect::<Vec<_>>().map(|decls| File {
decls: IndexMap::from_iter(decls),
}))
}
#[cfg(test)]
mod tests {
use crate::parser::ast::{Expr, File};
use crate::parser::parse;
use crate::tokens::Token;
use chumsky::input::Stream;
use chumsky::prelude::*;
use indexmap::IndexMap;
use logos::Logos;
#[test]
fn test_parse_node_with_params() {
const INPUT: &str = "meow [ hello: $foo, world: @bar]";
assert_eq!(
parse(INPUT).unwrap(),
File {
decls: IndexMap::from_iter([(
("main", (0..0).into()),
(
Expr::Node(
("meow", (0..4).into()),
Some((
IndexMap::from_iter([
(
("hello", (7..12).into()),
Expr::Var(("foo", (14..18).into()))
),
(
("world", (20..25).into()),
Expr::InputVar(("bar", (27..31).into()))
)
]),
(5..32).into()
))
),
(0..0).into()
)
)])
}
);
}
}

View file

@ -0,0 +1,36 @@
use std::collections::{BTreeMap, HashMap};
use indexmap::IndexMap;
use super::Spanned;
#[derive(Debug, PartialEq, Eq)]
pub struct File<'src> {
pub decls: IndexMap<Spanned<&'src str>, Spanned<Expr<'src>>>,
}
#[derive(Debug, PartialEq, Eq)]
pub enum Expr<'src> {
Node(
Spanned<&'src str>,
Option<Spanned<IndexMap<Spanned<&'src str>, Expr<'src>>>>,
),
SimplePipe(Box<Expr<'src>>, Box<Expr<'src>>),
NamingPipe(
Box<Expr<'src>>,
(Vec<Spanned<&'src str>>, Vec<Spanned<&'src str>>),
Box<Expr<'src>>,
),
MappingPipe(Box<Expr<'src>>, Box<Expr<'src>>),
NullPipe(Box<Expr<'src>>, Box<Expr<'src>>),
MultiPipe(IndexMap<Spanned<&'src str>, Expr<'src>>),
LetIn(
IndexMap<Spanned<&'src str>, Box<Expr<'src>>>,
Box<Expr<'src>>,
),
// $
Var(Spanned<&'src str>),
// @
InputVar(Spanned<&'src str>),
AttrSet(Spanned<IndexMap<Spanned<&'src str>, Expr<'src>>>),
}

View file

@ -1,8 +1,15 @@
use logos::Logos;
#[derive(Logos, Debug, PartialEq, Eq)]
#[derive(Logos, Debug, PartialEq, Eq, Clone)]
#[logos(skip r"[ \t\n\f]+")]
pub enum Token<'a> {
// hack!
// this isn't actually supposed to be in the language.
// i just can't figure out how to automatically choose between a top level declaration
// or a top level expression
// so a declaration needs the keyword def until i can figure this out
#[token("def")]
Def,
#[regex("[a-zA-Z0-9_\\-]+", |lex| lex.slice())]
Word(&'a str),
#[regex("\\$[a-zA-Z0-9_\\-]+", |lex| &lex.slice()[1..])]
@ -27,6 +34,8 @@ pub enum Token<'a> {
Equals,
#[token(":")]
Colon,
#[token(";")]
SemiColon,
#[token("[")]
BracketOpen,
#[token("]")]