Compare commits

..

3 commits

Author SHA1 Message Date
2d59a7f560
lang: start implementing parser combinators (i have no idea what i'm doing)
also, the current test.owo crashes for some reason. this is a headache.
manual/imperative parsers are a nightmare.
2024-04-12 01:02:07 +02:00
9da157ff4a
lang: massive amounts of parser and ast pain 2024-04-11 03:23:03 +02:00
881a987b2f
flake.lock: Update
Flake lock file updates:

• Updated input 'devenv':
    'github:cachix/devenv/18ef9849d1ecac7a9a7920eb4f2e4adcf67a8c3a' (2024-01-09)
  → 'github:cachix/devenv/a71323c618664a6b7a39bc183b0ce22ac8511cf9' (2024-04-08)
• Added input 'devenv/cachix':
    'github:cachix/cachix/661bbb7f8b55722a0406456b15267b5426a3bda6' (2024-03-15)
• Added input 'devenv/cachix/devenv':
    'github:cachix/devenv/2ee4450b0f4b95a1b90f2eb5ffea98b90e48c196' (2024-02-23)
• Added input 'devenv/cachix/devenv/flake-compat':
    follows 'devenv/cachix/flake-compat'
• Added input 'devenv/cachix/devenv/nix':
    'github:domenkozar/nix/ecd0af0c1f56de32cbad14daa1d82a132bf298f8' (2024-02-22)
• Added input 'devenv/cachix/devenv/nix/flake-compat':
    'github:edolstra/flake-compat/35bb57c0c8d8b62bbfd284272c928ceb64ddbde9' (2023-01-17)
• Added input 'devenv/cachix/devenv/nix/nixpkgs':
    follows 'devenv/cachix/devenv/nixpkgs'
• Added input 'devenv/cachix/devenv/nix/nixpkgs-regression':
    'github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2' (2022-01-24)
• Added input 'devenv/cachix/devenv/nixpkgs':
    'github:NixOS/nixpkgs/9201b5ff357e781bf014d0330d18555695df7ba8' (2023-08-23)
• Added input 'devenv/cachix/devenv/poetry2nix':
    'github:nix-community/poetry2nix/d5006be9c2c2417dafb2e2e5034d83fabd207ee3' (2023-08-24)
• Added input 'devenv/cachix/devenv/poetry2nix/flake-utils':
    'github:numtide/flake-utils/919d646de7be200f3bf08cb76ae1f09402b6f9b4' (2023-07-11)
• Added input 'devenv/cachix/devenv/poetry2nix/flake-utils/systems':
    'github:nix-systems/default/da67096a3b9bf56a91d16901293e51ba5b49a27e' (2023-04-09)
• Added input 'devenv/cachix/devenv/poetry2nix/nix-github-actions':
    'github:nix-community/nix-github-actions/165b1650b753316aa7f1787f3005a8d2da0f5301' (2023-07-09)
• Added input 'devenv/cachix/devenv/poetry2nix/nix-github-actions/nixpkgs':
    follows 'devenv/cachix/devenv/poetry2nix/nixpkgs'
• Added input 'devenv/cachix/devenv/poetry2nix/nixpkgs':
    follows 'devenv/cachix/devenv/nixpkgs'
• Added input 'devenv/cachix/devenv/pre-commit-hooks':
    follows 'devenv/cachix/pre-commit-hooks'
• Added input 'devenv/cachix/flake-compat':
    'github:edolstra/flake-compat/0f9255e01c2351cc7d116c072cb317785dd33b33' (2023-10-04)
• Added input 'devenv/cachix/nixpkgs':
    follows 'devenv/nixpkgs'
• Added input 'devenv/cachix/pre-commit-hooks':
    'github:cachix/pre-commit-hooks.nix/5df5a70ad7575f6601d91f0efec95dd9bc619431' (2024-02-15)
• Added input 'devenv/cachix/pre-commit-hooks/flake-compat':
    'github:edolstra/flake-compat/0f9255e01c2351cc7d116c072cb317785dd33b33' (2023-10-04)
• Added input 'devenv/cachix/pre-commit-hooks/flake-utils':
    'github:numtide/flake-utils/4022d587cbbfd70fe950c1e2083a02621806a725' (2023-12-04)
• Added input 'devenv/cachix/pre-commit-hooks/flake-utils/systems':
    'github:nix-systems/default/da67096a3b9bf56a91d16901293e51ba5b49a27e' (2023-04-09)
• Added input 'devenv/cachix/pre-commit-hooks/gitignore':
    'github:hercules-ci/gitignore.nix/43e1aa1308018f37118e34d3a9cb4f5e75dc11d5' (2023-12-29)
• Added input 'devenv/cachix/pre-commit-hooks/gitignore/nixpkgs':
    follows 'devenv/cachix/pre-commit-hooks/nixpkgs'
• Added input 'devenv/cachix/pre-commit-hooks/nixpkgs':
    follows 'devenv/cachix/nixpkgs'
• Added input 'devenv/cachix/pre-commit-hooks/nixpkgs-stable':
    'github:NixOS/nixpkgs/3dc440faeee9e889fe2d1b4d25ad0f430d449356' (2024-01-10)
• Updated input 'devenv/flake-compat':
    'github:edolstra/flake-compat/35bb57c0c8d8b62bbfd284272c928ceb64ddbde9' (2023-01-17)
  → 'github:edolstra/flake-compat/0f9255e01c2351cc7d116c072cb317785dd33b33' (2023-10-04)
• Updated input 'devenv/nix':
    'github:domenkozar/nix/7c91803598ffbcfe4a55c44ac6d49b2cf07a527f' (2023-02-16)
  → 'github:domenkozar/nix/c5bbf14ecbd692eeabf4184cc8d50f79c2446549' (2024-03-15)
• Added input 'devenv/nix/flake-compat':
    'github:edolstra/flake-compat/35bb57c0c8d8b62bbfd284272c928ceb64ddbde9' (2023-01-17)
• Removed input 'devenv/nix/lowdown-src'
• Updated input 'devenv/nixpkgs':
    'github:NixOS/nixpkgs/126f49a01de5b7e35a43fd43f891ecf6d3a51459' (2023-03-15)
  → 'github:cachix/devenv-nixpkgs/829e73affeadfb4198a7105cbe3a03153d13edc9' (2024-03-12)
• Updated input 'devenv/pre-commit-hooks':
    'github:cachix/pre-commit-hooks.nix/ea96f0c05924341c551a797aaba8126334c505d2' (2024-01-08)
  → 'github:cachix/pre-commit-hooks.nix/e35aed5fda3cc79f88ed7f1795021e559582093a' (2024-04-02)
• Updated input 'devenv/pre-commit-hooks/flake-utils':
    'github:numtide/flake-utils/a1720a10a6cfe8234c0e93907ffe81be440f4cef' (2023-05-31)
  → 'github:numtide/flake-utils/b1d9ab70662946ef0850d488da1c9019f3a9752a' (2024-03-11)
• Updated input 'devenv/pre-commit-hooks/gitignore':
    'github:hercules-ci/gitignore.nix/a20de23b925fd8264fd7fad6454652e142fd7f73' (2022-08-14)
  → 'github:hercules-ci/gitignore.nix/637db329424fd7e46cf4185293b9cc8c88c95394' (2024-02-28)
• Updated input 'devenv/pre-commit-hooks/nixpkgs-stable':
    'github:NixOS/nixpkgs/c37ca420157f4abc31e26f436c1145f8951ff373' (2023-06-03)
  → 'github:NixOS/nixpkgs/614b4613980a522ba49f0d194531beddbb7220d3' (2024-03-17)
• Updated input 'fenix':
    'github:nix-community/fenix/93e89638c15512db65e931f26ce36edf8cfbb4a5' (2024-01-10)
  → 'github:nix-community/fenix/99c6241db5ca5363c05c8f4acbdf3a4e8fc42844' (2024-04-06)
• Updated input 'fenix/nixpkgs':
    'github:nixos/nixpkgs/46ae0210ce163b3cba6c7da08840c1d63de9c701' (2024-01-06)
  → 'github:nixos/nixpkgs/fd281bd6b7d3e32ddfa399853946f782553163b5' (2024-04-03)
• Updated input 'fenix/rust-analyzer-src':
    'github:rust-lang/rust-analyzer/ae6e73772432cfe35bb0ff6de6fdcfa908642b67' (2024-01-09)
  → 'github:rust-lang/rust-analyzer/8e581ac348e223488622f4d3003cb2bd412bf27e' (2024-04-03)
• Updated input 'nixpkgs':
    'github:NixOS/nixpkgs/317484b1ead87b9c1b8ac5261a8d2dd748a0492d' (2024-01-08)
  → 'github:NixOS/nixpkgs/ff0dbd94265ac470dda06a657d5fe49de93b4599' (2024-04-06)
2024-04-08 23:27:56 +02:00
16 changed files with 1306 additions and 218 deletions

52
Cargo.lock generated
View file

@ -174,9 +174,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]] [[package]]
name = "chumsky" name = "chumsky"
version = "1.0.0-alpha.6" version = "1.0.0-alpha.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9c28d4e5dd9a9262a38b231153591da6ce1471b818233f4727985d3dd0ed93c" checksum = "c7b80276986f86789dc56ca6542d53bba9cda3c66091ebbe7bd96fc1bdf20f1f"
dependencies = [ dependencies = [
"hashbrown", "hashbrown",
"regex-automata", "regex-automata",
@ -237,6 +237,12 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "countme"
version = "3.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
[[package]] [[package]]
name = "crc32fast" name = "crc32fast"
version = "1.3.2" version = "1.3.2"
@ -322,6 +328,12 @@ dependencies = [
"windows-sys 0.48.0", "windows-sys 0.48.0",
] ]
[[package]]
name = "ego-tree"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a68a4904193147e0a8dec3314640e6db742afd5f6e634f428a6af230d9b3591"
[[package]] [[package]]
name = "either" name = "either"
version = "1.9.0" version = "1.9.0"
@ -506,9 +518,11 @@ dependencies = [
"ariadne", "ariadne",
"chumsky", "chumsky",
"clap", "clap",
"ego-tree",
"indexmap", "indexmap",
"logos", "logos",
"petgraph", "petgraph",
"rowan",
] ]
[[package]] [[package]]
@ -589,6 +603,15 @@ version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
[[package]]
name = "memoffset"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a"
dependencies = [
"autocfg",
]
[[package]] [[package]]
name = "miniz_oxide" name = "miniz_oxide"
version = "0.7.1" version = "0.7.1"
@ -863,6 +886,25 @@ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]]
name = "rowan"
version = "0.15.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
dependencies = [
"countme",
"hashbrown",
"memoffset",
"rustc-hash",
"text-size",
]
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]] [[package]]
name = "ryu" name = "ryu"
version = "1.0.16" version = "1.0.16"
@ -973,6 +1015,12 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "text-size"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233"
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.55" version = "1.0.55"

View file

@ -7,11 +7,13 @@ edition = "2021"
[dependencies] [dependencies]
logos = "0.14" logos = "0.14"
chumsky = {version= "1.0.0-alpha.6", features=["label"]} chumsky = {version= "1.0.0-alpha.7", features=["label"]}
petgraph = { workspace = true} petgraph = { workspace = true}
indexmap = "2.2.6" indexmap = "2.2.6"
clap = { version = "4", features = ["derive"] } clap = { version = "4", features = ["derive"] }
ariadne = "0.4.0" ariadne = "0.4.0"
ego-tree = "0.6.2"
rowan = "0.15.15"
[lints] [lints]
workspace = true workspace = true

View file

@ -1,7 +1,10 @@
use std::{fs, path::PathBuf}; use std::{fs, path::PathBuf};
use clap::Parser; use clap::Parser;
use lang::{err_reporting::ErrorCollector, parser::parse}; use lang::{
err_reporting::ErrorCollector,
parser::ast::lossless::{lex, parser},
};
#[derive(Parser)] #[derive(Parser)]
struct Args { struct Args {
@ -13,20 +16,24 @@ fn main() {
let args = Args::parse(); let args = Args::parse();
let n = args.file.clone(); let n = args.file.clone();
let f = fs::read_to_string(n.clone()).expect("failed to read file"); let f = fs::read_to_string(n.clone()).expect("failed to read file");
let mut err_collector = ErrorCollector::new(vec![(n.to_str().unwrap(), &f)]); println!("toks: {:?}", lex::lex(&f));
let parse_res = parser::parse(&f);
println!("parse: {:?}", parse_res);
// dbg!(lex::lex(&f));
// let mut err_collector = ErrorCollector::new(vec![(n.to_str().unwrap(), &f)]);
println!("file: {f}\n"); // println!("file: {f}\n");
let parse_res = parse(&f); // let parse_res = parse(&f);
err_collector.insert_many( // err_collector.insert_many(
args.file.to_str().unwrap(), // args.file.to_str().unwrap(),
lang::err_reporting::Stage::Parse, // lang::err_reporting::Stage::Parse,
parse_res // parse_res
.errors() // .errors()
.into_iter() // .into_iter()
.map(|e| e.to_owned()) // .map(|e| e.to_owned())
.collect::<Vec<_>>(), // .collect::<Vec<_>>(),
); // );
err_collector.report_raw(); // err_collector.report_raw();
println!("res: {:?}", parse_res); // println!("res: {:?}", parse_res);
} }

View file

@ -1,5 +1,3 @@
use std::ops::Range;
use chumsky::{ use chumsky::{
error::Rich, error::Rich,
input::{Stream, ValueInput}, input::{Stream, ValueInput},
@ -10,19 +8,22 @@ use chumsky::{
IterParser, IterParser,
}; };
use indexmap::IndexMap; use indexmap::IndexMap;
use logos::{Logos, Source}; use logos::Logos;
use crate::tokens::Token; use crate::tokens::Token;
pub mod ast; pub mod ast;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use self::ast::{Expr, Expression, File}; use self::ast::{
raw_ast::{RawExpr, RawExpression},
File,
};
pub type Span = SimpleSpan; pub type Span = SimpleSpan;
pub type Spanned<T> = (T, Span); pub type Spanned<T> = (T, Span);
pub fn parse<'src>(src: &'src str) -> ParseResult<File<'_>, Rich<'_, Token<'_>>> { pub fn parse(src: &str) -> ParseResult<File<'_>, Rich<'_, Token<'_>>> {
let toks: Vec<_> = Token::lexer(src) let toks: Vec<_> = Token::lexer(src)
.spanned() .spanned()
.map(|(t, s)| (t.expect("TODO: add lexer error(s)"), Span::from(s))) .map(|(t, s)| (t.expect("TODO: add lexer error(s)"), Span::from(s)))
@ -35,22 +36,39 @@ pub(crate) fn parser<
'src: 'tokens, 'src: 'tokens,
I: ValueInput<'tokens, Token = Token<'src>, Span = Span>, I: ValueInput<'tokens, Token = Token<'src>, Span = Span>,
>() -> impl Parser<'tokens, I, File<'src>, extra::Err<Rich<'tokens, Token<'src>, Span>>> { >() -> impl Parser<'tokens, I, File<'src>, extra::Err<Rich<'tokens, Token<'src>, Span>>> {
let word = select! { Token::Word(word) => word }; let word = select! { Token::Word(word) = e => (word, e.span())};
let expr = recursive(|expr| { let expr = recursive(|expr| {
let lit = select! { let lit = select! {
Token::Int(i) = e => Expression::new(Expr::Lit(ast::Lit::Int(i.parse().unwrap())), e.span()), Token::Int(i) = e => RawExpression::new(RawExpr::Lit(ast::Lit::Int(i.parse().expect("TODO: handle better"))), e.span()),
Token::Float(f) = e => Expression::new(Expr::Lit(ast::Lit::Float(f.parse().unwrap())), e.span()), Token::Float(f) = e => RawExpression::new(RawExpr::Lit(ast::Lit::Float(f.parse().expect("TODO: handle better"))), e.span()),
Token::String(s) = e => RawExpression::new(RawExpr::Lit(ast::Lit::String(s.strip_prefix('"').expect("a").strip_suffix('"').expect("b"))), e.span())
}; };
let mat = just(Token::Mat)
.ignore_then(select! { Token::Dimensions(dimensions) = e => (dimensions, e.span())})
.then(
lit.separated_by(just(Token::Comma))
.collect::<Vec<_>>()
.separated_by(just(Token::Semicolon))
.collect::<Vec<_>>()
.delimited_by(just(Token::BracketOpen), just(Token::BracketClose)),
)
.map_with(|(dimensions, data), e| {
// TODO: Validation and proper error handling/reporting
// (validation = validating the matrix dimensions)
RawExpression::new(
RawExpr::Matrix(dimensions, data.into_iter().flatten().collect()),
e.span(),
)
});
let var = select! { let var = select! {
Token::VarIdent(name) => (Expr::Var as fn(_) -> _, name), Token::VarIdent(name) => (RawExpr::Var as fn(_) -> _, name),
Token::InputIdent(name) => (Expr::InputVar as fn(_) -> _, name) Token::InputIdent(name) => (RawExpr::InputVar as fn(_) -> _, name)
} }
.map_with(|(item_type, name), extra| Expression::new(item_type(name), extra.span())) .map_with(|(item_type, name), extra| RawExpression::new(item_type(name), extra.span()))
.labelled("variable"); .labelled("variable");
let attrset = word let attrset = word
.map_with(|n, e| (n, e.span()))
.labelled("attr name") .labelled("attr name")
.then_ignore(just(Token::Colon)) .then_ignore(just(Token::Colon))
.then(expr) .then(expr)
@ -63,57 +81,72 @@ pub(crate) fn parser<
.labelled("attrset"); .labelled("attrset");
let node = word let node = word
.map_with(|v, e| (v, e.span())) .repeated()
.collect()
.then(attrset.clone().or_not()) .then(attrset.clone().or_not())
.map_with(|(name, params), extra| { .map_with(|(name, params), extra| {
Expression::new(Expr::Node(name, params), extra.span()) RawExpression::new(RawExpr::Node(name, params), extra.span())
}) })
.or(var) // .or(var)
.or(attrset // .or(attrset
.map_with(|attrset, extra| Expression::new(Expr::AttrSet(attrset), extra.span()))) // .map_with(|attrset, extra| Expression::new(Expr::AttrSet(attrset), extra.span())))
// .or(lit)
// .or(mat)
.labelled("node"); .labelled("node");
let atom = var
.or(lit)
.or(mat)
.or(attrset.map_with(|attrset, extra| {
RawExpression::new(RawExpr::AttrSet(attrset), extra.span())
}))
.or(node.clone());
#[allow(clippy::let_and_return)] #[allow(clippy::let_and_return)]
let pipeline = node let pipeline = atom
.clone() .clone()
.then(choice(( .then(choice((
just(Token::Pipe).to(Expr::SimplePipe as fn(_, _) -> _), just(Token::Pipe).to(RawExpr::SimplePipe as fn(_, _) -> _),
just(Token::MappingPipe).to(Expr::MappingPipe as fn(_, _) -> _), just(Token::MappingPipe).to(RawExpr::MappingPipe as fn(_, _) -> _),
just(Token::NullPipe).to(Expr::NullPipe as fn(_, _) -> _), just(Token::NullPipe).to(RawExpr::NullPipe as fn(_, _) -> _),
))) )))
.repeated() .repeated()
.foldr_with(node, |(curr, pipe), next, extra| { .foldr_with(atom, |(curr, pipe), next, extra| {
Expression::new(pipe(Box::new(curr), Box::new(next)), extra.span()) RawExpression::new(pipe(curr, next), extra.span())
}); });
pipeline pipeline
}); });
let decl = just(Token::Def).ignore_then( let decls = just(Token::Def)
word.map_with(|n, e| (n, e.span())) .ignore_then(
.then_ignore(just(Token::Equals)) word.then_ignore(just(Token::Equals))
.then(expr.clone().map(|expr| expr)) .then(expr.clone().map(|expr| expr))
.then_ignore(just(Token::SemiColon)), .then_ignore(just(Token::Semicolon)),
); )
.repeated()
expr.map(|expr| File { .collect::<Vec<_>>()
decls: IndexMap::from_iter([(("main", (0..0).into()), expr)]), .map(|decls| File {
})
.or(decl.repeated().collect::<Vec<_>>().map(|decls| File {
decls: IndexMap::from_iter(decls), decls: IndexMap::from_iter(decls),
})) });
let single_expr = expr.map(|expr| File {
decls: IndexMap::from_iter([(("main", (0..0).into()), expr)]),
});
just(Token::Def).rewind().ignore_then(decls).or(single_expr)
// single_expr.or(decls)
// expr.map(|expr| File {
// decls: IndexMap::from_iter([(("main", (0..0).into()), expr)]),
// })
// .or(decl.repeated().collect::<Vec<_>>().map(|decls| File {
// decls: IndexMap::from_iter(decls),
// }))
} }
pub mod asg { pub mod asg {
use petgraph::graph::DiGraph; use petgraph::graph::DiGraph;
use super::Spanned; use super::Spanned;
pub struct Asg<'src> {
graph: DiGraph<AsgNode<'src>, String>,
}
enum AsgNode<'src> {
Node(Spanned<&'src str>),
}
} }

View file

@ -2,51 +2,14 @@ use std::collections::{BTreeMap, HashMap};
use indexmap::IndexMap; use indexmap::IndexMap;
use super::{Span, Spanned}; use super::Spanned;
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct File<'src> { pub struct File<'src> {
pub decls: IndexMap<Spanned<&'src str>, Expression<'src>>, pub decls: IndexMap<Spanned<&'src str>, raw_ast::RawExpression<'src>>,
} }
#[derive(Debug, PartialEq)] pub mod raw_ast;
pub struct Expression<'src> {
pub expr: Expr<'src>,
pub span: Span,
}
impl<'src> Expression<'src> {
pub fn new(expr: Expr<'src>, span: Span) -> Self {
Self { expr, span }
}
}
#[derive(Debug, PartialEq)]
pub enum Expr<'src> {
Node(
Spanned<&'src str>,
Option<Spanned<IndexMap<Spanned<&'src str>, Expression<'src>>>>,
),
SimplePipe(Box<Expression<'src>>, Box<Expression<'src>>),
// NamingPipe(
// Box<Expression<'src>>,
// (Vec<Spanned<&'src str>>, Vec<Spanned<&'src str>>),
// Box<Expression<'src>>,
// ),
MappingPipe(Box<Expression<'src>>, Box<Expression<'src>>),
NullPipe(Box<Expression<'src>>, Box<Expression<'src>>),
MultiPipe(IndexMap<Spanned<&'src str>, Expression<'src>>),
// LetIn(
// IndexMap<Spanned<&'src str>, Box<Expression<'src>>>,
// Box<Expression<'src>>,
// ),
// $
Var(&'src str),
// @
InputVar(&'src str),
AttrSet(Spanned<IndexMap<Spanned<&'src str>, Expression<'src>>>),
Lit(Lit<'src>),
}
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub enum Lit<'src> { pub enum Lit<'src> {
@ -55,3 +18,7 @@ pub enum Lit<'src> {
Float(f64), Float(f64),
String(&'src str), String(&'src str),
} }
pub mod lossless;
pub mod ast_tree;

View file

@ -0,0 +1,31 @@
use ego_tree::Tree;
use crate::parser::Spanned;
use super::{File, Lit};
pub struct Ast<'src> {
tree: Tree<AstNode<'src>>,
}
struct AstNode<'src> {
kind: NodeKind<'src>,
}
enum NodeKind<'src> {
Decl,
Ident(&'src str),
Instr,
Expr,
MappingPipe,
NullPipe,
MultiPipe,
Var(&'src str),
InputVar(&'src str),
AttrSet,
Attr,
Lit(Lit<'src>),
Matrix,
Dimensions(u16, u16),
MatrixRow,
}

View file

@ -0,0 +1,19 @@
use self::lex::SyntaxKind;
pub mod parser;
pub mod lex;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum Lang {}
impl rowan::Language for Lang {
type Kind = SyntaxKind;
#[allow(unsafe_code)]
fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind {
assert!(raw.0 <= SyntaxKind::ROOT as u16);
unsafe { std::mem::transmute::<u16, SyntaxKind>(raw.0) }
}
fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind {
kind.into()
}
}

View file

@ -0,0 +1,118 @@
use logos::Logos;
use crate::parser::Span;
pub fn lex(src: &str) -> Vec<(SyntaxKind, &str)> {
let mut lex = SyntaxKind::lexer(src);
let mut r = Vec::new();
while let Some(tok_res) = lex.next() {
r.push((tok_res.unwrap_or(SyntaxKind::LEX_ERR), lex.slice()))
}
r.reverse();
r
}
#[derive(Logos, Debug, PartialEq, Eq, Clone, Copy, Hash, PartialOrd, Ord)]
#[repr(u16)]
#[allow(non_camel_case_types)]
pub enum SyntaxKind {
#[token("def")]
DEF_KW = 0,
#[token("let")]
LET_KW,
#[token("in")]
IN_KW,
#[token("mat")]
MAT_KW,
#[regex("[\\d]+x[\\d]+")]
PAT_DIMENSIONS,
#[regex("[\\d]+")]
INT_NUM,
#[regex("[+-]?([\\d]+\\.[\\d]*|[\\d]*\\.[\\d]+)")]
FLOAT_NUM,
#[regex(r#""([^"\\]|\\["\\bnfrt]|u[a-fA-F0-9]{4})*""#)]
STRING,
MATRIX,
DECL,
LIST,
MAT_BODY,
PARENTHESIZED_EXPR,
EXPR,
#[token("(")]
L_PAREN,
#[token(")")]
R_PAREN,
#[token("{")]
L_CURLY,
#[token("}")]
R_CURLY,
#[token("[")]
L_BRACK,
#[token("]")]
R_BRACK,
#[token("<")]
L_ANGLE,
#[token(">")]
R_ANGLE,
#[token("+")]
PLUS,
#[token("-")]
MINUS,
#[token("*")]
STAR,
#[token("/")]
SLASH,
#[token("%")]
PERCENT,
#[token("^")]
CARET,
INSTR,
INSTR_NAME,
INSTR_PARAMS,
ATTR_SET,
ATTR,
ATTR_NAME,
ATTR_VALUE,
#[regex("[a-zA-Z_]+[a-zA-Z_\\-\\d]*")]
IDENT,
#[regex("\\$[a-zA-Z0-9_\\-]+")]
VAR,
#[regex("\\@[a-zA-Z0-9_\\-]+")]
INPUT_VAR,
#[token("$")]
DOLLAR,
#[token("@")]
AT,
#[token(",")]
COMMA,
#[token("|")]
PIPE,
#[token("@|")]
MAPPING_PIPE,
#[token("!|")]
NULL_PIPE,
#[token("=")]
EQ,
#[token(":")]
COLON,
#[token(";")]
SEMICOLON,
#[token(".")]
DOT,
#[token("!")]
BANG,
#[regex("[ \\t\\f]+")]
WHITESPACE,
#[token("\n")]
NEWLINE,
PARSE_ERR,
LEX_ERR,
ROOT,
}
impl From<SyntaxKind> for rowan::SyntaxKind {
fn from(kind: SyntaxKind) -> Self {
Self(kind as u16)
}
}

View file

@ -0,0 +1,453 @@
use std::borrow::Borrow;
use rowan::{
Checkpoint, GreenNode, GreenNodeBuilder, GreenNodeData, GreenTokenData, Language, NodeOrToken,
};
use crate::parser::{
ast::lossless::{lex::SyntaxKind::*, Lang},
Span,
};
use super::lex::{self, SyntaxKind};
mod parsers {
use rowan::GreenNode;
use crate::parser::ast::lossless::lex::SyntaxKind;
use super::SyntaxError;
struct ParseResult {
green_node: GreenNode,
errors: Vec<SyntaxError>,
}
trait Parser {
fn parse<'src>(input: &[(SyntaxKind, &'src str)]) -> ParseResult;
}
}
#[derive(PartialEq, Eq)]
pub struct Parse {
pub green_node: GreenNode,
}
impl std::fmt::Debug for Parse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
debug_print_green_node(NodeOrToken::Node(self.green_node.borrow()), f, 0)
}
}
fn debug_print_green_node(
node: NodeOrToken<&GreenNodeData, &GreenTokenData>,
f: &mut std::fmt::Formatter<'_>,
lvl: i32,
) -> std::fmt::Result {
for _ in 0..lvl {
f.write_str(" ")?;
}
match node {
NodeOrToken::Node(n) => {
writeln!(f, "{:?} {{", Lang::kind_from_raw(node.kind()));
for c in n.children() {
debug_print_green_node(c, f, lvl + 1)?;
}
for _ in 0..lvl {
f.write_str(" ")?;
}
f.write_str("}\n")
}
NodeOrToken::Token(t) => {
writeln!(f, "{:?} {:?};", Lang::kind_from_raw(t.kind()), t.text())
}
}
}
#[derive(Debug)]
struct Parser<'src> {
tokens: Vec<(SyntaxKind, &'src str)>,
builder: GreenNodeBuilder<'src>,
errors: Vec<SyntaxError>,
}
#[derive(Debug, PartialEq, Eq)]
enum SyntaxError {
Expected(SyntaxKind),
AttrExpectedValue,
/// guessed if there's a newline and attr on next line without comma
/// should then suggest comma after attr
ExpectedCommaBetweenAttrs,
}
pub fn parse(src: &str) -> Parse {
let mut tokens = lex::lex(src);
Parser {
tokens,
builder: GreenNodeBuilder::new(),
errors: Vec::new(),
}
.parse()
}
impl Parser<'_> {
fn parse(mut self) -> Parse {
self.start_node(ROOT);
match self.expr(None) {
expr::ExprRes::Ok => (),
expr::ExprRes::Eof => (),
expr::ExprRes::NoExpr => todo!(),
}
self.builder.finish_node();
Parse {
green_node: self.builder.finish(),
}
}
fn start_node(&mut self, kind: SyntaxKind) {
self.builder.start_node(kind.into());
}
fn finish_node(&mut self) {
self.builder.finish_node();
}
/// Advance one token, adding it to the current branch of the tree builder.
fn bump(&mut self) {
let (kind, text) = self.tokens.pop().unwrap();
self.builder.token(kind.into(), text);
}
fn syntax_err(&mut self, err: SyntaxError) {
let (_, text) = self.tokens.pop().unwrap();
self.builder.token(PARSE_ERR.into(), text);
self.errors.push(err);
}
fn syntax_err_by_checkpoint(&mut self, checkpoint: Checkpoint, err: SyntaxError) {
self.builder.start_node_at(checkpoint, PARSE_ERR.into());
self.finish_node();
self.errors.push(err);
}
fn expected(&mut self, expected: SyntaxKind) {
self.syntax_err(SyntaxError::Expected(expected))
}
/// Peek at the first unprocessed token
fn current(&self) -> Option<SyntaxKind> {
self.tokens.last().map(|(kind, _)| *kind)
}
fn next(&self) -> Option<SyntaxKind> {
self.tokens
.get(self.tokens.len() - 2)
.map(|(kind, _)| *kind)
}
fn skip_ws(&mut self) {
while self.current() == Some(WHITESPACE) || self.current() == Some(NEWLINE) {
self.bump()
}
}
fn skip_ws_without_newlines(&mut self) {
while self.current() == Some(WHITESPACE) {
self.bump()
}
}
}
mod expr {
use rowan::Checkpoint;
use super::{attrset::AttrsetRes, instr::NodeRes, Parser};
use crate::parser::{ast::lossless::lex::SyntaxKind::*, Span};
impl Parser<'_> {
pub(super) fn expr(&mut self, start: Option<Checkpoint>) -> ExprRes {
self.skip_ws();
let start = start.unwrap_or_else(|| self.builder.checkpoint());
match self.current() {
Some(IDENT) => {
let expr_res = match self.instr() {
NodeRes::Ok => ExprRes::Ok,
NodeRes::Eof => ExprRes::Eof,
};
self.builder.start_node_at(start, EXPR.into());
self.finish_node();
expr_res
}
Some(_) => self.atom(Some(start)),
None => ExprRes::Eof,
}
}
pub(super) fn atom(&mut self, start: Option<Checkpoint>) -> ExprRes {
self.skip_ws();
let start = start.unwrap_or_else(|| self.builder.checkpoint());
match self.current() {
Some(INT_NUM | FLOAT_NUM | STRING) => {
self.bump();
self.builder.start_node_at(start, EXPR.into());
self.finish_node();
ExprRes::Ok
}
Some(L_CURLY) => match self.attrset(start) {
AttrsetRes::Ok => ExprRes::Ok,
AttrsetRes::Eof => ExprRes::Eof,
},
Some(L_PAREN) => {
self.builder.start_node_at(start, PARENTHESIZED_EXPR.into());
self.bump();
self.expr(None);
self.skip_ws();
match self.current() {
Some(R_PAREN) => ExprRes::Ok,
Some(_) => todo!(),
None => ExprRes::Eof,
}
}
Some(_) => ExprRes::NoExpr,
None => ExprRes::Eof,
}
}
}
pub enum ExprRes {
Ok,
Eof,
/// isnt an expression
NoExpr,
}
}
mod attrset {
use chumsky::container::Container;
use rowan::Checkpoint;
use super::{expr::ExprRes, instr::NodeRes, Parser};
use crate::parser::{
ast::lossless::{lex::SyntaxKind::*, parser::SyntaxError},
Span,
};
impl Parser<'_> {
pub(super) fn attrset(&mut self, checkpoint: Checkpoint) -> AttrsetRes {
assert_eq!(self.current(), Some(L_CURLY));
self.bump();
self.skip_ws();
match self.current() {
Some(R_CURLY) => {
self.builder.start_node_at(checkpoint, ATTR_SET.into());
self.bump();
self.finish_node();
AttrsetRes::Ok
}
Some(_) => {
self.builder.start_node_at(checkpoint, ATTR_SET.into());
let res = match self.attrs() {
AttrRes::Eof => AttrsetRes::Eof,
AttrRes::RCurly | AttrRes::Ok => {
println!("curr: {:?}", self.current());
AttrsetRes::Ok
}
};
self.finish_node();
res
}
None => AttrsetRes::Eof,
}
// self.start_node(ATTR);
}
fn attrs(&mut self) -> AttrRes {
let mut res = AttrRes::Ok;
while res == AttrRes::Ok {
println!("it: {:?}", self.tokens.last());
match self.attr() {
AttrRes::Ok => {
self.skip_ws_without_newlines();
println!(
"a: {:?}, {:?}",
self.tokens.last(),
self.tokens.get(self.tokens.len() - 2)
);
println!("errs: {:?}", self.errors);
res = AttrRes::Ok;
let checkpoint_previous_end = self.builder.checkpoint();
res = match self.current() {
Some(COMMA) => {
self.bump();
AttrRes::Ok
}
Some(R_CURLY) => {
self.bump();
res = AttrRes::Ok;
break;
}
Some(NEWLINE) => {
self.skip_ws();
println!(
"b: {:?}, {:?}",
self.tokens.last(),
self.tokens.get(self.tokens.len() - 2)
);
match self.current() {
Some(COMMA) => {
self.bump();
AttrRes::Ok
}
Some(R_CURLY) => {
self.bump();
res = AttrRes::Ok;
break;
}
Some(IDENT) => {
println!("wtf");
self.syntax_err_by_checkpoint(
checkpoint_previous_end,
SyntaxError::ExpectedCommaBetweenAttrs,
);
// self.syntax_err(SyntaxError::ExpectedCommaBetweenAttrs);
AttrRes::Ok
}
Some(_) => {
self.bump();
AttrRes::Ok
}
None => {
res = AttrRes::Eof;
break;
}
}
}
Some(_) => {
self.bump();
println!(
"c: {:?}, {:?}",
self.tokens.last(),
self.tokens.get(self.tokens.len() - 2)
);
AttrRes::Ok
}
None => {
res = AttrRes::Eof;
break;
}
}
}
AttrRes::Eof => {
res = AttrRes::Eof;
break;
}
AttrRes::RCurly => {
res = AttrRes::RCurly;
break;
}
}
}
println!("toks_left: {:?}", self.tokens);
res
}
fn attr(&mut self) -> AttrRes {
self.skip_ws();
self.start_node(ATTR);
self.start_node(ATTR_NAME);
match self.current() {
Some(IDENT) => self.bump(),
Some(R_CURLY) => return AttrRes::Ok,
Some(_) => self.expected(IDENT),
None => return AttrRes::Eof,
}
self.finish_node();
self.skip_ws();
match self.current() {
Some(COLON) => self.bump(),
Some(R_CURLY) => {
self.expected(COLON);
return AttrRes::RCurly;
}
Some(_) => self.expected(COLON),
None => return AttrRes::Eof,
}
self.skip_ws();
self.start_node(ATTR_VALUE);
match self.expr(None) {
ExprRes::Ok => self.bump(),
ExprRes::Eof => return AttrRes::Eof,
ExprRes::NoExpr => match self.current() {
Some(COMMA) => self.syntax_err(SyntaxError::AttrExpectedValue),
Some(R_CURLY) => {
self.syntax_err(SyntaxError::AttrExpectedValue);
return AttrRes::RCurly;
}
Some(_) => self.expected(EXPR),
None => unreachable!(),
},
}
self.finish_node();
self.finish_node();
AttrRes::Ok
}
}
#[derive(PartialEq, Eq)]
pub enum AttrsetRes {
Ok,
Eof,
}
#[derive(PartialEq, Eq)]
enum AttrRes {
Ok,
Eof,
RCurly,
}
}
mod instr {
use super::Parser;
use crate::parser::{
ast::lossless::{lex::SyntaxKind::*, parser::expr::ExprRes},
Span,
};
impl Parser<'_> {
pub(super) fn instr(&mut self) -> NodeRes {
assert_eq!(self.current(), Some(IDENT));
self.skip_ws();
self.start_node(INSTR);
self.instr_name();
// used to count positionals
let mut i = 0;
let params_checkpoint = self.builder.checkpoint();
loop {
match self.expr(None) {
ExprRes::Ok => {
i += 1;
continue;
}
ExprRes::NoExpr | ExprRes::Eof => break,
}
}
if i >= 1 {
self.builder
.start_node_at(params_checkpoint, INSTR_PARAMS.into());
self.finish_node();
}
self.finish_node();
NodeRes::Ok
}
fn instr_name(&mut self) {
self.start_node(INSTR_NAME);
while self.current() == Some(IDENT) {
self.bump();
self.skip_ws_without_newlines();
}
self.finish_node();
}
}
pub(super) enum NodeRes {
Ok,
Eof,
}
}

View file

@ -0,0 +1,50 @@
use indexmap::IndexMap;
use super::super::Spanned;
use super::super::Span;
use super::Lit;
#[derive(Debug, PartialEq)]
pub struct RawExpression<'src> {
pub expr: Box<RawExpr<'src>>,
pub span: Span,
}
impl<'src> RawExpression<'src> {
pub fn new(expr: RawExpr<'src>, span: Span) -> Self {
Self {
expr: Box::new(expr),
span,
}
}
}
#[derive(Debug, PartialEq)]
pub enum RawExpr<'src> {
Node(
Vec<Spanned<&'src str>>,
Option<Spanned<IndexMap<Spanned<&'src str>, RawExpression<'src>>>>,
),
SimplePipe(RawExpression<'src>, RawExpression<'src>),
// NamingPipe(
// Box<Expression<'src>>,
// (Vec<Spanned<&'src str>>, Vec<Spanned<&'src str>>),
// Box<Expression<'src>>,
// ),
MappingPipe(RawExpression<'src>, RawExpression<'src>),
NullPipe(RawExpression<'src>, RawExpression<'src>),
MultiPipe(IndexMap<Spanned<&'src str>, RawExpression<'src>>),
// LetIn(
// IndexMap<Spanned<&'src str>, Box<Expression<'src>>>,
// Box<Expression<'src>>,
// ),
// $
Var(&'src str),
// @
InputVar(&'src str),
AttrSet(Spanned<IndexMap<Spanned<&'src str>, RawExpression<'src>>>),
Lit(Lit<'src>),
Matrix(Spanned<(u16, u16)>, Vec<RawExpression<'src>>),
List(Vec<RawExpression<'src>>),
}

View file

@ -1,4 +1,4 @@
use crate::parser::ast::{Expr, File}; use crate::parser::ast::File;
use crate::parser::parse; use crate::parser::parse;
use crate::tokens::Token; use crate::tokens::Token;
use chumsky::input::Stream; use chumsky::input::Stream;

View file

@ -14,6 +14,14 @@ pub enum Token<'a> {
Let, Let,
#[token("in")] #[token("in")]
In, In,
#[token("mat")]
Mat,
#[regex("[\\d]+x[\\d]+", |lex| {
let (x, y) = lex.slice().split_once('x').expect("shouldn't fail to split");
// TODO: handle overflows etc
(x.parse().expect("should only match valid u16s"), y.parse().expect("should only match valid u16s"))
})]
Dimensions((u16, u16)),
#[regex("[\\d]+", |lex| lex.slice())] #[regex("[\\d]+", |lex| lex.slice())]
Int(&'a str), Int(&'a str),
#[regex("[+-]?([\\d]+\\.[\\d]*|[\\d]*\\.[\\d]+)", |lex| lex.slice())] #[regex("[+-]?([\\d]+\\.[\\d]*|[\\d]*\\.[\\d]+)", |lex| lex.slice())]
@ -30,12 +38,11 @@ pub enum Token<'a> {
Mult, Mult,
#[token("/")] #[token("/")]
Div, Div,
#[regex("[a-zA-Z_]+[a-zA-Z0-9_\\-]*", |lex| lex.slice())] // TODO: figure out how to allow numbers in words?
#[regex("[a-zA-Z_]+[a-zA-Z_\\-\\d]*", |lex| lex.slice().trim())]
Word(&'a str), Word(&'a str),
#[regex("\\$[a-zA-Z0-9_\\-]+", |lex| &lex.slice()[1..])] #[regex("\\$[a-zA-Z0-9_\\-]+", |lex| &lex.slice()[1..])]
VarIdent(&'a str), VarIdent(&'a str),
#[token("@..")]
InputSpread,
#[regex("\\@[a-zA-Z0-9_\\-]+", |lex| &lex.slice()[1..])] #[regex("\\@[a-zA-Z0-9_\\-]+", |lex| &lex.slice()[1..])]
InputIdent(&'a str), InputIdent(&'a str),
#[token(",")] #[token(",")]
@ -55,7 +62,7 @@ pub enum Token<'a> {
#[token(":")] #[token(":")]
Colon, Colon,
#[token(";")] #[token(";")]
SemiColon, Semicolon,
#[token("[")] #[token("[")]
BracketOpen, BracketOpen,
#[token("]")] #[token("]")]

View file

@ -34,7 +34,7 @@ lexer_test! {
lexer_test! { lexer_test! {
test_lex_subgroup, test_lex_subgroup,
"subgroup(first, second) = a | b { in1: $first } | c { in1: $second }", "subgroup(first, second) = a | b [ $first ] | c [ $second ]",
[ [
Token::Word("subgroup"), Token::Word("subgroup"),
Token::ParenOpen, Token::ParenOpen,
@ -46,18 +46,14 @@ lexer_test! {
Token::Word("a"), Token::Word("a"),
Token::Pipe, Token::Pipe,
Token::Word("b"), Token::Word("b"),
Token::BraceOpen, Token::BracketOpen,
Token::Word("in1"),
Token::Colon,
Token::VarIdent("first"), Token::VarIdent("first"),
Token::BraceClose, Token::BracketClose,
Token::Pipe, Token::Pipe,
Token::Word("c"), Token::Word("c"),
Token::BraceOpen, Token::BracketOpen,
Token::Word("in1"),
Token::Colon,
Token::VarIdent("second"), Token::VarIdent("second"),
Token::BraceClose Token::BracketClose
] ]
} }

View file

@ -1,18 +1,43 @@
{ {
"nodes": { "nodes": {
"devenv": { "cachix": {
"inputs": { "inputs": {
"flake-compat": "flake-compat", "devenv": "devenv_2",
"nix": "nix", "flake-compat": "flake-compat_2",
"nixpkgs": "nixpkgs", "nixpkgs": [
"devenv",
"nixpkgs"
],
"pre-commit-hooks": "pre-commit-hooks" "pre-commit-hooks": "pre-commit-hooks"
}, },
"locked": { "locked": {
"lastModified": 1704835383, "lastModified": 1710475558,
"narHash": "sha256-SoC0rYR9iHW0dVOEmxNEfa8vk9dTK86P5iXTgHafmwM=", "narHash": "sha256-egKrPCKjy/cE+NqCj4hg2fNX/NwLCf0bRDInraYXDgs=",
"owner": "cachix",
"repo": "cachix",
"rev": "661bbb7f8b55722a0406456b15267b5426a3bda6",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "cachix",
"type": "github"
}
},
"devenv": {
"inputs": {
"cachix": "cachix",
"flake-compat": "flake-compat_4",
"nix": "nix_2",
"nixpkgs": "nixpkgs_2",
"pre-commit-hooks": "pre-commit-hooks_2"
},
"locked": {
"lastModified": 1712724616,
"narHash": "sha256-qs9uEbrOpp6oXcDOp5cpilyU52t78ZpEPATtaHRVLIU=",
"owner": "cachix", "owner": "cachix",
"repo": "devenv", "repo": "devenv",
"rev": "18ef9849d1ecac7a9a7920eb4f2e4adcf67a8c3a", "rev": "d1a11d14dbe96a03c7f9068e4d3af05f283734e0",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -21,17 +46,48 @@
"type": "github" "type": "github"
} }
}, },
"devenv_2": {
"inputs": {
"flake-compat": [
"devenv",
"cachix",
"flake-compat"
],
"nix": "nix",
"nixpkgs": "nixpkgs",
"poetry2nix": "poetry2nix",
"pre-commit-hooks": [
"devenv",
"cachix",
"pre-commit-hooks"
]
},
"locked": {
"lastModified": 1708704632,
"narHash": "sha256-w+dOIW60FKMaHI1q5714CSibk99JfYxm0CzTinYWr+Q=",
"owner": "cachix",
"repo": "devenv",
"rev": "2ee4450b0f4b95a1b90f2eb5ffea98b90e48c196",
"type": "github"
},
"original": {
"owner": "cachix",
"ref": "python-rewrite",
"repo": "devenv",
"type": "github"
}
},
"fenix": { "fenix": {
"inputs": { "inputs": {
"nixpkgs": "nixpkgs_2", "nixpkgs": "nixpkgs_3",
"rust-analyzer-src": "rust-analyzer-src" "rust-analyzer-src": "rust-analyzer-src"
}, },
"locked": { "locked": {
"lastModified": 1704867811, "lastModified": 1712730246,
"narHash": "sha256-pG4O1vPpNSMjz7p/5x+/OH4tXC0thzAPbJ55kI/W5dU=", "narHash": "sha256-iB8bFj+07RHpmt+XuGGvYQk2Iwm12u6+DklGq/+Tg5s=",
"owner": "nix-community", "owner": "nix-community",
"repo": "fenix", "repo": "fenix",
"rev": "93e89638c15512db65e931f26ce36edf8cfbb4a5", "rev": "d402ae4a5e5676722290470f61a5e8e3155b5487",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -56,16 +112,116 @@
"type": "github" "type": "github"
} }
}, },
"flake-compat_2": {
"flake": false,
"locked": {
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-compat_3": {
"flake": false,
"locked": {
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-compat_4": {
"flake": false,
"locked": {
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-compat_5": {
"flake": false,
"locked": {
"lastModified": 1673956053,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-utils": { "flake-utils": {
"inputs": { "inputs": {
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1685518550, "lastModified": 1689068808,
"narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=", "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef", "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_3": {
"inputs": {
"systems": "systems_3"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -78,16 +234,17 @@
"inputs": { "inputs": {
"nixpkgs": [ "nixpkgs": [
"devenv", "devenv",
"cachix",
"pre-commit-hooks", "pre-commit-hooks",
"nixpkgs" "nixpkgs"
] ]
}, },
"locked": { "locked": {
"lastModified": 1660459072, "lastModified": 1703887061,
"narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", "narHash": "sha256-gGPa9qWNc6eCXT/+Z5/zMkyYOuRZqeFZBDbopNZQkuY=",
"owner": "hercules-ci", "owner": "hercules-ci",
"repo": "gitignore.nix", "repo": "gitignore.nix",
"rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", "rev": "43e1aa1308018f37118e34d3a9cb4f5e75dc11d5",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -96,53 +253,109 @@
"type": "github" "type": "github"
} }
}, },
"lowdown-src": { "gitignore_2": {
"flake": false, "inputs": {
"nixpkgs": [
"devenv",
"pre-commit-hooks",
"nixpkgs"
]
},
"locked": { "locked": {
"lastModified": 1633514407, "lastModified": 1709087332,
"narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", "narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
"owner": "kristapsdz", "owner": "hercules-ci",
"repo": "lowdown", "repo": "gitignore.nix",
"rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", "rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "kristapsdz", "owner": "hercules-ci",
"repo": "lowdown", "repo": "gitignore.nix",
"type": "github" "type": "github"
} }
}, },
"nix": { "nix": {
"inputs": { "inputs": {
"lowdown-src": "lowdown-src", "flake-compat": "flake-compat",
"nixpkgs": [ "nixpkgs": [
"devenv",
"cachix",
"devenv", "devenv",
"nixpkgs" "nixpkgs"
], ],
"nixpkgs-regression": "nixpkgs-regression" "nixpkgs-regression": "nixpkgs-regression"
}, },
"locked": { "locked": {
"lastModified": 1676545802, "lastModified": 1708577783,
"narHash": "sha256-EK4rZ+Hd5hsvXnzSzk2ikhStJnD63odF7SzsQ8CuSPU=", "narHash": "sha256-92xq7eXlxIT5zFNccLpjiP7sdQqQI30Gyui2p/PfKZM=",
"owner": "domenkozar", "owner": "domenkozar",
"repo": "nix", "repo": "nix",
"rev": "7c91803598ffbcfe4a55c44ac6d49b2cf07a527f", "rev": "ecd0af0c1f56de32cbad14daa1d82a132bf298f8",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "domenkozar", "owner": "domenkozar",
"ref": "relaxed-flakes", "ref": "devenv-2.21",
"repo": "nix",
"type": "github"
}
},
"nix-github-actions": {
"inputs": {
"nixpkgs": [
"devenv",
"cachix",
"devenv",
"poetry2nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1688870561,
"narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=",
"owner": "nix-community",
"repo": "nix-github-actions",
"rev": "165b1650b753316aa7f1787f3005a8d2da0f5301",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "nix-github-actions",
"type": "github"
}
},
"nix_2": {
"inputs": {
"flake-compat": "flake-compat_5",
"nixpkgs": [
"devenv",
"nixpkgs"
],
"nixpkgs-regression": "nixpkgs-regression_2"
},
"locked": {
"lastModified": 1710500156,
"narHash": "sha256-zvCqeUO2GLOm7jnU23G4EzTZR7eylcJN+HJ5svjmubI=",
"owner": "domenkozar",
"repo": "nix",
"rev": "c5bbf14ecbd692eeabf4184cc8d50f79c2446549",
"type": "github"
},
"original": {
"owner": "domenkozar",
"ref": "devenv-2.21",
"repo": "nix", "repo": "nix",
"type": "github" "type": "github"
} }
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1678875422, "lastModified": 1692808169,
"narHash": "sha256-T3o6NcQPwXjxJMn2shz86Chch4ljXgZn746c2caGxd8=", "narHash": "sha256-x9Opq06rIiwdwGeK2Ykj69dNc2IvUH1fY55Wm7atwrE=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "126f49a01de5b7e35a43fd43f891ecf6d3a51459", "rev": "9201b5ff357e781bf014d0330d18555695df7ba8",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -168,45 +381,93 @@
"type": "github" "type": "github"
} }
}, },
"nixpkgs-stable": { "nixpkgs-regression_2": {
"locked": { "locked": {
"lastModified": 1685801374, "lastModified": 1643052045,
"narHash": "sha256-otaSUoFEMM+LjBI1XL/xGB5ao6IwnZOXc47qhIgJe8U=", "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "c37ca420157f4abc31e26f436c1145f8951ff373", "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "nixos-23.05", "repo": "nixpkgs",
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
"type": "github"
}
},
"nixpkgs-stable": {
"locked": {
"lastModified": 1704874635,
"narHash": "sha256-YWuCrtsty5vVZvu+7BchAxmcYzTMfolSPP5io8+WYCg=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "3dc440faeee9e889fe2d1b4d25ad0f430d449356",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-23.11",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-stable_2": {
"locked": {
"lastModified": 1710695816,
"narHash": "sha256-3Eh7fhEID17pv9ZxrPwCLfqXnYP006RKzSs0JptsN84=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "614b4613980a522ba49f0d194531beddbb7220d3",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-23.11",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }
}, },
"nixpkgs_2": { "nixpkgs_2": {
"locked": { "locked": {
"lastModified": 1704538339, "lastModified": 1710236354,
"narHash": "sha256-1734d3mQuux9ySvwf6axRWZRBhtcZA9Q8eftD6EZg6U=", "narHash": "sha256-vWrciFdq49vve43g4pbi7NjmL4cwG1ifXnQx+dU3T5E=",
"owner": "nixos", "owner": "cachix",
"repo": "nixpkgs", "repo": "devenv-nixpkgs",
"rev": "46ae0210ce163b3cba6c7da08840c1d63de9c701", "rev": "829e73affeadfb4198a7105cbe3a03153d13edc9",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "nixos", "owner": "cachix",
"ref": "nixos-unstable", "ref": "rolling",
"repo": "nixpkgs", "repo": "devenv-nixpkgs",
"type": "github" "type": "github"
} }
}, },
"nixpkgs_3": { "nixpkgs_3": {
"locked": { "locked": {
"lastModified": 1704722960, "lastModified": 1712608508,
"narHash": "sha256-mKGJ3sPsT6//s+Knglai5YflJUF2DGj7Ai6Ynopz0kI=", "narHash": "sha256-vMZ5603yU0wxgyQeHJryOI+O61yrX2AHwY6LOFyV1gM=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "4cba8b53da471aea2ab2b0c1f30a81e7c451f4b6",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_4": {
"locked": {
"lastModified": 1712608508,
"narHash": "sha256-vMZ5603yU0wxgyQeHJryOI+O61yrX2AHwY6LOFyV1gM=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "317484b1ead87b9c1b8ac5261a8d2dd748a0492d", "rev": "4cba8b53da471aea2ab2b0c1f30a81e7c451f4b6",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -216,26 +477,77 @@
"type": "github" "type": "github"
} }
}, },
"poetry2nix": {
"inputs": {
"flake-utils": "flake-utils",
"nix-github-actions": "nix-github-actions",
"nixpkgs": [
"devenv",
"cachix",
"devenv",
"nixpkgs"
]
},
"locked": {
"lastModified": 1692876271,
"narHash": "sha256-IXfZEkI0Mal5y1jr6IRWMqK8GW2/f28xJenZIPQqkY0=",
"owner": "nix-community",
"repo": "poetry2nix",
"rev": "d5006be9c2c2417dafb2e2e5034d83fabd207ee3",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "poetry2nix",
"type": "github"
}
},
"pre-commit-hooks": { "pre-commit-hooks": {
"inputs": { "inputs": {
"flake-compat": [ "flake-compat": "flake-compat_3",
"devenv", "flake-utils": "flake-utils_2",
"flake-compat"
],
"flake-utils": "flake-utils",
"gitignore": "gitignore", "gitignore": "gitignore",
"nixpkgs": [ "nixpkgs": [
"devenv", "devenv",
"cachix",
"nixpkgs" "nixpkgs"
], ],
"nixpkgs-stable": "nixpkgs-stable" "nixpkgs-stable": "nixpkgs-stable"
}, },
"locked": { "locked": {
"lastModified": 1704725188, "lastModified": 1708018599,
"narHash": "sha256-qq8NbkhRZF1vVYQFt1s8Mbgo8knj+83+QlL5LBnYGpI=", "narHash": "sha256-M+Ng6+SePmA8g06CmUZWi1AjG2tFBX9WCXElBHEKnyM=",
"owner": "cachix", "owner": "cachix",
"repo": "pre-commit-hooks.nix", "repo": "pre-commit-hooks.nix",
"rev": "ea96f0c05924341c551a797aaba8126334c505d2", "rev": "5df5a70ad7575f6601d91f0efec95dd9bc619431",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"type": "github"
}
},
"pre-commit-hooks_2": {
"inputs": {
"flake-compat": [
"devenv",
"flake-compat"
],
"flake-utils": "flake-utils_3",
"gitignore": "gitignore_2",
"nixpkgs": [
"devenv",
"nixpkgs"
],
"nixpkgs-stable": "nixpkgs-stable_2"
},
"locked": {
"lastModified": 1712055707,
"narHash": "sha256-4XLvuSIDZJGS17xEwSrNuJLL7UjDYKGJSbK1WWX2AK8=",
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"rev": "e35aed5fda3cc79f88ed7f1795021e559582093a",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -248,18 +560,18 @@
"inputs": { "inputs": {
"devenv": "devenv", "devenv": "devenv",
"fenix": "fenix", "fenix": "fenix",
"nixpkgs": "nixpkgs_3", "nixpkgs": "nixpkgs_4",
"systems": "systems_2" "systems": "systems_4"
} }
}, },
"rust-analyzer-src": { "rust-analyzer-src": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1704833483, "lastModified": 1712663608,
"narHash": "sha256-Ox01mpYmjapNYaqOu4fMS/4Ma9NLd2rVNz6d4rJmcf4=", "narHash": "sha256-tN9ZL6kGppmHg84lxlpAlaN+kXWNctKK7Yitq/iXDEw=",
"owner": "rust-lang", "owner": "rust-lang",
"repo": "rust-analyzer", "repo": "rust-analyzer",
"rev": "ae6e73772432cfe35bb0ff6de6fdcfa908642b67", "rev": "a5feb4f05f09adca661c869b1bf2324898cbaa43",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -298,6 +610,36 @@
"repo": "default", "repo": "default",
"type": "github" "type": "github"
} }
},
"systems_3": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_4": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
} }
}, },
"root": "root", "root": "root",

View file

@ -11,37 +11,55 @@
extra-substituters = "https://devenv.cachix.org"; extra-substituters = "https://devenv.cachix.org";
}; };
outputs = { self, nixpkgs, devenv, systems, ... } @ inputs: outputs = {
let self,
nixpkgs,
devenv,
systems,
...
} @ inputs: let
forEachSystem = nixpkgs.lib.genAttrs (import systems); forEachSystem = nixpkgs.lib.genAttrs (import systems);
in in {
{ devShells =
devShells = forEachSystem forEachSystem
(system: (system: let
let
pkgs = nixpkgs.legacyPackages.${system}; pkgs = nixpkgs.legacyPackages.${system};
in in {
{
default = devenv.lib.mkShell { default = devenv.lib.mkShell {
inherit inputs pkgs; inherit inputs pkgs;
modules = [ modules = [
({pkgs, config, ...}: { ({
pkgs,
config,
...
}: {
languages.rust = { languages.rust = {
enable = true; enable = true;
channel = "nightly"; channel = "nightly";
components = [
"rustc"
"cargo"
"clippy"
"rustfmt"
"rust-src"
];
}; };
pre-commit.hooks = { pre-commit.hooks = {
clippy.enable = true; clippy.enable = false;
rustfmt.enable = true; rustfmt.enable = true;
}; };
packages = with pkgs; [ packages = with pkgs; [
just nushell just
nushell
ripgrep ripgrep
typst typst-lsp typst
typst-lsp
mold mold
cargo-nextest cargo-watch cargo-nextest
cargo-watch
rust-analyzer
]; ];
}) })
]; ];

View file

@ -1,7 +1,4 @@
def blend1 = [ meow mew meow 5 3.14 "uwu" {
open "test.png", meow: test 24
open "test2.png" another: hi "hello",
] } "awa"
| blend multiply 0.6
def blend2 = open "test.png" | blend multiply 0.6 [ open test2.png ]