forked from katzen-cafe/iowo
Compare commits
108 commits
Author | SHA1 | Date | |
---|---|---|---|
e581a8e7cf | |||
d809d3b52d | |||
ef1a9f5029 | |||
662cb8ba0e | |||
fcf91f25e3 | |||
958857cb58 | |||
883b0c804e | |||
f7d05ead2c | |||
cee9b97dbf | |||
e5ccebe679 | |||
3164328568 | |||
c564d0f24c | |||
b8720b2df9 | |||
af6886214b | |||
ac75978c01 | |||
9b1f6a1dc1 | |||
fed8cf2466 | |||
91f766c18e | |||
becc4b4041 | |||
21bcf62ea5 | |||
34ddaacb58 | |||
ec2ff5778b | |||
a3ab844ba7 | |||
a693b57447 | |||
3412eb9395 | |||
ccc6d4f532 | |||
54401d2a21 | |||
18309ec919 | |||
0705702d4a | |||
31a044577a | |||
911339fc2a | |||
619b7acf94 | |||
b7bc0366c2 | |||
734a734f09 | |||
dddbcccf72 | |||
26996fbd00 | |||
d9a07c8898 | |||
db9228dec4 | |||
56ec11e143 | |||
1e9648966f | |||
a2695a2a11 | |||
dc44244e7b | |||
1e0741e600 | |||
3eee768ce1 | |||
eb7806572b | |||
1c6180aabc | |||
37651a83bc | |||
3e2c5946c8 | |||
1a533eb788 | |||
7bc603f7e7 | |||
d6bc644fb6 | |||
cfefab9fd0 | |||
0de076ace1 | |||
946ac879a7 | |||
f6da90a354 | |||
ed151c2e3c | |||
4bcaf945d7 | |||
29cdcfbe0c | |||
afd493be16 | |||
30f17773a8 | |||
db2643359c | |||
9af71ed3f4 | |||
8a541546d9 | |||
4df0118aa4 | |||
ba0da33509 | |||
9510d9254c | |||
e62b50a51a | |||
2bea3994c2 | |||
86b1481943 | |||
06c9094227 | |||
381ab45edc | |||
6d8b79e8f7 | |||
be637846b1 | |||
1711d17fa6 | |||
f7b61f9e0e | |||
2d59a7f560 | |||
9da157ff4a | |||
881a987b2f | |||
bfd4b3765f | |||
198c74c7ae | |||
8d7401531e | |||
b6e304fa78 | |||
ace69b0094 | |||
84448af714 | |||
ae60db7721 | |||
de008263ca | |||
ca84af4e1b | |||
ae86ae29ab | |||
02c5e9e159 | |||
0197df5ee2 | |||
919a3bb377 | |||
9ae8c2fbd3 | |||
9727ef82ca | |||
c31a158d9b | |||
aeeee54200 | |||
dc7d76dc26 | |||
e17fffb66b | |||
f59062cf88 | |||
384fef5a81 | |||
77bcb54b5e | |||
d87033d320 | |||
bf60bdd814 | |||
5368951254 | |||
a42ec014e5 | |||
01b1880089 | |||
56848a1b05 | |||
69f0baf425 | |||
98850ee1e9 |
93 changed files with 6457 additions and 270 deletions
1444
Cargo.lock
generated
1444
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -3,13 +3,19 @@ members = [
|
||||||
"crates/app",
|
"crates/app",
|
||||||
"crates/eval",
|
"crates/eval",
|
||||||
"crates/ir",
|
"crates/ir",
|
||||||
"crates/prowocessing",
|
"crates/lang",
|
||||||
|
"crates/svg-filters",
|
||||||
|
"crates/prowocessing",
|
||||||
|
"crates/executor-poc",
|
||||||
|
"crates/pawarser",
|
||||||
|
"crates/json-pawarser",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
clap = { version = "4", features = ["derive"] }
|
clap = { version = "4", features = ["derive"] }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
petgraph = "0.6.4"
|
||||||
|
|
||||||
# to enable all the lints below, this must be present in a workspace member's Cargo.toml:
|
# to enable all the lints below, this must be present in a workspace member's Cargo.toml:
|
||||||
# [lints]
|
# [lints]
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
name = "app"
|
name = "app"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
default-run = "app"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,9 @@ use std::{
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use super::error::Config;
|
use crate::error_reporting::{report_serde_json_err, report_serde_ron_err};
|
||||||
|
|
||||||
|
use super::error::{self, Config};
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct Configs {
|
pub struct Configs {
|
||||||
|
@ -40,14 +42,20 @@ pub(super) fn find_config_file() -> Result<PathBuf, Config> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Configs {
|
impl Configs {
|
||||||
pub fn read(p: PathBuf) -> Result<Self, Config> {
|
pub fn read(p: PathBuf) -> Result<Self, error::Config> {
|
||||||
match p
|
match p
|
||||||
.extension()
|
.extension()
|
||||||
.map(|v| v.to_str().expect("config path to be UTF-8"))
|
.map(|v| v.to_str().expect("config path to be UTF-8"))
|
||||||
{
|
{
|
||||||
Some("ron") => Ok(serde_json::from_str(&fs::read_to_string(p)?)?),
|
Some("ron") => {
|
||||||
Some("json") => Ok(ron::from_str(&fs::read_to_string(p)?)?),
|
let f = fs::read_to_string(p)?;
|
||||||
e => Err(Config::UnknownExtension(e.map(str::to_owned))),
|
ron::from_str(&f).or_else(|e| report_serde_ron_err(&f, &e))
|
||||||
|
}
|
||||||
|
Some("json") => {
|
||||||
|
let f = fs::read_to_string(p)?;
|
||||||
|
serde_json::from_str(&f).or_else(|e| report_serde_json_err(&f, &e))
|
||||||
|
}
|
||||||
|
e => Err(error::Config::UnknownExtension(e.map(str::to_owned))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ fn report_serde_err(src: &str, line: usize, col: usize, msg: String) -> ! {
|
||||||
.finish()
|
.finish()
|
||||||
.eprint(("test", Source::from(src)))
|
.eprint(("test", Source::from(src)))
|
||||||
.expect("writing error to stderr failed");
|
.expect("writing error to stderr failed");
|
||||||
process::exit(1);
|
process::exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reconstruct a byte offset from the line + column numbers typical from serde crates
|
/// Reconstruct a byte offset from the line + column numbers typical from serde crates
|
||||||
|
|
13
crates/executor-poc/Cargo.toml
Normal file
13
crates/executor-poc/Cargo.toml
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
[package]
|
||||||
|
name = "executor-poc"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
image = "0.25.1"
|
||||||
|
indexmap = "2.2.6"
|
||||||
|
nalgebra = "0.33.0"
|
||||||
|
petgraph.workspace = true
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
128
crates/executor-poc/src/lib.rs
Normal file
128
crates/executor-poc/src/lib.rs
Normal file
|
@ -0,0 +1,128 @@
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
use instructions::Instruction;
|
||||||
|
use petgraph::graph::DiGraph;
|
||||||
|
use types::Type;
|
||||||
|
|
||||||
|
trait Node {
|
||||||
|
fn inputs() -> IndexMap<String, Type>;
|
||||||
|
fn outputs() -> IndexMap<String, Type>;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct NodeGraph {
|
||||||
|
graph: DiGraph<Instruction, TypedEdge>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TypedEdge {
|
||||||
|
from: String,
|
||||||
|
to: String,
|
||||||
|
typ: Type,
|
||||||
|
}
|
||||||
|
|
||||||
|
mod instructions {
|
||||||
|
//! This is the lowest level of the IR, the one the executor will use.
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use indexmap::{indexmap, IndexMap};
|
||||||
|
pub enum Instruction {
|
||||||
|
// File handling
|
||||||
|
LoadFile,
|
||||||
|
SaveFile,
|
||||||
|
|
||||||
|
ColorMatrix,
|
||||||
|
PosMatrix,
|
||||||
|
|
||||||
|
Blend,
|
||||||
|
SplitChannels,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Instruction {
|
||||||
|
fn inputs(&self) -> IndexMap<String, Type> {
|
||||||
|
match self {
|
||||||
|
Instruction::LoadFile => indexmap! {
|
||||||
|
"path" => Type::Path
|
||||||
|
},
|
||||||
|
Instruction::SaveFile => indexmap! {
|
||||||
|
"path" => Type::Path
|
||||||
|
},
|
||||||
|
|
||||||
|
Instruction::ColorMatrix => indexmap! {
|
||||||
|
"image" => Type::ImageData,
|
||||||
|
"matrix" => Type::Mat(4,5)
|
||||||
|
},
|
||||||
|
Instruction::PosMatrix => indexmap! {
|
||||||
|
"image" => Type::ImageData,
|
||||||
|
"matrix" => Type::Mat(2, 3),
|
||||||
|
},
|
||||||
|
|
||||||
|
Instruction::Blend => todo!(),
|
||||||
|
Instruction::SplitChannels => todo!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn outputs(&self) -> IndexMap<String, Type> {
|
||||||
|
match self {
|
||||||
|
Instruction::LoadFile => indexmap! {
|
||||||
|
"image" => Type::ImageData
|
||||||
|
},
|
||||||
|
Instruction::SaveFile => indexmap! {},
|
||||||
|
|
||||||
|
Instruction::ColorMatrix => indexmap! {
|
||||||
|
"resut" => Type::ImageData
|
||||||
|
},
|
||||||
|
Instruction::PosMatrix => todo!(),
|
||||||
|
|
||||||
|
Instruction::Blend => todo!(),
|
||||||
|
Instruction::SplitChannels => todo!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod types {
|
||||||
|
pub enum Type {
|
||||||
|
// TODO: later do lower level type system for this stuff?
|
||||||
|
// Image(Size, PixelType),
|
||||||
|
// // image data for processing.
|
||||||
|
// // always PixelType::Rgba32F
|
||||||
|
// ImageData(Size),
|
||||||
|
// // stuff that's still to be generated, not sized and no pixeltype
|
||||||
|
// ProceduralImage,
|
||||||
|
ImageData,
|
||||||
|
Text,
|
||||||
|
Integer,
|
||||||
|
Float,
|
||||||
|
Double,
|
||||||
|
Path,
|
||||||
|
Bool,
|
||||||
|
Vec(
|
||||||
|
// length,
|
||||||
|
u8,
|
||||||
|
),
|
||||||
|
Mat(
|
||||||
|
// Rows
|
||||||
|
u8,
|
||||||
|
// Columns
|
||||||
|
u8,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
// pub struct Size {
|
||||||
|
// width: u16,
|
||||||
|
// height: u16,
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Pixel types. Taken from variants [here](https://docs.rs/image/latest/image/pub enum.DynamicImage.html).
|
||||||
|
// pub enum PixelType {
|
||||||
|
// Luma8,
|
||||||
|
// LumaA8,
|
||||||
|
// Rgb8,
|
||||||
|
// Rgba8,
|
||||||
|
// Luma16,
|
||||||
|
// LumaA16,
|
||||||
|
// Rgb16,
|
||||||
|
// Rgba16,
|
||||||
|
// Rgb32F,
|
||||||
|
// #[default]
|
||||||
|
// Rgba32F,
|
||||||
|
// }
|
||||||
|
}
|
13
crates/json-pawarser/Cargo.toml
Normal file
13
crates/json-pawarser/Cargo.toml
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
[package]
|
||||||
|
name = "json-pawarser"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
logos = "0.14.2"
|
||||||
|
enumset = "1.1.3"
|
||||||
|
rowan = "0.15.15"
|
||||||
|
pawarser = { path = "../pawarser" }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
78
crates/json-pawarser/src/grammar.rs
Normal file
78
crates/json-pawarser/src/grammar.rs
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
use array::array;
|
||||||
|
use enumset::{enum_set, EnumSet};
|
||||||
|
use pawarser::parser::ParserBuilder;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
syntax_error::SyntaxError,
|
||||||
|
syntax_kind::{lex, SyntaxKind},
|
||||||
|
};
|
||||||
|
|
||||||
|
use self::object::object;
|
||||||
|
|
||||||
|
mod array;
|
||||||
|
mod object;
|
||||||
|
|
||||||
|
pub(crate) type Parser<'src> = pawarser::Parser<'src, SyntaxKind, SyntaxError>;
|
||||||
|
pub(crate) type CompletedMarker = pawarser::CompletedMarker<SyntaxKind, SyntaxError>;
|
||||||
|
|
||||||
|
const BASIC_VALUE_TOKENS: EnumSet<SyntaxKind> =
|
||||||
|
enum_set!(SyntaxKind::BOOL | SyntaxKind::NULL | SyntaxKind::NUMBER | SyntaxKind::STRING);
|
||||||
|
|
||||||
|
pub fn value(p: &mut Parser) -> bool {
|
||||||
|
if BASIC_VALUE_TOKENS.contains(p.current()) {
|
||||||
|
p.do_bump();
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
object(p).or_else(|| array(p)).is_some()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{
|
||||||
|
test_utils::{check_parser, gen_checks},
|
||||||
|
value,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn value_lit() {
|
||||||
|
gen_checks! {value;
|
||||||
|
r#""helo world""# => r#"ROOT { STRING "\"helo world\""; }"#,
|
||||||
|
"42" => r#"ROOT { NUMBER "42"; }"#,
|
||||||
|
"null" => r#"ROOT { NULL "null"; }"#,
|
||||||
|
"true" => r#"ROOT { BOOL "true"; }"#,
|
||||||
|
"false" => r#"ROOT { BOOL "false"; }"#
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test_utils {
|
||||||
|
use pawarser::parser::ParserBuilder;
|
||||||
|
|
||||||
|
use crate::syntax_kind::{lex, SyntaxKind};
|
||||||
|
|
||||||
|
use super::Parser;
|
||||||
|
|
||||||
|
macro_rules! gen_checks {
|
||||||
|
($fn_to_test:ident; $($in:literal => $out:literal),+) => {
|
||||||
|
$(crate::grammar::test_utils::check_parser($in, |p| { $fn_to_test(p); }, $out);)+
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) use gen_checks;
|
||||||
|
|
||||||
|
pub(super) fn check_parser(input: &str, parser_fn: fn(&mut Parser), expected_output: &str) {
|
||||||
|
let toks = lex(input);
|
||||||
|
let mut p: Parser = ParserBuilder::new(toks)
|
||||||
|
.add_meaningless(SyntaxKind::WHITESPACE)
|
||||||
|
.add_meaningless(SyntaxKind::NEWLINE)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
parser_fn(&mut p);
|
||||||
|
|
||||||
|
let out = p.finish();
|
||||||
|
|
||||||
|
assert_eq!(format!("{out:?}").trim_end(), expected_output);
|
||||||
|
}
|
||||||
|
}
|
36
crates/json-pawarser/src/grammar/array.rs
Normal file
36
crates/json-pawarser/src/grammar/array.rs
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
use crate::{syntax_error::SyntaxError, syntax_kind::SyntaxKind};
|
||||||
|
|
||||||
|
use super::{value, CompletedMarker, Parser};
|
||||||
|
|
||||||
|
pub(super) fn array(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
let array_start = p.start("array");
|
||||||
|
|
||||||
|
if !p.eat(SyntaxKind::BRACKET_OPEN) {
|
||||||
|
array_start.abandon(p);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let el = p.start("arr_el");
|
||||||
|
value(p);
|
||||||
|
el.complete(p, SyntaxKind::ELEMENT);
|
||||||
|
|
||||||
|
while p.at(SyntaxKind::COMMA) {
|
||||||
|
let potential_trailing_comma = p.start("potential_trailing_comma");
|
||||||
|
|
||||||
|
p.eat(SyntaxKind::COMMA);
|
||||||
|
let maybe_el = p.start("arr_el");
|
||||||
|
if !value(p) {
|
||||||
|
maybe_el.abandon(p);
|
||||||
|
potential_trailing_comma.complete(p, SyntaxKind::TRAILING_COMMA);
|
||||||
|
} else {
|
||||||
|
maybe_el.complete(p, SyntaxKind::ELEMENT);
|
||||||
|
potential_trailing_comma.abandon(p);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(if !p.eat(SyntaxKind::BRACKET_CLOSE) {
|
||||||
|
array_start.error(p, SyntaxError::UnclosedArray)
|
||||||
|
} else {
|
||||||
|
array_start.complete(p, SyntaxKind::ARRAY)
|
||||||
|
})
|
||||||
|
}
|
92
crates/json-pawarser/src/grammar/object.rs
Normal file
92
crates/json-pawarser/src/grammar/object.rs
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
use crate::{grammar::value, syntax_error::SyntaxError, syntax_kind::SyntaxKind};
|
||||||
|
|
||||||
|
use super::{CompletedMarker, Parser, BASIC_VALUE_TOKENS};
|
||||||
|
|
||||||
|
pub(super) fn object(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
let obj_start = p.start("object");
|
||||||
|
|
||||||
|
if !p.eat(SyntaxKind::BRACE_OPEN) {
|
||||||
|
obj_start.abandon(p);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
member(p);
|
||||||
|
while p.at(SyntaxKind::COMMA) {
|
||||||
|
// not always an error, later configurable
|
||||||
|
let potential_trailing_comma = p.start("potential_trailing_comma");
|
||||||
|
p.eat(SyntaxKind::COMMA);
|
||||||
|
|
||||||
|
if member(p).is_none() {
|
||||||
|
potential_trailing_comma.complete(p, SyntaxKind::TRAILING_COMMA);
|
||||||
|
} else {
|
||||||
|
potential_trailing_comma.abandon(p);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(if p.eat(SyntaxKind::BRACE_CLOSE) {
|
||||||
|
obj_start.complete(p, SyntaxKind::OBJECT)
|
||||||
|
} else {
|
||||||
|
obj_start.error(p, SyntaxError::UnclosedObject)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn member(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
let member_start = p.start("member");
|
||||||
|
|
||||||
|
if p.at(SyntaxKind::BRACE_CLOSE) {
|
||||||
|
member_start.abandon(p);
|
||||||
|
return None;
|
||||||
|
} else if p.at(SyntaxKind::STRING) {
|
||||||
|
let member_name_start = p.start("member_name");
|
||||||
|
p.eat(SyntaxKind::STRING);
|
||||||
|
member_name_start.complete(p, SyntaxKind::MEMBER_NAME);
|
||||||
|
} else {
|
||||||
|
return todo!("handle other tokens: {:?}", p.current());
|
||||||
|
}
|
||||||
|
|
||||||
|
if !p.eat(SyntaxKind::COLON) {
|
||||||
|
todo!("handle wrong tokens")
|
||||||
|
}
|
||||||
|
|
||||||
|
let member_value_start = p.start("member_value_start");
|
||||||
|
if value(p) {
|
||||||
|
member_value_start.complete(p, SyntaxKind::MEMBER_VALUE);
|
||||||
|
Some(member_start.complete(p, SyntaxKind::MEMBER))
|
||||||
|
} else {
|
||||||
|
member_value_start.abandon(p);
|
||||||
|
let e = member_start.error(p, SyntaxError::MemberMissingValue);
|
||||||
|
Some(
|
||||||
|
e.precede(p, "member but failed already")
|
||||||
|
.complete(p, SyntaxKind::MEMBER),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::grammar::{
|
||||||
|
object::{member, object},
|
||||||
|
test_utils::gen_checks,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn object_basic() {
|
||||||
|
gen_checks! {object;
|
||||||
|
r#"{"a": "b"}"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { STRING "\"b\""; } } BRACE_CLOSE "}"; } }"#,
|
||||||
|
r#"{"a": 42}"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { NUMBER "42"; } } BRACE_CLOSE "}"; } }"#,
|
||||||
|
r#"{"a": "b""# => r#"ROOT { PARSE_ERR: UnclosedObject { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { STRING "\"b\""; } } } }"#,
|
||||||
|
r#"{"a": }"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { PARSE_ERR: MemberMissingValue { MEMBER_NAME { STRING "\"a\""; } COLON ":"; } } WHITESPACE " "; BRACE_CLOSE "}"; } }"#,
|
||||||
|
r#"{"a":"# => r#"ROOT { PARSE_ERR: UnclosedObject { BRACE_OPEN "{"; MEMBER { PARSE_ERR: MemberMissingValue { MEMBER_NAME { STRING "\"a\""; } COLON ":"; } } } }"#,
|
||||||
|
r#"{"a":true,}"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; MEMBER_VALUE { BOOL "true"; } } TRAILING_COMMA { COMMA ","; } BRACE_CLOSE "}"; } }"#
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn member_basic() {
|
||||||
|
gen_checks! {member;
|
||||||
|
r#""a": "b""# => r#"ROOT { MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { STRING "\"b\""; } } }"#,
|
||||||
|
r#""a": 42"# => r#"ROOT { MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { NUMBER "42"; } } }"#,
|
||||||
|
r#""a":"# => r#"ROOT { MEMBER { PARSE_ERR: MemberMissingValue { MEMBER_NAME { STRING "\"a\""; } COLON ":"; } } }"#
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
3
crates/json-pawarser/src/lib.rs
Normal file
3
crates/json-pawarser/src/lib.rs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
mod grammar;
|
||||||
|
mod syntax_error;
|
||||||
|
mod syntax_kind;
|
11
crates/json-pawarser/src/syntax_error.rs
Normal file
11
crates/json-pawarser/src/syntax_error.rs
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
use crate::syntax_kind::SyntaxKind;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum SyntaxError {
|
||||||
|
UnclosedObject,
|
||||||
|
UnclosedArray,
|
||||||
|
DisallowedKeyType(SyntaxKind),
|
||||||
|
MemberMissingValue,
|
||||||
|
UnexpectedTrailingComma,
|
||||||
|
}
|
||||||
|
impl pawarser::parser::SyntaxError for SyntaxError {}
|
117
crates/json-pawarser/src/syntax_kind.rs
Normal file
117
crates/json-pawarser/src/syntax_kind.rs
Normal file
|
@ -0,0 +1,117 @@
|
||||||
|
use logos::Logos;
|
||||||
|
|
||||||
|
pub fn lex(src: &str) -> Vec<(SyntaxKind, &str)> {
|
||||||
|
let mut lex = SyntaxKind::lexer(src);
|
||||||
|
let mut r = Vec::new();
|
||||||
|
|
||||||
|
while let Some(tok_res) = lex.next() {
|
||||||
|
r.push((tok_res.unwrap_or(SyntaxKind::LEX_ERR), lex.slice()))
|
||||||
|
}
|
||||||
|
|
||||||
|
r
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(enumset::EnumSetType, Debug, Logos, PartialEq, Eq, Clone, Copy, Hash)]
|
||||||
|
#[repr(u16)]
|
||||||
|
#[enumset(no_super_impls)]
|
||||||
|
#[allow(non_camel_case_types)]
|
||||||
|
pub enum SyntaxKind {
|
||||||
|
OBJECT,
|
||||||
|
MEMBER,
|
||||||
|
MEMBER_NAME,
|
||||||
|
MEMBER_VALUE,
|
||||||
|
|
||||||
|
ARRAY,
|
||||||
|
ELEMENT,
|
||||||
|
|
||||||
|
// SyntaxKinds for future json5/etc support
|
||||||
|
TRAILING_COMMA,
|
||||||
|
|
||||||
|
// Tokens
|
||||||
|
// Regexes adapted from [the logos handbook](https://logos.maciej.codes/examples/json_borrowed.html)
|
||||||
|
#[token("true")]
|
||||||
|
#[token("false")]
|
||||||
|
BOOL,
|
||||||
|
#[token("{")]
|
||||||
|
BRACE_OPEN,
|
||||||
|
#[token("}")]
|
||||||
|
BRACE_CLOSE,
|
||||||
|
#[token("[")]
|
||||||
|
BRACKET_OPEN,
|
||||||
|
#[token("]")]
|
||||||
|
BRACKET_CLOSE,
|
||||||
|
#[token(":")]
|
||||||
|
COLON,
|
||||||
|
#[token(",")]
|
||||||
|
COMMA,
|
||||||
|
#[token("null")]
|
||||||
|
NULL,
|
||||||
|
#[regex(r"-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?")]
|
||||||
|
NUMBER,
|
||||||
|
#[regex(r#""([^"\\]|\\["\\bnfrt]|u[a-fA-F0-9]{4})*""#)]
|
||||||
|
STRING,
|
||||||
|
|
||||||
|
// Whitespace tokens
|
||||||
|
#[regex("[ \\t\\f]+")]
|
||||||
|
WHITESPACE,
|
||||||
|
#[token("\n")]
|
||||||
|
NEWLINE,
|
||||||
|
|
||||||
|
// Error SyntaxKinds
|
||||||
|
LEX_ERR,
|
||||||
|
PARSE_ERR,
|
||||||
|
|
||||||
|
// Meta SyntaxKinds
|
||||||
|
ROOT,
|
||||||
|
EOF,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl pawarser::parser::SyntaxElement for SyntaxKind {
|
||||||
|
const SYNTAX_EOF: Self = Self::EOF;
|
||||||
|
|
||||||
|
const SYNTAX_ERROR: Self = Self::PARSE_ERR;
|
||||||
|
const SYNTAX_ROOT: Self = Self::ROOT;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SyntaxKind> for rowan::SyntaxKind {
|
||||||
|
fn from(kind: SyntaxKind) -> Self {
|
||||||
|
Self(kind as u16)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<rowan::SyntaxKind> for SyntaxKind {
|
||||||
|
fn from(raw: rowan::SyntaxKind) -> Self {
|
||||||
|
assert!(raw.0 <= SyntaxKind::EOF as u16);
|
||||||
|
#[allow(unsafe_code, reason = "The transmute is necessary here")]
|
||||||
|
unsafe {
|
||||||
|
std::mem::transmute::<u16, SyntaxKind>(raw.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::syntax_kind::{lex, SyntaxKind};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn simple_object() {
|
||||||
|
const TEST_DATA: &str = r#"{"hello_world": "meow", "some_num":7.42}"#;
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
dbg!(lex(TEST_DATA)),
|
||||||
|
vec![
|
||||||
|
(SyntaxKind::BRACE_OPEN, "{"),
|
||||||
|
(SyntaxKind::STRING, "\"hello_world\""),
|
||||||
|
(SyntaxKind::COLON, ":"),
|
||||||
|
(SyntaxKind::WHITESPACE, " "),
|
||||||
|
(SyntaxKind::STRING, "\"meow\""),
|
||||||
|
(SyntaxKind::COMMA, ","),
|
||||||
|
(SyntaxKind::WHITESPACE, " "),
|
||||||
|
(SyntaxKind::STRING, "\"some_num\""),
|
||||||
|
(SyntaxKind::COLON, ":"),
|
||||||
|
(SyntaxKind::NUMBER, "7.42"),
|
||||||
|
(SyntaxKind::BRACE_CLOSE, "}")
|
||||||
|
]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
25
crates/lang/Cargo.toml
Normal file
25
crates/lang/Cargo.toml
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
[package]
|
||||||
|
name = "lang"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
logos = "0.14"
|
||||||
|
petgraph = { workspace = true}
|
||||||
|
indexmap = "2.2.6"
|
||||||
|
clap = { version = "4", features = ["derive"] }
|
||||||
|
ariadne = "0.4.0"
|
||||||
|
ego-tree = "0.6.2"
|
||||||
|
rowan = "0.15.15"
|
||||||
|
drop_bomb = "0.1.5"
|
||||||
|
enumset = "1.1.3"
|
||||||
|
indoc = "2"
|
||||||
|
dashmap = "5.5.3"
|
||||||
|
crossbeam = "0.8.4"
|
||||||
|
owo-colors = {version = "4", features = ["supports-colors"]}
|
||||||
|
strip-ansi-escapes = "0.2.0"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
80
crates/lang/src/ast.rs
Normal file
80
crates/lang/src/ast.rs
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
use crate::lst_parser::syntax_kind::SyntaxKind::*;
|
||||||
|
use crate::SyntaxNode;
|
||||||
|
use rowan::Language;
|
||||||
|
|
||||||
|
// Heavily modified version of https://github.com/rust-analyzer/rowan/blob/e2d2e93e16c5104b136d0bc738a0d48346922200/examples/s_expressions.rs#L250-L266
|
||||||
|
macro_rules! ast_nodes {
|
||||||
|
($($ast:ident, $kind:ident);+) => {
|
||||||
|
$(
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
#[repr(transparent)]
|
||||||
|
pub struct $ast(SyntaxNode);
|
||||||
|
impl rowan::ast::AstNode for $ast {
|
||||||
|
type Language = crate::Lang;
|
||||||
|
|
||||||
|
fn can_cast(kind: <Self::Language as Language>::Kind) -> bool {
|
||||||
|
kind == $kind
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cast(node: SyntaxNode) -> Option<Self> {
|
||||||
|
if node.kind() == $kind {
|
||||||
|
Some(Self(node))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn syntax(&self) -> &SyntaxNode {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)+
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
ast_nodes!(
|
||||||
|
Def, DEF;
|
||||||
|
DefName, DEF_NAME;
|
||||||
|
DefBody, DEF_BODY;
|
||||||
|
|
||||||
|
Mod, MODULE;
|
||||||
|
ModName, MODULE_NAME;
|
||||||
|
ModBody, MODULE_BODY;
|
||||||
|
|
||||||
|
Use, USE;
|
||||||
|
UsePat, USE_PAT;
|
||||||
|
PatItem, PAT_ITEM;
|
||||||
|
PatGlob, PAT_GLOB;
|
||||||
|
PatGroup, PAT_GROUP;
|
||||||
|
|
||||||
|
Literal, LITERAL;
|
||||||
|
IntLit, INT_NUM;
|
||||||
|
FloatLit, FLOAT_NUM;
|
||||||
|
StringLit, STRING;
|
||||||
|
|
||||||
|
Matrix, MATRIX;
|
||||||
|
MatrixRow, MAT_ROW;
|
||||||
|
Vector, VEC;
|
||||||
|
List, LIST;
|
||||||
|
CollectionItem, COLLECTION_ITEM;
|
||||||
|
|
||||||
|
ParenthesizedExpr, PARENTHESIZED_EXPR;
|
||||||
|
Expression, EXPR;
|
||||||
|
|
||||||
|
Pipeline, PIPELINE;
|
||||||
|
|
||||||
|
Instruction, INSTR;
|
||||||
|
InstructionName, INSTR_NAME;
|
||||||
|
InstructionParams, INSTR_PARAMS;
|
||||||
|
|
||||||
|
AttributeSet, ATTR_SET;
|
||||||
|
Attribute, ATTR;
|
||||||
|
AttributeName, ATTR_NAME;
|
||||||
|
AttributeValue, ATTR_VALUE;
|
||||||
|
|
||||||
|
ParseError, PARSE_ERR;
|
||||||
|
LexError, LEX_ERR;
|
||||||
|
|
||||||
|
Root, ROOT;
|
||||||
|
Eof, EOF
|
||||||
|
);
|
25
crates/lang/src/lib.rs
Normal file
25
crates/lang/src/lib.rs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
#![feature(type_alias_impl_trait, lint_reasons, box_into_inner)]
|
||||||
|
|
||||||
|
use crate::lst_parser::syntax_kind::SyntaxKind;
|
||||||
|
|
||||||
|
pub mod ast;
|
||||||
|
pub mod lst_parser;
|
||||||
|
pub mod world;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub enum Lang {}
|
||||||
|
impl rowan::Language for Lang {
|
||||||
|
type Kind = SyntaxKind;
|
||||||
|
#[allow(unsafe_code)]
|
||||||
|
fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind {
|
||||||
|
assert!(raw.0 <= SyntaxKind::ROOT as u16);
|
||||||
|
unsafe { std::mem::transmute::<u16, SyntaxKind>(raw.0) }
|
||||||
|
}
|
||||||
|
fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind {
|
||||||
|
kind.into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type SyntaxNode = rowan::SyntaxNode<Lang>;
|
||||||
|
pub type SyntaxToken = rowan::SyntaxNode<Lang>;
|
||||||
|
pub type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>;
|
169
crates/lang/src/lst_parser.rs
Normal file
169
crates/lang/src/lst_parser.rs
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
use drop_bomb::DropBomb;
|
||||||
|
|
||||||
|
use self::{
|
||||||
|
error::SyntaxError,
|
||||||
|
events::{Event, NodeKind},
|
||||||
|
input::Input,
|
||||||
|
syntax_kind::SyntaxKind,
|
||||||
|
};
|
||||||
|
use std::cell::Cell;
|
||||||
|
|
||||||
|
pub mod syntax_kind;
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
pub mod error;
|
||||||
|
pub mod events;
|
||||||
|
pub mod grammar;
|
||||||
|
pub mod input;
|
||||||
|
pub mod output;
|
||||||
|
|
||||||
|
const PARSER_STEP_LIMIT: u32 = 4096;
|
||||||
|
|
||||||
|
pub struct Parser<'src, 'toks> {
|
||||||
|
input: Input<'src, 'toks>,
|
||||||
|
pos: usize,
|
||||||
|
events: Vec<Event>,
|
||||||
|
steps: Cell<u32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'src, 'toks> Parser<'src, 'toks> {
|
||||||
|
pub fn new(input: Input<'src, 'toks>) -> Self {
|
||||||
|
Self {
|
||||||
|
input,
|
||||||
|
pos: 0,
|
||||||
|
events: Vec::new(),
|
||||||
|
steps: Cell::new(0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finish(self) -> Vec<Event> {
|
||||||
|
self.events
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
|
||||||
|
self.step();
|
||||||
|
self.input.kind(self.pos + n)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn eat_succeeding_ws(&mut self) {
|
||||||
|
self.push_ev(Event::Eat {
|
||||||
|
count: self.input.meaningless_tail_len(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn current(&self) -> SyntaxKind {
|
||||||
|
self.step();
|
||||||
|
self.input.kind(self.pos)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn start(&mut self, name: &str) -> Marker {
|
||||||
|
let pos = self.events.len();
|
||||||
|
self.push_ev(Event::tombstone());
|
||||||
|
Marker::new(pos, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn at(&self, kind: SyntaxKind) -> bool {
|
||||||
|
self.nth_at(0, kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool {
|
||||||
|
if !self.at(kind) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.do_bump();
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool {
|
||||||
|
self.nth(n) == kind
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_bump(&mut self) {
|
||||||
|
self.push_ev(Event::Eat {
|
||||||
|
count: self.input.preceding_meaningless(self.pos),
|
||||||
|
});
|
||||||
|
self.pos += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push_ev(&mut self, event: Event) {
|
||||||
|
self.events.push(event)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn step(&self) {
|
||||||
|
let steps = self.steps.get();
|
||||||
|
assert!(steps <= PARSER_STEP_LIMIT, "the parser seems stuck...");
|
||||||
|
self.steps.set(steps + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct Marker {
|
||||||
|
pos: usize,
|
||||||
|
bomb: DropBomb,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Marker {
|
||||||
|
pub(crate) fn new(pos: usize, name: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
pos,
|
||||||
|
bomb: DropBomb::new(format!("Marker {name} must be completed or abandoned")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn close_node(mut self, p: &mut Parser, kind: NodeKind) -> CompletedMarker {
|
||||||
|
self.bomb.defuse();
|
||||||
|
match &mut p.events[self.pos] {
|
||||||
|
Event::Start { kind: slot, .. } => *slot = kind.clone(),
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
p.push_ev(Event::Finish);
|
||||||
|
|
||||||
|
CompletedMarker {
|
||||||
|
pos: self.pos,
|
||||||
|
kind,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn complete(self, p: &mut Parser<'_, '_>, kind: SyntaxKind) -> CompletedMarker {
|
||||||
|
self.close_node(p, NodeKind::Syntax(kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn error(self, p: &mut Parser, kind: SyntaxError) -> CompletedMarker {
|
||||||
|
self.close_node(p, NodeKind::Error(kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn abandon(mut self, p: &mut Parser<'_, '_>) {
|
||||||
|
self.bomb.defuse();
|
||||||
|
if self.pos == p.events.len() - 1 {
|
||||||
|
match p.events.pop() {
|
||||||
|
Some(Event::Start {
|
||||||
|
kind: NodeKind::Syntax(SyntaxKind::TOMBSTONE),
|
||||||
|
forward_parent: None,
|
||||||
|
}) => (),
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct CompletedMarker {
|
||||||
|
pos: usize,
|
||||||
|
kind: NodeKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CompletedMarker {
|
||||||
|
pub(crate) fn precede(self, p: &mut Parser<'_, '_>, name: &str) -> Marker {
|
||||||
|
let new_pos = p.start(name);
|
||||||
|
|
||||||
|
match &mut p.events[self.pos] {
|
||||||
|
Event::Start { forward_parent, .. } => {
|
||||||
|
*forward_parent = Some(new_pos.pos - self.pos);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
new_pos
|
||||||
|
}
|
||||||
|
}
|
15
crates/lang/src/lst_parser/error.rs
Normal file
15
crates/lang/src/lst_parser/error.rs
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
use crate::lst_parser::syntax_kind::SyntaxKind;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum SyntaxError {
|
||||||
|
Expected(Vec<SyntaxKind>),
|
||||||
|
PipelineNeedsSink,
|
||||||
|
// if there was two space seperated items in a list
|
||||||
|
SpaceSepInList,
|
||||||
|
SemicolonInList,
|
||||||
|
CommaInMatOrVec,
|
||||||
|
UnterminatedTopLevelItem,
|
||||||
|
UnclosedModuleBody,
|
||||||
|
UnfinishedPath,
|
||||||
|
PathSepContainsSemicolon,
|
||||||
|
}
|
70
crates/lang/src/lst_parser/events.rs
Normal file
70
crates/lang/src/lst_parser/events.rs
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
use crate::lst_parser::syntax_kind::SyntaxKind;
|
||||||
|
|
||||||
|
use super::error::SyntaxError;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum Event {
|
||||||
|
Start {
|
||||||
|
kind: NodeKind,
|
||||||
|
forward_parent: Option<usize>,
|
||||||
|
},
|
||||||
|
Finish,
|
||||||
|
Eat {
|
||||||
|
count: usize,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum NodeKind {
|
||||||
|
Syntax(SyntaxKind),
|
||||||
|
Error(SyntaxError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NodeKind {
|
||||||
|
pub fn is_syntax(&self) -> bool {
|
||||||
|
matches!(self, Self::Syntax(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_error(&self) -> bool {
|
||||||
|
matches!(self, Self::Error(_))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SyntaxKind> for NodeKind {
|
||||||
|
fn from(value: SyntaxKind) -> Self {
|
||||||
|
NodeKind::Syntax(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SyntaxError> for NodeKind {
|
||||||
|
fn from(value: SyntaxError) -> Self {
|
||||||
|
NodeKind::Error(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<SyntaxKind> for NodeKind {
|
||||||
|
fn eq(&self, other: &SyntaxKind) -> bool {
|
||||||
|
match self {
|
||||||
|
NodeKind::Syntax(s) => s == other,
|
||||||
|
NodeKind::Error(_) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<SyntaxError> for NodeKind {
|
||||||
|
fn eq(&self, other: &SyntaxError) -> bool {
|
||||||
|
match self {
|
||||||
|
NodeKind::Syntax(_) => false,
|
||||||
|
NodeKind::Error(e) => e == other,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Event {
|
||||||
|
pub(crate) fn tombstone() -> Self {
|
||||||
|
Self::Start {
|
||||||
|
kind: SyntaxKind::TOMBSTONE.into(),
|
||||||
|
forward_parent: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
38
crates/lang/src/lst_parser/grammar.rs
Normal file
38
crates/lang/src/lst_parser/grammar.rs
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use crate::lst_parser::syntax_kind::SyntaxKind::*;
|
||||||
|
|
||||||
|
use self::module::{mod_body, top_level_item};
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
input::Input,
|
||||||
|
output::Output,
|
||||||
|
syntax_kind::{self, lex},
|
||||||
|
Parser,
|
||||||
|
};
|
||||||
|
|
||||||
|
mod expression;
|
||||||
|
mod module;
|
||||||
|
|
||||||
|
pub fn source_file(p: &mut Parser) {
|
||||||
|
let root = p.start("root");
|
||||||
|
|
||||||
|
mod_body(p);
|
||||||
|
// expression::expression(p, false);
|
||||||
|
p.eat_succeeding_ws();
|
||||||
|
|
||||||
|
root.complete(p, ROOT);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_parser(input: &str, parser_fn: fn(&mut Parser), output: &str) {
|
||||||
|
let toks = lex(input);
|
||||||
|
let mut parser = Parser::new(Input::new(&toks));
|
||||||
|
|
||||||
|
parser_fn(&mut parser);
|
||||||
|
|
||||||
|
let p_out = dbg!(parser.finish());
|
||||||
|
let o = Output::from_parser_output(toks, p_out);
|
||||||
|
|
||||||
|
let s = strip_ansi_escapes::strip_str(format!("{o:?}"));
|
||||||
|
assert_eq!(&s, output);
|
||||||
|
}
|
44
crates/lang/src/lst_parser/grammar/expression.rs
Normal file
44
crates/lang/src/lst_parser/grammar/expression.rs
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
use crate::lst_parser::{error::SyntaxError, syntax_kind::SyntaxKind::*, CompletedMarker, Parser};
|
||||||
|
|
||||||
|
use self::{collection::collection, instruction::instr, lit::literal, pipeline::PIPES};
|
||||||
|
|
||||||
|
mod collection;
|
||||||
|
mod instruction;
|
||||||
|
mod lit;
|
||||||
|
mod pipeline;
|
||||||
|
|
||||||
|
pub fn expression(p: &mut Parser, in_pipe: bool) -> Option<CompletedMarker> {
|
||||||
|
let expr = p.start("expr");
|
||||||
|
|
||||||
|
if atom(p).or_else(|| instr(p)).is_none() {
|
||||||
|
expr.abandon(p);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let r = expr.complete(p, EXPR);
|
||||||
|
|
||||||
|
if PIPES.contains(p.current()) && !in_pipe {
|
||||||
|
pipeline::pipeline(p, r)
|
||||||
|
} else {
|
||||||
|
Some(r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn atom(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
literal(p)
|
||||||
|
.or_else(|| collection(p))
|
||||||
|
.or_else(|| parenthesized_expr(p))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parenthesized_expr(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
if p.eat(L_PAREN) {
|
||||||
|
let par_expr = p.start("parenthesized");
|
||||||
|
expression(p, false);
|
||||||
|
if !p.eat(R_PAREN) {
|
||||||
|
return Some(par_expr.error(p, SyntaxError::Expected(vec![R_PAREN])));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Some(par_expr.complete(p, PARENTHESIZED_EXPR));
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
25
crates/lang/src/lst_parser/grammar/expression/collection.rs
Normal file
25
crates/lang/src/lst_parser/grammar/expression/collection.rs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
use enumset::enum_set;
|
||||||
|
|
||||||
|
use crate::lst_parser::{
|
||||||
|
syntax_kind::{SyntaxKind::*, TokenSet},
|
||||||
|
CompletedMarker, Parser,
|
||||||
|
};
|
||||||
|
|
||||||
|
use self::{attr_set::attr_set, vec::vec_matrix_list};
|
||||||
|
|
||||||
|
mod attr_set;
|
||||||
|
mod vec;
|
||||||
|
|
||||||
|
const COLLECTION_START: TokenSet = enum_set!(L_BRACK | L_BRACE);
|
||||||
|
|
||||||
|
pub fn collection(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
if !COLLECTION_START.contains(p.current()) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(match p.current() {
|
||||||
|
L_BRACK => vec_matrix_list(p),
|
||||||
|
L_BRACE => attr_set(p),
|
||||||
|
_ => unreachable!(),
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
use crate::lst_parser::{
|
||||||
|
error::SyntaxError,
|
||||||
|
grammar::expression::{atom, expression},
|
||||||
|
CompletedMarker, Marker, Parser,
|
||||||
|
SyntaxKind::*,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn attr_set(p: &mut Parser) -> CompletedMarker {
|
||||||
|
let start = p.start("attr_set_start");
|
||||||
|
assert!(p.eat(L_BRACE));
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if attr(p).is_some() {
|
||||||
|
// TODO: handle others
|
||||||
|
if p.eat(COMMA) {
|
||||||
|
continue;
|
||||||
|
} else if p.eat(R_BRACE) {
|
||||||
|
return start.complete(p, ATTR_SET);
|
||||||
|
}
|
||||||
|
// TODO: check for newline and stuff following that for recov of others
|
||||||
|
} else if p.eat(R_BRACE) {
|
||||||
|
return start.complete(p, ATTR_SET);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn attr(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
if p.at(IDENT) {
|
||||||
|
let attr_start = p.start("attr");
|
||||||
|
let attr_name_start = p.start("attr_name");
|
||||||
|
p.do_bump();
|
||||||
|
attr_name_start.complete(p, ATTR_NAME);
|
||||||
|
|
||||||
|
// TODO: handle comma, expr/atom, other
|
||||||
|
p.eat(COLON);
|
||||||
|
|
||||||
|
// TODO: handle failed expr parser too
|
||||||
|
let attr_value = p.start("attr_value");
|
||||||
|
let _ = expression(p, false);
|
||||||
|
attr_value.complete(p, ATTR_VALUE);
|
||||||
|
Some(attr_start.complete(p, ATTR))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,97 @@
|
||||||
|
use crate::lst_parser::{
|
||||||
|
error::SyntaxError, grammar::expression::atom, CompletedMarker, Marker, Parser, SyntaxKind::*,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn vec_matrix_list(p: &mut Parser) -> CompletedMarker {
|
||||||
|
let start = p.start("vec_matrix_list_start");
|
||||||
|
assert!(p.eat(L_BRACK));
|
||||||
|
let row_start = p.start("matrix_row_start");
|
||||||
|
if let Some(item) = atom(p) {
|
||||||
|
item.precede(p, "coll_item_start")
|
||||||
|
.complete(p, COLLECTION_ITEM);
|
||||||
|
|
||||||
|
if p.at(COMMA) {
|
||||||
|
row_start.abandon(p);
|
||||||
|
return finish_list(p, start);
|
||||||
|
}
|
||||||
|
|
||||||
|
finish_mat_or_vec(p, start, row_start)
|
||||||
|
} else if p.eat(R_BRACK) {
|
||||||
|
row_start.abandon(p);
|
||||||
|
start.complete(p, LIST)
|
||||||
|
} else {
|
||||||
|
row_start.abandon(p);
|
||||||
|
start.error(p, SyntaxError::Expected(vec![EXPR, R_BRACK]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finish_list(p: &mut Parser, list_start: Marker) -> CompletedMarker {
|
||||||
|
loop {
|
||||||
|
if p.eat(COMMA) {
|
||||||
|
if let Some(item) = atom(p) {
|
||||||
|
item.precede(p, "coll_item_start")
|
||||||
|
.complete(p, COLLECTION_ITEM);
|
||||||
|
} else if p.eat(R_BRACK) {
|
||||||
|
return list_start.complete(p, LIST);
|
||||||
|
}
|
||||||
|
} else if p.eat(R_BRACK) {
|
||||||
|
return list_start.complete(p, LIST);
|
||||||
|
} else if let Some(item) = atom(p) {
|
||||||
|
item.precede(p, "next_item")
|
||||||
|
.complete(p, COLLECTION_ITEM)
|
||||||
|
.precede(p, "err_space_sep")
|
||||||
|
.error(p, SyntaxError::SpaceSepInList);
|
||||||
|
} else if p.at(SEMICOLON) {
|
||||||
|
let semi_err = p.start("semicolon_err");
|
||||||
|
p.eat(SEMICOLON);
|
||||||
|
semi_err.error(p, SyntaxError::SemicolonInList);
|
||||||
|
if let Some(item) = atom(p) {
|
||||||
|
item.precede(p, "coll_item_start")
|
||||||
|
.complete(p, COLLECTION_ITEM);
|
||||||
|
} else if p.eat(R_BRACK) {
|
||||||
|
return list_start.complete(p, LIST);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: handle commas, general other wrong toks
|
||||||
|
fn finish_mat_or_vec(p: &mut Parser, coll_start: Marker, mut row_start: Marker) -> CompletedMarker {
|
||||||
|
let mut is_matrix = false;
|
||||||
|
let mut row_item_count = 1;
|
||||||
|
loop {
|
||||||
|
if let Some(item) = atom(p) {
|
||||||
|
item.precede(p, "coll_item_start")
|
||||||
|
.complete(p, COLLECTION_ITEM);
|
||||||
|
row_item_count += 1;
|
||||||
|
} else if p.at(SEMICOLON) {
|
||||||
|
is_matrix = true;
|
||||||
|
row_start.complete(p, MAT_ROW);
|
||||||
|
p.eat(SEMICOLON);
|
||||||
|
row_start = p.start("matrix_row_start");
|
||||||
|
row_item_count = 0;
|
||||||
|
} else if p.at(R_BRACK) {
|
||||||
|
if is_matrix && row_item_count == 0 {
|
||||||
|
row_start.abandon(p);
|
||||||
|
p.eat(R_BRACK);
|
||||||
|
return coll_start.complete(p, MATRIX);
|
||||||
|
} else if is_matrix {
|
||||||
|
row_start.complete(p, MAT_ROW);
|
||||||
|
p.eat(R_BRACK);
|
||||||
|
return coll_start.complete(p, MATRIX);
|
||||||
|
} else {
|
||||||
|
row_start.abandon(p);
|
||||||
|
p.eat(R_BRACK);
|
||||||
|
return coll_start.complete(p, VEC);
|
||||||
|
}
|
||||||
|
} else if p.at(COMMA) {
|
||||||
|
let err_unexpected_comma = p.start("err_unexpected_comma");
|
||||||
|
p.do_bump();
|
||||||
|
err_unexpected_comma.error(p, SyntaxError::CommaInMatOrVec);
|
||||||
|
} else {
|
||||||
|
let err_unexpected = p.start("err_unexpected_tok");
|
||||||
|
p.do_bump();
|
||||||
|
err_unexpected.error(p, SyntaxError::Expected(vec![EXPR, SEMICOLON, R_BRACK]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
34
crates/lang/src/lst_parser/grammar/expression/instruction.rs
Normal file
34
crates/lang/src/lst_parser/grammar/expression/instruction.rs
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
use crate::lst_parser::{syntax_kind::SyntaxKind::*, CompletedMarker, Parser};
|
||||||
|
|
||||||
|
use super::{atom, lit::literal};
|
||||||
|
|
||||||
|
pub fn instr(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
if !p.at(IDENT) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let instr = p.start("instr");
|
||||||
|
|
||||||
|
instr_name(p);
|
||||||
|
instr_params(p);
|
||||||
|
|
||||||
|
Some(instr.complete(p, INSTR))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn instr_name(p: &mut Parser) {
|
||||||
|
let instr_name = p.start("instr_name");
|
||||||
|
|
||||||
|
while p.at(IDENT) {
|
||||||
|
p.do_bump();
|
||||||
|
}
|
||||||
|
|
||||||
|
instr_name.complete(p, INSTR_NAME);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn instr_params(p: &mut Parser) {
|
||||||
|
if let Some(start) = atom(p) {
|
||||||
|
while atom(p).is_some() {}
|
||||||
|
|
||||||
|
start.precede(p, "params_start").complete(p, INSTR_PARAMS);
|
||||||
|
}
|
||||||
|
}
|
59
crates/lang/src/lst_parser/grammar/expression/lit.rs
Normal file
59
crates/lang/src/lst_parser/grammar/expression/lit.rs
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
use enumset::enum_set;
|
||||||
|
use indoc::indoc;
|
||||||
|
|
||||||
|
use crate::lst_parser::{
|
||||||
|
grammar::check_parser,
|
||||||
|
syntax_kind::{SyntaxKind::*, TokenSet},
|
||||||
|
CompletedMarker, Parser,
|
||||||
|
};
|
||||||
|
|
||||||
|
const LIT_TOKENS: TokenSet = enum_set!(INT_NUM | FLOAT_NUM | STRING);
|
||||||
|
|
||||||
|
pub fn literal(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
if !LIT_TOKENS.contains(p.current()) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let lit = p.start("lit");
|
||||||
|
|
||||||
|
p.do_bump();
|
||||||
|
|
||||||
|
Some(lit.complete(p, LITERAL))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_lst_lit() {
|
||||||
|
check_parser(
|
||||||
|
"42",
|
||||||
|
|p| {
|
||||||
|
literal(p);
|
||||||
|
},
|
||||||
|
indoc! {r#"
|
||||||
|
LITERAL {
|
||||||
|
INT_NUM "42";
|
||||||
|
}
|
||||||
|
"#},
|
||||||
|
);
|
||||||
|
check_parser(
|
||||||
|
"3.14",
|
||||||
|
|p| {
|
||||||
|
literal(p);
|
||||||
|
},
|
||||||
|
indoc! {r#"
|
||||||
|
LITERAL {
|
||||||
|
FLOAT_NUM "3.14";
|
||||||
|
}
|
||||||
|
"#},
|
||||||
|
);
|
||||||
|
check_parser(
|
||||||
|
r#""Meow""#,
|
||||||
|
|p| {
|
||||||
|
literal(p);
|
||||||
|
},
|
||||||
|
indoc! {r#"
|
||||||
|
LITERAL {
|
||||||
|
STRING "\"Meow\"";
|
||||||
|
}
|
||||||
|
"#},
|
||||||
|
);
|
||||||
|
}
|
36
crates/lang/src/lst_parser/grammar/expression/pipeline.rs
Normal file
36
crates/lang/src/lst_parser/grammar/expression/pipeline.rs
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
use enumset::enum_set;
|
||||||
|
|
||||||
|
use crate::lst_parser::{
|
||||||
|
error::SyntaxError,
|
||||||
|
syntax_kind::{SyntaxKind::*, TokenSet},
|
||||||
|
CompletedMarker, Parser,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::expression;
|
||||||
|
|
||||||
|
pub fn pipeline(p: &mut Parser, start_expr: CompletedMarker) -> Option<CompletedMarker> {
|
||||||
|
if !pipe(p) {
|
||||||
|
return Some(start_expr);
|
||||||
|
}
|
||||||
|
let pipeline_marker = start_expr.precede(p, "pipeline_start");
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if expression(p, true).is_none() {
|
||||||
|
return Some(pipeline_marker.error(p, SyntaxError::PipelineNeedsSink));
|
||||||
|
}
|
||||||
|
if !pipe(p) {
|
||||||
|
return Some(pipeline_marker.complete(p, PIPELINE));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const PIPES: TokenSet = enum_set!(PIPE | MAPPING_PIPE | NULL_PIPE);
|
||||||
|
|
||||||
|
fn pipe(p: &mut Parser) -> bool {
|
||||||
|
if PIPES.contains(p.current()) {
|
||||||
|
p.do_bump();
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
191
crates/lang/src/lst_parser/grammar/module.rs
Normal file
191
crates/lang/src/lst_parser/grammar/module.rs
Normal file
|
@ -0,0 +1,191 @@
|
||||||
|
use enumset::enum_set;
|
||||||
|
|
||||||
|
use crate::lst_parser::{
|
||||||
|
error::SyntaxError,
|
||||||
|
grammar::expression::expression,
|
||||||
|
syntax_kind::{SyntaxKind::*, TokenSet},
|
||||||
|
CompletedMarker, Parser,
|
||||||
|
};
|
||||||
|
|
||||||
|
const TOP_LEVEL_ITEM_START: TokenSet = enum_set!(DEF_KW | MOD_KW | USE_KW);
|
||||||
|
|
||||||
|
pub fn mod_body(p: &mut Parser) {
|
||||||
|
loop {
|
||||||
|
if top_level_item(p).is_none() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mod_decl(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
let mod_start = p.start("module");
|
||||||
|
if !p.eat(MOD_KW) {
|
||||||
|
mod_start.abandon(p);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mod_name = p.start("module_name");
|
||||||
|
if p.eat(IDENT) {
|
||||||
|
mod_name.complete(p, MODULE_NAME);
|
||||||
|
} else {
|
||||||
|
mod_name.error(p, SyntaxError::Expected(vec![IDENT]));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mod_body_marker = p.start("mod_body");
|
||||||
|
if p.eat(SEMICOLON) {
|
||||||
|
mod_body_marker.abandon(p);
|
||||||
|
Some(mod_start.complete(p, MODULE))
|
||||||
|
} else if p.eat(L_BRACE) {
|
||||||
|
mod_body(p);
|
||||||
|
if !p.eat(R_BRACE) {
|
||||||
|
mod_body_marker
|
||||||
|
.complete(p, MODULE_BODY)
|
||||||
|
.precede(p, "unclosed_mod_body_err")
|
||||||
|
.error(p, SyntaxError::UnclosedModuleBody);
|
||||||
|
} else {
|
||||||
|
mod_body_marker.complete(p, MODULE_BODY);
|
||||||
|
}
|
||||||
|
Some(mod_start.complete(p, MODULE))
|
||||||
|
} else {
|
||||||
|
Some(mod_start.error(p, SyntaxError::Expected(vec![MODULE_BODY])))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn top_level_item(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
if !TOP_LEVEL_ITEM_START.contains(p.current()) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
def(p).or_else(|| mod_decl(p)).or_else(|| r#use(p))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn def(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
let def_start = p.start("top_level_def");
|
||||||
|
if !p.eat(DEF_KW) {
|
||||||
|
def_start.abandon(p);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let def_name = p.start("def_name");
|
||||||
|
if p.eat(IDENT) {
|
||||||
|
def_name.complete(p, DEF_NAME);
|
||||||
|
} else {
|
||||||
|
def_name.error(p, SyntaxError::Expected(vec![IDENT]));
|
||||||
|
}
|
||||||
|
|
||||||
|
let maybe_expected_eq = p.start("maybe_expect_eq");
|
||||||
|
if !p.eat(EQ) {
|
||||||
|
maybe_expected_eq.error(p, SyntaxError::Expected(vec![EQ]));
|
||||||
|
} else {
|
||||||
|
maybe_expected_eq.abandon(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
let body = p.start("def_body");
|
||||||
|
if expression(p, false).is_some() {
|
||||||
|
body.complete(p, DEF_BODY);
|
||||||
|
} else {
|
||||||
|
body.error(p, SyntaxError::Expected(vec![DEF_BODY]));
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(if p.eat(SEMICOLON) {
|
||||||
|
def_start.complete(p, DEF)
|
||||||
|
} else if TOP_LEVEL_ITEM_START.contains(p.current()) || p.at(EOF) {
|
||||||
|
def_start
|
||||||
|
.complete(p, DEF)
|
||||||
|
.precede(p, "unterminated_tl_item")
|
||||||
|
.error(p, SyntaxError::UnterminatedTopLevelItem)
|
||||||
|
} else {
|
||||||
|
def_start
|
||||||
|
.complete(p, DEF)
|
||||||
|
.precede(p, "err_unexpected")
|
||||||
|
.error(p, SyntaxError::Expected(vec![SEMICOLON]))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn r#use(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
let use_start = p.start("use_start");
|
||||||
|
if !p.eat(USE_KW) {
|
||||||
|
use_start.abandon(p);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
if use_pat(p).is_none() {
|
||||||
|
p.start("expected_use_pat")
|
||||||
|
.error(p, SyntaxError::Expected(vec![USE_PAT]));
|
||||||
|
}
|
||||||
|
|
||||||
|
let use_item = use_start.complete(p, USE);
|
||||||
|
Some(if p.eat(SEMICOLON) {
|
||||||
|
use_item
|
||||||
|
} else if TOP_LEVEL_ITEM_START.contains(p.current()) || p.at(EOF) {
|
||||||
|
use_item
|
||||||
|
.precede(p, "unterminated_tl_item")
|
||||||
|
.error(p, SyntaxError::UnterminatedTopLevelItem)
|
||||||
|
} else {
|
||||||
|
use_item
|
||||||
|
.precede(p, "err_unexpected")
|
||||||
|
.error(p, SyntaxError::Expected(vec![SEMICOLON]))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn use_pat(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
let use_pat_marker = p.start("use_pat");
|
||||||
|
if !p.eat(IDENT) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if p.eat(PATH_SEP) {
|
||||||
|
if pat_item(p).is_none() {
|
||||||
|
break Some(use_pat_marker.error(p, SyntaxError::UnfinishedPath));
|
||||||
|
}
|
||||||
|
} else if p.at(SEMICOLON) && p.nth_at(1, COLON) {
|
||||||
|
let broken_sep = p.start("broken_path_sep");
|
||||||
|
let wrong_semi = p.start("semi_typo");
|
||||||
|
p.eat(SEMICOLON);
|
||||||
|
wrong_semi.error(p, SyntaxError::PathSepContainsSemicolon);
|
||||||
|
p.eat(COLON);
|
||||||
|
broken_sep.complete(p, PATH_SEP);
|
||||||
|
if pat_item(p).is_none() {
|
||||||
|
break Some(use_pat_marker.error(p, SyntaxError::UnfinishedPath));
|
||||||
|
}
|
||||||
|
} else if p.at(COLON) && p.nth_at(1, SEMICOLON) {
|
||||||
|
let broken_sep = p.start("broken_path_sep");
|
||||||
|
p.eat(COLON);
|
||||||
|
let wrong_semi = p.start("semi_typo");
|
||||||
|
p.eat(SEMICOLON);
|
||||||
|
wrong_semi.error(p, SyntaxError::PathSepContainsSemicolon);
|
||||||
|
broken_sep.complete(p, PATH_SEP);
|
||||||
|
if pat_item(p).is_none() {
|
||||||
|
break Some(use_pat_marker.error(p, SyntaxError::UnfinishedPath));
|
||||||
|
}
|
||||||
|
} else if p.at(SEMICOLON) && p.nth_at(1, SEMICOLON) {
|
||||||
|
let broken_sep = p.start("broken_path_sep");
|
||||||
|
p.eat(SEMICOLON);
|
||||||
|
p.eat(SEMICOLON);
|
||||||
|
broken_sep
|
||||||
|
.complete(p, PATH_SEP)
|
||||||
|
.precede(p, "semi_typo_err")
|
||||||
|
.error(p, SyntaxError::PathSepContainsSemicolon);
|
||||||
|
if pat_item(p).is_none() {
|
||||||
|
break Some(use_pat_marker.error(p, SyntaxError::UnfinishedPath));
|
||||||
|
}
|
||||||
|
} else if p.eat(SEMICOLON) {
|
||||||
|
break Some(use_pat_marker.complete(p, USE_PAT));
|
||||||
|
} else {
|
||||||
|
break Some(use_pat_marker.error(p, SyntaxError::Expected(vec![PATH_SEP, SEMICOLON])));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pat_item(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
|
let item_start = p.start("pat_item_start");
|
||||||
|
if p.eat(IDENT) {
|
||||||
|
Some(item_start.complete(p, PAT_ITEM))
|
||||||
|
} else if p.eat(STAR) {
|
||||||
|
Some(item_start.complete(p, PAT_GLOB))
|
||||||
|
} else if p.eat(L_BRACE) {
|
||||||
|
todo!("write PAT_GROUPs")
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
70
crates/lang/src/lst_parser/input.rs
Normal file
70
crates/lang/src/lst_parser/input.rs
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
use enumset::enum_set;
|
||||||
|
|
||||||
|
use crate::lst_parser::syntax_kind::SyntaxKind;
|
||||||
|
|
||||||
|
use super::syntax_kind::TokenSet;
|
||||||
|
|
||||||
|
pub struct Input<'src, 'toks> {
|
||||||
|
raw: &'toks Vec<(SyntaxKind, &'src str)>,
|
||||||
|
/// indices of the "meaningful" tokens (not whitespace etc)
|
||||||
|
/// includes newlines because those might indeed help with finding errors
|
||||||
|
meaningful: Vec<usize>,
|
||||||
|
/// indices of newlines for the purpose of easily querying them
|
||||||
|
/// can be helpful with missing commas etc
|
||||||
|
newlines: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const MEANINGLESS_TOKS: TokenSet = enum_set!(SyntaxKind::WHITESPACE | SyntaxKind::NEWLINE);
|
||||||
|
|
||||||
|
impl<'src, 'toks> Input<'src, 'toks> {
|
||||||
|
pub fn new(raw_toks: &'toks Vec<(SyntaxKind, &'src str)>) -> Self {
|
||||||
|
let meaningful = raw_toks
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.filter_map(|(i, tok)| {
|
||||||
|
if MEANINGLESS_TOKS.contains(tok.0) {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(i)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let newlines = raw_toks
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.filter_map(|(i, tok)| match tok.0 {
|
||||||
|
SyntaxKind::NEWLINE => Some(i),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
raw: raw_toks,
|
||||||
|
meaningful,
|
||||||
|
newlines,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::unwrap_used, reason = "meaningful indices cannot be invalid")]
|
||||||
|
pub(crate) fn kind(&self, idx: usize) -> SyntaxKind {
|
||||||
|
let Some(meaningful_idx) = self.meaningful.get(idx) else {
|
||||||
|
return SyntaxKind::EOF;
|
||||||
|
};
|
||||||
|
|
||||||
|
self.raw.get(*meaningful_idx).unwrap().0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn preceding_meaningless(&self, idx: usize) -> usize {
|
||||||
|
assert!(self.meaningful.len() > idx);
|
||||||
|
|
||||||
|
if idx == 0 {
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
self.meaningful[idx] - self.meaningful[idx - 1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn meaningless_tail_len(&self) -> usize {
|
||||||
|
self.raw.len() - (self.meaningful.last().unwrap() + 1)
|
||||||
|
}
|
||||||
|
}
|
208
crates/lang/src/lst_parser/output.rs
Normal file
208
crates/lang/src/lst_parser/output.rs
Normal file
|
@ -0,0 +1,208 @@
|
||||||
|
use clap::builder;
|
||||||
|
use owo_colors::{unset_override, OwoColorize};
|
||||||
|
use rowan::{GreenNode, GreenNodeBuilder, GreenNodeData, GreenTokenData, Language, NodeOrToken};
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
lst_parser::{input::MEANINGLESS_TOKS, syntax_kind::SyntaxKind},
|
||||||
|
Lang, SyntaxNode,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
error::SyntaxError,
|
||||||
|
events::{Event, NodeKind},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct Output {
|
||||||
|
pub green_node: GreenNode,
|
||||||
|
pub errors: Vec<SyntaxError>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for Output {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let mut errs: Vec<&SyntaxError> = self.errors.iter().collect();
|
||||||
|
errs.reverse();
|
||||||
|
|
||||||
|
debug_print_green_node(NodeOrToken::Node(&self.green_node), f, 0, &mut errs, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const INDENT_STR: &str = " ";
|
||||||
|
/// colored argument currently broken
|
||||||
|
fn debug_print_green_node(
|
||||||
|
node: NodeOrToken<&GreenNodeData, &GreenTokenData>,
|
||||||
|
f: &mut dyn std::fmt::Write,
|
||||||
|
lvl: i32,
|
||||||
|
errs: &mut Vec<&SyntaxError>,
|
||||||
|
colored: bool,
|
||||||
|
) -> std::fmt::Result {
|
||||||
|
for _ in 0..lvl {
|
||||||
|
f.write_str(INDENT_STR)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let r = match node {
|
||||||
|
NodeOrToken::Node(n) => {
|
||||||
|
let kind = Lang::kind_from_raw(node.kind());
|
||||||
|
if kind != SyntaxKind::PARSE_ERR {
|
||||||
|
writeln!(
|
||||||
|
f,
|
||||||
|
"{:?} {}",
|
||||||
|
Lang::kind_from_raw(node.kind()).bright_yellow().bold(),
|
||||||
|
"{".yellow()
|
||||||
|
)?;
|
||||||
|
} else {
|
||||||
|
let err = errs
|
||||||
|
.pop()
|
||||||
|
.expect("all error syntax nodes should correspond to an error")
|
||||||
|
.bright_red();
|
||||||
|
|
||||||
|
writeln!(
|
||||||
|
f,
|
||||||
|
"{:?}{} {err:?} {}",
|
||||||
|
kind.bright_red().bold(),
|
||||||
|
":".red(),
|
||||||
|
"{".bright_red().bold()
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
for c in n.children() {
|
||||||
|
debug_print_green_node(c, f, lvl + 1, errs, colored)?;
|
||||||
|
}
|
||||||
|
for _ in 0..lvl {
|
||||||
|
f.write_str(INDENT_STR)?;
|
||||||
|
}
|
||||||
|
if kind != SyntaxKind::PARSE_ERR {
|
||||||
|
write!(f, "{}", "}\n".yellow())
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", "}\n".bright_red().bold())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
NodeOrToken::Token(t) => {
|
||||||
|
let tok = Lang::kind_from_raw(t.kind());
|
||||||
|
if MEANINGLESS_TOKS.contains(tok) {
|
||||||
|
writeln!(
|
||||||
|
f,
|
||||||
|
"{:?} {:?}{}",
|
||||||
|
Lang::kind_from_raw(t.kind()).white(),
|
||||||
|
t.text().white(),
|
||||||
|
";".white()
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
writeln!(
|
||||||
|
f,
|
||||||
|
"{:?} {:?}{}",
|
||||||
|
Lang::kind_from_raw(t.kind()).bright_cyan().bold(),
|
||||||
|
t.text().green(),
|
||||||
|
";".yellow()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
r
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Output {
|
||||||
|
pub fn debug_colored(&self) -> String {
|
||||||
|
let mut out = String::new();
|
||||||
|
let mut errs: Vec<&SyntaxError> = self.errors.iter().collect();
|
||||||
|
errs.reverse();
|
||||||
|
|
||||||
|
let _ = debug_print_green_node(
|
||||||
|
NodeOrToken::Node(&self.green_node),
|
||||||
|
&mut out,
|
||||||
|
0,
|
||||||
|
&mut errs,
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
|
||||||
|
out
|
||||||
|
}
|
||||||
|
pub fn from_parser_output(
|
||||||
|
mut raw_toks: Vec<(SyntaxKind, &str)>,
|
||||||
|
mut events: Vec<Event>,
|
||||||
|
) -> Self {
|
||||||
|
let mut builder = GreenNodeBuilder::new();
|
||||||
|
let mut fw_parents = Vec::new();
|
||||||
|
let mut errors = Vec::new();
|
||||||
|
raw_toks.reverse();
|
||||||
|
|
||||||
|
for i in 0..events.len() {
|
||||||
|
match mem::replace(&mut events[i], Event::tombstone()) {
|
||||||
|
Event::Start {
|
||||||
|
kind,
|
||||||
|
forward_parent,
|
||||||
|
} => {
|
||||||
|
if kind == SyntaxKind::TOMBSTONE && forward_parent.is_none() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
fw_parents.push(kind);
|
||||||
|
let mut idx = i;
|
||||||
|
let mut fp = forward_parent;
|
||||||
|
while let Some(fwd) = fp {
|
||||||
|
idx += fwd as usize;
|
||||||
|
fp = match mem::replace(&mut events[idx], Event::tombstone()) {
|
||||||
|
Event::Start {
|
||||||
|
kind,
|
||||||
|
forward_parent,
|
||||||
|
} => {
|
||||||
|
fw_parents.push(kind);
|
||||||
|
forward_parent
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove whitespace bc it's ugly
|
||||||
|
while let Some((SyntaxKind::WHITESPACE | SyntaxKind::NEWLINE, _)) =
|
||||||
|
raw_toks.last()
|
||||||
|
{
|
||||||
|
match events.iter_mut().find(|ev| matches!(ev, Event::Eat { .. })) {
|
||||||
|
Some(Event::Eat { count }) => *count -= 1,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
let (tok, text): (SyntaxKind, &str) = raw_toks.pop().unwrap();
|
||||||
|
builder.token(tok.into(), text);
|
||||||
|
}
|
||||||
|
|
||||||
|
for kind in fw_parents.drain(..).rev() {
|
||||||
|
match kind {
|
||||||
|
NodeKind::Syntax(kind) if kind != SyntaxKind::TOMBSTONE => {
|
||||||
|
builder.start_node(kind.into())
|
||||||
|
}
|
||||||
|
NodeKind::Error(err) => {
|
||||||
|
errors.push(err);
|
||||||
|
builder.start_node(SyntaxKind::PARSE_ERR.into())
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::Finish => builder.finish_node(),
|
||||||
|
Event::Eat { count } => (0..count).for_each(|_| {
|
||||||
|
let (tok, text): (SyntaxKind, &str) = raw_toks.pop().unwrap();
|
||||||
|
builder.token(tok.into(), text);
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Self {
|
||||||
|
green_node: builder.finish(),
|
||||||
|
errors,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn syntax(&self) -> SyntaxNode {
|
||||||
|
SyntaxNode::new_root(self.green_node.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn errors(&self) -> Vec<SyntaxError> {
|
||||||
|
self.errors.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dissolve(self) -> (GreenNode, Vec<SyntaxError>) {
|
||||||
|
let Self { green_node, errors } = self;
|
||||||
|
(green_node, errors)
|
||||||
|
}
|
||||||
|
}
|
140
crates/lang/src/lst_parser/syntax_kind.rs
Normal file
140
crates/lang/src/lst_parser/syntax_kind.rs
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
use enumset::EnumSet;
|
||||||
|
use logos::Logos;
|
||||||
|
|
||||||
|
pub fn lex(src: &str) -> Vec<(SyntaxKind, &str)> {
|
||||||
|
let mut lex = SyntaxKind::lexer(src);
|
||||||
|
let mut r = Vec::new();
|
||||||
|
|
||||||
|
while let Some(tok_res) = lex.next() {
|
||||||
|
r.push((tok_res.unwrap_or(SyntaxKind::LEX_ERR), lex.slice()))
|
||||||
|
}
|
||||||
|
|
||||||
|
r
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(enumset::EnumSetType, Logos, Debug, PartialEq, Eq, Clone, Copy, Hash, PartialOrd, Ord)]
|
||||||
|
#[repr(u16)]
|
||||||
|
#[enumset(no_super_impls)]
|
||||||
|
#[allow(non_camel_case_types)]
|
||||||
|
pub enum SyntaxKind {
|
||||||
|
#[token("def")]
|
||||||
|
DEF_KW = 0,
|
||||||
|
DEF,
|
||||||
|
DEF_NAME,
|
||||||
|
DEF_BODY,
|
||||||
|
#[token("let")]
|
||||||
|
LET_KW,
|
||||||
|
#[token("in")]
|
||||||
|
IN_KW,
|
||||||
|
LET_IN,
|
||||||
|
#[token("::")]
|
||||||
|
PATH_SEP,
|
||||||
|
#[token("mod")]
|
||||||
|
MOD_KW,
|
||||||
|
MODULE,
|
||||||
|
MODULE_NAME,
|
||||||
|
MODULE_BODY,
|
||||||
|
USE,
|
||||||
|
#[token("use")]
|
||||||
|
USE_KW,
|
||||||
|
USE_PAT,
|
||||||
|
PAT_ITEM,
|
||||||
|
PAT_GLOB,
|
||||||
|
PAT_GROUP,
|
||||||
|
#[regex("[\\d]+")]
|
||||||
|
INT_NUM,
|
||||||
|
#[regex("[+-]?([\\d]+\\.[\\d]*|[\\d]*\\.[\\d]+)")]
|
||||||
|
FLOAT_NUM,
|
||||||
|
#[regex(r#""([^"\\]|\\["\\bnfrt]|u[a-fA-F0-9]{4})*""#)]
|
||||||
|
STRING,
|
||||||
|
MATRIX,
|
||||||
|
MAT_ROW,
|
||||||
|
VEC,
|
||||||
|
LIST,
|
||||||
|
// either of a vec, a matrix or a list
|
||||||
|
COLLECTION_ITEM,
|
||||||
|
PARENTHESIZED_EXPR,
|
||||||
|
EXPR,
|
||||||
|
LITERAL,
|
||||||
|
#[token("(")]
|
||||||
|
L_PAREN,
|
||||||
|
#[token(")")]
|
||||||
|
R_PAREN,
|
||||||
|
#[token("{")]
|
||||||
|
L_BRACE,
|
||||||
|
#[token("}")]
|
||||||
|
R_BRACE,
|
||||||
|
#[token("[")]
|
||||||
|
L_BRACK,
|
||||||
|
#[token("]")]
|
||||||
|
R_BRACK,
|
||||||
|
#[token("<")]
|
||||||
|
L_ANGLE,
|
||||||
|
#[token(">")]
|
||||||
|
R_ANGLE,
|
||||||
|
#[token("+")]
|
||||||
|
PLUS,
|
||||||
|
#[token("-")]
|
||||||
|
MINUS,
|
||||||
|
#[token("*")]
|
||||||
|
STAR,
|
||||||
|
#[token("/")]
|
||||||
|
SLASH,
|
||||||
|
#[token("%")]
|
||||||
|
PERCENT,
|
||||||
|
#[token("^")]
|
||||||
|
CARET,
|
||||||
|
INSTR,
|
||||||
|
INSTR_NAME,
|
||||||
|
INSTR_PARAMS,
|
||||||
|
ATTR_SET,
|
||||||
|
ATTR,
|
||||||
|
ATTR_NAME,
|
||||||
|
ATTR_VALUE,
|
||||||
|
#[regex("[a-zA-Z_]+[a-zA-Z_\\-\\d]*")]
|
||||||
|
IDENT,
|
||||||
|
#[regex("\\$[a-zA-Z0-9_\\-]+")]
|
||||||
|
VAR,
|
||||||
|
#[regex("\\@[a-zA-Z0-9_\\-]+")]
|
||||||
|
INPUT_VAR,
|
||||||
|
#[token("$")]
|
||||||
|
DOLLAR,
|
||||||
|
#[token("@")]
|
||||||
|
AT,
|
||||||
|
#[token(",")]
|
||||||
|
COMMA,
|
||||||
|
#[token("|")]
|
||||||
|
PIPE,
|
||||||
|
#[token("@|")]
|
||||||
|
MAPPING_PIPE,
|
||||||
|
#[token("!|")]
|
||||||
|
NULL_PIPE,
|
||||||
|
PIPELINE,
|
||||||
|
#[token("=")]
|
||||||
|
EQ,
|
||||||
|
#[token(":")]
|
||||||
|
COLON,
|
||||||
|
#[token(";")]
|
||||||
|
SEMICOLON,
|
||||||
|
#[token(".")]
|
||||||
|
DOT,
|
||||||
|
#[token("!")]
|
||||||
|
BANG,
|
||||||
|
#[regex("[ \\t\\f]+")]
|
||||||
|
WHITESPACE,
|
||||||
|
#[token("\n")]
|
||||||
|
NEWLINE,
|
||||||
|
PARSE_ERR,
|
||||||
|
LEX_ERR,
|
||||||
|
ROOT,
|
||||||
|
EOF,
|
||||||
|
TOMBSTONE,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type TokenSet = EnumSet<SyntaxKind>;
|
||||||
|
|
||||||
|
impl From<SyntaxKind> for rowan::SyntaxKind {
|
||||||
|
fn from(kind: SyntaxKind) -> Self {
|
||||||
|
Self(kind as u16)
|
||||||
|
}
|
||||||
|
}
|
1
crates/lang/src/lst_parser/tests.rs
Normal file
1
crates/lang/src/lst_parser/tests.rs
Normal file
|
@ -0,0 +1 @@
|
||||||
|
|
29
crates/lang/src/main.rs
Normal file
29
crates/lang/src/main.rs
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
use clap::Parser;
|
||||||
|
use std::{fs, path::PathBuf};
|
||||||
|
|
||||||
|
use lang::lst_parser::{self, grammar, input, output::Output, syntax_kind};
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
struct Args {
|
||||||
|
file: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::unwrap_used)]
|
||||||
|
fn main() {
|
||||||
|
let args = Args::parse();
|
||||||
|
let n = args.file.clone();
|
||||||
|
let f = fs::read_to_string(n.clone()).expect("failed to read file");
|
||||||
|
|
||||||
|
let toks = dbg!(syntax_kind::lex(&f));
|
||||||
|
let input = input::Input::new(&toks);
|
||||||
|
let mut parser = lst_parser::Parser::new(input);
|
||||||
|
|
||||||
|
grammar::source_file(&mut parser);
|
||||||
|
|
||||||
|
let p_out = dbg!(parser.finish());
|
||||||
|
let o = Output::from_parser_output(toks, p_out);
|
||||||
|
|
||||||
|
println!("{}", o.debug_colored());
|
||||||
|
|
||||||
|
// World::new(n);
|
||||||
|
}
|
27
crates/lang/src/world.rs
Normal file
27
crates/lang/src/world.rs
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use self::files::{Files, OpenFileError};
|
||||||
|
|
||||||
|
mod error;
|
||||||
|
mod files;
|
||||||
|
|
||||||
|
struct World;
|
||||||
|
|
||||||
|
impl World {
|
||||||
|
pub fn new(entry_point: &Path) -> Result<Self, WorldCreationError> {
|
||||||
|
let mut files = Files::default();
|
||||||
|
let (entry_point_id, errors) = files.add_file(entry_point)?;
|
||||||
|
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum WorldCreationError {
|
||||||
|
FailedToOpenEntryPoint(OpenFileError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<OpenFileError> for WorldCreationError {
|
||||||
|
fn from(value: OpenFileError) -> Self {
|
||||||
|
Self::FailedToOpenEntryPoint(value)
|
||||||
|
}
|
||||||
|
}
|
10
crates/lang/src/world/error.rs
Normal file
10
crates/lang/src/world/error.rs
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use crate::{ast::ParseError, lst_parser::error::SyntaxError};
|
||||||
|
|
||||||
|
use super::files::{FileId, Loc, OpenFileError};
|
||||||
|
|
||||||
|
pub enum Error {
|
||||||
|
Syntax(Loc<ParseError>, SyntaxError),
|
||||||
|
OpenFileError(OpenFileError),
|
||||||
|
}
|
57
crates/lang/src/world/files.rs
Normal file
57
crates/lang/src/world/files.rs
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
io,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
|
mod loc;
|
||||||
|
|
||||||
|
pub use loc::Loc;
|
||||||
|
use rowan::ast::AstNode;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ast::ParseError,
|
||||||
|
lst_parser::{self, error::SyntaxError, input, output::Output},
|
||||||
|
world::{error::Error, files::source_file::SourceFile},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct Files {
|
||||||
|
inner: Vec<source_file::SourceFile>,
|
||||||
|
path_to_id_map: HashMap<PathBuf, FileId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Files {
|
||||||
|
pub fn add_file(&mut self, path: &Path) -> Result<(FileId, Vec<Error>), OpenFileError> {
|
||||||
|
if !path.exists() {
|
||||||
|
return Err(OpenFileError::NotFound(path.to_owned()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let file_id = FileId(self.inner.len());
|
||||||
|
let (source_file, errs) = match SourceFile::open(path) {
|
||||||
|
Ok((source_file, errs)) => {
|
||||||
|
let errs = errs
|
||||||
|
.into_iter()
|
||||||
|
.map(|(ptr, err)| Error::Syntax(Loc::from_ptr(ptr, file_id), err))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
(source_file, errs)
|
||||||
|
}
|
||||||
|
Err(e) => return Err(OpenFileError::IoError(path.to_path_buf(), e)),
|
||||||
|
};
|
||||||
|
|
||||||
|
self.inner.push(source_file);
|
||||||
|
self.path_to_id_map.insert(path.to_path_buf(), file_id);
|
||||||
|
|
||||||
|
Ok((file_id, errs))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum OpenFileError {
|
||||||
|
NotFound(PathBuf),
|
||||||
|
IoError(PathBuf, std::io::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug)]
|
||||||
|
pub struct FileId(usize);
|
||||||
|
|
||||||
|
mod source_file;
|
29
crates/lang/src/world/files/loc.rs
Normal file
29
crates/lang/src/world/files/loc.rs
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
use rowan::ast::{AstNode, AstPtr};
|
||||||
|
|
||||||
|
use crate::Lang;
|
||||||
|
|
||||||
|
use super::FileId;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Loc<N: AstNode<Language = Lang>> {
|
||||||
|
file: FileId,
|
||||||
|
syntax: AstPtr<N>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<N: AstNode<Language = Lang>> Loc<N> {
|
||||||
|
pub fn new(node: N, file: FileId) -> Self {
|
||||||
|
Self::from_ptr(AstPtr::new(&node), file)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_ptr(ptr: AstPtr<N>, file: FileId) -> Self {
|
||||||
|
Self { file, syntax: ptr }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file(&self) -> FileId {
|
||||||
|
self.file
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn syntax(&self) -> AstPtr<N> {
|
||||||
|
self.syntax.clone()
|
||||||
|
}
|
||||||
|
}
|
113
crates/lang/src/world/files/source_file.rs
Normal file
113
crates/lang/src/world/files/source_file.rs
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
use crate::lst_parser::{self, grammar, input, syntax_kind};
|
||||||
|
use crate::SyntaxNode;
|
||||||
|
|
||||||
|
use crate::lst_parser::output::Output;
|
||||||
|
|
||||||
|
use crate::lst_parser::error::SyntaxError;
|
||||||
|
|
||||||
|
use crate::ast::ParseError;
|
||||||
|
|
||||||
|
use rowan::ast::{AstNode, AstPtr};
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
use std::{fs, io};
|
||||||
|
|
||||||
|
use rowan::GreenNode;
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
pub(crate) struct SourceFile {
|
||||||
|
pub(crate) path: PathBuf,
|
||||||
|
pub(crate) lst: rowan::GreenNode,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SourceFile {
|
||||||
|
pub(crate) fn open(p: &Path) -> io::Result<(Self, Vec<(AstPtr<ParseError>, SyntaxError)>)> {
|
||||||
|
assert!(p.exists());
|
||||||
|
|
||||||
|
let f = fs::read_to_string(p)?;
|
||||||
|
let (lst, errs) = Self::parse(f);
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
Self {
|
||||||
|
path: p.to_path_buf(),
|
||||||
|
lst,
|
||||||
|
},
|
||||||
|
errs,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn parse(f: String) -> (GreenNode, Vec<(AstPtr<ParseError>, SyntaxError)>) {
|
||||||
|
let toks = syntax_kind::lex(&f);
|
||||||
|
let input = input::Input::new(&toks);
|
||||||
|
let mut parser = lst_parser::Parser::new(input);
|
||||||
|
|
||||||
|
grammar::source_file(&mut parser);
|
||||||
|
|
||||||
|
let p_out = parser.finish();
|
||||||
|
let (lst, errs) = Output::from_parser_output(toks, p_out).dissolve();
|
||||||
|
|
||||||
|
(lst.clone(), Self::find_errs(lst, errs))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn find_errs(
|
||||||
|
lst: GreenNode,
|
||||||
|
mut errs: Vec<SyntaxError>,
|
||||||
|
) -> Vec<(AstPtr<ParseError>, SyntaxError)> {
|
||||||
|
let mut out = Vec::new();
|
||||||
|
errs.reverse();
|
||||||
|
|
||||||
|
let lst = SyntaxNode::new_root(lst);
|
||||||
|
Self::find_errs_recursive(&mut out, lst, &mut errs);
|
||||||
|
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn find_errs_recursive(
|
||||||
|
mut out: &mut Vec<(AstPtr<ParseError>, SyntaxError)>,
|
||||||
|
lst: SyntaxNode,
|
||||||
|
mut errs: &mut Vec<SyntaxError>,
|
||||||
|
) {
|
||||||
|
lst.children()
|
||||||
|
.filter_map(|c| ParseError::cast(c))
|
||||||
|
.for_each(|e| out.push((AstPtr::new(&e), errs.pop().unwrap())));
|
||||||
|
|
||||||
|
lst.children()
|
||||||
|
.for_each(|c| Self::find_errs_recursive(out, c, errs));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::world::files::source_file::SourceFile;
|
||||||
|
|
||||||
|
fn check_find_errs(input: &str, expected: &[&str]) {
|
||||||
|
let (_, errs) = SourceFile::parse(input.to_string());
|
||||||
|
|
||||||
|
let errs = errs
|
||||||
|
.into_iter()
|
||||||
|
.map(|(loc, err)| format!("{:?}@{:?}", err, loc.syntax_node_ptr().text_range()))
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
errs,
|
||||||
|
expected
|
||||||
|
.into_iter()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_errs() {
|
||||||
|
check_find_errs(
|
||||||
|
"def meow = ;\n mod ;",
|
||||||
|
&["Expected([DEF_BODY])@11..11", "Expected([IDENT])@18..18"],
|
||||||
|
);
|
||||||
|
|
||||||
|
check_find_errs(
|
||||||
|
"def awawa = a |",
|
||||||
|
&["UnterminatedTopLevelItem@0..15", "PipelineNeedsSink@12..15"],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
12
crates/pawarser/Cargo.toml
Normal file
12
crates/pawarser/Cargo.toml
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
[package]
|
||||||
|
name = "pawarser"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
rowan = "0.15.15"
|
||||||
|
drop_bomb = "0.1.5"
|
||||||
|
enumset = "1.1.3"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
8
crates/pawarser/src/lib.rs
Normal file
8
crates/pawarser/src/lib.rs
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
#![feature(iter_collect_into)]
|
||||||
|
pub mod parser;
|
||||||
|
|
||||||
|
pub use parser::{
|
||||||
|
error::SyntaxError,
|
||||||
|
marker::{CompletedMarker, Marker},
|
||||||
|
Parser, SyntaxElement,
|
||||||
|
};
|
253
crates/pawarser/src/parser.rs
Normal file
253
crates/pawarser/src/parser.rs
Normal file
|
@ -0,0 +1,253 @@
|
||||||
|
use std::{cell::Cell, fmt, marker::PhantomData, mem};
|
||||||
|
|
||||||
|
use enumset::{EnumSet, EnumSetType};
|
||||||
|
use rowan::{GreenNode, GreenNodeBuilder};
|
||||||
|
|
||||||
|
use crate::parser::event::NodeKind;
|
||||||
|
|
||||||
|
use self::{event::Event, input::Input, marker::Marker};
|
||||||
|
pub use {error::SyntaxError, output::ParserOutput};
|
||||||
|
|
||||||
|
pub mod error;
|
||||||
|
mod event;
|
||||||
|
mod input;
|
||||||
|
pub mod marker;
|
||||||
|
pub mod output;
|
||||||
|
|
||||||
|
/// this is used to define some required SyntaxKinds like an EOF token or an error token
|
||||||
|
pub trait SyntaxElement
|
||||||
|
where
|
||||||
|
Self: EnumSetType
|
||||||
|
+ Into<rowan::SyntaxKind>
|
||||||
|
+ From<rowan::SyntaxKind>
|
||||||
|
+ fmt::Debug
|
||||||
|
+ Clone
|
||||||
|
+ PartialEq
|
||||||
|
+ Eq,
|
||||||
|
{
|
||||||
|
/// EOF value. This will be used by the rest of the parser library to represent an EOF.
|
||||||
|
const SYNTAX_EOF: Self;
|
||||||
|
/// Error value. This will be used as a placeholder for associated respective errors.
|
||||||
|
const SYNTAX_ERROR: Self;
|
||||||
|
const SYNTAX_ROOT: Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Parser<'src, SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
|
||||||
|
input: Input<'src, SyntaxKind>,
|
||||||
|
pos: usize,
|
||||||
|
events: Vec<Event<SyntaxKind, SyntaxErr>>,
|
||||||
|
step_limit: u32,
|
||||||
|
steps: Cell<u32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'src, 'toks, SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>
|
||||||
|
Parser<'src, SyntaxKind, SyntaxErr>
|
||||||
|
{
|
||||||
|
/// eat all meaningless tokens at the end of the file.
|
||||||
|
pub fn eat_succeeding_meaningless(&mut self) {
|
||||||
|
self.push_ev(Event::Eat {
|
||||||
|
count: self.input.meaningless_tail_len(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get token from current position of the parser.
|
||||||
|
pub fn current(&self) -> SyntaxKind {
|
||||||
|
self.step();
|
||||||
|
self.input.kind(self.pos)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn start(&mut self, name: &str) -> Marker {
|
||||||
|
let pos = self.events.len();
|
||||||
|
self.push_ev(Event::tombstone());
|
||||||
|
Marker::new(pos, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Eat next token if it's of kind `kind` and return `true`.
|
||||||
|
/// Otherwise, `false`.
|
||||||
|
pub fn eat(&mut self, kind: SyntaxKind) -> bool {
|
||||||
|
if !self.at(kind) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.do_bump();
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn do_bump(&mut self) {
|
||||||
|
self.push_ev(Event::Eat {
|
||||||
|
count: self.input.preceding_meaningless(self.pos),
|
||||||
|
});
|
||||||
|
self.pos += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if the token at the current parser position is of `kind`
|
||||||
|
pub fn at(&self, kind: SyntaxKind) -> bool {
|
||||||
|
self.nth_at(0, kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if the token that is `n` ahead is of `kind`
|
||||||
|
pub fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool {
|
||||||
|
self.nth(n) == kind
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nth(&self, n: usize) -> SyntaxKind {
|
||||||
|
self.step();
|
||||||
|
self.input.kind(self.pos + n)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push_ev(&mut self, event: Event<SyntaxKind, SyntaxErr>) {
|
||||||
|
self.events.push(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn step(&self) {
|
||||||
|
let steps = self.steps.get();
|
||||||
|
assert!(steps <= self.step_limit, "the parser seems stuck.");
|
||||||
|
self.steps.set(steps + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finish(self) -> ParserOutput<SyntaxKind, SyntaxErr> {
|
||||||
|
let Self {
|
||||||
|
input,
|
||||||
|
pos,
|
||||||
|
mut events,
|
||||||
|
step_limit,
|
||||||
|
steps,
|
||||||
|
} = self;
|
||||||
|
let (mut raw_toks, meaningless_tokens) = input.dissolve();
|
||||||
|
let mut builder = GreenNodeBuilder::new();
|
||||||
|
// TODO: document what the hell a forward parent is
|
||||||
|
let mut fw_parents = Vec::new();
|
||||||
|
let mut errors: Vec<SyntaxErr> = Vec::new();
|
||||||
|
raw_toks.reverse();
|
||||||
|
|
||||||
|
// always have an implicit root node to avoid [`GreenNodeBuilder::finish()`] panicking due to multiple root elements.
|
||||||
|
builder.start_node(SyntaxKind::SYNTAX_ROOT.into());
|
||||||
|
|
||||||
|
for i in 0..events.len() {
|
||||||
|
match mem::replace(&mut events[i], Event::tombstone()) {
|
||||||
|
Event::Start {
|
||||||
|
kind,
|
||||||
|
forward_parent,
|
||||||
|
} => {
|
||||||
|
if kind == NodeKind::Tombstone && forward_parent.is_none() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolving forward parents
|
||||||
|
// temporarily jump around with the parser index and replace them with tombstones
|
||||||
|
fw_parents.push(kind);
|
||||||
|
let mut idx = i;
|
||||||
|
let mut fp = forward_parent;
|
||||||
|
while let Some(fwd) = fp {
|
||||||
|
idx += fwd as usize;
|
||||||
|
fp = match mem::replace(&mut events[idx], Event::tombstone()) {
|
||||||
|
Event::Start {
|
||||||
|
kind,
|
||||||
|
forward_parent,
|
||||||
|
} => {
|
||||||
|
fw_parents.push(kind);
|
||||||
|
forward_parent
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// clear semantically meaningless tokens before the new tree node for aesthetic reasons
|
||||||
|
while raw_toks
|
||||||
|
.last()
|
||||||
|
.is_some_and(|v| meaningless_tokens.contains(v.0))
|
||||||
|
{
|
||||||
|
// update first next Eat event
|
||||||
|
match events.iter_mut().find(|ev| matches!(ev, Event::Eat { .. })) {
|
||||||
|
Some(Event::Eat { count }) => *count -= 1,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// put whitespace into lst
|
||||||
|
let (tok, text) = raw_toks.pop().unwrap();
|
||||||
|
builder.token(tok.into(), text);
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert forward parents into the tree in correct order
|
||||||
|
for kind in fw_parents.drain(..).rev() {
|
||||||
|
match kind {
|
||||||
|
NodeKind::Syntax(kind) => builder.start_node(kind.into()),
|
||||||
|
NodeKind::Error(err) => {
|
||||||
|
errors.push(err);
|
||||||
|
builder.start_node(SyntaxKind::SYNTAX_ERROR.into())
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::Finish => builder.finish_node(),
|
||||||
|
Event::Eat { count } => (0..count).for_each(|_| {
|
||||||
|
let (tok, text) = raw_toks.pop().unwrap();
|
||||||
|
builder.token(tok.into(), text);
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// finish SYNTAX_ROOT
|
||||||
|
builder.finish_node();
|
||||||
|
|
||||||
|
ParserOutput {
|
||||||
|
green_node: builder.finish(),
|
||||||
|
errors,
|
||||||
|
_syntax_kind: PhantomData::<SyntaxKind>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ParserBuilder<
|
||||||
|
'src,
|
||||||
|
SyntaxKind: SyntaxElement,
|
||||||
|
// SyntaxErr: SyntaxError,
|
||||||
|
> {
|
||||||
|
raw_toks: Vec<(SyntaxKind, &'src str)>,
|
||||||
|
meaningless_token_kinds: EnumSet<SyntaxKind>,
|
||||||
|
step_limit: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'src, SyntaxKind: SyntaxElement> ParserBuilder<'src, SyntaxKind> {
|
||||||
|
pub fn new(raw_toks: Vec<(SyntaxKind, &'src str)>) -> Self {
|
||||||
|
Self {
|
||||||
|
raw_toks,
|
||||||
|
meaningless_token_kinds: EnumSet::new(),
|
||||||
|
step_limit: 4096,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the parser step limit.
|
||||||
|
/// Defaults to 4096
|
||||||
|
pub fn step_limit(mut self, new: u32) -> Self {
|
||||||
|
self.step_limit = new;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_meaningless(mut self, kind: SyntaxKind) -> Self {
|
||||||
|
self.meaningless_token_kinds.insert(kind);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_meaningless_many(mut self, kind: Vec<SyntaxKind>) -> Self {
|
||||||
|
self.meaningless_token_kinds
|
||||||
|
.insert_all(kind.into_iter().collect());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build<SyntaxErr: SyntaxError>(self) -> Parser<'src, SyntaxKind, SyntaxErr> {
|
||||||
|
let Self {
|
||||||
|
raw_toks,
|
||||||
|
meaningless_token_kinds,
|
||||||
|
step_limit,
|
||||||
|
} = self;
|
||||||
|
Parser {
|
||||||
|
input: Input::new(raw_toks, Some(meaningless_token_kinds)),
|
||||||
|
pos: 0,
|
||||||
|
events: Vec::new(),
|
||||||
|
step_limit,
|
||||||
|
steps: Cell::new(0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
9
crates/pawarser/src/parser/error.rs
Normal file
9
crates/pawarser/src/parser/error.rs
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
/// A marker trait... for now!
|
||||||
|
// TODO: constrain that conversion to `NodeKind::Error` is enforced to be possible
|
||||||
|
pub trait SyntaxError
|
||||||
|
where
|
||||||
|
Self: fmt::Debug + Clone + PartialEq + Eq,
|
||||||
|
{
|
||||||
|
}
|
42
crates/pawarser/src/parser/event.rs
Normal file
42
crates/pawarser/src/parser/event.rs
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
use enumset::EnumSetType;
|
||||||
|
|
||||||
|
use super::{error::SyntaxError, SyntaxElement};
|
||||||
|
|
||||||
|
pub enum Event<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
|
||||||
|
Start {
|
||||||
|
kind: NodeKind<SyntaxKind, SyntaxErr>,
|
||||||
|
forward_parent: Option<usize>,
|
||||||
|
},
|
||||||
|
Finish,
|
||||||
|
Eat {
|
||||||
|
count: usize,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> Event<SyntaxKind, SyntaxErr> {
|
||||||
|
pub fn tombstone() -> Self {
|
||||||
|
Self::Start {
|
||||||
|
kind: NodeKind::Tombstone,
|
||||||
|
forward_parent: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq)]
|
||||||
|
pub enum NodeKind<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
|
||||||
|
Tombstone,
|
||||||
|
Syntax(SyntaxKind),
|
||||||
|
Error(SyntaxErr),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> NodeKind<SyntaxKind, SyntaxErr> {
|
||||||
|
pub fn is_tombstone(&self) -> bool {
|
||||||
|
matches!(self, Self::Tombstone)
|
||||||
|
}
|
||||||
|
pub fn is_syntax(&self) -> bool {
|
||||||
|
matches!(self, Self::Syntax(_))
|
||||||
|
}
|
||||||
|
pub fn is_error(&self) -> bool {
|
||||||
|
matches!(self, Self::Error(_))
|
||||||
|
}
|
||||||
|
}
|
67
crates/pawarser/src/parser/input.rs
Normal file
67
crates/pawarser/src/parser/input.rs
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
use enumset::{EnumSet, EnumSetType};
|
||||||
|
|
||||||
|
use super::SyntaxElement;
|
||||||
|
|
||||||
|
pub struct Input<'src, SyntaxKind: SyntaxElement> {
|
||||||
|
raw: Vec<(SyntaxKind, &'src str)>,
|
||||||
|
// enumset of meaningless tokens
|
||||||
|
semantically_meaningless: EnumSet<SyntaxKind>,
|
||||||
|
// indices of non-meaningless tokens
|
||||||
|
meaningful_toks: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'src, SyntaxKind: SyntaxElement> Input<'src, SyntaxKind> {
|
||||||
|
pub fn new(
|
||||||
|
raw_toks: Vec<(SyntaxKind, &'src str)>,
|
||||||
|
meaningless: Option<EnumSet<SyntaxKind>>,
|
||||||
|
) -> Self {
|
||||||
|
let mut meaningful_toks = Vec::new();
|
||||||
|
|
||||||
|
if let Some(meaningless) = meaningless {
|
||||||
|
let meaningful_toks = raw_toks
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.filter_map(|(i, tok)| (!meaningless.contains(tok.0)).then_some(i))
|
||||||
|
.collect_into(&mut meaningful_toks);
|
||||||
|
}
|
||||||
|
|
||||||
|
Self {
|
||||||
|
raw: raw_toks,
|
||||||
|
semantically_meaningless: meaningless.unwrap_or_default(),
|
||||||
|
meaningful_toks,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn kind(&self, idx: usize) -> SyntaxKind {
|
||||||
|
let Some(meaningful_idx) = self.meaningful_toks.get(idx) else {
|
||||||
|
return SyntaxKind::SYNTAX_EOF;
|
||||||
|
};
|
||||||
|
|
||||||
|
self.raw.get(*meaningful_idx).unwrap().0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn preceding_meaningless(&self, idx: usize) -> usize {
|
||||||
|
assert!(self.meaningful_toks.len() > idx);
|
||||||
|
|
||||||
|
if idx == 0 {
|
||||||
|
// maybe should be `self.meaningful_toks[idx]` instead??
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
self.meaningful_toks[idx] - self.meaningful_toks[idx - 1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// get the count of meaningless tokens at the end of the file.
|
||||||
|
pub fn meaningless_tail_len(&self) -> usize {
|
||||||
|
self.raw.len() - (self.meaningful_toks.last().unwrap() + 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dissolve(self) -> (Vec<(SyntaxKind, &'src str)>, EnumSet<SyntaxKind>) {
|
||||||
|
let Self {
|
||||||
|
raw,
|
||||||
|
semantically_meaningless,
|
||||||
|
..
|
||||||
|
} = self;
|
||||||
|
(raw, semantically_meaningless)
|
||||||
|
}
|
||||||
|
}
|
97
crates/pawarser/src/parser/marker.rs
Normal file
97
crates/pawarser/src/parser/marker.rs
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
use drop_bomb::DropBomb;
|
||||||
|
use rowan::SyntaxKind;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
error::SyntaxError,
|
||||||
|
event::{Event, NodeKind},
|
||||||
|
Parser, SyntaxElement,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct Marker {
|
||||||
|
pos: usize,
|
||||||
|
bomb: DropBomb,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Marker {
|
||||||
|
pub(super) fn new(pos: usize, name: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
pos,
|
||||||
|
bomb: DropBomb::new(format!("Marker {name} must be completed or abandoned.")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn close_node<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
|
||||||
|
mut self,
|
||||||
|
p: &mut Parser<SyntaxKind, SyntaxErr>,
|
||||||
|
kind: NodeKind<SyntaxKind, SyntaxErr>,
|
||||||
|
) -> CompletedMarker<SyntaxKind, SyntaxErr> {
|
||||||
|
self.bomb.defuse();
|
||||||
|
|
||||||
|
match &mut p.events[self.pos] {
|
||||||
|
Event::Start { kind: slot, .. } => *slot = kind.clone(),
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
p.push_ev(Event::Finish);
|
||||||
|
CompletedMarker {
|
||||||
|
pos: self.pos,
|
||||||
|
kind,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn complete<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
|
||||||
|
self,
|
||||||
|
p: &mut Parser<SyntaxKind, SyntaxErr>,
|
||||||
|
kind: SyntaxKind,
|
||||||
|
) -> CompletedMarker<SyntaxKind, SyntaxErr> {
|
||||||
|
self.close_node(p, NodeKind::Syntax(kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn error<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
|
||||||
|
self,
|
||||||
|
p: &mut Parser<SyntaxKind, SyntaxErr>,
|
||||||
|
kind: SyntaxErr,
|
||||||
|
) -> CompletedMarker<SyntaxKind, SyntaxErr> {
|
||||||
|
self.close_node(p, NodeKind::Error(kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn abandon<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
|
||||||
|
mut self,
|
||||||
|
p: &mut Parser<SyntaxKind, SyntaxErr>,
|
||||||
|
) {
|
||||||
|
self.bomb.defuse();
|
||||||
|
|
||||||
|
// clean up empty tombstone event from marker
|
||||||
|
if self.pos == p.events.len() - 1 {
|
||||||
|
match p.events.pop() {
|
||||||
|
Some(Event::Start {
|
||||||
|
kind: NodeKind::Tombstone,
|
||||||
|
forward_parent: None,
|
||||||
|
}) => (),
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CompletedMarker<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
|
||||||
|
pos: usize,
|
||||||
|
kind: NodeKind<SyntaxKind, SyntaxErr>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> CompletedMarker<SyntaxKind, SyntaxErr> {
|
||||||
|
pub fn precede(self, p: &mut Parser<SyntaxKind, SyntaxErr>, name: &str) -> Marker {
|
||||||
|
let new_pos = p.start(name);
|
||||||
|
|
||||||
|
match &mut p.events[self.pos] {
|
||||||
|
Event::Start { forward_parent, .. } => {
|
||||||
|
// point forward parent of the node this marker completed to the new node
|
||||||
|
// will later be used to make the new node a parent of the current node.
|
||||||
|
*forward_parent = Some(new_pos.pos - self.pos)
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
new_pos
|
||||||
|
}
|
||||||
|
}
|
73
crates/pawarser/src/parser/output.rs
Normal file
73
crates/pawarser/src/parser/output.rs
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
use std::{fmt, marker::PhantomData};
|
||||||
|
|
||||||
|
use rowan::{GreenNode, GreenNodeData, GreenTokenData, NodeOrToken};
|
||||||
|
|
||||||
|
use crate::{SyntaxElement, SyntaxError};
|
||||||
|
|
||||||
|
pub struct ParserOutput<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
|
||||||
|
pub green_node: GreenNode,
|
||||||
|
pub errors: Vec<SyntaxErr>,
|
||||||
|
pub(super) _syntax_kind: PhantomData<SyntaxKind>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> std::fmt::Debug
|
||||||
|
for ParserOutput<SyntaxKind, SyntaxErr>
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let mut errs: Vec<&SyntaxErr> = self.errors.iter().collect();
|
||||||
|
errs.reverse();
|
||||||
|
debug_print_output::<SyntaxKind, SyntaxErr>(
|
||||||
|
NodeOrToken::Node(&self.green_node),
|
||||||
|
f,
|
||||||
|
0,
|
||||||
|
&mut errs,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn debug_print_output<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
|
||||||
|
node: NodeOrToken<&GreenNodeData, &GreenTokenData>,
|
||||||
|
f: &mut std::fmt::Formatter<'_>,
|
||||||
|
lvl: i32,
|
||||||
|
errs: &mut Vec<&SyntaxErr>,
|
||||||
|
) -> std::fmt::Result {
|
||||||
|
if f.alternate() {
|
||||||
|
for _ in 0..lvl {
|
||||||
|
f.write_str(" ")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let maybe_newline = if f.alternate() { "\n" } else { " " };
|
||||||
|
|
||||||
|
match node {
|
||||||
|
NodeOrToken::Node(n) => {
|
||||||
|
let kind: SyntaxKind = node.kind().into();
|
||||||
|
if kind != SyntaxKind::SYNTAX_ERROR {
|
||||||
|
write!(f, "{:?} {{{maybe_newline}", kind)?;
|
||||||
|
} else {
|
||||||
|
let err = errs
|
||||||
|
.pop()
|
||||||
|
.expect("all error syntax nodes should correspond to an error");
|
||||||
|
|
||||||
|
write!(f, "{:?}: {err:?} {{{maybe_newline}", kind)?;
|
||||||
|
}
|
||||||
|
for c in n.children() {
|
||||||
|
debug_print_output::<SyntaxKind, SyntaxErr>(c, f, lvl + 1, errs)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.alternate() {
|
||||||
|
for _ in 0..lvl {
|
||||||
|
f.write_str(" ")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
write!(f, "}}{maybe_newline}")
|
||||||
|
}
|
||||||
|
NodeOrToken::Token(t) => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{:?} {:?};{maybe_newline}",
|
||||||
|
Into::<SyntaxKind>::into(t.kind()),
|
||||||
|
t.text()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
15
crates/svg-filters/Cargo.toml
Normal file
15
crates/svg-filters/Cargo.toml
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
[package]
|
||||||
|
name = "svg-filters"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
csscolorparser = "0.6.2"
|
||||||
|
indexmap = "2.2.5"
|
||||||
|
petgraph = { workspace = true }
|
||||||
|
quick-xml = { version = "0.31.0", features = ["serialize"] }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
158
crates/svg-filters/src/codegen.rs
Normal file
158
crates/svg-filters/src/codegen.rs
Normal file
|
@ -0,0 +1,158 @@
|
||||||
|
use std::{
|
||||||
|
cmp,
|
||||||
|
collections::{BTreeSet, HashMap},
|
||||||
|
fmt::Display,
|
||||||
|
io::Read,
|
||||||
|
ops::Not,
|
||||||
|
};
|
||||||
|
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
use petgraph::{
|
||||||
|
algo::toposort,
|
||||||
|
graph::DiGraph,
|
||||||
|
prelude::{EdgeIndex, NodeIndex},
|
||||||
|
};
|
||||||
|
use quick_xml::ElementWriter;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
types::{
|
||||||
|
graph::{edge::Edge, FilterGraph, NodeInput},
|
||||||
|
nodes::{primitives::WriteElement, CommonAttrs},
|
||||||
|
},
|
||||||
|
Node,
|
||||||
|
};
|
||||||
|
|
||||||
|
use self::error::CodegenError;
|
||||||
|
|
||||||
|
pub struct SvgDocument {
|
||||||
|
filters: HashMap<String, FilterGraph>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SvgDocument {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
filters: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::unwrap_used, reason = "we literally just did the insertion")]
|
||||||
|
pub fn create_filter(&mut self, id: impl ToString) -> &mut FilterGraph {
|
||||||
|
let filter = FilterGraph::new();
|
||||||
|
|
||||||
|
self.filters.insert(id.to_string(), filter);
|
||||||
|
self.filters.get_mut(&id.to_string()).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_svg_pretty(&self) -> String {
|
||||||
|
let mut result = Vec::new();
|
||||||
|
let doc_writer = quick_xml::Writer::new_with_indent(&mut result, b' ', 2);
|
||||||
|
|
||||||
|
self.generate(doc_writer);
|
||||||
|
|
||||||
|
String::from_utf8_lossy(&result).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_svg(&self) -> String {
|
||||||
|
let mut result = Vec::new();
|
||||||
|
let doc_writer = quick_xml::Writer::new(&mut result);
|
||||||
|
|
||||||
|
self.generate(doc_writer);
|
||||||
|
|
||||||
|
String::from_utf8_lossy(&result).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate(&self, mut doc_writer: quick_xml::Writer<&mut Vec<u8>>) {
|
||||||
|
doc_writer
|
||||||
|
.create_element("svg")
|
||||||
|
.write_inner_content(|writer| {
|
||||||
|
self.filters
|
||||||
|
.iter()
|
||||||
|
.try_fold(writer, Self::gen_filter)
|
||||||
|
.map(|_| {})
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gen_filter<'w, 'r>(
|
||||||
|
writer: &'w mut quick_xml::Writer<&'r mut Vec<u8>>,
|
||||||
|
(id, graph): (&String, &FilterGraph),
|
||||||
|
) -> Result<&'w mut quick_xml::Writer<&'r mut Vec<u8>>, CodegenError> {
|
||||||
|
writer
|
||||||
|
.create_element("filter")
|
||||||
|
.with_attribute(("id", id.as_str()))
|
||||||
|
.write_inner_content(|writer| Self::graph_to_svg(writer, graph))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn graph_to_svg(
|
||||||
|
writer: &mut quick_xml::Writer<&mut Vec<u8>>,
|
||||||
|
graph: &FilterGraph,
|
||||||
|
) -> Result<(), CodegenError> {
|
||||||
|
let sorted = toposort(&graph.dag, None).expect("no cycles allowed in a DAG");
|
||||||
|
sorted
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|node_idx| {
|
||||||
|
graph
|
||||||
|
.dag
|
||||||
|
.node_weight(node_idx)
|
||||||
|
.and_then(|node| node.primitive())
|
||||||
|
.map(|(primitive, common_attrs)| (node_idx, primitive, common_attrs))
|
||||||
|
})
|
||||||
|
.try_fold(writer, |writer, (node_idx, primitive, common_attrs)| {
|
||||||
|
primitive.element_writer(
|
||||||
|
writer,
|
||||||
|
*common_attrs,
|
||||||
|
graph
|
||||||
|
.inputs(node_idx)
|
||||||
|
.into_iter()
|
||||||
|
.map(|v| v.to_string())
|
||||||
|
.collect(),
|
||||||
|
graph
|
||||||
|
.outputs(node_idx)
|
||||||
|
.is_empty()
|
||||||
|
.not()
|
||||||
|
.then_some(format!("r{}", node_idx.index())),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// convenience method to avoid fuckups during future changes
|
||||||
|
fn format_edge_idx(idx: EdgeIndex) -> String {
|
||||||
|
format!("edge{}", idx.index())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_node_idx(node_idx: NodeIndex) -> String {
|
||||||
|
format!("r{}", node_idx.index())
|
||||||
|
}
|
||||||
|
|
||||||
|
mod error {
|
||||||
|
use std::{error::Error, fmt::Display};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum CodegenError {
|
||||||
|
QuickXmlError(quick_xml::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<quick_xml::Error> for CodegenError {
|
||||||
|
fn from(value: quick_xml::Error) -> Self {
|
||||||
|
Self::QuickXmlError(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for CodegenError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
CodegenError::QuickXmlError(e) => e.fmt(f),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Error for CodegenError {}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for SvgDocument {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
40
crates/svg-filters/src/lib.rs
Normal file
40
crates/svg-filters/src/lib.rs
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
#![feature(lint_reasons)]
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
pub mod util {
|
||||||
|
macro_rules! gen_attr {
|
||||||
|
($name:literal = $out:expr) => {
|
||||||
|
quick_xml::events::attributes::Attribute {
|
||||||
|
key: quick_xml::name::QName($name),
|
||||||
|
value: std::borrow::Cow::from(($out).to_string().into_bytes()),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! gen_attrs {
|
||||||
|
($($name:literal: $out:expr),+) => {
|
||||||
|
vec![
|
||||||
|
$(gen_attr!($name = $out)),+
|
||||||
|
]
|
||||||
|
};
|
||||||
|
($($cond:expr => $name:literal: $out:expr),+) => {
|
||||||
|
{
|
||||||
|
let mut r = Vec::new();
|
||||||
|
$(if $cond {
|
||||||
|
r.push(gen_attr!($name = $out));
|
||||||
|
})+
|
||||||
|
r
|
||||||
|
}
|
||||||
|
};
|
||||||
|
($other:ident; $($cond:expr => $name:literal: $out:expr),+) => {
|
||||||
|
$other.append(&mut gen_attrs![$($cond => $name: $out),+]);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod codegen;
|
||||||
|
pub mod types;
|
||||||
|
pub use types::nodes::Node;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
65
crates/svg-filters/src/main.rs
Normal file
65
crates/svg-filters/src/main.rs
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
use svg_filters::{
|
||||||
|
codegen::SvgDocument,
|
||||||
|
types::nodes::{
|
||||||
|
primitives::{
|
||||||
|
blend::BlendMode,
|
||||||
|
color_matrix::ColorMatrixType,
|
||||||
|
component_transfer::TransferFn,
|
||||||
|
displacement_map::Channel,
|
||||||
|
turbulence::{NoiseType, StitchTiles},
|
||||||
|
},
|
||||||
|
standard_input::StandardInput,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let mut doc = SvgDocument::new();
|
||||||
|
|
||||||
|
let f = doc.create_filter("cmyk-chromabb");
|
||||||
|
|
||||||
|
let noise = f.turbulence(0., 0.1, 2, 0, StitchTiles::Stitch, NoiseType::FractalNoise);
|
||||||
|
let noise = f.component_transfer_rgba(
|
||||||
|
noise,
|
||||||
|
TransferFn::Discrete {
|
||||||
|
table_values: vec![0., 0.2, 0.4, 0.6, 0.8, 1.],
|
||||||
|
},
|
||||||
|
TransferFn::Discrete {
|
||||||
|
table_values: vec![0., 0.2, 0.4, 0.6, 0.8, 1.],
|
||||||
|
},
|
||||||
|
TransferFn::Discrete {
|
||||||
|
table_values: vec![0., 0.2, 0.4, 0.6, 0.8, 1.],
|
||||||
|
},
|
||||||
|
TransferFn::Linear {
|
||||||
|
slope: 0.,
|
||||||
|
intercept: 0.5,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let cyan = f.color_matrix(
|
||||||
|
StandardInput::SourceGraphic,
|
||||||
|
ColorMatrixType::Matrix(Box::new([
|
||||||
|
0., 0., 0., 0., 0., //
|
||||||
|
0., 1., 0., 0., 0., //
|
||||||
|
0., 0., 1., 0., 0., //
|
||||||
|
0., 0., 0., 1., 0.,
|
||||||
|
])),
|
||||||
|
);
|
||||||
|
let cyan = f.offset(cyan, 25., 0.);
|
||||||
|
let cyan = f.displacement_map(cyan, noise, 50., Channel::R, Channel::A);
|
||||||
|
|
||||||
|
let magenta = f.color_matrix(
|
||||||
|
StandardInput::SourceGraphic,
|
||||||
|
ColorMatrixType::Matrix(Box::new([
|
||||||
|
1., 0., 0., 0., 0., //
|
||||||
|
0., 0., 0., 0., 0., //
|
||||||
|
0., 0., 1., 0., 0., //
|
||||||
|
0., 0., 0., 1., 0.,
|
||||||
|
])),
|
||||||
|
);
|
||||||
|
let magenta = f.displacement_map(magenta, noise, 50., Channel::R, Channel::A);
|
||||||
|
let magenta = f.offset(magenta, -25., 0.);
|
||||||
|
|
||||||
|
f.blend(cyan, magenta, BlendMode::Screen);
|
||||||
|
|
||||||
|
println!("{}", doc.generate_svg_pretty());
|
||||||
|
}
|
17
crates/svg-filters/src/tests.rs
Normal file
17
crates/svg-filters/src/tests.rs
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
mod blend;
|
||||||
|
mod color_matrix;
|
||||||
|
mod complex;
|
||||||
|
mod component_transfer;
|
||||||
|
mod displacement_map;
|
||||||
|
mod flood;
|
||||||
|
mod gaussian_blur;
|
||||||
|
mod offset;
|
||||||
|
mod turbulence;
|
||||||
|
mod composite {}
|
||||||
|
mod convolve_matrix {}
|
||||||
|
mod diffuse_lighting {}
|
||||||
|
mod image {}
|
||||||
|
mod merge {}
|
||||||
|
mod morphology {}
|
||||||
|
mod specular_lighting {}
|
||||||
|
mod tile {}
|
20
crates/svg-filters/src/tests/blend.rs
Normal file
20
crates/svg-filters/src/tests/blend.rs
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
use crate::{
|
||||||
|
codegen::SvgDocument,
|
||||||
|
types::nodes::{primitives::blend::BlendMode, standard_input::StandardInput},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_offset_blend() {
|
||||||
|
let mut doc = SvgDocument::new();
|
||||||
|
|
||||||
|
let blend = doc.create_filter("blend");
|
||||||
|
|
||||||
|
let offset0 = blend.offset(StandardInput::SourceGraphic, 100., 0.);
|
||||||
|
let offset1 = blend.offset(StandardInput::SourceGraphic, -100., 0.);
|
||||||
|
blend.blend(offset0, offset1, BlendMode::Multiply);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
doc.generate_svg(),
|
||||||
|
r#"<svg><filter id="blend"><feOffset dx="-100" dy="0" in="SourceGraphic" result="r7"/><feOffset dx="100" dy="0" in="SourceGraphic" result="r6"/><feBlend mode="multiply" in="r6" in2="r7"/></filter></svg>"#
|
||||||
|
);
|
||||||
|
}
|
25
crates/svg-filters/src/tests/color_matrix.rs
Normal file
25
crates/svg-filters/src/tests/color_matrix.rs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
use crate::{
|
||||||
|
codegen::SvgDocument,
|
||||||
|
types::nodes::{primitives::color_matrix::ColorMatrixType, standard_input::StandardInput},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_greyscale_channel_extraction() {
|
||||||
|
let mut doc = SvgDocument::new();
|
||||||
|
let greyscale = doc.create_filter("greyscale");
|
||||||
|
|
||||||
|
greyscale.color_matrix(
|
||||||
|
StandardInput::SourceGraphic,
|
||||||
|
ColorMatrixType::Matrix(Box::new([
|
||||||
|
1., 0., 0., 0., 0., //
|
||||||
|
1., 0., 0., 0., 0., //
|
||||||
|
1., 0., 0., 0., 0., //
|
||||||
|
0., 0., 0., 1., 0.,
|
||||||
|
])),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
doc.generate_svg(),
|
||||||
|
r#"<svg><filter id="greyscale"><feColorMatrix values="1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 1 0" in="SourceGraphic"/></filter></svg>"#
|
||||||
|
);
|
||||||
|
}
|
51
crates/svg-filters/src/tests/complex.rs
Normal file
51
crates/svg-filters/src/tests/complex.rs
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
use crate::{
|
||||||
|
codegen::SvgDocument,
|
||||||
|
types::nodes::{primitives::color_matrix::ColorMatrixType, standard_input::StandardInput},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_chrom_abb() {
|
||||||
|
let mut doc = SvgDocument::new();
|
||||||
|
let chromabb = doc.create_filter("chromabb_gen");
|
||||||
|
|
||||||
|
let chan_r = chromabb.color_matrix(
|
||||||
|
StandardInput::SourceGraphic,
|
||||||
|
ColorMatrixType::Matrix(Box::new([
|
||||||
|
1., 0., 0., 0., 0., //
|
||||||
|
0., 0., 0., 0., 0., //
|
||||||
|
0., 0., 0., 0., 0., //
|
||||||
|
0., 0., 0., 1., 0.,
|
||||||
|
])),
|
||||||
|
);
|
||||||
|
let offset_r = chromabb.offset(chan_r, 25., 0.);
|
||||||
|
let blur_r = chromabb.gaussian_blur_xy(offset_r, 5, 0);
|
||||||
|
|
||||||
|
let chan_b = chromabb.color_matrix(
|
||||||
|
StandardInput::SourceGraphic,
|
||||||
|
ColorMatrixType::Matrix(Box::new([
|
||||||
|
0., 0., 0., 0., 0., //
|
||||||
|
0., 0., 0., 0., 0., //
|
||||||
|
0., 0., 1., 0., 0., //
|
||||||
|
0., 0., 0., 1., 0.,
|
||||||
|
])),
|
||||||
|
);
|
||||||
|
let offset_b = chromabb.offset(chan_b, -25., 0.);
|
||||||
|
let blur_b = chromabb.gaussian_blur_xy(offset_b, 5, 0);
|
||||||
|
|
||||||
|
let composite_rb = chromabb.composite_arithmetic(blur_r, blur_b, 0., 1., 1., 0.);
|
||||||
|
|
||||||
|
let chan_g = chromabb.color_matrix(
|
||||||
|
StandardInput::SourceGraphic,
|
||||||
|
ColorMatrixType::Matrix(Box::new([
|
||||||
|
0., 0., 0., 0., 0., //
|
||||||
|
0., 1., 0., 0., 0., //
|
||||||
|
0., 0., 0., 0., 0., //
|
||||||
|
0., 0., 0., 1., 0.,
|
||||||
|
])),
|
||||||
|
);
|
||||||
|
chromabb.composite_arithmetic(composite_rb, chan_g, 0., 1., 1., 0.);
|
||||||
|
assert_eq!(
|
||||||
|
doc.generate_svg(),
|
||||||
|
r#"<svg><filter id="chromabb_gen"><feColorMatrix values="0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0" in="SourceGraphic" result="r13"/><feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0" in="SourceGraphic" result="r9"/><feOffset dx="-25" dy="0" in="r9" result="r10"/><feGaussianBlur stdDeviation="5 0" in="r10" result="r11"/><feColorMatrix values="1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0" in="SourceGraphic" result="r6"/><feOffset dx="25" dy="0" in="r6" result="r7"/><feGaussianBlur stdDeviation="5 0" in="r7" result="r8"/><feComposite operator="arithmetic" k1="0" k2="1" k3="1" k4="0" in="r8" in2="r11" result="r12"/><feComposite operator="arithmetic" k1="0" k2="1" k3="1" k4="0" in="r12" in2="r13"/></filter></svg>"#
|
||||||
|
);
|
||||||
|
}
|
36
crates/svg-filters/src/tests/component_transfer.rs
Normal file
36
crates/svg-filters/src/tests/component_transfer.rs
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
use crate::{
|
||||||
|
codegen::SvgDocument,
|
||||||
|
types::nodes::primitives::{
|
||||||
|
component_transfer::{ComponentTransfer, TransferFn},
|
||||||
|
FePrimitive,
|
||||||
|
},
|
||||||
|
Node,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_comp_trans_simple() {
|
||||||
|
let mut doc = SvgDocument::new();
|
||||||
|
|
||||||
|
let comptrans = doc.create_filter("comp_trans");
|
||||||
|
|
||||||
|
comptrans.add_node(Node::simple(FePrimitive::ComponentTransfer(
|
||||||
|
ComponentTransfer {
|
||||||
|
func_r: TransferFn::Table {
|
||||||
|
table_values: vec![0., 0.1, 0.4, 0.9],
|
||||||
|
},
|
||||||
|
func_g: TransferFn::Discrete {
|
||||||
|
table_values: vec![0.1, 0.3, 0.5, 0.7, 0.9],
|
||||||
|
},
|
||||||
|
func_b: TransferFn::Linear {
|
||||||
|
slope: 1.0,
|
||||||
|
intercept: 0.75,
|
||||||
|
},
|
||||||
|
func_a: TransferFn::Identity,
|
||||||
|
},
|
||||||
|
)));
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
doc.generate_svg(),
|
||||||
|
r#"<svg><filter id="comp_trans"><feComponentTransfer><feFuncR type="table" tableValues="0 0.1 0.4 0.9"/><feFuncG type="discrete" tableValues="0.1 0.3 0.5 0.7 0.9"/><feFuncB type="linear" slope="1" intercept="0.75"/><feFuncA type="identity"/></feComponentTransfer></filter></svg>"#
|
||||||
|
);
|
||||||
|
}
|
32
crates/svg-filters/src/tests/displacement_map.rs
Normal file
32
crates/svg-filters/src/tests/displacement_map.rs
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
use crate::{
|
||||||
|
codegen::SvgDocument,
|
||||||
|
types::nodes::{
|
||||||
|
primitives::{
|
||||||
|
displacement_map::Channel,
|
||||||
|
turbulence::{NoiseType, StitchTiles},
|
||||||
|
},
|
||||||
|
standard_input::StandardInput,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_displacement_map_simple() {
|
||||||
|
let mut doc = SvgDocument::new();
|
||||||
|
|
||||||
|
let displace = doc.create_filter("displace");
|
||||||
|
|
||||||
|
let simple_noise =
|
||||||
|
displace.turbulence(0.01, 0.01, 1, 0, StitchTiles::Stitch, NoiseType::Turbulence);
|
||||||
|
displace.displacement_map(
|
||||||
|
StandardInput::SourceGraphic,
|
||||||
|
simple_noise,
|
||||||
|
128.,
|
||||||
|
Channel::R,
|
||||||
|
Channel::R,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
doc.generate_svg(),
|
||||||
|
r#"<svg><filter id="displace"><feTurbulence baseFrequency="0.01 0.01" stitchTiles="stitch" result="r6"/><feDisplacementMap scale="128" xChannelSelector="R" yChannelSelector="R" in="SourceGraphic" in2="r6"/></filter></svg>"#
|
||||||
|
);
|
||||||
|
}
|
17
crates/svg-filters/src/tests/flood.rs
Normal file
17
crates/svg-filters/src/tests/flood.rs
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
use csscolorparser::Color;
|
||||||
|
|
||||||
|
use crate::codegen::SvgDocument;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_flood_simple() {
|
||||||
|
let mut doc = SvgDocument::new();
|
||||||
|
|
||||||
|
let turbdispl = doc.create_filter("noiseDisplace");
|
||||||
|
|
||||||
|
turbdispl.flood(Color::new(0.9, 0.7, 0.85, 1.), 1.);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
doc.generate_svg(),
|
||||||
|
r##"<svg><filter id="noiseDisplace"><feFlood flood-color="#e6b3d9" flood-opacity="1"/></filter></svg>"##
|
||||||
|
);
|
||||||
|
}
|
13
crates/svg-filters/src/tests/gaussian_blur.rs
Normal file
13
crates/svg-filters/src/tests/gaussian_blur.rs
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
use crate::{codegen::SvgDocument, types::nodes::standard_input::StandardInput};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_simple_blur() {
|
||||||
|
let mut doc = SvgDocument::new();
|
||||||
|
let blur = doc.create_filter("blur");
|
||||||
|
|
||||||
|
blur.gaussian_blur_xy(StandardInput::SourceGraphic, 30, 30);
|
||||||
|
assert_eq!(
|
||||||
|
doc.generate_svg(),
|
||||||
|
r#"<svg><filter id="blur"><feGaussianBlur stdDeviation="30 30" in="SourceGraphic"/></filter></svg>"#
|
||||||
|
);
|
||||||
|
}
|
14
crates/svg-filters/src/tests/offset.rs
Normal file
14
crates/svg-filters/src/tests/offset.rs
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
use crate::{codegen::SvgDocument, types::nodes::standard_input::StandardInput};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_offset_simple() {
|
||||||
|
let mut doc = SvgDocument::new();
|
||||||
|
let offset = doc.create_filter("offset");
|
||||||
|
|
||||||
|
offset.offset(StandardInput::SourceGraphic, 25., -25.);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
doc.generate_svg(),
|
||||||
|
r#"<svg><filter id="offset"><feOffset dx="25" dy="-25" in="SourceGraphic"/></filter></svg>"#
|
||||||
|
);
|
||||||
|
}
|
25
crates/svg-filters/src/tests/turbulence.rs
Normal file
25
crates/svg-filters/src/tests/turbulence.rs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
use crate::{
|
||||||
|
codegen::SvgDocument,
|
||||||
|
types::nodes::primitives::turbulence::{NoiseType, StitchTiles},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_simple_turbulence() {
|
||||||
|
let mut doc = SvgDocument::new();
|
||||||
|
|
||||||
|
let noise = doc.create_filter("noise");
|
||||||
|
|
||||||
|
noise.turbulence(
|
||||||
|
0.01,
|
||||||
|
0.01,
|
||||||
|
1,
|
||||||
|
0,
|
||||||
|
StitchTiles::Stitch,
|
||||||
|
NoiseType::FractalNoise,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
doc.generate_svg(),
|
||||||
|
r#"<svg><filter id="noise"><feTurbulence baseFrequency="0.01 0.01" stitchTiles="stitch" type="fractalNoise"/></filter></svg>"#
|
||||||
|
);
|
||||||
|
}
|
6
crates/svg-filters/src/types.rs
Normal file
6
crates/svg-filters/src/types.rs
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
pub mod length;
|
||||||
|
pub mod nodes;
|
||||||
|
|
||||||
|
// pub mod old;
|
||||||
|
|
||||||
|
pub mod graph;
|
143
crates/svg-filters/src/types/graph.rs
Normal file
143
crates/svg-filters/src/types/graph.rs
Normal file
|
@ -0,0 +1,143 @@
|
||||||
|
use std::fmt::{Debug, Display};
|
||||||
|
|
||||||
|
use petgraph::{prelude::NodeIndex, prelude::*};
|
||||||
|
|
||||||
|
use crate::Node;
|
||||||
|
|
||||||
|
use super::nodes::standard_input::StandardInput;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct FilterGraph {
|
||||||
|
pub dag: DiGraph<Node, ()>,
|
||||||
|
source_graphic_idx: NodeIndex,
|
||||||
|
source_alpha_idx: NodeIndex,
|
||||||
|
background_image_idx: NodeIndex,
|
||||||
|
background_alpha_idx: NodeIndex,
|
||||||
|
fill_paint_idx: NodeIndex,
|
||||||
|
stroke_paint_idx: NodeIndex,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for FilterGraph {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub enum NodeInput {
|
||||||
|
Standard(StandardInput),
|
||||||
|
Idx(NodeIndex),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for NodeInput {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
NodeInput::Standard(s) => Debug::fmt(s, f),
|
||||||
|
NodeInput::Idx(idx) => write!(f, "r{}", idx.index()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<StandardInput> for NodeInput {
|
||||||
|
fn from(value: StandardInput) -> Self {
|
||||||
|
Self::Standard(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<NodeIndex> for NodeInput {
|
||||||
|
fn from(value: NodeIndex) -> Self {
|
||||||
|
Self::Idx(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FilterGraph {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
let mut dag = DiGraph::new();
|
||||||
|
|
||||||
|
let source_graphic_idx = dag.add_node(Node::StdInput(StandardInput::SourceGraphic));
|
||||||
|
let source_alpha_idx = dag.add_node(Node::StdInput(StandardInput::SourceAlpha));
|
||||||
|
let background_image_idx = dag.add_node(Node::StdInput(StandardInput::BackgroundImage));
|
||||||
|
let background_alpha_idx = dag.add_node(Node::StdInput(StandardInput::BackgroundAlpha));
|
||||||
|
let fill_paint_idx = dag.add_node(Node::StdInput(StandardInput::FillPaint));
|
||||||
|
let stroke_paint_idx = dag.add_node(Node::StdInput(StandardInput::StrokePaint));
|
||||||
|
|
||||||
|
Self {
|
||||||
|
dag,
|
||||||
|
source_graphic_idx,
|
||||||
|
source_alpha_idx,
|
||||||
|
background_image_idx,
|
||||||
|
background_alpha_idx,
|
||||||
|
fill_paint_idx,
|
||||||
|
stroke_paint_idx,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_node(&mut self, node: Node) -> NodeIndex {
|
||||||
|
self.dag.add_node(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_input(&self, input: NodeInput) -> NodeIndex {
|
||||||
|
match input {
|
||||||
|
NodeInput::Standard(StandardInput::SourceGraphic) => self.source_graphic_idx,
|
||||||
|
NodeInput::Standard(StandardInput::SourceAlpha) => self.source_alpha_idx,
|
||||||
|
NodeInput::Standard(StandardInput::BackgroundImage) => self.background_image_idx,
|
||||||
|
NodeInput::Standard(StandardInput::BackgroundAlpha) => self.background_alpha_idx,
|
||||||
|
NodeInput::Standard(StandardInput::FillPaint) => self.fill_paint_idx,
|
||||||
|
NodeInput::Standard(StandardInput::StrokePaint) => self.stroke_paint_idx,
|
||||||
|
NodeInput::Idx(i) => i,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(
|
||||||
|
clippy::unwrap_used,
|
||||||
|
reason = "we only operate on values we know exist, so unwrapping is safe"
|
||||||
|
)]
|
||||||
|
pub fn inputs(&self, node_idx: NodeIndex) -> Vec<NodeInput> {
|
||||||
|
let mut inputs = self
|
||||||
|
.dag
|
||||||
|
.neighbors_directed(node_idx, Direction::Incoming)
|
||||||
|
.map(|input_idx| (self.dag.find_edge(input_idx, node_idx).unwrap(), input_idx))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
inputs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||||
|
|
||||||
|
inputs
|
||||||
|
.into_iter()
|
||||||
|
.map(
|
||||||
|
|(_, input_idx)| match self.dag.node_weight(input_idx).unwrap() {
|
||||||
|
Node::StdInput(s) => NodeInput::Standard(*s),
|
||||||
|
Node::Primitive { .. } => NodeInput::Idx(input_idx),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn outputs(&self, node_idx: NodeIndex) -> Vec<NodeIndex> {
|
||||||
|
self.dag
|
||||||
|
.neighbors_directed(node_idx, Direction::Outgoing)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn source_graphic(&self) -> NodeIndex {
|
||||||
|
self.source_graphic_idx
|
||||||
|
}
|
||||||
|
pub fn source_alpha(&self) -> NodeIndex {
|
||||||
|
self.source_alpha_idx
|
||||||
|
}
|
||||||
|
pub fn background_image(&self) -> NodeIndex {
|
||||||
|
self.background_image_idx
|
||||||
|
}
|
||||||
|
pub fn background_alpha(&self) -> NodeIndex {
|
||||||
|
self.background_alpha_idx
|
||||||
|
}
|
||||||
|
pub fn fill_paint(&self) -> NodeIndex {
|
||||||
|
self.fill_paint_idx
|
||||||
|
}
|
||||||
|
pub fn stroke_paint(&self) -> NodeIndex {
|
||||||
|
self.stroke_paint_idx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod abstracted_inputs;
|
||||||
|
|
||||||
|
pub mod edge;
|
196
crates/svg-filters/src/types/graph/abstracted_inputs.rs
Normal file
196
crates/svg-filters/src/types/graph/abstracted_inputs.rs
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
use csscolorparser::Color;
|
||||||
|
use petgraph::{data::Build, prelude::NodeIndex};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
types::nodes::primitives::{
|
||||||
|
blend::BlendMode,
|
||||||
|
color_matrix::ColorMatrixType,
|
||||||
|
component_transfer::TransferFn,
|
||||||
|
displacement_map::Channel,
|
||||||
|
turbulence::{NoiseType, StitchTiles},
|
||||||
|
},
|
||||||
|
Node,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::{FilterGraph, NodeInput};
|
||||||
|
|
||||||
|
impl FilterGraph {
|
||||||
|
pub fn color_matrix(
|
||||||
|
&mut self,
|
||||||
|
r#in: impl Into<NodeInput>,
|
||||||
|
cm_type: ColorMatrixType,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::color_matrix(cm_type));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn offset(&mut self, r#in: impl Into<NodeInput>, dx: f32, dy: f32) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::offset(dx, dy));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn gaussian_blur_xy(&mut self, r#in: impl Into<NodeInput>, x: u16, y: u16) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::gaussian_blur_xy(x, y));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn blend(
|
||||||
|
&mut self,
|
||||||
|
r#in: impl Into<NodeInput>,
|
||||||
|
in2: impl Into<NodeInput>,
|
||||||
|
mode: BlendMode,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::blend(mode));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(in2.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn composite_arithmetic(
|
||||||
|
&mut self,
|
||||||
|
r#in: impl Into<NodeInput>,
|
||||||
|
in2: impl Into<NodeInput>,
|
||||||
|
k1: f32,
|
||||||
|
k2: f32,
|
||||||
|
k3: f32,
|
||||||
|
k4: f32,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self
|
||||||
|
.dag
|
||||||
|
.add_node(Node::composite_arithmetic(k1, k2, k3, k4));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(in2.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn component_transfer_rgba(
|
||||||
|
&mut self,
|
||||||
|
r#in: impl Into<NodeInput>,
|
||||||
|
r: TransferFn,
|
||||||
|
g: TransferFn,
|
||||||
|
b: TransferFn,
|
||||||
|
a: TransferFn,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::component_transfer_rgba(r, g, b, a));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
pub fn component_transfer_rgb(
|
||||||
|
&mut self,
|
||||||
|
r#in: impl Into<NodeInput>,
|
||||||
|
r: TransferFn,
|
||||||
|
g: TransferFn,
|
||||||
|
b: TransferFn,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::component_transfer_rgb(r, g, b));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
pub fn component_transfer_r(
|
||||||
|
&mut self,
|
||||||
|
r#in: impl Into<NodeInput>,
|
||||||
|
func: TransferFn,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::component_transfer_r(func));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
pub fn component_transfer_g(
|
||||||
|
&mut self,
|
||||||
|
r#in: impl Into<NodeInput>,
|
||||||
|
func: TransferFn,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::component_transfer_g(func));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
pub fn component_transfer_b(
|
||||||
|
&mut self,
|
||||||
|
r#in: impl Into<NodeInput>,
|
||||||
|
func: TransferFn,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::component_transfer_b(func));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
pub fn component_transfer_a(
|
||||||
|
&mut self,
|
||||||
|
r#in: impl Into<NodeInput>,
|
||||||
|
func: TransferFn,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::component_transfer_a(func));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
pub fn component_transfer_single(
|
||||||
|
&mut self,
|
||||||
|
r#in: impl Into<NodeInput>,
|
||||||
|
func: TransferFn,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self.dag.add_node(Node::component_transfer_single(func));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(r#in.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn flood(&mut self, flood_color: Color, flood_opacity: f32) -> NodeIndex {
|
||||||
|
self.dag.add_node(Node::flood(flood_color, flood_opacity))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn flood_opaque(&mut self, flood_color: Color) -> NodeIndex {
|
||||||
|
self.dag.add_node(Node::flood_opaque(flood_color))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn turbulence(
|
||||||
|
&mut self,
|
||||||
|
base_freq_x: f32,
|
||||||
|
base_freq_y: f32,
|
||||||
|
num_octaves: u16,
|
||||||
|
seed: u32,
|
||||||
|
stitch_tiles: StitchTiles,
|
||||||
|
noise_type: NoiseType,
|
||||||
|
) -> NodeIndex {
|
||||||
|
self.dag.add_node(Node::turbulence(
|
||||||
|
base_freq_x,
|
||||||
|
base_freq_y,
|
||||||
|
num_octaves,
|
||||||
|
seed,
|
||||||
|
stitch_tiles,
|
||||||
|
noise_type,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn displacement_map(
|
||||||
|
&mut self,
|
||||||
|
source_image: impl Into<NodeInput>,
|
||||||
|
displacement_map: impl Into<NodeInput>,
|
||||||
|
scale: f32,
|
||||||
|
x_channel: Channel,
|
||||||
|
y_channel: Channel,
|
||||||
|
) -> NodeIndex {
|
||||||
|
let node_idx = self
|
||||||
|
.dag
|
||||||
|
.add_node(Node::displacement_map(scale, x_channel, y_channel));
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(source_image.into()), node_idx, ());
|
||||||
|
self.dag
|
||||||
|
.add_edge(self.resolve_input(displacement_map.into()), node_idx, ());
|
||||||
|
node_idx
|
||||||
|
}
|
||||||
|
}
|
19
crates/svg-filters/src/types/graph/edge.rs
Normal file
19
crates/svg-filters/src/types/graph/edge.rs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||||
|
pub struct Edge {
|
||||||
|
input_idx: u8,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Edge {
|
||||||
|
pub fn new(input_idx: u8) -> Self {
|
||||||
|
Self { input_idx }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToString for Edge {
|
||||||
|
fn to_string(&self) -> String {
|
||||||
|
match self.input_idx {
|
||||||
|
0 => "in".to_owned(),
|
||||||
|
n => format!("in{}", n + 1),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
48
crates/svg-filters/src/types/length.rs
Normal file
48
crates/svg-filters/src/types/length.rs
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
#[derive(Default, Debug, Clone, Copy)]
|
||||||
|
pub struct Length(f32, Unit);
|
||||||
|
|
||||||
|
impl Length {
|
||||||
|
pub fn is_zero(&self) -> bool {
|
||||||
|
self.0 == 0.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Length {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}{}", self.0, self.1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type Coordinate = Length;
|
||||||
|
|
||||||
|
#[derive(Default, Debug, Clone, Copy)]
|
||||||
|
pub enum Unit {
|
||||||
|
#[default]
|
||||||
|
None,
|
||||||
|
Em,
|
||||||
|
Ex,
|
||||||
|
Px,
|
||||||
|
In,
|
||||||
|
Cm,
|
||||||
|
Mm,
|
||||||
|
Pt,
|
||||||
|
Pc,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Unit {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Unit::None => f.write_str(""),
|
||||||
|
Unit::Em => f.write_str("em"),
|
||||||
|
Unit::Ex => f.write_str("ex"),
|
||||||
|
Unit::Px => f.write_str("px"),
|
||||||
|
Unit::In => f.write_str("in"),
|
||||||
|
Unit::Cm => f.write_str("cm"),
|
||||||
|
Unit::Mm => f.write_str("mm"),
|
||||||
|
Unit::Pt => f.write_str("pt"),
|
||||||
|
Unit::Pc => f.write_str("pc"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
237
crates/svg-filters/src/types/nodes.rs
Normal file
237
crates/svg-filters/src/types/nodes.rs
Normal file
|
@ -0,0 +1,237 @@
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use csscolorparser::Color;
|
||||||
|
use quick_xml::{events::attributes::Attribute, name::QName};
|
||||||
|
|
||||||
|
use self::{
|
||||||
|
primitives::{
|
||||||
|
blend::{Blend, BlendMode},
|
||||||
|
color_matrix::{ColorMatrix, ColorMatrixType},
|
||||||
|
component_transfer::{ComponentTransfer, TransferFn},
|
||||||
|
composite::{Composite, CompositeOperator},
|
||||||
|
displacement_map::{Channel, DisplacementMap},
|
||||||
|
flood::Flood,
|
||||||
|
gaussian_blur::GaussianBlur,
|
||||||
|
offset::Offset,
|
||||||
|
turbulence::{NoiseType, StitchTiles, Turbulence},
|
||||||
|
FePrimitive,
|
||||||
|
},
|
||||||
|
standard_input::StandardInput,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::length::{Coordinate, Length};
|
||||||
|
|
||||||
|
pub mod primitives;
|
||||||
|
pub mod standard_input;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum Node {
|
||||||
|
StdInput(StandardInput),
|
||||||
|
Primitive {
|
||||||
|
primitive: FePrimitive,
|
||||||
|
common_attrs: CommonAttrs,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Node {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::StdInput(StandardInput::SourceGraphic)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug, Clone, Copy)]
|
||||||
|
pub struct CommonAttrs {
|
||||||
|
pub x: Coordinate,
|
||||||
|
pub y: Coordinate,
|
||||||
|
pub width: Length,
|
||||||
|
pub height: Length,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CommonAttrs> for Vec<Attribute<'_>> {
|
||||||
|
fn from(val: CommonAttrs) -> Self {
|
||||||
|
gen_attrs![
|
||||||
|
!val.x.is_zero() => b"x": val.x,
|
||||||
|
!val.y.is_zero() => b"y": val.y,
|
||||||
|
!val.width.is_zero() => b"width": val.width,
|
||||||
|
!val.height.is_zero() => b"height": val.height
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Node {
|
||||||
|
pub fn simple(el: FePrimitive) -> Node {
|
||||||
|
Node::Primitive {
|
||||||
|
primitive: el,
|
||||||
|
common_attrs: CommonAttrs::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn primitive(&self) -> Option<(&FePrimitive, &CommonAttrs)> {
|
||||||
|
if let Node::Primitive {
|
||||||
|
primitive,
|
||||||
|
common_attrs,
|
||||||
|
} = self
|
||||||
|
{
|
||||||
|
Some((primitive, common_attrs))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn input_count(&self) -> u8 {
|
||||||
|
match self {
|
||||||
|
Node::Primitive {
|
||||||
|
primitive:
|
||||||
|
FePrimitive::ColorMatrix(_)
|
||||||
|
| FePrimitive::ComponentTransfer(_)
|
||||||
|
| FePrimitive::ConvolveMatrix(_)
|
||||||
|
| FePrimitive::DiffuseLighting(_)
|
||||||
|
| FePrimitive::GaussianBlur(_)
|
||||||
|
| FePrimitive::Morphology(_)
|
||||||
|
| FePrimitive::Offset(_)
|
||||||
|
| FePrimitive::SpecularLighting(_)
|
||||||
|
| FePrimitive::Tile(_),
|
||||||
|
..
|
||||||
|
} => 1,
|
||||||
|
|
||||||
|
Node::Primitive {
|
||||||
|
primitive:
|
||||||
|
FePrimitive::Composite(_) | FePrimitive::Blend(_) | FePrimitive::DisplacementMap(_),
|
||||||
|
..
|
||||||
|
} => 2,
|
||||||
|
|
||||||
|
Node::StdInput(_)
|
||||||
|
| Node::Primitive {
|
||||||
|
primitive:
|
||||||
|
FePrimitive::Flood(_) | FePrimitive::Image(_) | FePrimitive::Turbulence(_),
|
||||||
|
..
|
||||||
|
} => 0,
|
||||||
|
Node::Primitive {
|
||||||
|
primitive: FePrimitive::Merge(_),
|
||||||
|
..
|
||||||
|
} => todo!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn blend(mode: BlendMode) -> Self {
|
||||||
|
Self::simple(FePrimitive::Blend(Blend::new(mode)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn color_matrix(cm_type: ColorMatrixType) -> Self {
|
||||||
|
Self::simple(FePrimitive::ColorMatrix(ColorMatrix::new(cm_type)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn composite(op: CompositeOperator) -> Self {
|
||||||
|
Self::simple(FePrimitive::Composite(Composite::new(op)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn composite_arithmetic(k1: f32, k2: f32, k3: f32, k4: f32) -> Self {
|
||||||
|
Self::composite(CompositeOperator::Arithmetic { k1, k2, k3, k4 })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn gaussian_blur(v: u16) -> Self {
|
||||||
|
Self::simple(FePrimitive::GaussianBlur(GaussianBlur::single(v)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn gaussian_blur_xy(x: u16, y: u16) -> Self {
|
||||||
|
Self::simple(FePrimitive::GaussianBlur(GaussianBlur::with_xy(x, y)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn offset(dx: f32, dy: f32) -> Self {
|
||||||
|
Self::simple(FePrimitive::Offset(Offset::new(dx, dy)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn component_transfer_rgba(
|
||||||
|
r: TransferFn,
|
||||||
|
g: TransferFn,
|
||||||
|
b: TransferFn,
|
||||||
|
a: TransferFn,
|
||||||
|
) -> Self {
|
||||||
|
Self::simple(FePrimitive::ComponentTransfer(ComponentTransfer {
|
||||||
|
func_r: r,
|
||||||
|
func_g: g,
|
||||||
|
func_b: b,
|
||||||
|
func_a: a,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn component_transfer_rgb(r: TransferFn, g: TransferFn, b: TransferFn) -> Self {
|
||||||
|
Self::component_transfer_rgba(r, g, b, TransferFn::Identity)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn component_transfer_r(func: TransferFn) -> Self {
|
||||||
|
Self::component_transfer_rgba(
|
||||||
|
func,
|
||||||
|
TransferFn::Identity,
|
||||||
|
TransferFn::Identity,
|
||||||
|
TransferFn::Identity,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn component_transfer_g(func: TransferFn) -> Self {
|
||||||
|
Self::component_transfer_rgba(
|
||||||
|
TransferFn::Identity,
|
||||||
|
func,
|
||||||
|
TransferFn::Identity,
|
||||||
|
TransferFn::Identity,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn component_transfer_b(func: TransferFn) -> Self {
|
||||||
|
Self::component_transfer_rgba(
|
||||||
|
TransferFn::Identity,
|
||||||
|
TransferFn::Identity,
|
||||||
|
func,
|
||||||
|
TransferFn::Identity,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn component_transfer_a(func: TransferFn) -> Self {
|
||||||
|
Self::component_transfer_rgba(
|
||||||
|
TransferFn::Identity,
|
||||||
|
TransferFn::Identity,
|
||||||
|
TransferFn::Identity,
|
||||||
|
func,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn component_transfer_single(func: TransferFn) -> Self {
|
||||||
|
Self::component_transfer_rgb(func.clone(), func.clone(), func)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn flood(flood_color: Color, flood_opacity: f32) -> Self {
|
||||||
|
Self::simple(FePrimitive::Flood(Flood {
|
||||||
|
flood_color,
|
||||||
|
flood_opacity,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn flood_opaque(flood_color: Color) -> Self {
|
||||||
|
Self::flood(flood_color, 1.)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn turbulence(
|
||||||
|
base_freq_x: f32,
|
||||||
|
base_freq_y: f32,
|
||||||
|
num_octaves: u16,
|
||||||
|
seed: u32,
|
||||||
|
stitch_tiles: StitchTiles,
|
||||||
|
noise_type: NoiseType,
|
||||||
|
) -> Self {
|
||||||
|
Self::simple(FePrimitive::Turbulence(Turbulence {
|
||||||
|
base_frequency: (base_freq_x, base_freq_y),
|
||||||
|
num_octaves,
|
||||||
|
seed,
|
||||||
|
stitch_tiles,
|
||||||
|
noise_type,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn displacement_map(scale: f32, x_channel: Channel, y_channel: Channel) -> Self {
|
||||||
|
Self::simple(FePrimitive::DisplacementMap(DisplacementMap {
|
||||||
|
scale,
|
||||||
|
x_channel_selector: x_channel,
|
||||||
|
y_channel_selector: y_channel,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
150
crates/svg-filters/src/types/nodes/primitives.rs
Normal file
150
crates/svg-filters/src/types/nodes/primitives.rs
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
use quick_xml::{events::attributes::Attribute, ElementWriter, Writer};
|
||||||
|
use std::convert::Into;
|
||||||
|
|
||||||
|
use super::CommonAttrs;
|
||||||
|
|
||||||
|
pub mod blend;
|
||||||
|
pub mod color_matrix;
|
||||||
|
pub mod component_transfer;
|
||||||
|
pub mod composite;
|
||||||
|
pub mod convolve_matrix;
|
||||||
|
pub mod diffuse_lighting;
|
||||||
|
pub mod displacement_map;
|
||||||
|
pub mod flood;
|
||||||
|
pub mod gaussian_blur;
|
||||||
|
pub mod image;
|
||||||
|
pub mod merge;
|
||||||
|
pub mod morphology;
|
||||||
|
pub mod offset;
|
||||||
|
pub mod specular_lighting;
|
||||||
|
pub mod tile;
|
||||||
|
pub mod turbulence;
|
||||||
|
|
||||||
|
pub trait WriteElement {
|
||||||
|
fn attrs(&self) -> Vec<Attribute>;
|
||||||
|
fn tag_name(&self) -> &'static str;
|
||||||
|
fn element_writer<'writer, 'result>(
|
||||||
|
&self,
|
||||||
|
writer: &'writer mut Writer<&'result mut Vec<u8>>,
|
||||||
|
common: CommonAttrs,
|
||||||
|
inputs: Vec<String>,
|
||||||
|
output: Option<String>,
|
||||||
|
) -> quick_xml::Result<&'writer mut quick_xml::Writer<&'result mut Vec<u8>>> {
|
||||||
|
let attrs: Vec<_> = inputs
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, edge)| {
|
||||||
|
(
|
||||||
|
match i {
|
||||||
|
0 => "in".to_owned(),
|
||||||
|
n => format!("in{}", n + 1),
|
||||||
|
}
|
||||||
|
.into_bytes(),
|
||||||
|
edge.into_bytes(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let mut el_writer = writer
|
||||||
|
.create_element(self.tag_name())
|
||||||
|
.with_attributes(Into::<Vec<Attribute<'_>>>::into(common))
|
||||||
|
.with_attributes(self.attrs())
|
||||||
|
.with_attributes(attrs.iter().map(|(k, v)| (&k[..], &v[..])));
|
||||||
|
if let Some(output) = output {
|
||||||
|
el_writer = el_writer.with_attribute(("result", output.as_str()));
|
||||||
|
}
|
||||||
|
|
||||||
|
el_writer.write_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// svg filter effects primitives
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum FePrimitive {
|
||||||
|
Blend(blend::Blend),
|
||||||
|
ColorMatrix(color_matrix::ColorMatrix),
|
||||||
|
ComponentTransfer(component_transfer::ComponentTransfer),
|
||||||
|
Composite(composite::Composite),
|
||||||
|
ConvolveMatrix(convolve_matrix::ConvolveMatrix),
|
||||||
|
DiffuseLighting(diffuse_lighting::DiffuseLighting),
|
||||||
|
DisplacementMap(displacement_map::DisplacementMap),
|
||||||
|
Flood(flood::Flood),
|
||||||
|
GaussianBlur(gaussian_blur::GaussianBlur),
|
||||||
|
Image(image::Image),
|
||||||
|
Merge(merge::Merge),
|
||||||
|
Morphology(morphology::Morphology),
|
||||||
|
Offset(offset::Offset),
|
||||||
|
SpecularLighting(specular_lighting::SpecularLighting),
|
||||||
|
Tile(tile::Tile),
|
||||||
|
Turbulence(turbulence::Turbulence),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for FePrimitive {
|
||||||
|
fn attrs(&self) -> std::vec::Vec<quick_xml::events::attributes::Attribute<'_>> {
|
||||||
|
match self {
|
||||||
|
FePrimitive::Blend(el) => el.attrs(),
|
||||||
|
FePrimitive::ColorMatrix(el) => el.attrs(),
|
||||||
|
FePrimitive::ComponentTransfer(el) => el.attrs(),
|
||||||
|
FePrimitive::Composite(el) => el.attrs(),
|
||||||
|
FePrimitive::GaussianBlur(el) => el.attrs(),
|
||||||
|
FePrimitive::Offset(el) => el.attrs(),
|
||||||
|
FePrimitive::Turbulence(el) => el.attrs(),
|
||||||
|
FePrimitive::DisplacementMap(el) => el.attrs(),
|
||||||
|
FePrimitive::Flood(el) => el.attrs(),
|
||||||
|
FePrimitive::Morphology(el) => el.attrs(),
|
||||||
|
FePrimitive::ConvolveMatrix(_) => todo!(),
|
||||||
|
FePrimitive::DiffuseLighting(_) => todo!(),
|
||||||
|
FePrimitive::Image(_) => todo!(),
|
||||||
|
FePrimitive::Merge(_) => todo!(),
|
||||||
|
FePrimitive::SpecularLighting(_) => todo!(),
|
||||||
|
FePrimitive::Tile(_) => todo!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
FePrimitive::Blend(el) => el.tag_name(),
|
||||||
|
FePrimitive::ColorMatrix(el) => el.tag_name(),
|
||||||
|
FePrimitive::ComponentTransfer(el) => el.tag_name(),
|
||||||
|
FePrimitive::Composite(el) => el.tag_name(),
|
||||||
|
FePrimitive::GaussianBlur(el) => el.tag_name(),
|
||||||
|
FePrimitive::Offset(el) => el.tag_name(),
|
||||||
|
FePrimitive::Turbulence(el) => el.tag_name(),
|
||||||
|
FePrimitive::DisplacementMap(el) => el.tag_name(),
|
||||||
|
FePrimitive::Flood(el) => el.tag_name(),
|
||||||
|
FePrimitive::Morphology(el) => el.tag_name(),
|
||||||
|
FePrimitive::ConvolveMatrix(_) => todo!(),
|
||||||
|
FePrimitive::DiffuseLighting(_) => todo!(),
|
||||||
|
FePrimitive::Image(_) => todo!(),
|
||||||
|
FePrimitive::Merge(_) => todo!(),
|
||||||
|
FePrimitive::SpecularLighting(_) => todo!(),
|
||||||
|
FePrimitive::Tile(_) => todo!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn element_writer<'writer, 'result>(
|
||||||
|
&self,
|
||||||
|
writer: &'writer mut Writer<&'result mut Vec<u8>>,
|
||||||
|
common: CommonAttrs,
|
||||||
|
inputs: Vec<String>,
|
||||||
|
output: Option<String>,
|
||||||
|
) -> quick_xml::Result<&'writer mut quick_xml::Writer<&'result mut Vec<u8>>> {
|
||||||
|
match self {
|
||||||
|
FePrimitive::Blend(el) => el.element_writer(writer, common, inputs, output),
|
||||||
|
FePrimitive::ColorMatrix(el) => el.element_writer(writer, common, inputs, output),
|
||||||
|
FePrimitive::ComponentTransfer(el) => el.element_writer(writer, common, inputs, output),
|
||||||
|
FePrimitive::Composite(el) => el.element_writer(writer, common, inputs, output),
|
||||||
|
FePrimitive::Turbulence(el) => el.element_writer(writer, common, inputs, output),
|
||||||
|
FePrimitive::GaussianBlur(el) => el.element_writer(writer, common, inputs, output),
|
||||||
|
FePrimitive::Offset(el) => el.element_writer(writer, common, inputs, output),
|
||||||
|
FePrimitive::DisplacementMap(el) => el.element_writer(writer, common, inputs, output),
|
||||||
|
FePrimitive::Flood(el) => el.element_writer(writer, common, inputs, output),
|
||||||
|
FePrimitive::Morphology(el) => el.element_writer(writer, common, inputs, output),
|
||||||
|
FePrimitive::ConvolveMatrix(_) => todo!(),
|
||||||
|
FePrimitive::DiffuseLighting(_) => todo!(),
|
||||||
|
FePrimitive::Image(_) => todo!(),
|
||||||
|
FePrimitive::Merge(_) => todo!(),
|
||||||
|
FePrimitive::SpecularLighting(_) => todo!(),
|
||||||
|
FePrimitive::Tile(_) => todo!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
82
crates/svg-filters/src/types/nodes/primitives/blend.rs
Normal file
82
crates/svg-filters/src/types/nodes/primitives/blend.rs
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
use super::WriteElement;
|
||||||
|
|
||||||
|
/// [feBlend](https://www.w3.org/TR/SVG11/filters.html#feBlendElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Blend {
|
||||||
|
mode: BlendMode,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Blend {
|
||||||
|
pub fn new(mode: BlendMode) -> Self {
|
||||||
|
Self { mode }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Blend {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
mode: BlendMode::Normal,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for Blend {
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
if let BlendMode::Normal = self.mode {
|
||||||
|
Vec::new()
|
||||||
|
} else {
|
||||||
|
gen_attrs![b"mode": self.mode]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feBlend"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// as according to https://drafts.fxtf.org/compositing-1/#blending
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum BlendMode {
|
||||||
|
Normal,
|
||||||
|
Multiply,
|
||||||
|
Screen,
|
||||||
|
Overlay,
|
||||||
|
Darken,
|
||||||
|
Lighten,
|
||||||
|
ColorDodge,
|
||||||
|
ColorBurn,
|
||||||
|
HardLight,
|
||||||
|
SoftLight,
|
||||||
|
Difference,
|
||||||
|
Exclusion,
|
||||||
|
|
||||||
|
Hue,
|
||||||
|
Saturation,
|
||||||
|
Color,
|
||||||
|
Luminosity,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for BlendMode {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.write_str(match self {
|
||||||
|
BlendMode::Normal => "normal",
|
||||||
|
BlendMode::Multiply => "multiply",
|
||||||
|
BlendMode::Screen => "screen",
|
||||||
|
BlendMode::Overlay => "overlay",
|
||||||
|
BlendMode::Darken => "darken",
|
||||||
|
BlendMode::Lighten => "lighten",
|
||||||
|
BlendMode::ColorDodge => "color-dodge",
|
||||||
|
BlendMode::ColorBurn => "color-burn",
|
||||||
|
BlendMode::HardLight => "hard-light",
|
||||||
|
BlendMode::SoftLight => "soft-light",
|
||||||
|
BlendMode::Difference => "difference",
|
||||||
|
BlendMode::Exclusion => "exclusion",
|
||||||
|
BlendMode::Hue => "hue",
|
||||||
|
BlendMode::Saturation => "saturation",
|
||||||
|
BlendMode::Color => "color",
|
||||||
|
BlendMode::Luminosity => "luminosity",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,47 @@
|
||||||
|
use super::WriteElement;
|
||||||
|
|
||||||
|
/// [feColorMatrix](https://www.w3.org/TR/SVG11/filters.html#feColorMatrixElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ColorMatrix {
|
||||||
|
cm_type: ColorMatrixType,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ColorMatrix {
|
||||||
|
pub fn new(cm_type: ColorMatrixType) -> Self {
|
||||||
|
Self { cm_type }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for ColorMatrix {
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
match &self.cm_type {
|
||||||
|
ColorMatrixType::Matrix(v) => gen_attrs![
|
||||||
|
b"values": v
|
||||||
|
.iter()
|
||||||
|
.map(std::string::ToString::to_string)
|
||||||
|
.reduce(|mut acc, e| {
|
||||||
|
acc.push(' ');
|
||||||
|
acc.push_str(&e);
|
||||||
|
acc
|
||||||
|
})
|
||||||
|
.expect("fixed length arr should always work")
|
||||||
|
],
|
||||||
|
ColorMatrixType::Saturate(v) | ColorMatrixType::HueRotate(v) => {
|
||||||
|
gen_attrs![b"values": v]
|
||||||
|
}
|
||||||
|
ColorMatrixType::LuminanceToAlpha => Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feColorMatrix"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum ColorMatrixType {
|
||||||
|
Matrix(Box<[f32; 20]>),
|
||||||
|
Saturate(f32),
|
||||||
|
HueRotate(f32),
|
||||||
|
LuminanceToAlpha,
|
||||||
|
}
|
|
@ -0,0 +1,134 @@
|
||||||
|
use quick_xml::{events::attributes::Attribute, Writer};
|
||||||
|
|
||||||
|
use super::WriteElement;
|
||||||
|
|
||||||
|
/// [feComponentTransfer](https://www.w3.org/TR/SVG11/filters.html#feComponentTransferElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ComponentTransfer {
|
||||||
|
pub func_r: TransferFn,
|
||||||
|
pub func_g: TransferFn,
|
||||||
|
pub func_b: TransferFn,
|
||||||
|
pub func_a: TransferFn,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for ComponentTransfer {
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
Vec::new()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feComponentTransfer"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn element_writer<'writer, 'result>(
|
||||||
|
&self,
|
||||||
|
writer: &'writer mut quick_xml::Writer<&'result mut Vec<u8>>,
|
||||||
|
common: crate::types::nodes::CommonAttrs,
|
||||||
|
inputs: Vec<String>,
|
||||||
|
output: Option<String>,
|
||||||
|
) -> quick_xml::Result<&'writer mut quick_xml::Writer<&'result mut Vec<u8>>> {
|
||||||
|
let inputs: Vec<_> = inputs
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, edge)| {
|
||||||
|
(
|
||||||
|
match i {
|
||||||
|
0 => "in".to_owned(),
|
||||||
|
n => format!("in{}", n + 1),
|
||||||
|
}
|
||||||
|
.into_bytes(),
|
||||||
|
edge.into_bytes(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let mut el_writer = writer
|
||||||
|
.create_element(self.tag_name())
|
||||||
|
.with_attributes(inputs.iter().map(|(k, v)| (&k[..], &v[..])))
|
||||||
|
.with_attributes(Into::<Vec<Attribute<'_>>>::into(common));
|
||||||
|
if let Some(output) = output {
|
||||||
|
el_writer = el_writer.with_attribute(("result", output.as_str()));
|
||||||
|
}
|
||||||
|
|
||||||
|
el_writer.write_inner_content(|writer| {
|
||||||
|
self.func_r.write_self(writer, "feFuncR")?;
|
||||||
|
self.func_g.write_self(writer, "feFuncG")?;
|
||||||
|
self.func_b.write_self(writer, "feFuncB")?;
|
||||||
|
self.func_a.write_self(writer, "feFuncA")?;
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// [transfer functions](https://www.w3.org/TR/SVG11/filters.html#transferFuncElements)
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum TransferFn {
|
||||||
|
Identity,
|
||||||
|
Table {
|
||||||
|
table_values: Vec<f32>,
|
||||||
|
},
|
||||||
|
Discrete {
|
||||||
|
table_values: Vec<f32>,
|
||||||
|
},
|
||||||
|
Linear {
|
||||||
|
slope: f32,
|
||||||
|
intercept: f32,
|
||||||
|
},
|
||||||
|
Gamma {
|
||||||
|
amplitude: f32,
|
||||||
|
exponent: f32,
|
||||||
|
offset: f32,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TransferFn {
|
||||||
|
#[allow(clippy::str_to_string, reason = "inside macro call")]
|
||||||
|
fn write_self<'writer, 'result>(
|
||||||
|
&self,
|
||||||
|
target: &'writer mut Writer<&'result mut Vec<u8>>,
|
||||||
|
name: &'static str,
|
||||||
|
) -> quick_xml::Result<&'writer mut Writer<&'result mut Vec<u8>>> {
|
||||||
|
target
|
||||||
|
.create_element(name)
|
||||||
|
.with_attributes(match self {
|
||||||
|
TransferFn::Identity => gen_attrs![b"type": "identity"],
|
||||||
|
TransferFn::Table { table_values } => gen_attrs![
|
||||||
|
b"type": "table",
|
||||||
|
b"tableValues": table_values
|
||||||
|
.iter()
|
||||||
|
.map(std::string::ToString::to_string)
|
||||||
|
.reduce(|mut acc, e| {
|
||||||
|
acc.push(' ');
|
||||||
|
acc.push_str(&e);
|
||||||
|
acc
|
||||||
|
}).expect("empty tables disallowed")
|
||||||
|
],
|
||||||
|
TransferFn::Discrete { table_values } => gen_attrs![
|
||||||
|
b"type": "discrete",
|
||||||
|
b"tableValues": table_values
|
||||||
|
.iter()
|
||||||
|
.map(std::string::ToString::to_string)
|
||||||
|
.reduce(|mut acc, e| {
|
||||||
|
acc.push(' ');
|
||||||
|
acc.push_str(&e);
|
||||||
|
acc
|
||||||
|
}).expect("empty tables disallowed")
|
||||||
|
],
|
||||||
|
TransferFn::Linear { slope, intercept } => gen_attrs![
|
||||||
|
b"type": "linear",
|
||||||
|
b"slope": slope,
|
||||||
|
b"intercept": intercept
|
||||||
|
],
|
||||||
|
TransferFn::Gamma {
|
||||||
|
amplitude,
|
||||||
|
exponent,
|
||||||
|
offset,
|
||||||
|
} => gen_attrs![
|
||||||
|
b"type": "gamma",
|
||||||
|
b"amplitude": amplitude,
|
||||||
|
b"exponent": exponent,
|
||||||
|
b"offset": offset
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.write_empty()
|
||||||
|
}
|
||||||
|
}
|
86
crates/svg-filters/src/types/nodes/primitives/composite.rs
Normal file
86
crates/svg-filters/src/types/nodes/primitives/composite.rs
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use quick_xml::{events::attributes::Attribute, name::QName};
|
||||||
|
|
||||||
|
use super::WriteElement;
|
||||||
|
|
||||||
|
/// [feComposite](https://www.w3.org/TR/SVG11/filters.html#feCompositeElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Composite {
|
||||||
|
operator: CompositeOperator,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Composite {
|
||||||
|
pub fn new(op: CompositeOperator) -> Self {
|
||||||
|
Self { operator: op }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn arithmetic(k1: f32, k2: f32, k3: f32, k4: f32) -> Self {
|
||||||
|
Self {
|
||||||
|
operator: CompositeOperator::Arithmetic { k1, k2, k3, k4 },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum CompositeOperator {
|
||||||
|
Over,
|
||||||
|
In,
|
||||||
|
Out,
|
||||||
|
Atop,
|
||||||
|
Xor,
|
||||||
|
Arithmetic { k1: f32, k2: f32, k3: f32, k4: f32 },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for Composite {
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
let (op_name, vals) = match self.operator {
|
||||||
|
CompositeOperator::Over => ("over", None),
|
||||||
|
CompositeOperator::In => ("in", None),
|
||||||
|
CompositeOperator::Out => ("out", None),
|
||||||
|
CompositeOperator::Atop => ("atop", None),
|
||||||
|
CompositeOperator::Xor => ("xor", None),
|
||||||
|
CompositeOperator::Arithmetic { k1, k2, k3, k4 } => {
|
||||||
|
("arithmetic", Some([k1, k2, k3, k4]))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut r = vec![Attribute {
|
||||||
|
key: QName(b"operator"),
|
||||||
|
value: Cow::from(op_name.as_bytes()),
|
||||||
|
}];
|
||||||
|
|
||||||
|
if let Some([k1, k2, k3, k4]) = vals {
|
||||||
|
// r.append(&mut vec![
|
||||||
|
// Attribute {
|
||||||
|
// key: QName(b"k1"),
|
||||||
|
// value: Cow::from(k1.to_string().into_bytes()),
|
||||||
|
// },
|
||||||
|
// Attribute {
|
||||||
|
// key: QName(b"k2"),
|
||||||
|
// value: Cow::from(k2.to_string().into_bytes()),
|
||||||
|
// },
|
||||||
|
// Attribute {
|
||||||
|
// key: QName(b"k3"),
|
||||||
|
// value: Cow::from(k3.to_string().into_bytes()),
|
||||||
|
// },
|
||||||
|
// Attribute {
|
||||||
|
// key: QName(b"k4"),
|
||||||
|
// value: Cow::from(k4.to_string().into_bytes()),
|
||||||
|
// },
|
||||||
|
// ]);
|
||||||
|
r.append(&mut gen_attrs![
|
||||||
|
b"k1": k1,
|
||||||
|
b"k2": k2,
|
||||||
|
b"k3": k3,
|
||||||
|
b"k4": k4
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
r
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feComposite"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ConvolveMatrix {
|
||||||
|
order: (u16, u16),
|
||||||
|
// must be checked to be `order.0 * order.1`
|
||||||
|
kernel_matrix: Vec<f32>,
|
||||||
|
divisor: f32,
|
||||||
|
bias: f32,
|
||||||
|
target_x: i32,
|
||||||
|
target_y: i32,
|
||||||
|
edge_mode: EdgeMode,
|
||||||
|
kernel_unit_length: (f32, f32),
|
||||||
|
preserve_alpha: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum EdgeMode {
|
||||||
|
None,
|
||||||
|
Duplicate,
|
||||||
|
Wrap,
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
// TODO
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct DiffuseLighting;
|
|
@ -0,0 +1,34 @@
|
||||||
|
use super::WriteElement;
|
||||||
|
|
||||||
|
/// [feDisplacementMap](https://www.w3.org/TR/SVG11/filters.html#feDisplacementMapElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct DisplacementMap {
|
||||||
|
pub scale: f32,
|
||||||
|
pub x_channel_selector: Channel,
|
||||||
|
pub y_channel_selector: Channel,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for DisplacementMap {
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
let mut r = Vec::new();
|
||||||
|
gen_attrs![
|
||||||
|
r;
|
||||||
|
self.scale != 0. => b"scale": self.scale,
|
||||||
|
self.x_channel_selector != Channel::A => b"xChannelSelector": format!("{:?}", self.x_channel_selector),
|
||||||
|
self.y_channel_selector != Channel::A => b"yChannelSelector": format!("{:?}", self.y_channel_selector)
|
||||||
|
];
|
||||||
|
r
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feDisplacementMap"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub enum Channel {
|
||||||
|
A,
|
||||||
|
R,
|
||||||
|
G,
|
||||||
|
B,
|
||||||
|
}
|
23
crates/svg-filters/src/types/nodes/primitives/flood.rs
Normal file
23
crates/svg-filters/src/types/nodes/primitives/flood.rs
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
use csscolorparser::Color;
|
||||||
|
|
||||||
|
use super::WriteElement;
|
||||||
|
|
||||||
|
/// [feFlood](https://www.w3.org/TR/SVG11/filters.html#feFloodElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Flood {
|
||||||
|
pub flood_color: Color,
|
||||||
|
pub flood_opacity: f32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for Flood {
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
gen_attrs![
|
||||||
|
b"flood-color": self.flood_color.to_hex_string(),
|
||||||
|
b"flood-opacity": self.flood_opacity
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feFlood"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,35 @@
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
use quick_xml::{events::attributes::Attribute, name::QName};
|
||||||
|
|
||||||
|
use super::WriteElement;
|
||||||
|
|
||||||
|
/// [feGaussianBlur](https://www.w3.org/TR/SVG11/filters.html#feGaussianBlurElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct GaussianBlur {
|
||||||
|
std_deviation: (u16, u16),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GaussianBlur {
|
||||||
|
pub fn single(v: u16) -> Self {
|
||||||
|
Self {
|
||||||
|
std_deviation: (v, v),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_xy(x: u16, y: u16) -> Self {
|
||||||
|
Self {
|
||||||
|
std_deviation: (x, y),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for GaussianBlur {
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
gen_attrs![b"stdDeviation": format!("{} {}", self.std_deviation.0, self.std_deviation.1)]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feGaussianBlur"
|
||||||
|
}
|
||||||
|
}
|
3
crates/svg-filters/src/types/nodes/primitives/image.rs
Normal file
3
crates/svg-filters/src/types/nodes/primitives/image.rs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
// TODO
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Image;
|
3
crates/svg-filters/src/types/nodes/primitives/merge.rs
Normal file
3
crates/svg-filters/src/types/nodes/primitives/merge.rs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
// TODO
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Merge;
|
37
crates/svg-filters/src/types/nodes/primitives/morphology.rs
Normal file
37
crates/svg-filters/src/types/nodes/primitives/morphology.rs
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
use super::WriteElement;
|
||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
/// [feMorphology](https://www.w3.org/TR/SVG11/filters.html#feMorphologyElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Morphology {
|
||||||
|
operator: Operator,
|
||||||
|
radius: (f32, f32),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for Morphology {
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
gen_attrs![
|
||||||
|
b"operator": self.operator,
|
||||||
|
b"radius": format!("{} {}", self.radius.0, self.radius.1)
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feMorphology"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum Operator {
|
||||||
|
Erode,
|
||||||
|
Dilate,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Operator {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.write_str(match self {
|
||||||
|
Operator::Erode => "erode",
|
||||||
|
Operator::Dilate => "dilate",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
24
crates/svg-filters/src/types/nodes/primitives/offset.rs
Normal file
24
crates/svg-filters/src/types/nodes/primitives/offset.rs
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
use super::WriteElement;
|
||||||
|
|
||||||
|
/// [feOffset](https://www.w3.org/TR/SVG11/filters.html#feOffsetElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Offset {
|
||||||
|
dx: f32,
|
||||||
|
dy: f32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Offset {
|
||||||
|
pub fn new(dx: f32, dy: f32) -> Self {
|
||||||
|
Self { dx, dy }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for Offset {
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
gen_attrs![b"dx": self.dx, b"dy": self.dy]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feOffset"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
// TODO
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct SpecularLighting;
|
15
crates/svg-filters/src/types/nodes/primitives/tile.rs
Normal file
15
crates/svg-filters/src/types/nodes/primitives/tile.rs
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
use super::WriteElement;
|
||||||
|
|
||||||
|
/// [feTile](https://www.w3.org/TR/SVG11/filters.html#feTileElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Tile;
|
||||||
|
|
||||||
|
impl WriteElement for Tile {
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
Vec::new()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feTile"
|
||||||
|
}
|
||||||
|
}
|
42
crates/svg-filters/src/types/nodes/primitives/turbulence.rs
Normal file
42
crates/svg-filters/src/types/nodes/primitives/turbulence.rs
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
use super::WriteElement;
|
||||||
|
|
||||||
|
/// [feTurbulence](https://www.w3.org/TR/SVG11/filters.html#feTurbulenceElement)
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Turbulence {
|
||||||
|
pub base_frequency: (f32, f32),
|
||||||
|
pub num_octaves: u16,
|
||||||
|
pub seed: u32,
|
||||||
|
pub stitch_tiles: StitchTiles,
|
||||||
|
// attr name: type
|
||||||
|
pub noise_type: NoiseType,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteElement for Turbulence {
|
||||||
|
#[allow(clippy::str_to_string, reason = "in macro invocation")]
|
||||||
|
fn attrs(&self) -> Vec<quick_xml::events::attributes::Attribute> {
|
||||||
|
let mut r = gen_attrs![b"baseFrequency": format!("{} {}", self.base_frequency.0, self.base_frequency.1)];
|
||||||
|
gen_attrs![
|
||||||
|
r;
|
||||||
|
self.num_octaves != 1 => b"numOctaves": self.num_octaves,
|
||||||
|
self.seed != 0 => b"seed": self.seed,
|
||||||
|
self.stitch_tiles != StitchTiles::NoStitch => b"stitchTiles": "stitch",
|
||||||
|
self.noise_type != NoiseType::Turbulence => b"type": "fractalNoise"
|
||||||
|
];
|
||||||
|
r
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tag_name(&self) -> &'static str {
|
||||||
|
"feTurbulence"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||||
|
pub enum StitchTiles {
|
||||||
|
Stitch,
|
||||||
|
NoStitch,
|
||||||
|
}
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||||
|
pub enum NoiseType {
|
||||||
|
Turbulence,
|
||||||
|
FractalNoise,
|
||||||
|
}
|
11
crates/svg-filters/src/types/nodes/standard_input.rs
Normal file
11
crates/svg-filters/src/types/nodes/standard_input.rs
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
/// [svg filter effect standard input](https://www.w3.org/TR/SVG11/filters.html#FilterPrimitiveInAttribute)
|
||||||
|
/// technically not a node, but for implementation simplicity... yeah
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub enum StandardInput {
|
||||||
|
SourceGraphic,
|
||||||
|
SourceAlpha,
|
||||||
|
BackgroundImage,
|
||||||
|
BackgroundAlpha,
|
||||||
|
FillPaint,
|
||||||
|
StrokePaint,
|
||||||
|
}
|
528
flake.lock
528
flake.lock
|
@ -1,18 +1,85 @@
|
||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
"devenv": {
|
"cachix": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"flake-compat": "flake-compat",
|
"devenv": "devenv_2",
|
||||||
"nix": "nix",
|
"flake-compat": [
|
||||||
"nixpkgs": "nixpkgs",
|
"devenv",
|
||||||
"pre-commit-hooks": "pre-commit-hooks"
|
"flake-compat"
|
||||||
|
],
|
||||||
|
"git-hooks": [
|
||||||
|
"devenv",
|
||||||
|
"pre-commit-hooks"
|
||||||
|
],
|
||||||
|
"nixpkgs": [
|
||||||
|
"devenv",
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1704835383,
|
"lastModified": 1726520618,
|
||||||
"narHash": "sha256-SoC0rYR9iHW0dVOEmxNEfa8vk9dTK86P5iXTgHafmwM=",
|
"narHash": "sha256-jOsaBmJ/EtX5t/vbylCdS7pWYcKGmWOKg4QKUzKr6dA=",
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "cachix",
|
||||||
|
"rev": "695525f9086542dfb09fde0871dbf4174abbf634",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "cachix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cachix_2": {
|
||||||
|
"inputs": {
|
||||||
|
"devenv": "devenv_3",
|
||||||
|
"flake-compat": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"flake-compat"
|
||||||
|
],
|
||||||
|
"nixpkgs": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"nixpkgs"
|
||||||
|
],
|
||||||
|
"pre-commit-hooks": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"pre-commit-hooks"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1712055811,
|
||||||
|
"narHash": "sha256-7FcfMm5A/f02yyzuavJe06zLa9hcMHsagE28ADcmQvk=",
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "cachix",
|
||||||
|
"rev": "02e38da89851ec7fec3356a5c04bc8349cae0e30",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "cachix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"devenv": {
|
||||||
|
"inputs": {
|
||||||
|
"cachix": "cachix",
|
||||||
|
"flake-compat": "flake-compat_2",
|
||||||
|
"nix": "nix_3",
|
||||||
|
"nixpkgs": "nixpkgs_3",
|
||||||
|
"pre-commit-hooks": "pre-commit-hooks_2"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1729445229,
|
||||||
|
"narHash": "sha256-3vhSEs2ufSvv2Oct8G9CWEPFI57c4NAZ2wR2accHELM=",
|
||||||
"owner": "cachix",
|
"owner": "cachix",
|
||||||
"repo": "devenv",
|
"repo": "devenv",
|
||||||
"rev": "18ef9849d1ecac7a9a7920eb4f2e4adcf67a8c3a",
|
"rev": "006016cf4191c34c17cfdb6669e0690e24302ac0",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -21,17 +88,86 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"devenv_2": {
|
||||||
|
"inputs": {
|
||||||
|
"cachix": "cachix_2",
|
||||||
|
"flake-compat": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"flake-compat"
|
||||||
|
],
|
||||||
|
"nix": "nix_2",
|
||||||
|
"nixpkgs": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"nixpkgs"
|
||||||
|
],
|
||||||
|
"pre-commit-hooks": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"git-hooks"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1723156315,
|
||||||
|
"narHash": "sha256-0JrfahRMJ37Rf1i0iOOn+8Z4CLvbcGNwa2ChOAVrp/8=",
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "devenv",
|
||||||
|
"rev": "ff5eb4f2accbcda963af67f1a1159e3f6c7f5f91",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "devenv",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"devenv_3": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-compat": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"flake-compat"
|
||||||
|
],
|
||||||
|
"nix": "nix",
|
||||||
|
"nixpkgs": "nixpkgs",
|
||||||
|
"poetry2nix": "poetry2nix",
|
||||||
|
"pre-commit-hooks": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"pre-commit-hooks"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1708704632,
|
||||||
|
"narHash": "sha256-w+dOIW60FKMaHI1q5714CSibk99JfYxm0CzTinYWr+Q=",
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "devenv",
|
||||||
|
"rev": "2ee4450b0f4b95a1b90f2eb5ffea98b90e48c196",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "cachix",
|
||||||
|
"ref": "python-rewrite",
|
||||||
|
"repo": "devenv",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"fenix": {
|
"fenix": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nixpkgs": "nixpkgs_2",
|
"nixpkgs": "nixpkgs_4",
|
||||||
"rust-analyzer-src": "rust-analyzer-src"
|
"rust-analyzer-src": "rust-analyzer-src"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1704867811,
|
"lastModified": 1729492502,
|
||||||
"narHash": "sha256-pG4O1vPpNSMjz7p/5x+/OH4tXC0thzAPbJ55kI/W5dU=",
|
"narHash": "sha256-d6L4bBlUWr4sHC+eRXo+4acFPEFXKmqHpM/BfQ5gQQw=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "fenix",
|
"repo": "fenix",
|
||||||
"rev": "93e89638c15512db65e931f26ce36edf8cfbb4a5",
|
"rev": "4002a1ec3486b855f341d2b864ba06b61e73af28",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -56,16 +192,69 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"flake-compat_2": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1696426674,
|
||||||
|
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
|
||||||
|
"owner": "edolstra",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "edolstra",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flake-parts": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs-lib": [
|
||||||
|
"devenv",
|
||||||
|
"nix",
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1712014858,
|
||||||
|
"narHash": "sha256-sB4SWl2lX95bExY2gMFG5HIzvva5AVMJd4Igm+GpZNw=",
|
||||||
|
"owner": "hercules-ci",
|
||||||
|
"repo": "flake-parts",
|
||||||
|
"rev": "9126214d0a59633752a136528f5f3b9aa8565b7d",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "hercules-ci",
|
||||||
|
"repo": "flake-parts",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"flake-utils": {
|
"flake-utils": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"systems": "systems"
|
"systems": "systems"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1685518550,
|
"lastModified": 1689068808,
|
||||||
"narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=",
|
"narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "flake-utils",
|
"repo": "flake-utils",
|
||||||
"rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef",
|
"rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flake-utils_2": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1667395993,
|
||||||
|
"narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -83,11 +272,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1660459072,
|
"lastModified": 1709087332,
|
||||||
"narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=",
|
"narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
|
||||||
"owner": "hercules-ci",
|
"owner": "hercules-ci",
|
||||||
"repo": "gitignore.nix",
|
"repo": "gitignore.nix",
|
||||||
"rev": "a20de23b925fd8264fd7fad6454652e142fd7f73",
|
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -96,53 +285,142 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"lowdown-src": {
|
"libgit2": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1633514407,
|
"lastModified": 1697646580,
|
||||||
"narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
|
"narHash": "sha256-oX4Z3S9WtJlwvj0uH9HlYcWv+x1hqp8mhXl7HsLu2f0=",
|
||||||
"owner": "kristapsdz",
|
"owner": "libgit2",
|
||||||
"repo": "lowdown",
|
"repo": "libgit2",
|
||||||
"rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
|
"rev": "45fd9ed7ae1a9b74b957ef4f337bc3c8b3df01b5",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "kristapsdz",
|
"owner": "libgit2",
|
||||||
"repo": "lowdown",
|
"repo": "libgit2",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nix": {
|
"nix": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"lowdown-src": "lowdown-src",
|
"flake-compat": "flake-compat",
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
"devenv",
|
"devenv",
|
||||||
"nixpkgs"
|
"nixpkgs"
|
||||||
],
|
],
|
||||||
"nixpkgs-regression": "nixpkgs-regression"
|
"nixpkgs-regression": "nixpkgs-regression"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1676545802,
|
"lastModified": 1712911606,
|
||||||
"narHash": "sha256-EK4rZ+Hd5hsvXnzSzk2ikhStJnD63odF7SzsQ8CuSPU=",
|
"narHash": "sha256-BGvBhepCufsjcUkXnEEXhEVjwdJAwPglCC2+bInc794=",
|
||||||
"owner": "domenkozar",
|
"owner": "domenkozar",
|
||||||
"repo": "nix",
|
"repo": "nix",
|
||||||
"rev": "7c91803598ffbcfe4a55c44ac6d49b2cf07a527f",
|
"rev": "b24a9318ea3f3600c1e24b4a00691ee912d4de12",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "domenkozar",
|
"owner": "domenkozar",
|
||||||
"ref": "relaxed-flakes",
|
"ref": "devenv-2.21",
|
||||||
|
"repo": "nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nix-github-actions": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"poetry2nix",
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1688870561,
|
||||||
|
"narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=",
|
||||||
|
"owner": "nix-community",
|
||||||
|
"repo": "nix-github-actions",
|
||||||
|
"rev": "165b1650b753316aa7f1787f3005a8d2da0f5301",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-community",
|
||||||
|
"repo": "nix-github-actions",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nix_2": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-compat": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"flake-compat"
|
||||||
|
],
|
||||||
|
"nixpkgs": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"nixpkgs"
|
||||||
|
],
|
||||||
|
"nixpkgs-regression": "nixpkgs-regression_2"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1712911606,
|
||||||
|
"narHash": "sha256-BGvBhepCufsjcUkXnEEXhEVjwdJAwPglCC2+bInc794=",
|
||||||
|
"owner": "domenkozar",
|
||||||
|
"repo": "nix",
|
||||||
|
"rev": "b24a9318ea3f3600c1e24b4a00691ee912d4de12",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "domenkozar",
|
||||||
|
"ref": "devenv-2.21",
|
||||||
|
"repo": "nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nix_3": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-compat": [
|
||||||
|
"devenv",
|
||||||
|
"flake-compat"
|
||||||
|
],
|
||||||
|
"flake-parts": "flake-parts",
|
||||||
|
"libgit2": "libgit2",
|
||||||
|
"nixpkgs": "nixpkgs_2",
|
||||||
|
"nixpkgs-23-11": "nixpkgs-23-11",
|
||||||
|
"nixpkgs-regression": "nixpkgs-regression_3",
|
||||||
|
"pre-commit-hooks": "pre-commit-hooks"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1727438425,
|
||||||
|
"narHash": "sha256-X8ES7I1cfNhR9oKp06F6ir4Np70WGZU5sfCOuNBEwMg=",
|
||||||
|
"owner": "domenkozar",
|
||||||
|
"repo": "nix",
|
||||||
|
"rev": "f6c5ae4c1b2e411e6b1e6a8181cc84363d6a7546",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "domenkozar",
|
||||||
|
"ref": "devenv-2.24",
|
||||||
"repo": "nix",
|
"repo": "nix",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1678875422,
|
"lastModified": 1692808169,
|
||||||
"narHash": "sha256-T3o6NcQPwXjxJMn2shz86Chch4ljXgZn746c2caGxd8=",
|
"narHash": "sha256-x9Opq06rIiwdwGeK2Ykj69dNc2IvUH1fY55Wm7atwrE=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "126f49a01de5b7e35a43fd43f891ecf6d3a51459",
|
"rev": "9201b5ff357e781bf014d0330d18555695df7ba8",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -152,6 +430,22 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"nixpkgs-23-11": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1717159533,
|
||||||
|
"narHash": "sha256-oamiKNfr2MS6yH64rUn99mIZjc45nGJlj9eGth/3Xuw=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "a62e6edd6d5e1fa0329b8653c801147986f8d446",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "a62e6edd6d5e1fa0329b8653c801147986f8d446",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"nixpkgs-regression": {
|
"nixpkgs-regression": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1643052045,
|
"lastModified": 1643052045,
|
||||||
|
@ -168,29 +462,93 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nixpkgs-stable": {
|
"nixpkgs-regression_2": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1685801374,
|
"lastModified": 1643052045,
|
||||||
"narHash": "sha256-otaSUoFEMM+LjBI1XL/xGB5ao6IwnZOXc47qhIgJe8U=",
|
"narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "c37ca420157f4abc31e26f436c1145f8951ff373",
|
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-23.05",
|
"repo": "nixpkgs",
|
||||||
|
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs-regression_3": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1643052045,
|
||||||
|
"narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs-stable": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1720386169,
|
||||||
|
"narHash": "sha256-NGKVY4PjzwAa4upkGtAMz1npHGoRzWotlSnVlqI40mo=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "194846768975b7ad2c4988bdb82572c00222c0d7",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-24.05",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nixpkgs_2": {
|
"nixpkgs_2": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1704538339,
|
"lastModified": 1717432640,
|
||||||
"narHash": "sha256-1734d3mQuux9ySvwf6axRWZRBhtcZA9Q8eftD6EZg6U=",
|
"narHash": "sha256-+f9c4/ZX5MWDOuB1rKoWj+lBNm0z0rs4CK47HBLxy1o=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "88269ab3044128b7c2f4c7d68448b2fb50456870",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "release-24.05",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs_3": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1716977621,
|
||||||
|
"narHash": "sha256-Q1UQzYcMJH4RscmpTkjlgqQDX5yi1tZL0O345Ri6vXQ=",
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "devenv-nixpkgs",
|
||||||
|
"rev": "4267e705586473d3e5c8d50299e71503f16a6fb6",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "cachix",
|
||||||
|
"ref": "rolling",
|
||||||
|
"repo": "devenv-nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs_4": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1729256560,
|
||||||
|
"narHash": "sha256-/uilDXvCIEs3C9l73JTACm4quuHUsIHcns1c+cHUJwA=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "46ae0210ce163b3cba6c7da08840c1d63de9c701",
|
"rev": "4c2fcb090b1f3e5b47eaa7bd33913b574a11e0a0",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -200,13 +558,13 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nixpkgs_3": {
|
"nixpkgs_5": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1704722960,
|
"lastModified": 1729256560,
|
||||||
"narHash": "sha256-mKGJ3sPsT6//s+Knglai5YflJUF2DGj7Ai6Ynopz0kI=",
|
"narHash": "sha256-/uilDXvCIEs3C9l73JTACm4quuHUsIHcns1c+cHUJwA=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "317484b1ead87b9c1b8ac5261a8d2dd748a0492d",
|
"rev": "4c2fcb090b1f3e5b47eaa7bd33913b574a11e0a0",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -216,13 +574,75 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"poetry2nix": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"nix-github-actions": "nix-github-actions",
|
||||||
|
"nixpkgs": [
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"cachix",
|
||||||
|
"devenv",
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1692876271,
|
||||||
|
"narHash": "sha256-IXfZEkI0Mal5y1jr6IRWMqK8GW2/f28xJenZIPQqkY0=",
|
||||||
|
"owner": "nix-community",
|
||||||
|
"repo": "poetry2nix",
|
||||||
|
"rev": "d5006be9c2c2417dafb2e2e5034d83fabd207ee3",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-community",
|
||||||
|
"repo": "poetry2nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"pre-commit-hooks": {
|
"pre-commit-hooks": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-compat": [
|
||||||
|
"devenv",
|
||||||
|
"nix"
|
||||||
|
],
|
||||||
|
"flake-utils": "flake-utils_2",
|
||||||
|
"gitignore": [
|
||||||
|
"devenv",
|
||||||
|
"nix"
|
||||||
|
],
|
||||||
|
"nixpkgs": [
|
||||||
|
"devenv",
|
||||||
|
"nix",
|
||||||
|
"nixpkgs"
|
||||||
|
],
|
||||||
|
"nixpkgs-stable": [
|
||||||
|
"devenv",
|
||||||
|
"nix",
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1712897695,
|
||||||
|
"narHash": "sha256-nMirxrGteNAl9sWiOhoN5tIHyjBbVi5e2tgZUgZlK3Y=",
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "pre-commit-hooks.nix",
|
||||||
|
"rev": "40e6053ecb65fcbf12863338a6dcefb3f55f1bf8",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "pre-commit-hooks.nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"pre-commit-hooks_2": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"flake-compat": [
|
"flake-compat": [
|
||||||
"devenv",
|
"devenv",
|
||||||
"flake-compat"
|
"flake-compat"
|
||||||
],
|
],
|
||||||
"flake-utils": "flake-utils",
|
|
||||||
"gitignore": "gitignore",
|
"gitignore": "gitignore",
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
"devenv",
|
"devenv",
|
||||||
|
@ -231,11 +651,11 @@
|
||||||
"nixpkgs-stable": "nixpkgs-stable"
|
"nixpkgs-stable": "nixpkgs-stable"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1704725188,
|
"lastModified": 1726745158,
|
||||||
"narHash": "sha256-qq8NbkhRZF1vVYQFt1s8Mbgo8knj+83+QlL5LBnYGpI=",
|
"narHash": "sha256-D5AegvGoEjt4rkKedmxlSEmC+nNLMBPWFxvmYnVLhjk=",
|
||||||
"owner": "cachix",
|
"owner": "cachix",
|
||||||
"repo": "pre-commit-hooks.nix",
|
"repo": "pre-commit-hooks.nix",
|
||||||
"rev": "ea96f0c05924341c551a797aaba8126334c505d2",
|
"rev": "4e743a6920eab45e8ba0fbe49dc459f1423a4b74",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -248,18 +668,18 @@
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"devenv": "devenv",
|
"devenv": "devenv",
|
||||||
"fenix": "fenix",
|
"fenix": "fenix",
|
||||||
"nixpkgs": "nixpkgs_3",
|
"nixpkgs": "nixpkgs_5",
|
||||||
"systems": "systems_2"
|
"systems": "systems_2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"rust-analyzer-src": {
|
"rust-analyzer-src": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1704833483,
|
"lastModified": 1729454508,
|
||||||
"narHash": "sha256-Ox01mpYmjapNYaqOu4fMS/4Ma9NLd2rVNz6d4rJmcf4=",
|
"narHash": "sha256-1W5B/CnLgdC03iIFG0wtawO1+dGDWDpc84PeOHo2ecU=",
|
||||||
"owner": "rust-lang",
|
"owner": "rust-lang",
|
||||||
"repo": "rust-analyzer",
|
"repo": "rust-analyzer",
|
||||||
"rev": "ae6e73772432cfe35bb0ff6de6fdcfa908642b67",
|
"rev": "9323b5385863739d1c113f02e4cf3f2777c09977",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|
84
flake.nix
84
flake.nix
|
@ -11,41 +11,55 @@
|
||||||
extra-substituters = "https://devenv.cachix.org";
|
extra-substituters = "https://devenv.cachix.org";
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs = { self, nixpkgs, devenv, systems, ... } @ inputs:
|
outputs = {
|
||||||
let
|
self,
|
||||||
forEachSystem = nixpkgs.lib.genAttrs (import systems);
|
nixpkgs,
|
||||||
in
|
devenv,
|
||||||
{
|
fenix,
|
||||||
devShells = forEachSystem
|
systems,
|
||||||
(system:
|
...
|
||||||
let
|
} @ inputs: let
|
||||||
pkgs = nixpkgs.legacyPackages.${system};
|
forEachSystem = nixpkgs.lib.genAttrs (import systems);
|
||||||
in
|
in {
|
||||||
{
|
devShells =
|
||||||
default = devenv.lib.mkShell {
|
forEachSystem
|
||||||
inherit inputs pkgs;
|
(system: let
|
||||||
modules = [
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
({pkgs, config, ...}: {
|
toolchain = with fenix.packages.${system};
|
||||||
languages.rust = {
|
combine [
|
||||||
enable = true;
|
complete.toolchain
|
||||||
channel = "nightly";
|
];
|
||||||
};
|
in {
|
||||||
|
default = devenv.lib.mkShell {
|
||||||
|
inherit inputs pkgs;
|
||||||
|
modules = [
|
||||||
|
({
|
||||||
|
pkgs,
|
||||||
|
config,
|
||||||
|
...
|
||||||
|
}: {
|
||||||
|
pre-commit.hooks = {
|
||||||
|
clippy.enable = false;
|
||||||
|
rustfmt.enable = true;
|
||||||
|
};
|
||||||
|
|
||||||
pre-commit.hooks = {
|
env = {
|
||||||
clippy.enable = true;
|
RUST_BACKTRACE = 1;
|
||||||
rustfmt.enable = true;
|
};
|
||||||
};
|
packages = with pkgs; [
|
||||||
|
just
|
||||||
packages = with pkgs; [
|
nushell
|
||||||
just nushell
|
ripgrep
|
||||||
ripgrep
|
typst
|
||||||
typst typst-lsp
|
typst-lsp
|
||||||
mold
|
mold
|
||||||
cargo-nextest cargo-watch
|
cargo-nextest
|
||||||
];
|
cargo-watch
|
||||||
})
|
toolchain
|
||||||
];
|
];
|
||||||
};
|
})
|
||||||
});
|
];
|
||||||
};
|
};
|
||||||
|
});
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
4
testfiles/config.json
Normal file
4
testfiles/config.json
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"example_value": "42",
|
||||||
|
"no_startup_message": true
|
||||||
|
}
|
4
testfiles/config.ron
Normal file
4
testfiles/config.ron
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
(
|
||||||
|
example_value: 42
|
||||||
|
no_startup_message: false,
|
||||||
|
)
|
3
testfiles/testproj/hello.owo
Normal file
3
testfiles/testproj/hello.owo
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
mod meow;
|
||||||
|
|
||||||
|
def broken
|
4
testfiles/testproj/hello/meow.owo
Normal file
4
testfiles/testproj/hello/meow.owo
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
mod mrawr {}
|
||||||
|
mod mrow {
|
||||||
|
def gay = ;
|
||||||
|
}
|
8
testfiles/testproj/test.owo
Normal file
8
testfiles/testproj/test.owo
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
mod hello;
|
||||||
|
mod world {
|
||||||
|
mod meow {}
|
||||||
|
mod uwu {
|
||||||
|
mod test {}
|
||||||
|
mod meow {}
|
||||||
|
}
|
||||||
|
}
|
1
testfiles/testproj/uwu.owo
Normal file
1
testfiles/testproj/uwu.owo
Normal file
|
@ -0,0 +1 @@
|
||||||
|
mod meow {}
|
Loading…
Reference in a new issue