Compare commits

...

41 commits

Author SHA1 Message Date
e581a8e7cf
json-pawarser: format the checks prettily 2024-11-20 10:28:41 +01:00
d809d3b52d
json-pawarser: test grammar::object 2024-10-30 12:27:27 +01:00
ef1a9f5029
json-pawarser: test grammar::member 2024-10-30 10:57:52 +01:00
662cb8ba0e
json-pawarser: make return of object grammar easier to understand 2024-10-29 19:40:50 +01:00
fcf91f25e3
json-pawarser: test stuffs uwu 2024-10-29 19:36:46 +01:00
958857cb58
handle debug pretty printing 2024-10-27 17:21:42 +01:00
883b0c804e
add implicit root node to avoid crash on multiple root nodes 2024-10-27 16:59:18 +01:00
f7d05ead2c
rename trait meta syntaxkinds 2024-10-27 16:56:39 +01:00
cee9b97dbf
extract modules to files 2024-10-23 13:27:36 +02:00
e5ccebe679
add arrays 2024-10-23 13:01:39 +02:00
3164328568
implement multiple members, member_values and trailing commata 2024-10-23 10:52:44 +02:00
c564d0f24c
implement Marker::abandon 2024-10-23 10:51:43 +02:00
b8720b2df9
pawarser, json-pawarser: get first debug print working! 2024-10-21 18:29:46 +02:00
af6886214b
flake.lock: Update 2024-10-21 15:31:08 +02:00
ac75978c01
pawarser: Implement Parser::finish 2024-10-21 15:16:36 +02:00
9b1f6a1dc1
pawarser: Implement CompletedMarker::precede 2024-10-21 15:15:40 +02:00
fed8cf2466
pawarser: require/derive PartialEq + Eq for NodeKind and its contents 2024-10-21 15:15:06 +02:00
91f766c18e
pawarser: make raw_tokens vec owned in input 2024-10-21 15:13:38 +02:00
becc4b4041
json-pawarser: init 2024-10-18 14:05:27 +02:00
21bcf62ea5
pawarser(setup): continue working on the bare basics 2024-10-17 09:54:09 +02:00
34ddaacb58
pawarser(chore): split up files 2024-10-13 16:47:53 +02:00
ec2ff5778b
pawarser(setup): basic parser stuff and types around it. also, a builder. 2024-10-13 16:44:59 +02:00
a3ab844ba7
pawarser(init): start extracting the parser lib 2024-10-13 15:32:26 +02:00
a693b57447
yet another attempt at building an evaluator 2024-10-10 10:23:54 +02:00
3412eb9395
executor (poc): init proof of concept executor crate 2024-07-18 19:12:58 +02:00
ccc6d4f532
update/fix cargo.lock 2024-07-08 20:51:51 +02:00
54401d2a21
app: apply review 2024-07-08 20:49:13 +02:00
18309ec919
app, prowocessing: move dev commands to tests 2024-07-08 20:49:13 +02:00
0705702d4a
experimentation: implement some basic traits for io and data types 2024-07-08 20:49:13 +02:00
31a044577a
experimentation: use dynamic type ids for signatures and add qol macro 2024-07-08 20:49:13 +02:00
911339fc2a
simplified by entirely removing DataRef 2024-07-08 20:49:12 +02:00
619b7acf94
prowocessing: let-else refatoring as according to review 2024-07-08 20:49:12 +02:00
b7bc0366c2
prowocessing: apply most basic reviews 2024-07-08 20:48:56 +02:00
734a734f09
prowocessing: add documentation of trait experiment 2024-07-08 20:48:10 +02:00
dddbcccf72
prowocessing: refactor trait based experiment to individual files 2024-07-08 20:48:10 +02:00
26996fbd00
prowocessing: add trait based experiment 2024-07-08 20:48:10 +02:00
d9a07c8898
prowocessing: extract experiment into its own file 2024-07-08 20:48:10 +02:00
db9228dec4
cli: add dev command for enums experiment 2024-07-08 20:48:10 +02:00
56ec11e143
cli: add subcommand support 2024-07-08 20:48:09 +02:00
1e9648966f
experimentation: write experiment for enum architecture 2024-07-08 20:48:09 +02:00
a2695a2a11
processing library: init 2024-07-08 20:46:39 +02:00
38 changed files with 2780 additions and 423 deletions

1115
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -5,6 +5,10 @@ members = [
"crates/ir",
"crates/lang",
"crates/svg-filters",
"crates/prowocessing",
"crates/executor-poc",
"crates/pawarser",
"crates/json-pawarser",
]
resolver = "2"

View file

@ -12,6 +12,7 @@ clap = { workspace = true, features = [ "derive", "env" ] }
dirs = "5"
eval = { path = "../eval" }
ir = { path = "../ir" }
prowocessing = { path = "../prowocessing"}
owo-colors = "4"
ron = "0.8"
serde = { workspace = true, features = [ "derive" ] }

View file

@ -1,18 +1,11 @@
use std::path::PathBuf;
use clap::Parser;
use self::{
cli::Args,
config_file::{find_config_file, Configs},
};
use self::config_file::{find_config_file, Configs};
pub(crate) use cli::CliConfigs;
mod cli;
mod config_file;
/// this struct may hold all configuration
pub struct Config {
pub source: PathBuf,
pub evaluator: eval::Available,
pub startup_msg: bool,
@ -20,13 +13,17 @@ pub struct Config {
impl Config {
/// Get the configs from all possible places (args, file, env...)
pub fn read() -> Self {
let args = Args::parse();
let config = if let Some(config) = args.config_path {
Ok(config)
} else {
find_config_file()
};
pub fn read(args: &CliConfigs) -> Self {
// let config = if let Some(config) = &args.config_path {
// Ok(config.clone())
// } else {
// find_config_file()
// };
let config = args
.config_path
.clone()
.ok_or(())
.or_else(|()| find_config_file());
// try to read a maybe existing config file
let config = config.ok().and_then(|path| {
@ -42,7 +39,6 @@ impl Config {
if let Some(file) = config {
Self {
source: args.source,
evaluator: args.evaluator.and(file.evaluator).unwrap_or_default(),
// this is negated because to an outward api, the negative is more intuitive,
// while in the source the other way around is more intuitive
@ -50,7 +46,6 @@ impl Config {
}
} else {
Self {
source: args.source,
startup_msg: !args.no_startup_message,
evaluator: args.evaluator.unwrap_or_default(),
}

View file

@ -1,12 +1,9 @@
use std::path::PathBuf;
use clap::{builder::BoolishValueParser, ArgAction, Parser};
#[derive(Parser)]
pub(crate) struct Args {
/// What file contains the pipeline to evaluate.
pub source: PathBuf,
use clap::{builder::BoolishValueParser, ArgAction, Args};
#[derive(Args)]
pub(crate) struct CliConfigs {
/// How to actually run the pipeline.
/// Overrides the config file. Defaults to the debug evaluator.
#[arg(short, long)]

View file

@ -1,6 +1,8 @@
use std::fs;
use std::{fs, path::PathBuf};
use config::Config;
use clap::{Parser, Subcommand};
use config::{CliConfigs, Config};
use dev::DevCommands;
use welcome_msg::print_startup_msg;
mod config;
@ -9,19 +11,60 @@ mod config;
mod error_reporting;
mod welcome_msg;
#[derive(Parser)]
struct Args {
#[command(flatten)]
configs: CliConfigs,
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
Run {
/// What file contains the pipeline to evaluate.
source: PathBuf,
},
Dev {
#[command(subcommand)]
command: DevCommands,
},
}
fn main() {
// TODO: proper error handling across the whole function
// don't forget to also look inside `Config`
let cfg = Config::read();
let args = Args::parse();
let cfg = Config::read(&args.configs);
if cfg.startup_msg {
print_startup_msg();
}
let source = fs::read_to_string(cfg.source).expect("can't find source file");
let ir = ir::from_ron(&source).expect("failed to parse source to graph ir");
match args.command {
Commands::Run { source } => {
let source = fs::read_to_string(source).expect("can't find source file");
let ir = ir::from_ron(&source).expect("failed to parse source to graph ir");
let mut machine = cfg.evaluator.pick();
machine.feed(ir);
machine.eval_full();
let mut machine = cfg.evaluator.pick();
machine.feed(ir);
machine.eval_full();
}
Commands::Dev {
command: dev_command,
} => dev_command.run(),
}
}
mod dev {
use clap::Subcommand;
#[derive(Subcommand)]
pub(crate) enum DevCommands {}
impl DevCommands {
pub fn run(self) {
println!("There are currently no dev commands.");
}
}
}

View file

@ -37,7 +37,7 @@ impl Available {
#[must_use]
pub fn pick(&self) -> Box<dyn Evaluator> {
match self {
Self::Debug => Box::new(kind::debug::Evaluator::default()),
Self::Debug => Box::<kind::debug::Evaluator>::default(),
}
}
}

View file

@ -0,0 +1,13 @@
[package]
name = "executor-poc"
version = "0.1.0"
edition = "2021"
[dependencies]
image = "0.25.1"
indexmap = "2.2.6"
nalgebra = "0.33.0"
petgraph.workspace = true
[lints]
workspace = true

View file

@ -0,0 +1,128 @@
use indexmap::IndexMap;
use instructions::Instruction;
use petgraph::graph::DiGraph;
use types::Type;
trait Node {
fn inputs() -> IndexMap<String, Type>;
fn outputs() -> IndexMap<String, Type>;
}
struct NodeGraph {
graph: DiGraph<Instruction, TypedEdge>,
}
struct TypedEdge {
from: String,
to: String,
typ: Type,
}
mod instructions {
//! This is the lowest level of the IR, the one the executor will use.
use std::path::Path;
use indexmap::{indexmap, IndexMap};
pub enum Instruction {
// File handling
LoadFile,
SaveFile,
ColorMatrix,
PosMatrix,
Blend,
SplitChannels,
}
impl Instruction {
fn inputs(&self) -> IndexMap<String, Type> {
match self {
Instruction::LoadFile => indexmap! {
"path" => Type::Path
},
Instruction::SaveFile => indexmap! {
"path" => Type::Path
},
Instruction::ColorMatrix => indexmap! {
"image" => Type::ImageData,
"matrix" => Type::Mat(4,5)
},
Instruction::PosMatrix => indexmap! {
"image" => Type::ImageData,
"matrix" => Type::Mat(2, 3),
},
Instruction::Blend => todo!(),
Instruction::SplitChannels => todo!(),
}
}
fn outputs(&self) -> IndexMap<String, Type> {
match self {
Instruction::LoadFile => indexmap! {
"image" => Type::ImageData
},
Instruction::SaveFile => indexmap! {},
Instruction::ColorMatrix => indexmap! {
"resut" => Type::ImageData
},
Instruction::PosMatrix => todo!(),
Instruction::Blend => todo!(),
Instruction::SplitChannels => todo!(),
}
}
}
}
mod types {
pub enum Type {
// TODO: later do lower level type system for this stuff?
// Image(Size, PixelType),
// // image data for processing.
// // always PixelType::Rgba32F
// ImageData(Size),
// // stuff that's still to be generated, not sized and no pixeltype
// ProceduralImage,
ImageData,
Text,
Integer,
Float,
Double,
Path,
Bool,
Vec(
// length,
u8,
),
Mat(
// Rows
u8,
// Columns
u8,
),
}
// pub struct Size {
// width: u16,
// height: u16,
// }
// Pixel types. Taken from variants [here](https://docs.rs/image/latest/image/pub enum.DynamicImage.html).
// pub enum PixelType {
// Luma8,
// LumaA8,
// Rgb8,
// Rgba8,
// Luma16,
// LumaA16,
// Rgb16,
// Rgba16,
// Rgb32F,
// #[default]
// Rgba32F,
// }
}

View file

@ -0,0 +1,13 @@
[package]
name = "json-pawarser"
version = "0.1.0"
edition = "2021"
[dependencies]
logos = "0.14.2"
enumset = "1.1.3"
rowan = "0.15.15"
pawarser = { path = "../pawarser" }
[lints]
workspace = true

View file

@ -0,0 +1,78 @@
use array::array;
use enumset::{enum_set, EnumSet};
use pawarser::parser::ParserBuilder;
use crate::{
syntax_error::SyntaxError,
syntax_kind::{lex, SyntaxKind},
};
use self::object::object;
mod array;
mod object;
pub(crate) type Parser<'src> = pawarser::Parser<'src, SyntaxKind, SyntaxError>;
pub(crate) type CompletedMarker = pawarser::CompletedMarker<SyntaxKind, SyntaxError>;
const BASIC_VALUE_TOKENS: EnumSet<SyntaxKind> =
enum_set!(SyntaxKind::BOOL | SyntaxKind::NULL | SyntaxKind::NUMBER | SyntaxKind::STRING);
pub fn value(p: &mut Parser) -> bool {
if BASIC_VALUE_TOKENS.contains(p.current()) {
p.do_bump();
return true;
} else {
object(p).or_else(|| array(p)).is_some()
}
}
#[cfg(test)]
mod tests {
use super::{
test_utils::{check_parser, gen_checks},
value,
};
#[test]
fn value_lit() {
gen_checks! {value;
r#""helo world""# => r#"ROOT { STRING "\"helo world\""; }"#,
"42" => r#"ROOT { NUMBER "42"; }"#,
"null" => r#"ROOT { NULL "null"; }"#,
"true" => r#"ROOT { BOOL "true"; }"#,
"false" => r#"ROOT { BOOL "false"; }"#
};
}
}
#[cfg(test)]
mod test_utils {
use pawarser::parser::ParserBuilder;
use crate::syntax_kind::{lex, SyntaxKind};
use super::Parser;
macro_rules! gen_checks {
($fn_to_test:ident; $($in:literal => $out:literal),+) => {
$(crate::grammar::test_utils::check_parser($in, |p| { $fn_to_test(p); }, $out);)+
}
}
pub(super) use gen_checks;
pub(super) fn check_parser(input: &str, parser_fn: fn(&mut Parser), expected_output: &str) {
let toks = lex(input);
let mut p: Parser = ParserBuilder::new(toks)
.add_meaningless(SyntaxKind::WHITESPACE)
.add_meaningless(SyntaxKind::NEWLINE)
.build();
parser_fn(&mut p);
let out = p.finish();
assert_eq!(format!("{out:?}").trim_end(), expected_output);
}
}

View file

@ -0,0 +1,36 @@
use crate::{syntax_error::SyntaxError, syntax_kind::SyntaxKind};
use super::{value, CompletedMarker, Parser};
pub(super) fn array(p: &mut Parser) -> Option<CompletedMarker> {
let array_start = p.start("array");
if !p.eat(SyntaxKind::BRACKET_OPEN) {
array_start.abandon(p);
return None;
}
let el = p.start("arr_el");
value(p);
el.complete(p, SyntaxKind::ELEMENT);
while p.at(SyntaxKind::COMMA) {
let potential_trailing_comma = p.start("potential_trailing_comma");
p.eat(SyntaxKind::COMMA);
let maybe_el = p.start("arr_el");
if !value(p) {
maybe_el.abandon(p);
potential_trailing_comma.complete(p, SyntaxKind::TRAILING_COMMA);
} else {
maybe_el.complete(p, SyntaxKind::ELEMENT);
potential_trailing_comma.abandon(p);
}
}
Some(if !p.eat(SyntaxKind::BRACKET_CLOSE) {
array_start.error(p, SyntaxError::UnclosedArray)
} else {
array_start.complete(p, SyntaxKind::ARRAY)
})
}

View file

@ -0,0 +1,92 @@
use crate::{grammar::value, syntax_error::SyntaxError, syntax_kind::SyntaxKind};
use super::{CompletedMarker, Parser, BASIC_VALUE_TOKENS};
pub(super) fn object(p: &mut Parser) -> Option<CompletedMarker> {
let obj_start = p.start("object");
if !p.eat(SyntaxKind::BRACE_OPEN) {
obj_start.abandon(p);
return None;
}
member(p);
while p.at(SyntaxKind::COMMA) {
// not always an error, later configurable
let potential_trailing_comma = p.start("potential_trailing_comma");
p.eat(SyntaxKind::COMMA);
if member(p).is_none() {
potential_trailing_comma.complete(p, SyntaxKind::TRAILING_COMMA);
} else {
potential_trailing_comma.abandon(p);
}
}
Some(if p.eat(SyntaxKind::BRACE_CLOSE) {
obj_start.complete(p, SyntaxKind::OBJECT)
} else {
obj_start.error(p, SyntaxError::UnclosedObject)
})
}
fn member(p: &mut Parser) -> Option<CompletedMarker> {
let member_start = p.start("member");
if p.at(SyntaxKind::BRACE_CLOSE) {
member_start.abandon(p);
return None;
} else if p.at(SyntaxKind::STRING) {
let member_name_start = p.start("member_name");
p.eat(SyntaxKind::STRING);
member_name_start.complete(p, SyntaxKind::MEMBER_NAME);
} else {
return todo!("handle other tokens: {:?}", p.current());
}
if !p.eat(SyntaxKind::COLON) {
todo!("handle wrong tokens")
}
let member_value_start = p.start("member_value_start");
if value(p) {
member_value_start.complete(p, SyntaxKind::MEMBER_VALUE);
Some(member_start.complete(p, SyntaxKind::MEMBER))
} else {
member_value_start.abandon(p);
let e = member_start.error(p, SyntaxError::MemberMissingValue);
Some(
e.precede(p, "member but failed already")
.complete(p, SyntaxKind::MEMBER),
)
}
}
#[cfg(test)]
mod tests {
use crate::grammar::{
object::{member, object},
test_utils::gen_checks,
};
#[test]
fn object_basic() {
gen_checks! {object;
r#"{"a": "b"}"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { STRING "\"b\""; } } BRACE_CLOSE "}"; } }"#,
r#"{"a": 42}"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { NUMBER "42"; } } BRACE_CLOSE "}"; } }"#,
r#"{"a": "b""# => r#"ROOT { PARSE_ERR: UnclosedObject { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { STRING "\"b\""; } } } }"#,
r#"{"a": }"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { PARSE_ERR: MemberMissingValue { MEMBER_NAME { STRING "\"a\""; } COLON ":"; } } WHITESPACE " "; BRACE_CLOSE "}"; } }"#,
r#"{"a":"# => r#"ROOT { PARSE_ERR: UnclosedObject { BRACE_OPEN "{"; MEMBER { PARSE_ERR: MemberMissingValue { MEMBER_NAME { STRING "\"a\""; } COLON ":"; } } } }"#,
r#"{"a":true,}"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; MEMBER_VALUE { BOOL "true"; } } TRAILING_COMMA { COMMA ","; } BRACE_CLOSE "}"; } }"#
}
}
#[test]
fn member_basic() {
gen_checks! {member;
r#""a": "b""# => r#"ROOT { MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { STRING "\"b\""; } } }"#,
r#""a": 42"# => r#"ROOT { MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { NUMBER "42"; } } }"#,
r#""a":"# => r#"ROOT { MEMBER { PARSE_ERR: MemberMissingValue { MEMBER_NAME { STRING "\"a\""; } COLON ":"; } } }"#
}
}
}

View file

@ -0,0 +1,3 @@
mod grammar;
mod syntax_error;
mod syntax_kind;

View file

@ -0,0 +1,11 @@
use crate::syntax_kind::SyntaxKind;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SyntaxError {
UnclosedObject,
UnclosedArray,
DisallowedKeyType(SyntaxKind),
MemberMissingValue,
UnexpectedTrailingComma,
}
impl pawarser::parser::SyntaxError for SyntaxError {}

View file

@ -0,0 +1,117 @@
use logos::Logos;
pub fn lex(src: &str) -> Vec<(SyntaxKind, &str)> {
let mut lex = SyntaxKind::lexer(src);
let mut r = Vec::new();
while let Some(tok_res) = lex.next() {
r.push((tok_res.unwrap_or(SyntaxKind::LEX_ERR), lex.slice()))
}
r
}
#[derive(enumset::EnumSetType, Debug, Logos, PartialEq, Eq, Clone, Copy, Hash)]
#[repr(u16)]
#[enumset(no_super_impls)]
#[allow(non_camel_case_types)]
pub enum SyntaxKind {
OBJECT,
MEMBER,
MEMBER_NAME,
MEMBER_VALUE,
ARRAY,
ELEMENT,
// SyntaxKinds for future json5/etc support
TRAILING_COMMA,
// Tokens
// Regexes adapted from [the logos handbook](https://logos.maciej.codes/examples/json_borrowed.html)
#[token("true")]
#[token("false")]
BOOL,
#[token("{")]
BRACE_OPEN,
#[token("}")]
BRACE_CLOSE,
#[token("[")]
BRACKET_OPEN,
#[token("]")]
BRACKET_CLOSE,
#[token(":")]
COLON,
#[token(",")]
COMMA,
#[token("null")]
NULL,
#[regex(r"-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?")]
NUMBER,
#[regex(r#""([^"\\]|\\["\\bnfrt]|u[a-fA-F0-9]{4})*""#)]
STRING,
// Whitespace tokens
#[regex("[ \\t\\f]+")]
WHITESPACE,
#[token("\n")]
NEWLINE,
// Error SyntaxKinds
LEX_ERR,
PARSE_ERR,
// Meta SyntaxKinds
ROOT,
EOF,
}
impl pawarser::parser::SyntaxElement for SyntaxKind {
const SYNTAX_EOF: Self = Self::EOF;
const SYNTAX_ERROR: Self = Self::PARSE_ERR;
const SYNTAX_ROOT: Self = Self::ROOT;
}
impl From<SyntaxKind> for rowan::SyntaxKind {
fn from(kind: SyntaxKind) -> Self {
Self(kind as u16)
}
}
impl From<rowan::SyntaxKind> for SyntaxKind {
fn from(raw: rowan::SyntaxKind) -> Self {
assert!(raw.0 <= SyntaxKind::EOF as u16);
#[allow(unsafe_code, reason = "The transmute is necessary here")]
unsafe {
std::mem::transmute::<u16, SyntaxKind>(raw.0)
}
}
}
#[cfg(test)]
mod tests {
use crate::syntax_kind::{lex, SyntaxKind};
#[test]
fn simple_object() {
const TEST_DATA: &str = r#"{"hello_world": "meow", "some_num":7.42}"#;
assert_eq!(
dbg!(lex(TEST_DATA)),
vec![
(SyntaxKind::BRACE_OPEN, "{"),
(SyntaxKind::STRING, "\"hello_world\""),
(SyntaxKind::COLON, ":"),
(SyntaxKind::WHITESPACE, " "),
(SyntaxKind::STRING, "\"meow\""),
(SyntaxKind::COMMA, ","),
(SyntaxKind::WHITESPACE, " "),
(SyntaxKind::STRING, "\"some_num\""),
(SyntaxKind::COLON, ":"),
(SyntaxKind::NUMBER, "7.42"),
(SyntaxKind::BRACE_CLOSE, "}")
]
);
}
}

View file

@ -0,0 +1,12 @@
[package]
name = "pawarser"
version = "0.1.0"
edition = "2021"
[dependencies]
rowan = "0.15.15"
drop_bomb = "0.1.5"
enumset = "1.1.3"
[lints]
workspace = true

View file

@ -0,0 +1,8 @@
#![feature(iter_collect_into)]
pub mod parser;
pub use parser::{
error::SyntaxError,
marker::{CompletedMarker, Marker},
Parser, SyntaxElement,
};

View file

@ -0,0 +1,253 @@
use std::{cell::Cell, fmt, marker::PhantomData, mem};
use enumset::{EnumSet, EnumSetType};
use rowan::{GreenNode, GreenNodeBuilder};
use crate::parser::event::NodeKind;
use self::{event::Event, input::Input, marker::Marker};
pub use {error::SyntaxError, output::ParserOutput};
pub mod error;
mod event;
mod input;
pub mod marker;
pub mod output;
/// this is used to define some required SyntaxKinds like an EOF token or an error token
pub trait SyntaxElement
where
Self: EnumSetType
+ Into<rowan::SyntaxKind>
+ From<rowan::SyntaxKind>
+ fmt::Debug
+ Clone
+ PartialEq
+ Eq,
{
/// EOF value. This will be used by the rest of the parser library to represent an EOF.
const SYNTAX_EOF: Self;
/// Error value. This will be used as a placeholder for associated respective errors.
const SYNTAX_ERROR: Self;
const SYNTAX_ROOT: Self;
}
pub struct Parser<'src, SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
input: Input<'src, SyntaxKind>,
pos: usize,
events: Vec<Event<SyntaxKind, SyntaxErr>>,
step_limit: u32,
steps: Cell<u32>,
}
impl<'src, 'toks, SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>
Parser<'src, SyntaxKind, SyntaxErr>
{
/// eat all meaningless tokens at the end of the file.
pub fn eat_succeeding_meaningless(&mut self) {
self.push_ev(Event::Eat {
count: self.input.meaningless_tail_len(),
});
}
/// Get token from current position of the parser.
pub fn current(&self) -> SyntaxKind {
self.step();
self.input.kind(self.pos)
}
pub fn start(&mut self, name: &str) -> Marker {
let pos = self.events.len();
self.push_ev(Event::tombstone());
Marker::new(pos, name)
}
/// Eat next token if it's of kind `kind` and return `true`.
/// Otherwise, `false`.
pub fn eat(&mut self, kind: SyntaxKind) -> bool {
if !self.at(kind) {
return false;
}
self.do_bump();
true
}
pub fn do_bump(&mut self) {
self.push_ev(Event::Eat {
count: self.input.preceding_meaningless(self.pos),
});
self.pos += 1;
}
/// Check if the token at the current parser position is of `kind`
pub fn at(&self, kind: SyntaxKind) -> bool {
self.nth_at(0, kind)
}
/// Check if the token that is `n` ahead is of `kind`
pub fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool {
self.nth(n) == kind
}
pub fn nth(&self, n: usize) -> SyntaxKind {
self.step();
self.input.kind(self.pos + n)
}
fn push_ev(&mut self, event: Event<SyntaxKind, SyntaxErr>) {
self.events.push(event);
}
fn step(&self) {
let steps = self.steps.get();
assert!(steps <= self.step_limit, "the parser seems stuck.");
self.steps.set(steps + 1);
}
pub fn finish(self) -> ParserOutput<SyntaxKind, SyntaxErr> {
let Self {
input,
pos,
mut events,
step_limit,
steps,
} = self;
let (mut raw_toks, meaningless_tokens) = input.dissolve();
let mut builder = GreenNodeBuilder::new();
// TODO: document what the hell a forward parent is
let mut fw_parents = Vec::new();
let mut errors: Vec<SyntaxErr> = Vec::new();
raw_toks.reverse();
// always have an implicit root node to avoid [`GreenNodeBuilder::finish()`] panicking due to multiple root elements.
builder.start_node(SyntaxKind::SYNTAX_ROOT.into());
for i in 0..events.len() {
match mem::replace(&mut events[i], Event::tombstone()) {
Event::Start {
kind,
forward_parent,
} => {
if kind == NodeKind::Tombstone && forward_parent.is_none() {
continue;
}
// resolving forward parents
// temporarily jump around with the parser index and replace them with tombstones
fw_parents.push(kind);
let mut idx = i;
let mut fp = forward_parent;
while let Some(fwd) = fp {
idx += fwd as usize;
fp = match mem::replace(&mut events[idx], Event::tombstone()) {
Event::Start {
kind,
forward_parent,
} => {
fw_parents.push(kind);
forward_parent
}
_ => unreachable!(),
}
}
// clear semantically meaningless tokens before the new tree node for aesthetic reasons
while raw_toks
.last()
.is_some_and(|v| meaningless_tokens.contains(v.0))
{
// update first next Eat event
match events.iter_mut().find(|ev| matches!(ev, Event::Eat { .. })) {
Some(Event::Eat { count }) => *count -= 1,
_ => unreachable!(),
}
// put whitespace into lst
let (tok, text) = raw_toks.pop().unwrap();
builder.token(tok.into(), text);
}
// insert forward parents into the tree in correct order
for kind in fw_parents.drain(..).rev() {
match kind {
NodeKind::Syntax(kind) => builder.start_node(kind.into()),
NodeKind::Error(err) => {
errors.push(err);
builder.start_node(SyntaxKind::SYNTAX_ERROR.into())
}
_ => {}
}
}
}
Event::Finish => builder.finish_node(),
Event::Eat { count } => (0..count).for_each(|_| {
let (tok, text) = raw_toks.pop().unwrap();
builder.token(tok.into(), text);
}),
}
}
// finish SYNTAX_ROOT
builder.finish_node();
ParserOutput {
green_node: builder.finish(),
errors,
_syntax_kind: PhantomData::<SyntaxKind>,
}
}
}
pub struct ParserBuilder<
'src,
SyntaxKind: SyntaxElement,
// SyntaxErr: SyntaxError,
> {
raw_toks: Vec<(SyntaxKind, &'src str)>,
meaningless_token_kinds: EnumSet<SyntaxKind>,
step_limit: u32,
}
impl<'src, SyntaxKind: SyntaxElement> ParserBuilder<'src, SyntaxKind> {
pub fn new(raw_toks: Vec<(SyntaxKind, &'src str)>) -> Self {
Self {
raw_toks,
meaningless_token_kinds: EnumSet::new(),
step_limit: 4096,
}
}
/// Sets the parser step limit.
/// Defaults to 4096
pub fn step_limit(mut self, new: u32) -> Self {
self.step_limit = new;
self
}
pub fn add_meaningless(mut self, kind: SyntaxKind) -> Self {
self.meaningless_token_kinds.insert(kind);
self
}
pub fn add_meaningless_many(mut self, kind: Vec<SyntaxKind>) -> Self {
self.meaningless_token_kinds
.insert_all(kind.into_iter().collect());
self
}
pub fn build<SyntaxErr: SyntaxError>(self) -> Parser<'src, SyntaxKind, SyntaxErr> {
let Self {
raw_toks,
meaningless_token_kinds,
step_limit,
} = self;
Parser {
input: Input::new(raw_toks, Some(meaningless_token_kinds)),
pos: 0,
events: Vec::new(),
step_limit,
steps: Cell::new(0),
}
}
}

View file

@ -0,0 +1,9 @@
use std::fmt;
/// A marker trait... for now!
// TODO: constrain that conversion to `NodeKind::Error` is enforced to be possible
pub trait SyntaxError
where
Self: fmt::Debug + Clone + PartialEq + Eq,
{
}

View file

@ -0,0 +1,42 @@
use enumset::EnumSetType;
use super::{error::SyntaxError, SyntaxElement};
pub enum Event<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
Start {
kind: NodeKind<SyntaxKind, SyntaxErr>,
forward_parent: Option<usize>,
},
Finish,
Eat {
count: usize,
},
}
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> Event<SyntaxKind, SyntaxErr> {
pub fn tombstone() -> Self {
Self::Start {
kind: NodeKind::Tombstone,
forward_parent: None,
}
}
}
#[derive(Clone, PartialEq, Eq)]
pub enum NodeKind<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
Tombstone,
Syntax(SyntaxKind),
Error(SyntaxErr),
}
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> NodeKind<SyntaxKind, SyntaxErr> {
pub fn is_tombstone(&self) -> bool {
matches!(self, Self::Tombstone)
}
pub fn is_syntax(&self) -> bool {
matches!(self, Self::Syntax(_))
}
pub fn is_error(&self) -> bool {
matches!(self, Self::Error(_))
}
}

View file

@ -0,0 +1,67 @@
use enumset::{EnumSet, EnumSetType};
use super::SyntaxElement;
pub struct Input<'src, SyntaxKind: SyntaxElement> {
raw: Vec<(SyntaxKind, &'src str)>,
// enumset of meaningless tokens
semantically_meaningless: EnumSet<SyntaxKind>,
// indices of non-meaningless tokens
meaningful_toks: Vec<usize>,
}
impl<'src, SyntaxKind: SyntaxElement> Input<'src, SyntaxKind> {
pub fn new(
raw_toks: Vec<(SyntaxKind, &'src str)>,
meaningless: Option<EnumSet<SyntaxKind>>,
) -> Self {
let mut meaningful_toks = Vec::new();
if let Some(meaningless) = meaningless {
let meaningful_toks = raw_toks
.iter()
.enumerate()
.filter_map(|(i, tok)| (!meaningless.contains(tok.0)).then_some(i))
.collect_into(&mut meaningful_toks);
}
Self {
raw: raw_toks,
semantically_meaningless: meaningless.unwrap_or_default(),
meaningful_toks,
}
}
pub fn kind(&self, idx: usize) -> SyntaxKind {
let Some(meaningful_idx) = self.meaningful_toks.get(idx) else {
return SyntaxKind::SYNTAX_EOF;
};
self.raw.get(*meaningful_idx).unwrap().0
}
pub fn preceding_meaningless(&self, idx: usize) -> usize {
assert!(self.meaningful_toks.len() > idx);
if idx == 0 {
// maybe should be `self.meaningful_toks[idx]` instead??
1
} else {
self.meaningful_toks[idx] - self.meaningful_toks[idx - 1]
}
}
/// get the count of meaningless tokens at the end of the file.
pub fn meaningless_tail_len(&self) -> usize {
self.raw.len() - (self.meaningful_toks.last().unwrap() + 1)
}
pub fn dissolve(self) -> (Vec<(SyntaxKind, &'src str)>, EnumSet<SyntaxKind>) {
let Self {
raw,
semantically_meaningless,
..
} = self;
(raw, semantically_meaningless)
}
}

View file

@ -0,0 +1,97 @@
use drop_bomb::DropBomb;
use rowan::SyntaxKind;
use super::{
error::SyntaxError,
event::{Event, NodeKind},
Parser, SyntaxElement,
};
pub struct Marker {
pos: usize,
bomb: DropBomb,
}
impl Marker {
pub(super) fn new(pos: usize, name: &str) -> Self {
Self {
pos,
bomb: DropBomb::new(format!("Marker {name} must be completed or abandoned.")),
}
}
fn close_node<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
mut self,
p: &mut Parser<SyntaxKind, SyntaxErr>,
kind: NodeKind<SyntaxKind, SyntaxErr>,
) -> CompletedMarker<SyntaxKind, SyntaxErr> {
self.bomb.defuse();
match &mut p.events[self.pos] {
Event::Start { kind: slot, .. } => *slot = kind.clone(),
_ => unreachable!(),
}
p.push_ev(Event::Finish);
CompletedMarker {
pos: self.pos,
kind,
}
}
pub fn complete<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
self,
p: &mut Parser<SyntaxKind, SyntaxErr>,
kind: SyntaxKind,
) -> CompletedMarker<SyntaxKind, SyntaxErr> {
self.close_node(p, NodeKind::Syntax(kind))
}
pub fn error<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
self,
p: &mut Parser<SyntaxKind, SyntaxErr>,
kind: SyntaxErr,
) -> CompletedMarker<SyntaxKind, SyntaxErr> {
self.close_node(p, NodeKind::Error(kind))
}
pub fn abandon<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
mut self,
p: &mut Parser<SyntaxKind, SyntaxErr>,
) {
self.bomb.defuse();
// clean up empty tombstone event from marker
if self.pos == p.events.len() - 1 {
match p.events.pop() {
Some(Event::Start {
kind: NodeKind::Tombstone,
forward_parent: None,
}) => (),
_ => unreachable!(),
}
}
}
}
pub struct CompletedMarker<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
pos: usize,
kind: NodeKind<SyntaxKind, SyntaxErr>,
}
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> CompletedMarker<SyntaxKind, SyntaxErr> {
pub fn precede(self, p: &mut Parser<SyntaxKind, SyntaxErr>, name: &str) -> Marker {
let new_pos = p.start(name);
match &mut p.events[self.pos] {
Event::Start { forward_parent, .. } => {
// point forward parent of the node this marker completed to the new node
// will later be used to make the new node a parent of the current node.
*forward_parent = Some(new_pos.pos - self.pos)
}
_ => unreachable!(),
}
new_pos
}
}

View file

@ -0,0 +1,73 @@
use std::{fmt, marker::PhantomData};
use rowan::{GreenNode, GreenNodeData, GreenTokenData, NodeOrToken};
use crate::{SyntaxElement, SyntaxError};
pub struct ParserOutput<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
pub green_node: GreenNode,
pub errors: Vec<SyntaxErr>,
pub(super) _syntax_kind: PhantomData<SyntaxKind>,
}
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> std::fmt::Debug
for ParserOutput<SyntaxKind, SyntaxErr>
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut errs: Vec<&SyntaxErr> = self.errors.iter().collect();
errs.reverse();
debug_print_output::<SyntaxKind, SyntaxErr>(
NodeOrToken::Node(&self.green_node),
f,
0,
&mut errs,
)
}
}
fn debug_print_output<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
node: NodeOrToken<&GreenNodeData, &GreenTokenData>,
f: &mut std::fmt::Formatter<'_>,
lvl: i32,
errs: &mut Vec<&SyntaxErr>,
) -> std::fmt::Result {
if f.alternate() {
for _ in 0..lvl {
f.write_str(" ")?;
}
}
let maybe_newline = if f.alternate() { "\n" } else { " " };
match node {
NodeOrToken::Node(n) => {
let kind: SyntaxKind = node.kind().into();
if kind != SyntaxKind::SYNTAX_ERROR {
write!(f, "{:?} {{{maybe_newline}", kind)?;
} else {
let err = errs
.pop()
.expect("all error syntax nodes should correspond to an error");
write!(f, "{:?}: {err:?} {{{maybe_newline}", kind)?;
}
for c in n.children() {
debug_print_output::<SyntaxKind, SyntaxErr>(c, f, lvl + 1, errs)?;
}
if f.alternate() {
for _ in 0..lvl {
f.write_str(" ")?;
}
}
write!(f, "}}{maybe_newline}")
}
NodeOrToken::Token(t) => {
write!(
f,
"{:?} {:?};{maybe_newline}",
Into::<SyntaxKind>::into(t.kind()),
t.text()
)
}
}
}

View file

@ -0,0 +1,13 @@
[package]
name = "prowocessing"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
image = "0.24.8"
palette = "0.7.4"
[lints]
workspace = true

View file

@ -0,0 +1,2 @@
pub mod enum_based;
pub mod trait_based;

View file

@ -0,0 +1,64 @@
pub enum Instruction {
Uppercase,
Lowercase,
}
pub struct Pipeline {
pipeline: Vec<fn(String) -> String>,
}
impl Pipeline {
pub fn run(&self, val: String) -> String {
let mut current = val;
for instr in &self.pipeline {
current = instr(current);
}
current
}
}
pub struct PipelineBuilder {
pipeline: Vec<Instruction>,
}
impl PipelineBuilder {
pub fn new() -> Self {
Self {
pipeline: Vec::new(),
}
}
#[must_use]
pub fn insert(mut self, instr: Instruction) -> Self {
self.pipeline.push(instr);
self
}
pub fn build(&self) -> Pipeline {
fn uppercase(v: String) -> String {
str::to_uppercase(&v)
}
fn lowercase(v: String) -> String {
str::to_lowercase(&v)
}
let mut res = Vec::new();
for item in &self.pipeline {
res.push(match item {
Instruction::Uppercase => uppercase,
Instruction::Lowercase => lowercase,
});
}
Pipeline { pipeline: res }
}
}
impl Default for PipelineBuilder {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,11 @@
//! An experiment for a hyper-modular trait-based architecture.
//!
//! Patterns defining this (or well, which I reference a lot while writing this):
//! - [Command pattern using trait objects](https://rust-unofficial.github.io/patterns/patterns/behavioural/command.html)
//! - [Builder pattern](https://rust-unofficial.github.io/patterns/patterns/creational/builder.html)
pub mod data;
#[macro_use]
pub mod element;
pub mod ops;
pub mod pipeline;

View file

@ -0,0 +1,5 @@
//! Definitions of the data transfer and storage types.
pub mod io;
pub mod raw;

View file

@ -0,0 +1,53 @@
//! Types for element and pipeline IO
use std::{borrow::ToOwned, convert::Into};
use super::raw::Data;
/// Newtype struct with borrowed types for pipeline/element inputs, so that doesn't force a move or clone
#[derive(PartialEq, Eq, Debug)]
pub struct Inputs<'a>(pub Vec<&'a Data>);
impl<'a> From<Vec<&'a Data>> for Inputs<'a> {
fn from(value: Vec<&'a Data>) -> Self {
Self(value)
}
}
impl<'a, T: Into<&'a Data>> From<T> for Inputs<'a> {
fn from(value: T) -> Self {
Self(vec![value.into()])
}
}
impl<'a> From<&'a Outputs> for Inputs<'a> {
fn from(value: &'a Outputs) -> Self {
Self(value.0.iter().map(Into::into).collect())
}
}
/// Used for pipeline/element outputs
#[derive(PartialEq, Eq, Debug)]
pub struct Outputs(pub Vec<Data>);
impl Outputs {
/// consume self and return inner value(s)
pub fn into_inner(self) -> Vec<Data> {
self.0
}
}
impl From<Vec<Data>> for Outputs {
fn from(value: Vec<Data>) -> Self {
Self(value)
}
}
impl<T: Into<Data>> From<T> for Outputs {
fn from(value: T) -> Self {
Self(vec![value.into()])
}
}
impl From<Inputs<'_>> for Outputs {
fn from(value: Inputs) -> Self {
Self(value.0.into_iter().map(ToOwned::to_owned).collect())
}
}

View file

@ -0,0 +1,20 @@
//! Dynamic data storage and transfer types for use in [`io`]
// Dynamic data type
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Data {
String(String),
Int(i32),
}
impl From<String> for Data {
fn from(value: String) -> Self {
Self::String(value)
}
}
impl From<i32> for Data {
fn from(value: i32) -> Self {
Self::Int(value)
}
}

View file

@ -0,0 +1,29 @@
//! The trait and type representations
use std::any::TypeId;
use crate::experimental::trait_based::data::io::Inputs;
use super::data::io::Outputs;
pub(crate) trait PipelineElement {
/// return a static runner function pointer to avoid dynamic dispatch during pipeline execution - Types MUST match the signature
fn runner(&self) -> fn(&Inputs) -> Outputs;
/// return the signature of the element
fn signature(&self) -> ElementSignature;
}
/// Type signature for an element used for static checking
pub(crate) struct ElementSignature {
pub inputs: Vec<TypeId>,
pub outputs: Vec<TypeId>,
}
macro_rules! signature {
($($inputs:ty),+ => $($outputs:ty),+) => (
ElementSignature {
inputs: vec![$(std::any::TypeId::of::<$inputs>(), )+],
outputs: vec![$(std::any::TypeId::of::<$outputs>(), )+]
}
)
}

View file

@ -0,0 +1,7 @@
mod num;
mod str;
pub mod prelude {
pub(crate) use super::num::*;
pub(crate) use super::str::*;
}

View file

@ -0,0 +1,62 @@
//! Operations on numeric data
use core::panic;
use std::any::TypeId;
use crate::experimental::trait_based::{
data::{
io::{Inputs, Outputs},
raw::Data,
},
element::{ElementSignature, PipelineElement},
};
/// Addition
pub struct Add(pub i32);
impl PipelineElement for Add {
fn runner(&self) -> fn(&Inputs) -> Outputs {
|input| {
let [Data::Int(i0), Data::Int(i1), ..] = input.0[..] else {
panic!("Invalid data passed")
};
(i0 + i1).into()
}
}
fn signature(&self) -> ElementSignature {
signature!(i32, i32 => i32)
}
}
/// Subtraction
pub struct Subtract(pub i32);
impl PipelineElement for Subtract {
fn runner(&self) -> fn(&Inputs) -> Outputs {
|input| {
let [Data::Int(i0), Data::Int(i1), ..] = input.0[..] else {
panic!("Invalid data passed")
};
(i0 + i1).into()
}
}
fn signature(&self) -> ElementSignature {
signature!(i32, i32 => i32)
}
}
/// Turn input to string
pub struct Stringify;
impl PipelineElement for Stringify {
fn runner(&self) -> fn(&Inputs) -> Outputs {
|input| {
let [Data::Int(int), ..] = input.0[..] else {
panic!("Invalid data passed")
};
int.to_string().into()
}
}
fn signature(&self) -> ElementSignature {
signature!(i32 => String)
}
}

View file

@ -0,0 +1,59 @@
//! Operation on String/text data
use crate::experimental::trait_based::{
data::{
io::{Inputs, Outputs},
raw::Data,
},
element::{ElementSignature, PipelineElement},
};
/// Concatenate the inputs
pub struct Concatenate(pub String);
impl PipelineElement for Concatenate {
fn runner(&self) -> fn(&Inputs) -> Outputs {
|input| {
let [Data::String(s0), Data::String(s1), ..] = input.0[..] else {
panic!("Invalid data passed")
};
format!("{s0}{s1}").into()
}
}
fn signature(&self) -> ElementSignature {
signature!(String, String => String)
}
}
/// Turn input text to uppercase
pub struct Upper;
impl PipelineElement for Upper {
fn runner(&self) -> fn(&Inputs) -> Outputs {
|input| {
let [Data::String(s), ..] = input.0[..] else {
panic!("Invalid data passed")
};
s.to_uppercase().into()
}
}
fn signature(&self) -> ElementSignature {
signature!(String => String)
}
}
/// Turn input text to lowercase
pub struct Lower;
impl PipelineElement for Lower {
fn runner(&self) -> fn(&Inputs) -> Outputs {
|input| {
let [Data::String(s), ..] = input.0[..] else {
panic!("Invalid data passed")
};
s.to_lowercase().into()
}
}
fn signature(&self) -> ElementSignature {
signature!(String => String)
}
}

View file

@ -0,0 +1,107 @@
use super::data::io::{Inputs, Outputs};
use super::element::PipelineElement;
use super::ops::prelude::*;
/// Builder for the pipelines that are actually run
///
/// TODO:
/// - Bind additional inputs if instruction has more then one and is passd without any additional
/// - allow binding to pointers to other pipelines?
/// - allow referencing earlier data
pub struct PipelineBuilder {
elements: Vec<Box<dyn PipelineElement>>,
}
impl PipelineBuilder {
/// Create new, empty builder
pub fn new() -> Self {
Self {
elements: Vec::new(),
}
}
/// Insert element into pipeline
fn insert<T: PipelineElement + 'static>(mut self, el: T) -> Self {
if let Some(previous_item) = self.elements.last() {
assert_eq!(
previous_item.signature().outputs[0],
el.signature().inputs[0]
);
}
self.elements.push(Box::new(el));
self
}
/// insert string concatenattion element
#[must_use]
pub fn concatenate(self, sec: String) -> Self {
self.insert(Concatenate(sec))
}
/// insert string uppercase element
#[must_use]
pub fn upper(self) -> Self {
self.insert(Upper)
}
/// insert string lowercase element
#[must_use]
pub fn lower(self) -> Self {
self.insert(Lower)
}
/// insert numeric addition element
#[must_use]
#[allow(
clippy::should_implement_trait,
reason = "is not equivalent to addition"
)]
pub fn add(self, sec: i32) -> Self {
self.insert(Add(sec))
}
/// insert numeric subtraction element
#[must_use]
pub fn subtract(self, sec: i32) -> Self {
self.insert(Subtract(sec))
}
/// insert stringify element
#[must_use]
pub fn stringify(self) -> Self {
self.insert(Stringify)
}
/// Build the pipeline. Doesn't check again - `insert` should verify correctness.
pub fn build(&self) -> Pipeline {
let mut r = Vec::new();
self.elements.iter().for_each(|el| r.push(el.runner()));
Pipeline { runners: r }
}
}
impl Default for PipelineBuilder {
fn default() -> Self {
Self::new()
}
}
/// Runnable pipeline - at the core of this library
pub struct Pipeline {
runners: Vec<fn(&Inputs) -> Outputs>,
}
impl Pipeline {
/// run the pipeline
pub fn run(&self, inputs: Inputs) -> Outputs {
let mut out: Outputs = inputs.into();
for runner in &self.runners {
out = runner(&(&out).into());
}
out
}
}

View file

@ -0,0 +1,40 @@
//! # This is the image processing library for iOwO
//!
//! One of the design goals for this library is, however, to be a simple, generic image processing library.
//! For now, it's just indev... lets see what comes of it!
#![feature(lint_reasons)]
/// just some experiments, to test whether the architecture i want is even possible (or how to do it). probably temporary.
/// Gonna first try string processing...
pub mod experimental;
#[cfg(test)]
mod tests {
use crate::experimental::{
enum_based,
trait_based::{self, data::io::Outputs},
};
#[test]
fn test_enums() {
let builder = enum_based::PipelineBuilder::new().insert(enum_based::Instruction::Uppercase);
let upr = builder.build();
let upr_lowr = builder.insert(enum_based::Instruction::Lowercase).build();
assert_eq!(upr.run(String::from("Test")), String::from("TEST"));
assert_eq!(upr_lowr.run(String::from("Test")), String::from("test"));
}
#[test]
fn add() {
let pipe = trait_based::pipeline::PipelineBuilder::new()
.add(0)
.stringify()
.build();
assert_eq!(
pipe.run(vec![&2.into(), &3.into()].into()),
Outputs(vec![String::from("5").into()])
);
}
}

View file

@ -3,12 +3,54 @@
"cachix": {
"inputs": {
"devenv": "devenv_2",
"flake-compat": "flake-compat_2",
"flake-compat": [
"devenv",
"flake-compat"
],
"git-hooks": [
"devenv",
"pre-commit-hooks"
],
"nixpkgs": [
"devenv",
"nixpkgs"
]
},
"locked": {
"lastModified": 1726520618,
"narHash": "sha256-jOsaBmJ/EtX5t/vbylCdS7pWYcKGmWOKg4QKUzKr6dA=",
"owner": "cachix",
"repo": "cachix",
"rev": "695525f9086542dfb09fde0871dbf4174abbf634",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "cachix",
"type": "github"
}
},
"cachix_2": {
"inputs": {
"devenv": "devenv_3",
"flake-compat": [
"devenv",
"cachix",
"devenv",
"flake-compat"
],
"pre-commit-hooks": "pre-commit-hooks"
"nixpkgs": [
"devenv",
"cachix",
"devenv",
"nixpkgs"
],
"pre-commit-hooks": [
"devenv",
"cachix",
"devenv",
"pre-commit-hooks"
]
},
"locked": {
"lastModified": 1712055811,
@ -27,17 +69,17 @@
"devenv": {
"inputs": {
"cachix": "cachix",
"flake-compat": "flake-compat_4",
"nix": "nix_2",
"nixpkgs": "nixpkgs_2",
"flake-compat": "flake-compat_2",
"nix": "nix_3",
"nixpkgs": "nixpkgs_3",
"pre-commit-hooks": "pre-commit-hooks_2"
},
"locked": {
"lastModified": 1712925466,
"narHash": "sha256-MJ6VxGNu/ftbn8SErJjBz80FUNXkZfcObHg/JP7wwAc=",
"lastModified": 1729445229,
"narHash": "sha256-3vhSEs2ufSvv2Oct8G9CWEPFI57c4NAZ2wR2accHELM=",
"owner": "cachix",
"repo": "devenv",
"rev": "1af93652caf48bfeef6ba7d1cf59fc66e506e5c2",
"rev": "006016cf4191c34c17cfdb6669e0690e24302ac0",
"type": "github"
},
"original": {
@ -48,15 +90,53 @@
},
"devenv_2": {
"inputs": {
"cachix": "cachix_2",
"flake-compat": [
"devenv",
"cachix",
"flake-compat"
],
"nix": "nix_2",
"nixpkgs": [
"devenv",
"cachix",
"nixpkgs"
],
"pre-commit-hooks": [
"devenv",
"cachix",
"git-hooks"
]
},
"locked": {
"lastModified": 1723156315,
"narHash": "sha256-0JrfahRMJ37Rf1i0iOOn+8Z4CLvbcGNwa2ChOAVrp/8=",
"owner": "cachix",
"repo": "devenv",
"rev": "ff5eb4f2accbcda963af67f1a1159e3f6c7f5f91",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "devenv",
"type": "github"
}
},
"devenv_3": {
"inputs": {
"flake-compat": [
"devenv",
"cachix",
"devenv",
"cachix",
"flake-compat"
],
"nix": "nix",
"nixpkgs": "nixpkgs",
"poetry2nix": "poetry2nix",
"pre-commit-hooks": [
"devenv",
"cachix",
"devenv",
"cachix",
"pre-commit-hooks"
@ -79,15 +159,15 @@
},
"fenix": {
"inputs": {
"nixpkgs": "nixpkgs_3",
"nixpkgs": "nixpkgs_4",
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1712903033,
"narHash": "sha256-KcvsEm0h1mIwBHFAzWFBjGihnbf2fxpAaXOdVbUfAI4=",
"lastModified": 1729492502,
"narHash": "sha256-d6L4bBlUWr4sHC+eRXo+4acFPEFXKmqHpM/BfQ5gQQw=",
"owner": "nix-community",
"repo": "fenix",
"rev": "c739f83545e625227f4d0af7fe2a71e69931fa4c",
"rev": "4002a1ec3486b855f341d2b864ba06b61e73af28",
"type": "github"
},
"original": {
@ -128,51 +208,25 @@
"type": "github"
}
},
"flake-compat_3": {
"flake": false,
"flake-parts": {
"inputs": {
"nixpkgs-lib": [
"devenv",
"nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"lastModified": 1712014858,
"narHash": "sha256-sB4SWl2lX95bExY2gMFG5HIzvva5AVMJd4Igm+GpZNw=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "9126214d0a59633752a136528f5f3b9aa8565b7d",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-compat_4": {
"flake": false,
"locked": {
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-compat_5": {
"flake": false,
"locked": {
"lastModified": 1673956053,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
@ -195,33 +249,12 @@
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"lastModified": 1667395993,
"narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_3": {
"inputs": {
"systems": "systems_3"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
"type": "github"
},
"original": {
@ -231,29 +264,6 @@
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
"devenv",
"cachix",
"pre-commit-hooks",
"nixpkgs"
]
},
"locked": {
"lastModified": 1703887061,
"narHash": "sha256-gGPa9qWNc6eCXT/+Z5/zMkyYOuRZqeFZBDbopNZQkuY=",
"owner": "hercules-ci",
"repo": "gitignore.nix",
"rev": "43e1aa1308018f37118e34d3a9cb4f5e75dc11d5",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "gitignore.nix",
"type": "github"
}
},
"gitignore_2": {
"inputs": {
"nixpkgs": [
"devenv",
@ -275,10 +285,28 @@
"type": "github"
}
},
"libgit2": {
"flake": false,
"locked": {
"lastModified": 1697646580,
"narHash": "sha256-oX4Z3S9WtJlwvj0uH9HlYcWv+x1hqp8mhXl7HsLu2f0=",
"owner": "libgit2",
"repo": "libgit2",
"rev": "45fd9ed7ae1a9b74b957ef4f337bc3c8b3df01b5",
"type": "github"
},
"original": {
"owner": "libgit2",
"repo": "libgit2",
"type": "github"
}
},
"nix": {
"inputs": {
"flake-compat": "flake-compat",
"nixpkgs": [
"devenv",
"cachix",
"devenv",
"cachix",
"devenv",
@ -287,11 +315,11 @@
"nixpkgs-regression": "nixpkgs-regression"
},
"locked": {
"lastModified": 1708577783,
"narHash": "sha256-92xq7eXlxIT5zFNccLpjiP7sdQqQI30Gyui2p/PfKZM=",
"lastModified": 1712911606,
"narHash": "sha256-BGvBhepCufsjcUkXnEEXhEVjwdJAwPglCC2+bInc794=",
"owner": "domenkozar",
"repo": "nix",
"rev": "ecd0af0c1f56de32cbad14daa1d82a132bf298f8",
"rev": "b24a9318ea3f3600c1e24b4a00691ee912d4de12",
"type": "github"
},
"original": {
@ -304,6 +332,8 @@
"nix-github-actions": {
"inputs": {
"nixpkgs": [
"devenv",
"cachix",
"devenv",
"cachix",
"devenv",
@ -327,8 +357,15 @@
},
"nix_2": {
"inputs": {
"flake-compat": "flake-compat_5",
"flake-compat": [
"devenv",
"cachix",
"devenv",
"flake-compat"
],
"nixpkgs": [
"devenv",
"cachix",
"devenv",
"nixpkgs"
],
@ -349,6 +386,34 @@
"type": "github"
}
},
"nix_3": {
"inputs": {
"flake-compat": [
"devenv",
"flake-compat"
],
"flake-parts": "flake-parts",
"libgit2": "libgit2",
"nixpkgs": "nixpkgs_2",
"nixpkgs-23-11": "nixpkgs-23-11",
"nixpkgs-regression": "nixpkgs-regression_3",
"pre-commit-hooks": "pre-commit-hooks"
},
"locked": {
"lastModified": 1727438425,
"narHash": "sha256-X8ES7I1cfNhR9oKp06F6ir4Np70WGZU5sfCOuNBEwMg=",
"owner": "domenkozar",
"repo": "nix",
"rev": "f6c5ae4c1b2e411e6b1e6a8181cc84363d6a7546",
"type": "github"
},
"original": {
"owner": "domenkozar",
"ref": "devenv-2.24",
"repo": "nix",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1692808169,
@ -365,6 +430,22 @@
"type": "github"
}
},
"nixpkgs-23-11": {
"locked": {
"lastModified": 1717159533,
"narHash": "sha256-oamiKNfr2MS6yH64rUn99mIZjc45nGJlj9eGth/3Xuw=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "a62e6edd6d5e1fa0329b8653c801147986f8d446",
"type": "github"
},
"original": {
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "a62e6edd6d5e1fa0329b8653c801147986f8d446",
"type": "github"
}
},
"nixpkgs-regression": {
"locked": {
"lastModified": 1643052045,
@ -397,45 +478,61 @@
"type": "github"
}
},
"nixpkgs-stable": {
"nixpkgs-regression_3": {
"locked": {
"lastModified": 1704874635,
"narHash": "sha256-YWuCrtsty5vVZvu+7BchAxmcYzTMfolSPP5io8+WYCg=",
"lastModified": 1643052045,
"narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "3dc440faeee9e889fe2d1b4d25ad0f430d449356",
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-23.11",
"repo": "nixpkgs",
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
"type": "github"
}
},
"nixpkgs-stable_2": {
"nixpkgs-stable": {
"locked": {
"lastModified": 1710695816,
"narHash": "sha256-3Eh7fhEID17pv9ZxrPwCLfqXnYP006RKzSs0JptsN84=",
"lastModified": 1720386169,
"narHash": "sha256-NGKVY4PjzwAa4upkGtAMz1npHGoRzWotlSnVlqI40mo=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "614b4613980a522ba49f0d194531beddbb7220d3",
"rev": "194846768975b7ad2c4988bdb82572c00222c0d7",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-23.11",
"ref": "nixos-24.05",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1710796454,
"narHash": "sha256-lQlICw60RhH8sHTDD/tJiiJrlAfNn8FDI9c+7G2F0SE=",
"lastModified": 1717432640,
"narHash": "sha256-+f9c4/ZX5MWDOuB1rKoWj+lBNm0z0rs4CK47HBLxy1o=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "88269ab3044128b7c2f4c7d68448b2fb50456870",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "release-24.05",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_3": {
"locked": {
"lastModified": 1716977621,
"narHash": "sha256-Q1UQzYcMJH4RscmpTkjlgqQDX5yi1tZL0O345Ri6vXQ=",
"owner": "cachix",
"repo": "devenv-nixpkgs",
"rev": "06fb0f1c643aee3ae6838dda3b37ef0abc3c763b",
"rev": "4267e705586473d3e5c8d50299e71503f16a6fb6",
"type": "github"
},
"original": {
@ -445,13 +542,13 @@
"type": "github"
}
},
"nixpkgs_3": {
"nixpkgs_4": {
"locked": {
"lastModified": 1712791164,
"narHash": "sha256-3sbWO1mbpWsLepZGbWaMovSO7ndZeFqDSdX0hZ9nVyw=",
"lastModified": 1729256560,
"narHash": "sha256-/uilDXvCIEs3C9l73JTACm4quuHUsIHcns1c+cHUJwA=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "1042fd8b148a9105f3c0aca3a6177fd1d9360ba5",
"rev": "4c2fcb090b1f3e5b47eaa7bd33913b574a11e0a0",
"type": "github"
},
"original": {
@ -461,13 +558,13 @@
"type": "github"
}
},
"nixpkgs_4": {
"nixpkgs_5": {
"locked": {
"lastModified": 1712791164,
"narHash": "sha256-3sbWO1mbpWsLepZGbWaMovSO7ndZeFqDSdX0hZ9nVyw=",
"lastModified": 1729256560,
"narHash": "sha256-/uilDXvCIEs3C9l73JTACm4quuHUsIHcns1c+cHUJwA=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "1042fd8b148a9105f3c0aca3a6177fd1d9360ba5",
"rev": "4c2fcb090b1f3e5b47eaa7bd33913b574a11e0a0",
"type": "github"
},
"original": {
@ -482,6 +579,8 @@
"flake-utils": "flake-utils",
"nix-github-actions": "nix-github-actions",
"nixpkgs": [
"devenv",
"cachix",
"devenv",
"cachix",
"devenv",
@ -503,44 +602,26 @@
}
},
"pre-commit-hooks": {
"inputs": {
"flake-compat": "flake-compat_3",
"flake-utils": "flake-utils_2",
"gitignore": "gitignore",
"nixpkgs": [
"devenv",
"cachix",
"nixpkgs"
],
"nixpkgs-stable": "nixpkgs-stable"
},
"locked": {
"lastModified": 1708018599,
"narHash": "sha256-M+Ng6+SePmA8g06CmUZWi1AjG2tFBX9WCXElBHEKnyM=",
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"rev": "5df5a70ad7575f6601d91f0efec95dd9bc619431",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"type": "github"
}
},
"pre-commit-hooks_2": {
"inputs": {
"flake-compat": [
"devenv",
"flake-compat"
"nix"
],
"flake-utils": "flake-utils_2",
"gitignore": [
"devenv",
"nix"
],
"flake-utils": "flake-utils_3",
"gitignore": "gitignore_2",
"nixpkgs": [
"devenv",
"nix",
"nixpkgs"
],
"nixpkgs-stable": "nixpkgs-stable_2"
"nixpkgs-stable": [
"devenv",
"nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1712897695,
@ -556,22 +637,49 @@
"type": "github"
}
},
"pre-commit-hooks_2": {
"inputs": {
"flake-compat": [
"devenv",
"flake-compat"
],
"gitignore": "gitignore",
"nixpkgs": [
"devenv",
"nixpkgs"
],
"nixpkgs-stable": "nixpkgs-stable"
},
"locked": {
"lastModified": 1726745158,
"narHash": "sha256-D5AegvGoEjt4rkKedmxlSEmC+nNLMBPWFxvmYnVLhjk=",
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"rev": "4e743a6920eab45e8ba0fbe49dc459f1423a4b74",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"type": "github"
}
},
"root": {
"inputs": {
"devenv": "devenv",
"fenix": "fenix",
"nixpkgs": "nixpkgs_4",
"systems": "systems_4"
"nixpkgs": "nixpkgs_5",
"systems": "systems_2"
}
},
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1712818880,
"narHash": "sha256-VDxsvgj/bNypHq48tQWtc3VRbWvzlFjzKf9ZZIVO10Y=",
"lastModified": 1729454508,
"narHash": "sha256-1W5B/CnLgdC03iIFG0wtawO1+dGDWDpc84PeOHo2ecU=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "657b33b0cb9bd49085202e91ad5b4676532c9140",
"rev": "9323b5385863739d1c113f02e4cf3f2777c09977",
"type": "github"
},
"original": {
@ -610,36 +718,6 @@
"repo": "default",
"type": "github"
}
},
"systems_3": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_4": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",