init i guess
This commit is contained in:
parent
c90532830e
commit
01de2f385a
21 changed files with 1315 additions and 0 deletions
12
crates/lopal_core/Cargo.toml
Normal file
12
crates/lopal_core/Cargo.toml
Normal file
|
@ -0,0 +1,12 @@
|
|||
[package]
|
||||
name = "lopal_core"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
rowan = "0.15.15"
|
||||
drop_bomb = "0.1.5"
|
||||
enumset = "1.1.3"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
8
crates/lopal_core/src/lib.rs
Normal file
8
crates/lopal_core/src/lib.rs
Normal file
|
@ -0,0 +1,8 @@
|
|||
#![feature(iter_collect_into)]
|
||||
pub mod parser;
|
||||
|
||||
pub use parser::{
|
||||
error::SyntaxError,
|
||||
marker::{CompletedMarker, Marker},
|
||||
Parser, SyntaxElement,
|
||||
};
|
253
crates/lopal_core/src/parser.rs
Normal file
253
crates/lopal_core/src/parser.rs
Normal file
|
@ -0,0 +1,253 @@
|
|||
use std::{cell::Cell, fmt, marker::PhantomData, mem};
|
||||
|
||||
use enumset::{EnumSet, EnumSetType};
|
||||
use rowan::{GreenNode, GreenNodeBuilder};
|
||||
|
||||
use crate::parser::event::NodeKind;
|
||||
|
||||
use self::{event::Event, input::Input, marker::Marker};
|
||||
pub use {error::SyntaxError, output::ParserOutput};
|
||||
|
||||
pub mod error;
|
||||
mod event;
|
||||
mod input;
|
||||
pub mod marker;
|
||||
pub mod output;
|
||||
|
||||
/// this is used to define some required SyntaxKinds like an EOF token or an error token
|
||||
pub trait SyntaxElement
|
||||
where
|
||||
Self: EnumSetType
|
||||
+ Into<rowan::SyntaxKind>
|
||||
+ From<rowan::SyntaxKind>
|
||||
+ fmt::Debug
|
||||
+ Clone
|
||||
+ PartialEq
|
||||
+ Eq,
|
||||
{
|
||||
/// EOF value. This will be used by the rest of the parser library to represent an EOF.
|
||||
const SYNTAX_EOF: Self;
|
||||
/// Error value. This will be used as a placeholder for associated respective errors.
|
||||
const SYNTAX_ERROR: Self;
|
||||
const SYNTAX_ROOT: Self;
|
||||
}
|
||||
|
||||
pub struct Parser<'src, SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
|
||||
input: Input<'src, SyntaxKind>,
|
||||
pos: usize,
|
||||
events: Vec<Event<SyntaxKind, SyntaxErr>>,
|
||||
step_limit: u32,
|
||||
steps: Cell<u32>,
|
||||
}
|
||||
|
||||
impl<'src, 'toks, SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>
|
||||
Parser<'src, SyntaxKind, SyntaxErr>
|
||||
{
|
||||
/// eat all meaningless tokens at the end of the file.
|
||||
pub fn eat_succeeding_meaningless(&mut self) {
|
||||
self.push_ev(Event::Eat {
|
||||
count: self.input.meaningless_tail_len(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Get token from current position of the parser.
|
||||
pub fn current(&self) -> SyntaxKind {
|
||||
self.step();
|
||||
self.input.kind(self.pos)
|
||||
}
|
||||
|
||||
pub fn start(&mut self, name: &str) -> Marker {
|
||||
let pos = self.events.len();
|
||||
self.push_ev(Event::tombstone());
|
||||
Marker::new(pos, name)
|
||||
}
|
||||
|
||||
/// Eat next token if it's of kind `kind` and return `true`.
|
||||
/// Otherwise, `false`.
|
||||
pub fn eat(&mut self, kind: SyntaxKind) -> bool {
|
||||
if !self.at(kind) {
|
||||
return false;
|
||||
}
|
||||
|
||||
self.do_bump();
|
||||
true
|
||||
}
|
||||
|
||||
pub fn do_bump(&mut self) {
|
||||
self.push_ev(Event::Eat {
|
||||
count: self.input.preceding_meaningless(self.pos),
|
||||
});
|
||||
self.pos += 1;
|
||||
}
|
||||
|
||||
/// Check if the token at the current parser position is of `kind`
|
||||
pub fn at(&self, kind: SyntaxKind) -> bool {
|
||||
self.nth_at(0, kind)
|
||||
}
|
||||
|
||||
/// Check if the token that is `n` ahead is of `kind`
|
||||
pub fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool {
|
||||
self.nth(n) == kind
|
||||
}
|
||||
|
||||
pub fn nth(&self, n: usize) -> SyntaxKind {
|
||||
self.step();
|
||||
self.input.kind(self.pos + n)
|
||||
}
|
||||
|
||||
fn push_ev(&mut self, event: Event<SyntaxKind, SyntaxErr>) {
|
||||
self.events.push(event);
|
||||
}
|
||||
|
||||
fn step(&self) {
|
||||
let steps = self.steps.get();
|
||||
assert!(steps <= self.step_limit, "the parser seems stuck.");
|
||||
self.steps.set(steps + 1);
|
||||
}
|
||||
|
||||
pub fn finish(self) -> ParserOutput<SyntaxKind, SyntaxErr> {
|
||||
let Self {
|
||||
input,
|
||||
pos,
|
||||
mut events,
|
||||
step_limit,
|
||||
steps,
|
||||
} = self;
|
||||
let (mut raw_toks, meaningless_tokens) = input.dissolve();
|
||||
let mut builder = GreenNodeBuilder::new();
|
||||
// TODO: document what the hell a forward parent is
|
||||
let mut fw_parents = Vec::new();
|
||||
let mut errors: Vec<SyntaxErr> = Vec::new();
|
||||
raw_toks.reverse();
|
||||
|
||||
// always have an implicit root node to avoid [`GreenNodeBuilder::finish()`] panicking due to multiple root elements.
|
||||
builder.start_node(SyntaxKind::SYNTAX_ROOT.into());
|
||||
|
||||
for i in 0..events.len() {
|
||||
match mem::replace(&mut events[i], Event::tombstone()) {
|
||||
Event::Start {
|
||||
kind,
|
||||
forward_parent,
|
||||
} => {
|
||||
if kind == NodeKind::Tombstone && forward_parent.is_none() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// resolving forward parents
|
||||
// temporarily jump around with the parser index and replace them with tombstones
|
||||
fw_parents.push(kind);
|
||||
let mut idx = i;
|
||||
let mut fp = forward_parent;
|
||||
while let Some(fwd) = fp {
|
||||
idx += fwd as usize;
|
||||
fp = match mem::replace(&mut events[idx], Event::tombstone()) {
|
||||
Event::Start {
|
||||
kind,
|
||||
forward_parent,
|
||||
} => {
|
||||
fw_parents.push(kind);
|
||||
forward_parent
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
// clear semantically meaningless tokens before the new tree node for aesthetic reasons
|
||||
while raw_toks
|
||||
.last()
|
||||
.is_some_and(|v| meaningless_tokens.contains(v.0))
|
||||
{
|
||||
// update first next Eat event
|
||||
match events.iter_mut().find(|ev| matches!(ev, Event::Eat { .. })) {
|
||||
Some(Event::Eat { count }) => *count -= 1,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
// put whitespace into lst
|
||||
let (tok, text) = raw_toks.pop().unwrap();
|
||||
builder.token(tok.into(), text);
|
||||
}
|
||||
|
||||
// insert forward parents into the tree in correct order
|
||||
for kind in fw_parents.drain(..).rev() {
|
||||
match kind {
|
||||
NodeKind::Syntax(kind) => builder.start_node(kind.into()),
|
||||
NodeKind::Error(err) => {
|
||||
errors.push(err);
|
||||
builder.start_node(SyntaxKind::SYNTAX_ERROR.into())
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
Event::Finish => builder.finish_node(),
|
||||
Event::Eat { count } => (0..count).for_each(|_| {
|
||||
let (tok, text) = raw_toks.pop().unwrap();
|
||||
builder.token(tok.into(), text);
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
// finish SYNTAX_ROOT
|
||||
builder.finish_node();
|
||||
|
||||
ParserOutput {
|
||||
green_node: builder.finish(),
|
||||
errors,
|
||||
_syntax_kind: PhantomData::<SyntaxKind>,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ParserBuilder<
|
||||
'src,
|
||||
SyntaxKind: SyntaxElement,
|
||||
// SyntaxErr: SyntaxError,
|
||||
> {
|
||||
raw_toks: Vec<(SyntaxKind, &'src str)>,
|
||||
meaningless_token_kinds: EnumSet<SyntaxKind>,
|
||||
step_limit: u32,
|
||||
}
|
||||
|
||||
impl<'src, SyntaxKind: SyntaxElement> ParserBuilder<'src, SyntaxKind> {
|
||||
pub fn new(raw_toks: Vec<(SyntaxKind, &'src str)>) -> Self {
|
||||
Self {
|
||||
raw_toks,
|
||||
meaningless_token_kinds: EnumSet::new(),
|
||||
step_limit: 4096,
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the parser step limit.
|
||||
/// Defaults to 4096
|
||||
pub fn step_limit(mut self, new: u32) -> Self {
|
||||
self.step_limit = new;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_meaningless(mut self, kind: SyntaxKind) -> Self {
|
||||
self.meaningless_token_kinds.insert(kind);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_meaningless_many(mut self, kind: Vec<SyntaxKind>) -> Self {
|
||||
self.meaningless_token_kinds
|
||||
.insert_all(kind.into_iter().collect());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn build<SyntaxErr: SyntaxError>(self) -> Parser<'src, SyntaxKind, SyntaxErr> {
|
||||
let Self {
|
||||
raw_toks,
|
||||
meaningless_token_kinds,
|
||||
step_limit,
|
||||
} = self;
|
||||
Parser {
|
||||
input: Input::new(raw_toks, Some(meaningless_token_kinds)),
|
||||
pos: 0,
|
||||
events: Vec::new(),
|
||||
step_limit,
|
||||
steps: Cell::new(0),
|
||||
}
|
||||
}
|
||||
}
|
9
crates/lopal_core/src/parser/error.rs
Normal file
9
crates/lopal_core/src/parser/error.rs
Normal file
|
@ -0,0 +1,9 @@
|
|||
use std::fmt;
|
||||
|
||||
/// A marker trait... for now!
|
||||
// TODO: constrain that conversion to `NodeKind::Error` is enforced to be possible
|
||||
pub trait SyntaxError
|
||||
where
|
||||
Self: fmt::Debug + Clone + PartialEq + Eq,
|
||||
{
|
||||
}
|
42
crates/lopal_core/src/parser/event.rs
Normal file
42
crates/lopal_core/src/parser/event.rs
Normal file
|
@ -0,0 +1,42 @@
|
|||
use enumset::EnumSetType;
|
||||
|
||||
use super::{error::SyntaxError, SyntaxElement};
|
||||
|
||||
pub enum Event<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
|
||||
Start {
|
||||
kind: NodeKind<SyntaxKind, SyntaxErr>,
|
||||
forward_parent: Option<usize>,
|
||||
},
|
||||
Finish,
|
||||
Eat {
|
||||
count: usize,
|
||||
},
|
||||
}
|
||||
|
||||
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> Event<SyntaxKind, SyntaxErr> {
|
||||
pub fn tombstone() -> Self {
|
||||
Self::Start {
|
||||
kind: NodeKind::Tombstone,
|
||||
forward_parent: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub enum NodeKind<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
|
||||
Tombstone,
|
||||
Syntax(SyntaxKind),
|
||||
Error(SyntaxErr),
|
||||
}
|
||||
|
||||
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> NodeKind<SyntaxKind, SyntaxErr> {
|
||||
pub fn is_tombstone(&self) -> bool {
|
||||
matches!(self, Self::Tombstone)
|
||||
}
|
||||
pub fn is_syntax(&self) -> bool {
|
||||
matches!(self, Self::Syntax(_))
|
||||
}
|
||||
pub fn is_error(&self) -> bool {
|
||||
matches!(self, Self::Error(_))
|
||||
}
|
||||
}
|
67
crates/lopal_core/src/parser/input.rs
Normal file
67
crates/lopal_core/src/parser/input.rs
Normal file
|
@ -0,0 +1,67 @@
|
|||
use enumset::{EnumSet, EnumSetType};
|
||||
|
||||
use super::SyntaxElement;
|
||||
|
||||
pub struct Input<'src, SyntaxKind: SyntaxElement> {
|
||||
raw: Vec<(SyntaxKind, &'src str)>,
|
||||
// enumset of meaningless tokens
|
||||
semantically_meaningless: EnumSet<SyntaxKind>,
|
||||
// indices of non-meaningless tokens
|
||||
meaningful_toks: Vec<usize>,
|
||||
}
|
||||
|
||||
impl<'src, SyntaxKind: SyntaxElement> Input<'src, SyntaxKind> {
|
||||
pub fn new(
|
||||
raw_toks: Vec<(SyntaxKind, &'src str)>,
|
||||
meaningless: Option<EnumSet<SyntaxKind>>,
|
||||
) -> Self {
|
||||
let mut meaningful_toks = Vec::new();
|
||||
|
||||
if let Some(meaningless) = meaningless {
|
||||
let meaningful_toks = raw_toks
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(i, tok)| (!meaningless.contains(tok.0)).then_some(i))
|
||||
.collect_into(&mut meaningful_toks);
|
||||
}
|
||||
|
||||
Self {
|
||||
raw: raw_toks,
|
||||
semantically_meaningless: meaningless.unwrap_or_default(),
|
||||
meaningful_toks,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn kind(&self, idx: usize) -> SyntaxKind {
|
||||
let Some(meaningful_idx) = self.meaningful_toks.get(idx) else {
|
||||
return SyntaxKind::SYNTAX_EOF;
|
||||
};
|
||||
|
||||
self.raw.get(*meaningful_idx).unwrap().0
|
||||
}
|
||||
|
||||
pub fn preceding_meaningless(&self, idx: usize) -> usize {
|
||||
assert!(self.meaningful_toks.len() > idx);
|
||||
|
||||
if idx == 0 {
|
||||
// maybe should be `self.meaningful_toks[idx]` instead??
|
||||
1
|
||||
} else {
|
||||
self.meaningful_toks[idx] - self.meaningful_toks[idx - 1]
|
||||
}
|
||||
}
|
||||
|
||||
/// get the count of meaningless tokens at the end of the file.
|
||||
pub fn meaningless_tail_len(&self) -> usize {
|
||||
self.raw.len() - (self.meaningful_toks.last().unwrap() + 1)
|
||||
}
|
||||
|
||||
pub fn dissolve(self) -> (Vec<(SyntaxKind, &'src str)>, EnumSet<SyntaxKind>) {
|
||||
let Self {
|
||||
raw,
|
||||
semantically_meaningless,
|
||||
..
|
||||
} = self;
|
||||
(raw, semantically_meaningless)
|
||||
}
|
||||
}
|
97
crates/lopal_core/src/parser/marker.rs
Normal file
97
crates/lopal_core/src/parser/marker.rs
Normal file
|
@ -0,0 +1,97 @@
|
|||
use drop_bomb::DropBomb;
|
||||
use rowan::SyntaxKind;
|
||||
|
||||
use super::{
|
||||
error::SyntaxError,
|
||||
event::{Event, NodeKind},
|
||||
Parser, SyntaxElement,
|
||||
};
|
||||
|
||||
pub struct Marker {
|
||||
pos: usize,
|
||||
bomb: DropBomb,
|
||||
}
|
||||
|
||||
impl Marker {
|
||||
pub(super) fn new(pos: usize, name: &str) -> Self {
|
||||
Self {
|
||||
pos,
|
||||
bomb: DropBomb::new(format!("Marker {name} must be completed or abandoned.")),
|
||||
}
|
||||
}
|
||||
|
||||
fn close_node<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
|
||||
mut self,
|
||||
p: &mut Parser<SyntaxKind, SyntaxErr>,
|
||||
kind: NodeKind<SyntaxKind, SyntaxErr>,
|
||||
) -> CompletedMarker<SyntaxKind, SyntaxErr> {
|
||||
self.bomb.defuse();
|
||||
|
||||
match &mut p.events[self.pos] {
|
||||
Event::Start { kind: slot, .. } => *slot = kind.clone(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
p.push_ev(Event::Finish);
|
||||
CompletedMarker {
|
||||
pos: self.pos,
|
||||
kind,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn complete<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
|
||||
self,
|
||||
p: &mut Parser<SyntaxKind, SyntaxErr>,
|
||||
kind: SyntaxKind,
|
||||
) -> CompletedMarker<SyntaxKind, SyntaxErr> {
|
||||
self.close_node(p, NodeKind::Syntax(kind))
|
||||
}
|
||||
|
||||
pub fn error<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
|
||||
self,
|
||||
p: &mut Parser<SyntaxKind, SyntaxErr>,
|
||||
kind: SyntaxErr,
|
||||
) -> CompletedMarker<SyntaxKind, SyntaxErr> {
|
||||
self.close_node(p, NodeKind::Error(kind))
|
||||
}
|
||||
|
||||
pub fn abandon<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
|
||||
mut self,
|
||||
p: &mut Parser<SyntaxKind, SyntaxErr>,
|
||||
) {
|
||||
self.bomb.defuse();
|
||||
|
||||
// clean up empty tombstone event from marker
|
||||
if self.pos == p.events.len() - 1 {
|
||||
match p.events.pop() {
|
||||
Some(Event::Start {
|
||||
kind: NodeKind::Tombstone,
|
||||
forward_parent: None,
|
||||
}) => (),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CompletedMarker<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
|
||||
pos: usize,
|
||||
kind: NodeKind<SyntaxKind, SyntaxErr>,
|
||||
}
|
||||
|
||||
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> CompletedMarker<SyntaxKind, SyntaxErr> {
|
||||
pub fn precede(self, p: &mut Parser<SyntaxKind, SyntaxErr>, name: &str) -> Marker {
|
||||
let new_pos = p.start(name);
|
||||
|
||||
match &mut p.events[self.pos] {
|
||||
Event::Start { forward_parent, .. } => {
|
||||
// point forward parent of the node this marker completed to the new node
|
||||
// will later be used to make the new node a parent of the current node.
|
||||
*forward_parent = Some(new_pos.pos - self.pos)
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
new_pos
|
||||
}
|
||||
}
|
73
crates/lopal_core/src/parser/output.rs
Normal file
73
crates/lopal_core/src/parser/output.rs
Normal file
|
@ -0,0 +1,73 @@
|
|||
use std::{fmt, marker::PhantomData};
|
||||
|
||||
use rowan::{GreenNode, GreenNodeData, GreenTokenData, NodeOrToken};
|
||||
|
||||
use crate::{SyntaxElement, SyntaxError};
|
||||
|
||||
pub struct ParserOutput<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> {
|
||||
pub green_node: GreenNode,
|
||||
pub errors: Vec<SyntaxErr>,
|
||||
pub(super) _syntax_kind: PhantomData<SyntaxKind>,
|
||||
}
|
||||
|
||||
impl<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError> std::fmt::Debug
|
||||
for ParserOutput<SyntaxKind, SyntaxErr>
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut errs: Vec<&SyntaxErr> = self.errors.iter().collect();
|
||||
errs.reverse();
|
||||
debug_print_output::<SyntaxKind, SyntaxErr>(
|
||||
NodeOrToken::Node(&self.green_node),
|
||||
f,
|
||||
0,
|
||||
&mut errs,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn debug_print_output<SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>(
|
||||
node: NodeOrToken<&GreenNodeData, &GreenTokenData>,
|
||||
f: &mut std::fmt::Formatter<'_>,
|
||||
lvl: i32,
|
||||
errs: &mut Vec<&SyntaxErr>,
|
||||
) -> std::fmt::Result {
|
||||
if f.alternate() {
|
||||
for _ in 0..lvl {
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
}
|
||||
let maybe_newline = if f.alternate() { "\n" } else { " " };
|
||||
|
||||
match node {
|
||||
NodeOrToken::Node(n) => {
|
||||
let kind: SyntaxKind = node.kind().into();
|
||||
if kind != SyntaxKind::SYNTAX_ERROR {
|
||||
write!(f, "{:?} {{{maybe_newline}", kind)?;
|
||||
} else {
|
||||
let err = errs
|
||||
.pop()
|
||||
.expect("all error syntax nodes should correspond to an error");
|
||||
|
||||
write!(f, "{:?}: {err:?} {{{maybe_newline}", kind)?;
|
||||
}
|
||||
for c in n.children() {
|
||||
debug_print_output::<SyntaxKind, SyntaxErr>(c, f, lvl + 1, errs)?;
|
||||
}
|
||||
|
||||
if f.alternate() {
|
||||
for _ in 0..lvl {
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
}
|
||||
write!(f, "}}{maybe_newline}")
|
||||
}
|
||||
NodeOrToken::Token(t) => {
|
||||
write!(
|
||||
f,
|
||||
"{:?} {:?};{maybe_newline}",
|
||||
Into::<SyntaxKind>::into(t.kind()),
|
||||
t.text()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
13
crates/lopal_json/Cargo.toml
Normal file
13
crates/lopal_json/Cargo.toml
Normal file
|
@ -0,0 +1,13 @@
|
|||
[package]
|
||||
name = "lopal_json"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
logos = "0.14.2"
|
||||
enumset = "1.1.3"
|
||||
rowan = "0.15.15"
|
||||
lopal_core = { path = "../lopal_core" }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
78
crates/lopal_json/src/grammar.rs
Normal file
78
crates/lopal_json/src/grammar.rs
Normal file
|
@ -0,0 +1,78 @@
|
|||
use array::array;
|
||||
use enumset::{enum_set, EnumSet};
|
||||
use lopal_core::parser::ParserBuilder;
|
||||
|
||||
use crate::{
|
||||
syntax_error::SyntaxError,
|
||||
syntax_kind::{lex, SyntaxKind},
|
||||
};
|
||||
|
||||
use self::object::object;
|
||||
|
||||
mod array;
|
||||
mod object;
|
||||
|
||||
pub(crate) type Parser<'src> = lopal_core::Parser<'src, SyntaxKind, SyntaxError>;
|
||||
pub(crate) type CompletedMarker = lopal_core::CompletedMarker<SyntaxKind, SyntaxError>;
|
||||
|
||||
const BASIC_VALUE_TOKENS: EnumSet<SyntaxKind> =
|
||||
enum_set!(SyntaxKind::BOOL | SyntaxKind::NULL | SyntaxKind::NUMBER | SyntaxKind::STRING);
|
||||
|
||||
pub fn value(p: &mut Parser) -> bool {
|
||||
if BASIC_VALUE_TOKENS.contains(p.current()) {
|
||||
p.do_bump();
|
||||
return true;
|
||||
} else {
|
||||
object(p).or_else(|| array(p)).is_some()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{
|
||||
test_utils::{check_parser, gen_checks},
|
||||
value,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn value_lit() {
|
||||
gen_checks! {value;
|
||||
r#""helo world""# => r#"ROOT { STRING "\"helo world\""; }"#,
|
||||
"42" => r#"ROOT { NUMBER "42"; }"#,
|
||||
"null" => r#"ROOT { NULL "null"; }"#,
|
||||
"true" => r#"ROOT { BOOL "true"; }"#,
|
||||
"false" => r#"ROOT { BOOL "false"; }"#
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_utils {
|
||||
use lopal_core::parser::ParserBuilder;
|
||||
|
||||
use crate::syntax_kind::{lex, SyntaxKind};
|
||||
|
||||
use super::Parser;
|
||||
|
||||
macro_rules! gen_checks {
|
||||
($fn_to_test:ident; $($in:literal => $out:literal),+) => {
|
||||
$(crate::grammar::test_utils::check_parser($in, |p| { $fn_to_test(p); }, $out);)+
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) use gen_checks;
|
||||
|
||||
pub(super) fn check_parser(input: &str, parser_fn: fn(&mut Parser), expected_output: &str) {
|
||||
let toks = lex(input);
|
||||
let mut p: Parser = ParserBuilder::new(toks)
|
||||
.add_meaningless(SyntaxKind::WHITESPACE)
|
||||
.add_meaningless(SyntaxKind::NEWLINE)
|
||||
.build();
|
||||
|
||||
parser_fn(&mut p);
|
||||
|
||||
let out = p.finish();
|
||||
|
||||
assert_eq!(format!("{out:?}").trim_end(), expected_output);
|
||||
}
|
||||
}
|
52
crates/lopal_json/src/grammar/array.rs
Normal file
52
crates/lopal_json/src/grammar/array.rs
Normal file
|
@ -0,0 +1,52 @@
|
|||
use crate::{syntax_error::SyntaxError, syntax_kind::SyntaxKind};
|
||||
|
||||
use super::{value, CompletedMarker, Parser};
|
||||
|
||||
pub(super) fn array(p: &mut Parser) -> Option<CompletedMarker> {
|
||||
let array_start = p.start("array");
|
||||
|
||||
if !p.eat(SyntaxKind::BRACKET_OPEN) {
|
||||
array_start.abandon(p);
|
||||
return None;
|
||||
}
|
||||
|
||||
let el = p.start("arr_el");
|
||||
value(p);
|
||||
el.complete(p, SyntaxKind::ELEMENT);
|
||||
|
||||
while p.at(SyntaxKind::COMMA) {
|
||||
let potential_trailing_comma = p.start("potential_trailing_comma");
|
||||
|
||||
p.eat(SyntaxKind::COMMA);
|
||||
let maybe_el = p.start("arr_el");
|
||||
if !value(p) {
|
||||
maybe_el.abandon(p);
|
||||
potential_trailing_comma.complete(p, SyntaxKind::TRAILING_COMMA);
|
||||
} else {
|
||||
maybe_el.complete(p, SyntaxKind::ELEMENT);
|
||||
potential_trailing_comma.abandon(p);
|
||||
}
|
||||
}
|
||||
|
||||
Some(if !p.eat(SyntaxKind::BRACKET_CLOSE) {
|
||||
array_start.error(p, SyntaxError::UnclosedArray)
|
||||
} else {
|
||||
array_start.complete(p, SyntaxKind::ARRAY)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::grammar::{array::array, test_utils::gen_checks};
|
||||
|
||||
#[test]
|
||||
fn array_basic() {
|
||||
gen_checks! {array;
|
||||
r#"[1,2,3]"# => r#"ROOT { ARRAY { BRACKET_OPEN "["; ELEMENT { NUMBER "1"; } COMMA ","; ELEMENT { NUMBER "2"; } COMMA ","; ELEMENT { NUMBER "3"; } BRACKET_CLOSE "]"; } }"#,
|
||||
r#"[1,2,]"# => r#"ROOT { ARRAY { BRACKET_OPEN "["; ELEMENT { NUMBER "1"; } COMMA ","; ELEMENT { NUMBER "2"; } TRAILING_COMMA { COMMA ","; } BRACKET_CLOSE "]"; } }"#,
|
||||
r#"[1,2"# => r#"ROOT { PARSE_ERR: UnclosedArray { BRACKET_OPEN "["; ELEMENT { NUMBER "1"; } COMMA ","; ELEMENT { NUMBER "2"; } } }"#,
|
||||
r#"[1,2,"# => r#"ROOT { PARSE_ERR: UnclosedArray { BRACKET_OPEN "["; ELEMENT { NUMBER "1"; } COMMA ","; ELEMENT { NUMBER "2"; } TRAILING_COMMA { COMMA ","; } } }"#,
|
||||
r#"[{"hello":"world""# => r#"ROOT { PARSE_ERR: UnclosedArray { BRACKET_OPEN "["; ELEMENT { PARSE_ERR: UnclosedObject { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"hello\""; } COLON ":"; MEMBER_VALUE { STRING "\"world\""; } } } } } }"#
|
||||
}
|
||||
}
|
||||
}
|
92
crates/lopal_json/src/grammar/object.rs
Normal file
92
crates/lopal_json/src/grammar/object.rs
Normal file
|
@ -0,0 +1,92 @@
|
|||
use crate::{grammar::value, syntax_error::SyntaxError, syntax_kind::SyntaxKind};
|
||||
|
||||
use super::{CompletedMarker, Parser, BASIC_VALUE_TOKENS};
|
||||
|
||||
pub(super) fn object(p: &mut Parser) -> Option<CompletedMarker> {
|
||||
let obj_start = p.start("object");
|
||||
|
||||
if !p.eat(SyntaxKind::BRACE_OPEN) {
|
||||
obj_start.abandon(p);
|
||||
return None;
|
||||
}
|
||||
|
||||
member(p);
|
||||
while p.at(SyntaxKind::COMMA) {
|
||||
// not always an error, later configurable
|
||||
let potential_trailing_comma = p.start("potential_trailing_comma");
|
||||
p.eat(SyntaxKind::COMMA);
|
||||
|
||||
if member(p).is_none() {
|
||||
potential_trailing_comma.complete(p, SyntaxKind::TRAILING_COMMA);
|
||||
} else {
|
||||
potential_trailing_comma.abandon(p);
|
||||
}
|
||||
}
|
||||
|
||||
Some(if p.eat(SyntaxKind::BRACE_CLOSE) {
|
||||
obj_start.complete(p, SyntaxKind::OBJECT)
|
||||
} else {
|
||||
obj_start.error(p, SyntaxError::UnclosedObject)
|
||||
})
|
||||
}
|
||||
|
||||
fn member(p: &mut Parser) -> Option<CompletedMarker> {
|
||||
let member_start = p.start("member");
|
||||
|
||||
if p.at(SyntaxKind::BRACE_CLOSE) {
|
||||
member_start.abandon(p);
|
||||
return None;
|
||||
} else if p.at(SyntaxKind::STRING) {
|
||||
let member_name_start = p.start("member_name");
|
||||
p.eat(SyntaxKind::STRING);
|
||||
member_name_start.complete(p, SyntaxKind::MEMBER_NAME);
|
||||
} else {
|
||||
return todo!("handle other tokens: {:?}", p.current());
|
||||
}
|
||||
|
||||
if !p.eat(SyntaxKind::COLON) {
|
||||
todo!("handle wrong tokens")
|
||||
}
|
||||
|
||||
let member_value_start = p.start("member_value_start");
|
||||
if value(p) {
|
||||
member_value_start.complete(p, SyntaxKind::MEMBER_VALUE);
|
||||
Some(member_start.complete(p, SyntaxKind::MEMBER))
|
||||
} else {
|
||||
member_value_start.abandon(p);
|
||||
let e = member_start.error(p, SyntaxError::MemberMissingValue);
|
||||
Some(
|
||||
e.precede(p, "member but failed already")
|
||||
.complete(p, SyntaxKind::MEMBER),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::grammar::{
|
||||
object::{member, object},
|
||||
test_utils::gen_checks,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn object_basic() {
|
||||
gen_checks! {object;
|
||||
r#"{"a": "b"}"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { STRING "\"b\""; } } BRACE_CLOSE "}"; } }"#,
|
||||
r#"{"a": 42}"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { NUMBER "42"; } } BRACE_CLOSE "}"; } }"#,
|
||||
r#"{"a": "b""# => r#"ROOT { PARSE_ERR: UnclosedObject { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { STRING "\"b\""; } } } }"#,
|
||||
r#"{"a": }"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { PARSE_ERR: MemberMissingValue { MEMBER_NAME { STRING "\"a\""; } COLON ":"; } } WHITESPACE " "; BRACE_CLOSE "}"; } }"#,
|
||||
r#"{"a":"# => r#"ROOT { PARSE_ERR: UnclosedObject { BRACE_OPEN "{"; MEMBER { PARSE_ERR: MemberMissingValue { MEMBER_NAME { STRING "\"a\""; } COLON ":"; } } } }"#,
|
||||
r#"{"a":true,}"# => r#"ROOT { OBJECT { BRACE_OPEN "{"; MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; MEMBER_VALUE { BOOL "true"; } } TRAILING_COMMA { COMMA ","; } BRACE_CLOSE "}"; } }"#
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn member_basic() {
|
||||
gen_checks! {member;
|
||||
r#""a": "b""# => r#"ROOT { MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { STRING "\"b\""; } } }"#,
|
||||
r#""a": 42"# => r#"ROOT { MEMBER { MEMBER_NAME { STRING "\"a\""; } COLON ":"; WHITESPACE " "; MEMBER_VALUE { NUMBER "42"; } } }"#,
|
||||
r#""a":"# => r#"ROOT { MEMBER { PARSE_ERR: MemberMissingValue { MEMBER_NAME { STRING "\"a\""; } COLON ":"; } } }"#
|
||||
}
|
||||
}
|
||||
}
|
3
crates/lopal_json/src/lib.rs
Normal file
3
crates/lopal_json/src/lib.rs
Normal file
|
@ -0,0 +1,3 @@
|
|||
mod grammar;
|
||||
mod syntax_error;
|
||||
mod syntax_kind;
|
11
crates/lopal_json/src/syntax_error.rs
Normal file
11
crates/lopal_json/src/syntax_error.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
use crate::syntax_kind::SyntaxKind;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum SyntaxError {
|
||||
UnclosedObject,
|
||||
UnclosedArray,
|
||||
DisallowedKeyType(SyntaxKind),
|
||||
MemberMissingValue,
|
||||
UnexpectedTrailingComma,
|
||||
}
|
||||
impl lopal_core::parser::SyntaxError for SyntaxError {}
|
117
crates/lopal_json/src/syntax_kind.rs
Normal file
117
crates/lopal_json/src/syntax_kind.rs
Normal file
|
@ -0,0 +1,117 @@
|
|||
use logos::Logos;
|
||||
|
||||
pub fn lex(src: &str) -> Vec<(SyntaxKind, &str)> {
|
||||
let mut lex = SyntaxKind::lexer(src);
|
||||
let mut r = Vec::new();
|
||||
|
||||
while let Some(tok_res) = lex.next() {
|
||||
r.push((tok_res.unwrap_or(SyntaxKind::LEX_ERR), lex.slice()))
|
||||
}
|
||||
|
||||
r
|
||||
}
|
||||
|
||||
#[derive(enumset::EnumSetType, Debug, Logos, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
#[repr(u16)]
|
||||
#[enumset(no_super_impls)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum SyntaxKind {
|
||||
OBJECT,
|
||||
MEMBER,
|
||||
MEMBER_NAME,
|
||||
MEMBER_VALUE,
|
||||
|
||||
ARRAY,
|
||||
ELEMENT,
|
||||
|
||||
// SyntaxKinds for future json5/etc support
|
||||
TRAILING_COMMA,
|
||||
|
||||
// Tokens
|
||||
// Regexes adapted from [the logos handbook](https://logos.maciej.codes/examples/json_borrowed.html)
|
||||
#[token("true")]
|
||||
#[token("false")]
|
||||
BOOL,
|
||||
#[token("{")]
|
||||
BRACE_OPEN,
|
||||
#[token("}")]
|
||||
BRACE_CLOSE,
|
||||
#[token("[")]
|
||||
BRACKET_OPEN,
|
||||
#[token("]")]
|
||||
BRACKET_CLOSE,
|
||||
#[token(":")]
|
||||
COLON,
|
||||
#[token(",")]
|
||||
COMMA,
|
||||
#[token("null")]
|
||||
NULL,
|
||||
#[regex(r"-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?")]
|
||||
NUMBER,
|
||||
#[regex(r#""([^"\\]|\\["\\bnfrt]|u[a-fA-F0-9]{4})*""#)]
|
||||
STRING,
|
||||
|
||||
// Whitespace tokens
|
||||
#[regex("[ \\t\\f]+")]
|
||||
WHITESPACE,
|
||||
#[token("\n")]
|
||||
NEWLINE,
|
||||
|
||||
// Error SyntaxKinds
|
||||
LEX_ERR,
|
||||
PARSE_ERR,
|
||||
|
||||
// Meta SyntaxKinds
|
||||
ROOT,
|
||||
EOF,
|
||||
}
|
||||
|
||||
impl lopal_core::parser::SyntaxElement for SyntaxKind {
|
||||
const SYNTAX_EOF: Self = Self::EOF;
|
||||
|
||||
const SYNTAX_ERROR: Self = Self::PARSE_ERR;
|
||||
const SYNTAX_ROOT: Self = Self::ROOT;
|
||||
}
|
||||
|
||||
impl From<SyntaxKind> for rowan::SyntaxKind {
|
||||
fn from(kind: SyntaxKind) -> Self {
|
||||
Self(kind as u16)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<rowan::SyntaxKind> for SyntaxKind {
|
||||
fn from(raw: rowan::SyntaxKind) -> Self {
|
||||
assert!(raw.0 <= SyntaxKind::EOF as u16);
|
||||
#[allow(unsafe_code, reason = "The transmute is necessary here")]
|
||||
unsafe {
|
||||
std::mem::transmute::<u16, SyntaxKind>(raw.0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::syntax_kind::{lex, SyntaxKind};
|
||||
|
||||
#[test]
|
||||
fn simple_object() {
|
||||
const TEST_DATA: &str = r#"{"hello_world": "meow", "some_num":7.42}"#;
|
||||
|
||||
assert_eq!(
|
||||
dbg!(lex(TEST_DATA)),
|
||||
vec![
|
||||
(SyntaxKind::BRACE_OPEN, "{"),
|
||||
(SyntaxKind::STRING, "\"hello_world\""),
|
||||
(SyntaxKind::COLON, ":"),
|
||||
(SyntaxKind::WHITESPACE, " "),
|
||||
(SyntaxKind::STRING, "\"meow\""),
|
||||
(SyntaxKind::COMMA, ","),
|
||||
(SyntaxKind::WHITESPACE, " "),
|
||||
(SyntaxKind::STRING, "\"some_num\""),
|
||||
(SyntaxKind::COLON, ":"),
|
||||
(SyntaxKind::NUMBER, "7.42"),
|
||||
(SyntaxKind::BRACE_CLOSE, "}")
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue