implement better checking and errors
This commit is contained in:
parent
3f4846744b
commit
344afa22b5
4 changed files with 125 additions and 52 deletions
34
src/syntax/check.rs
Normal file
34
src/syntax/check.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
use super::{
|
||||
error::{FileId, SyntaxError},
|
||||
PipelineElement, PipelineElementKind,
|
||||
};
|
||||
|
||||
pub fn check(
|
||||
syntax: Vec<PipelineElement>,
|
||||
raw_source: &str,
|
||||
file_id: FileId,
|
||||
) -> Result<Vec<PipelineElement>, Vec<SyntaxError>> {
|
||||
let mut errs = Vec::new();
|
||||
|
||||
if let Err(e_span) = check_missing_streamer(&syntax) {
|
||||
errs.push(SyntaxError::MissingStreamer(vec![(file_id, e_span)]));
|
||||
}
|
||||
|
||||
if errs.is_empty() {
|
||||
Ok(syntax)
|
||||
} else {
|
||||
Err(errs)
|
||||
}
|
||||
}
|
||||
|
||||
fn check_missing_streamer(syntax: &Vec<PipelineElement>) -> Result<(), logos::Span> {
|
||||
if let Some(&PipelineElement {
|
||||
kind: PipelineElementKind::Pipe,
|
||||
ref span,
|
||||
}) = syntax.first()
|
||||
{
|
||||
Err(span.clone())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
48
src/syntax/error.rs
Normal file
48
src/syntax/error.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
use codespan_reporting::diagnostic::{Diagnostic, Label};
|
||||
|
||||
pub type FileId = usize;
|
||||
|
||||
/// The enum representing a syntax error, used for error reporting
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum SyntaxError {
|
||||
/// This variant indicates a token that the Lexer didn't recognize
|
||||
InvalidToken(Vec<(FileId, logos::Span)>),
|
||||
/// `MissingStreamer` means, that the pipeline starts with a Pipe (`|`), so it has no streamer as input in front of it.
|
||||
MissingStreamer(Vec<(FileId, logos::Span)>),
|
||||
/// `MissingSink` means, that the pipeline ends with a Pipe (`|`), meaning that the output can't go anywhere
|
||||
MissingSink,
|
||||
/// This indicates a missing filter somewhere in the pipeline, meaning that there's 2 pipes after one another
|
||||
MissingFilter,
|
||||
/// A literal cannot be a sink
|
||||
LiteralAsSink,
|
||||
/// A literal can't be a filter either
|
||||
LiteralAsFilter,
|
||||
/// A literal acting as streamer cannot take arguments
|
||||
LiteralWithArgs,
|
||||
}
|
||||
|
||||
impl SyntaxError {
|
||||
pub fn to_diagnostic(&self) -> Diagnostic<usize> {
|
||||
match self {
|
||||
Self::InvalidToken(errs) => Diagnostic::error()
|
||||
.with_message("failed to parse invalid tokens")
|
||||
.with_labels(
|
||||
errs.into_iter()
|
||||
.map(|(file_id, span)| {
|
||||
Label::primary(*file_id, span.clone()).with_message("invalid token")
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
Self::MissingStreamer(locs) => Diagnostic::error()
|
||||
.with_message("pipelines must always start with a streamer")
|
||||
.with_labels(
|
||||
locs.into_iter()
|
||||
.map(|(file_id, span)| {
|
||||
Label::primary(*file_id, span.clone()).with_message("missing streamer")
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
_ => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
104
src/syntax/mod.rs
Normal file
104
src/syntax/mod.rs
Normal file
|
@ -0,0 +1,104 @@
|
|||
use std::mem;
|
||||
|
||||
use logos::Logos;
|
||||
use logos::Span;
|
||||
|
||||
use crate::lexer::Token;
|
||||
|
||||
use self::error::FileId;
|
||||
use self::error::SyntaxError;
|
||||
|
||||
pub mod check;
|
||||
mod error;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct PipelineElement {
|
||||
kind: PipelineElementKind,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum PipelineElementKind {
|
||||
Pipe,
|
||||
Command(Vec<CommandPart>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct CommandPart {
|
||||
kind: CommandPartKind,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum CommandPartKind {
|
||||
Word(String),
|
||||
Integer(i64),
|
||||
Float(f64),
|
||||
String(String),
|
||||
}
|
||||
|
||||
pub fn parse_syntax(input: &str, file_id: FileId) -> Result<Vec<PipelineElement>, SyntaxError> {
|
||||
let lexer = Token::lexer(input);
|
||||
let mut errs = Vec::new();
|
||||
|
||||
let mut r = Vec::new();
|
||||
|
||||
let mut partial_command: Vec<CommandPart> = Vec::new();
|
||||
for (tok, span) in lexer.spanned().into_iter() {
|
||||
if let Ok(tok) = tok {
|
||||
match tok {
|
||||
Token::Pipe => {
|
||||
if !partial_command.is_empty() {
|
||||
let span = partial_command.first().unwrap().span.start
|
||||
..partial_command.last().unwrap().span.end;
|
||||
r.push(PipelineElement {
|
||||
kind: PipelineElementKind::Command(mem::replace(
|
||||
&mut partial_command,
|
||||
Vec::new(),
|
||||
)),
|
||||
span,
|
||||
});
|
||||
}
|
||||
r.push(PipelineElement {
|
||||
kind: PipelineElementKind::Pipe,
|
||||
span,
|
||||
});
|
||||
}
|
||||
Token::Word(word) => partial_command.push(CommandPart {
|
||||
kind: CommandPartKind::Word(word.to_owned()),
|
||||
span,
|
||||
}),
|
||||
Token::IntLiteral(int) => partial_command.push(CommandPart {
|
||||
kind: CommandPartKind::Integer(int),
|
||||
span,
|
||||
}),
|
||||
Token::FloatLiteral(float) => partial_command.push(CommandPart {
|
||||
kind: CommandPartKind::Float(float),
|
||||
span,
|
||||
}),
|
||||
Token::StringLiteral(string) => partial_command.push(CommandPart {
|
||||
kind: CommandPartKind::String(string),
|
||||
span,
|
||||
}),
|
||||
_ => {}
|
||||
}
|
||||
} else {
|
||||
errs.push((file_id, span))
|
||||
}
|
||||
}
|
||||
|
||||
if !partial_command.is_empty() {
|
||||
let span =
|
||||
partial_command.first().unwrap().span.start..partial_command.last().unwrap().span.end;
|
||||
r.push(PipelineElement {
|
||||
kind: PipelineElementKind::Command(mem::replace(&mut partial_command, Vec::new())),
|
||||
span,
|
||||
});
|
||||
}
|
||||
|
||||
if errs.is_empty() {
|
||||
Ok(r)
|
||||
} else {
|
||||
Err(SyntaxError::InvalidToken(errs))
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue