Adds logging to parser
This commit is contained in:
parent
c00cdf53b7
commit
43644276bb
|
@ -1,6 +1,7 @@
|
|||
#![feature(concat_idents)]
|
||||
#![feature(exclusive_range_pattern)]
|
||||
#![feature(assert_matches)]
|
||||
#![feature(fn_traits)]
|
||||
|
||||
pub(crate) mod defines;
|
||||
pub mod logger;
|
||||
|
|
|
@ -1,7 +1,15 @@
|
|||
use crate::parsing::lexer::lex_tokens::TokenType;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Message {
|
||||
UnexpectedCharacter(char),
|
||||
InvalidCharacter { found: char, expected: char },
|
||||
InvalidCharacter {
|
||||
found: char,
|
||||
expected: char,
|
||||
},
|
||||
UnclosedStringLiteral,
|
||||
UnexpectedToken(),
|
||||
UnexpectedToken {
|
||||
found: TokenType,
|
||||
expected: Vec<TokenType>,
|
||||
},
|
||||
}
|
||||
|
|
|
@ -3,10 +3,10 @@ use crate::span::Span;
|
|||
|
||||
pub struct LexToken {
|
||||
pub token_type: TokenType,
|
||||
pub span: Span
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug)]
|
||||
#[derive(PartialEq, Debug, Clone)]
|
||||
pub enum TokenType {
|
||||
EndOfFile,
|
||||
WhiteSpace,
|
||||
|
|
|
@ -3,7 +3,9 @@ pub mod parsed_statement;
|
|||
mod parser_tests;
|
||||
|
||||
use super::lexer::lex_tokens::{LexToken, TokenType};
|
||||
use crate::logger::messages::Message;
|
||||
use crate::parsing::parser::parsed_statement::ParsedStatement;
|
||||
use crate::span::Span;
|
||||
use itertools::{Itertools, MultiPeek};
|
||||
|
||||
struct ParseReader<'a> {
|
||||
|
@ -39,11 +41,20 @@ impl<'a> ParseReader<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn consume(&mut self, token: TokenType) -> &LexToken {
|
||||
pub fn consume(
|
||||
&mut self,
|
||||
expected: TokenType,
|
||||
log: &mut dyn FnMut(Message, Span),
|
||||
) -> &LexToken {
|
||||
let n = self.next();
|
||||
if n.token_type != token {
|
||||
// TODO: log error
|
||||
unimplemented!()
|
||||
if n.token_type != expected {
|
||||
log(
|
||||
Message::UnexpectedToken {
|
||||
expected: vec![expected],
|
||||
found: n.token_type.clone(),
|
||||
},
|
||||
n.span,
|
||||
);
|
||||
}
|
||||
n
|
||||
}
|
||||
|
@ -54,20 +65,23 @@ impl<'a> ParseReader<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn parse(tokens: Vec<LexToken>) -> Box<ParsedStatement> {
|
||||
pub fn parse(tokens: Vec<LexToken>, log: &mut dyn FnMut(Message, Span)) -> Box<ParsedStatement> {
|
||||
let mut reader = ParseReader {
|
||||
tokens: tokens.iter().multipeek(),
|
||||
};
|
||||
parse_script(&mut reader)
|
||||
parse_script(&mut reader, log)
|
||||
}
|
||||
|
||||
fn parse_script(reader: &mut ParseReader) -> Box<ParsedStatement> {
|
||||
fn parse_script(
|
||||
reader: &mut ParseReader,
|
||||
log: &mut dyn FnMut(Message, Span),
|
||||
) -> Box<ParsedStatement> {
|
||||
let mut vec: Vec<Box<ParsedStatement>> = Vec::new();
|
||||
loop {
|
||||
let n = reader.peek();
|
||||
match n.token_type {
|
||||
TokenType::NamespaceKeyword => {
|
||||
vec.push(parse_namespace(reader));
|
||||
vec.push(parse_namespace(reader, log));
|
||||
}
|
||||
TokenType::EndOfFile => break,
|
||||
_ => {
|
||||
|
@ -79,7 +93,10 @@ fn parse_script(reader: &mut ParseReader) -> Box<ParsedStatement> {
|
|||
Box::new(ParsedStatement::Script(vec))
|
||||
}
|
||||
|
||||
fn parse_namespace(reader: &mut ParseReader) -> Box<ParsedStatement> {
|
||||
fn parse_namespace(
|
||||
reader: &mut ParseReader,
|
||||
log: &mut dyn FnMut(Message, Span),
|
||||
) -> Box<ParsedStatement> {
|
||||
reader.next(); // Consume namespace
|
||||
let identifier_token = reader.next();
|
||||
let s: String;
|
||||
|
@ -88,12 +105,17 @@ fn parse_namespace(reader: &mut ParseReader) -> Box<ParsedStatement> {
|
|||
s = i.to_string();
|
||||
}
|
||||
_ => {
|
||||
// Log error
|
||||
unimplemented!();
|
||||
log(
|
||||
Message::UnexpectedToken {
|
||||
found: identifier_token.token_type.clone(),
|
||||
expected: vec![TokenType::Identifier(String::new())],
|
||||
},
|
||||
identifier_token.span,
|
||||
);
|
||||
}
|
||||
}
|
||||
reader.consume(TokenType::OpenCurlyBracket);
|
||||
let script = parse_script(reader);
|
||||
reader.consume(TokenType::CloseCurlyBracket);
|
||||
reader.consume(TokenType::OpenCurlyBracket, log);
|
||||
let script = parse_script(reader, log);
|
||||
reader.consume(TokenType::CloseCurlyBracket, log);
|
||||
Box::new(ParsedStatement::Namespace(s, script))
|
||||
}
|
||||
|
|
|
@ -16,7 +16,8 @@ fn create_tokens(types: Vec<TokenType>) -> Vec<LexToken> {
|
|||
|
||||
#[test]
|
||||
fn test_empty_namespace() {
|
||||
let script = parse(create_tokens(vec![
|
||||
let script = parse(
|
||||
create_tokens(vec![
|
||||
TokenType::NamespaceKeyword,
|
||||
TokenType::WhiteSpace,
|
||||
TokenType::Identifier("foo".to_string()),
|
||||
|
@ -24,7 +25,11 @@ fn test_empty_namespace() {
|
|||
TokenType::OpenCurlyBracket,
|
||||
TokenType::CloseCurlyBracket,
|
||||
TokenType::EndOfFile,
|
||||
]));
|
||||
]),
|
||||
&mut |_message, _span| {
|
||||
unimplemented!();
|
||||
},
|
||||
);
|
||||
if let ParsedStatement::Script(inner) = script.as_ref() {
|
||||
assert_eq!(1, inner.len());
|
||||
if let ParsedStatement::Namespace(identifier, inner_script) = inner[0].as_ref() {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct Span {
|
||||
pub start: usize,
|
||||
pub end: usize,
|
||||
|
|
Loading…
Reference in New Issue