Adds logging to parser

This commit is contained in:
Deukhoofd 2021-06-05 21:03:14 +02:00
parent c00cdf53b7
commit 43644276bb
Signed by: Deukhoofd
GPG Key ID: F63E044490819F6F
6 changed files with 64 additions and 27 deletions

View File

@ -1,6 +1,7 @@
#![feature(concat_idents)] #![feature(concat_idents)]
#![feature(exclusive_range_pattern)] #![feature(exclusive_range_pattern)]
#![feature(assert_matches)] #![feature(assert_matches)]
#![feature(fn_traits)]
pub(crate) mod defines; pub(crate) mod defines;
pub mod logger; pub mod logger;

View File

@ -1,7 +1,15 @@
use crate::parsing::lexer::lex_tokens::TokenType;
#[derive(Debug)] #[derive(Debug)]
pub enum Message { pub enum Message {
UnexpectedCharacter(char), UnexpectedCharacter(char),
InvalidCharacter { found: char, expected: char }, InvalidCharacter {
found: char,
expected: char,
},
UnclosedStringLiteral, UnclosedStringLiteral,
UnexpectedToken(), UnexpectedToken {
found: TokenType,
expected: Vec<TokenType>,
},
} }

View File

@ -3,10 +3,10 @@ use crate::span::Span;
pub struct LexToken { pub struct LexToken {
pub token_type: TokenType, pub token_type: TokenType,
pub span: Span pub span: Span,
} }
#[derive(PartialEq, Debug)] #[derive(PartialEq, Debug, Clone)]
pub enum TokenType { pub enum TokenType {
EndOfFile, EndOfFile,
WhiteSpace, WhiteSpace,

View File

@ -3,7 +3,9 @@ pub mod parsed_statement;
mod parser_tests; mod parser_tests;
use super::lexer::lex_tokens::{LexToken, TokenType}; use super::lexer::lex_tokens::{LexToken, TokenType};
use crate::logger::messages::Message;
use crate::parsing::parser::parsed_statement::ParsedStatement; use crate::parsing::parser::parsed_statement::ParsedStatement;
use crate::span::Span;
use itertools::{Itertools, MultiPeek}; use itertools::{Itertools, MultiPeek};
struct ParseReader<'a> { struct ParseReader<'a> {
@ -39,11 +41,20 @@ impl<'a> ParseReader<'a> {
} }
} }
pub fn consume(&mut self, token: TokenType) -> &LexToken { pub fn consume(
&mut self,
expected: TokenType,
log: &mut dyn FnMut(Message, Span),
) -> &LexToken {
let n = self.next(); let n = self.next();
if n.token_type != token { if n.token_type != expected {
// TODO: log error log(
unimplemented!() Message::UnexpectedToken {
expected: vec![expected],
found: n.token_type.clone(),
},
n.span,
);
} }
n n
} }
@ -54,20 +65,23 @@ impl<'a> ParseReader<'a> {
} }
} }
pub fn parse(tokens: Vec<LexToken>) -> Box<ParsedStatement> { pub fn parse(tokens: Vec<LexToken>, log: &mut dyn FnMut(Message, Span)) -> Box<ParsedStatement> {
let mut reader = ParseReader { let mut reader = ParseReader {
tokens: tokens.iter().multipeek(), tokens: tokens.iter().multipeek(),
}; };
parse_script(&mut reader) parse_script(&mut reader, log)
} }
fn parse_script(reader: &mut ParseReader) -> Box<ParsedStatement> { fn parse_script(
reader: &mut ParseReader,
log: &mut dyn FnMut(Message, Span),
) -> Box<ParsedStatement> {
let mut vec: Vec<Box<ParsedStatement>> = Vec::new(); let mut vec: Vec<Box<ParsedStatement>> = Vec::new();
loop { loop {
let n = reader.peek(); let n = reader.peek();
match n.token_type { match n.token_type {
TokenType::NamespaceKeyword => { TokenType::NamespaceKeyword => {
vec.push(parse_namespace(reader)); vec.push(parse_namespace(reader, log));
} }
TokenType::EndOfFile => break, TokenType::EndOfFile => break,
_ => { _ => {
@ -79,7 +93,10 @@ fn parse_script(reader: &mut ParseReader) -> Box<ParsedStatement> {
Box::new(ParsedStatement::Script(vec)) Box::new(ParsedStatement::Script(vec))
} }
fn parse_namespace(reader: &mut ParseReader) -> Box<ParsedStatement> { fn parse_namespace(
reader: &mut ParseReader,
log: &mut dyn FnMut(Message, Span),
) -> Box<ParsedStatement> {
reader.next(); // Consume namespace reader.next(); // Consume namespace
let identifier_token = reader.next(); let identifier_token = reader.next();
let s: String; let s: String;
@ -88,12 +105,17 @@ fn parse_namespace(reader: &mut ParseReader) -> Box<ParsedStatement> {
s = i.to_string(); s = i.to_string();
} }
_ => { _ => {
// Log error log(
unimplemented!(); Message::UnexpectedToken {
found: identifier_token.token_type.clone(),
expected: vec![TokenType::Identifier(String::new())],
},
identifier_token.span,
);
} }
} }
reader.consume(TokenType::OpenCurlyBracket); reader.consume(TokenType::OpenCurlyBracket, log);
let script = parse_script(reader); let script = parse_script(reader, log);
reader.consume(TokenType::CloseCurlyBracket); reader.consume(TokenType::CloseCurlyBracket, log);
Box::new(ParsedStatement::Namespace(s, script)) Box::new(ParsedStatement::Namespace(s, script))
} }

View File

@ -16,15 +16,20 @@ fn create_tokens(types: Vec<TokenType>) -> Vec<LexToken> {
#[test] #[test]
fn test_empty_namespace() { fn test_empty_namespace() {
let script = parse(create_tokens(vec![ let script = parse(
TokenType::NamespaceKeyword, create_tokens(vec![
TokenType::WhiteSpace, TokenType::NamespaceKeyword,
TokenType::Identifier("foo".to_string()), TokenType::WhiteSpace,
TokenType::WhiteSpace, TokenType::Identifier("foo".to_string()),
TokenType::OpenCurlyBracket, TokenType::WhiteSpace,
TokenType::CloseCurlyBracket, TokenType::OpenCurlyBracket,
TokenType::EndOfFile, TokenType::CloseCurlyBracket,
])); TokenType::EndOfFile,
]),
&mut |_message, _span| {
unimplemented!();
},
);
if let ParsedStatement::Script(inner) = script.as_ref() { if let ParsedStatement::Script(inner) = script.as_ref() {
assert_eq!(1, inner.len()); assert_eq!(1, inner.len());
if let ParsedStatement::Namespace(identifier, inner_script) = inner[0].as_ref() { if let ParsedStatement::Namespace(identifier, inner_script) = inner[0].as_ref() {

View File

@ -1,3 +1,4 @@
#[derive(Copy, Clone, Debug)]
pub struct Span { pub struct Span {
pub start: usize, pub start: usize,
pub end: usize, pub end: usize,