diff --git a/src/lib.rs b/src/lib.rs index 0b0c0ec..a804f76 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,7 @@ #![feature(concat_idents)] #![feature(exclusive_range_pattern)] #![feature(assert_matches)] +#![feature(fn_traits)] pub(crate) mod defines; pub mod logger; diff --git a/src/logger/messages.rs b/src/logger/messages.rs index c3e9537..d951a75 100644 --- a/src/logger/messages.rs +++ b/src/logger/messages.rs @@ -1,7 +1,15 @@ +use crate::parsing::lexer::lex_tokens::TokenType; + #[derive(Debug)] pub enum Message { UnexpectedCharacter(char), - InvalidCharacter { found: char, expected: char }, + InvalidCharacter { + found: char, + expected: char, + }, UnclosedStringLiteral, - UnexpectedToken(), + UnexpectedToken { + found: TokenType, + expected: Vec, + }, } diff --git a/src/parsing/lexer/lex_tokens.rs b/src/parsing/lexer/lex_tokens.rs index 9320c1b..40c0f6c 100644 --- a/src/parsing/lexer/lex_tokens.rs +++ b/src/parsing/lexer/lex_tokens.rs @@ -3,10 +3,10 @@ use crate::span::Span; pub struct LexToken { pub token_type: TokenType, - pub span: Span + pub span: Span, } -#[derive(PartialEq, Debug)] +#[derive(PartialEq, Debug, Clone)] pub enum TokenType { EndOfFile, WhiteSpace, diff --git a/src/parsing/parser/mod.rs b/src/parsing/parser/mod.rs index ff402f5..fa81e14 100644 --- a/src/parsing/parser/mod.rs +++ b/src/parsing/parser/mod.rs @@ -3,7 +3,9 @@ pub mod parsed_statement; mod parser_tests; use super::lexer::lex_tokens::{LexToken, TokenType}; +use crate::logger::messages::Message; use crate::parsing::parser::parsed_statement::ParsedStatement; +use crate::span::Span; use itertools::{Itertools, MultiPeek}; struct ParseReader<'a> { @@ -39,11 +41,20 @@ impl<'a> ParseReader<'a> { } } - pub fn consume(&mut self, token: TokenType) -> &LexToken { + pub fn consume( + &mut self, + expected: TokenType, + log: &mut dyn FnMut(Message, Span), + ) -> &LexToken { let n = self.next(); - if n.token_type != token { - // TODO: log error - unimplemented!() + if n.token_type != expected { + log( + Message::UnexpectedToken { + expected: vec![expected], + found: n.token_type.clone(), + }, + n.span, + ); } n } @@ -54,20 +65,23 @@ impl<'a> ParseReader<'a> { } } -pub fn parse(tokens: Vec) -> Box { +pub fn parse(tokens: Vec, log: &mut dyn FnMut(Message, Span)) -> Box { let mut reader = ParseReader { tokens: tokens.iter().multipeek(), }; - parse_script(&mut reader) + parse_script(&mut reader, log) } -fn parse_script(reader: &mut ParseReader) -> Box { +fn parse_script( + reader: &mut ParseReader, + log: &mut dyn FnMut(Message, Span), +) -> Box { let mut vec: Vec> = Vec::new(); loop { let n = reader.peek(); match n.token_type { TokenType::NamespaceKeyword => { - vec.push(parse_namespace(reader)); + vec.push(parse_namespace(reader, log)); } TokenType::EndOfFile => break, _ => { @@ -79,7 +93,10 @@ fn parse_script(reader: &mut ParseReader) -> Box { Box::new(ParsedStatement::Script(vec)) } -fn parse_namespace(reader: &mut ParseReader) -> Box { +fn parse_namespace( + reader: &mut ParseReader, + log: &mut dyn FnMut(Message, Span), +) -> Box { reader.next(); // Consume namespace let identifier_token = reader.next(); let s: String; @@ -88,12 +105,17 @@ fn parse_namespace(reader: &mut ParseReader) -> Box { s = i.to_string(); } _ => { - // Log error - unimplemented!(); + log( + Message::UnexpectedToken { + found: identifier_token.token_type.clone(), + expected: vec![TokenType::Identifier(String::new())], + }, + identifier_token.span, + ); } } - reader.consume(TokenType::OpenCurlyBracket); - let script = parse_script(reader); - reader.consume(TokenType::CloseCurlyBracket); + reader.consume(TokenType::OpenCurlyBracket, log); + let script = parse_script(reader, log); + reader.consume(TokenType::CloseCurlyBracket, log); Box::new(ParsedStatement::Namespace(s, script)) } diff --git a/src/parsing/parser/parser_tests.rs b/src/parsing/parser/parser_tests.rs index f4d0a3c..35ce779 100644 --- a/src/parsing/parser/parser_tests.rs +++ b/src/parsing/parser/parser_tests.rs @@ -16,15 +16,20 @@ fn create_tokens(types: Vec) -> Vec { #[test] fn test_empty_namespace() { - let script = parse(create_tokens(vec![ - TokenType::NamespaceKeyword, - TokenType::WhiteSpace, - TokenType::Identifier("foo".to_string()), - TokenType::WhiteSpace, - TokenType::OpenCurlyBracket, - TokenType::CloseCurlyBracket, - TokenType::EndOfFile, - ])); + let script = parse( + create_tokens(vec![ + TokenType::NamespaceKeyword, + TokenType::WhiteSpace, + TokenType::Identifier("foo".to_string()), + TokenType::WhiteSpace, + TokenType::OpenCurlyBracket, + TokenType::CloseCurlyBracket, + TokenType::EndOfFile, + ]), + &mut |_message, _span| { + unimplemented!(); + }, + ); if let ParsedStatement::Script(inner) = script.as_ref() { assert_eq!(1, inner.len()); if let ParsedStatement::Namespace(identifier, inner_script) = inner[0].as_ref() { diff --git a/src/span.rs b/src/span.rs index 7b46ba4..2adedd7 100644 --- a/src/span.rs +++ b/src/span.rs @@ -1,3 +1,4 @@ +#[derive(Copy, Clone, Debug)] pub struct Span { pub start: usize, pub end: usize,