Updates parser to take LexTokens instead of TokenType

This commit is contained in:
Deukhoofd 2021-06-05 20:50:15 +02:00
parent 301ffd7496
commit c00cdf53b7
Signed by: Deukhoofd
GPG Key ID: F63E044490819F6F
3 changed files with 41 additions and 18 deletions

View File

@ -3,4 +3,5 @@ pub enum Message {
UnexpectedCharacter(char), UnexpectedCharacter(char),
InvalidCharacter { found: char, expected: char }, InvalidCharacter { found: char, expected: char },
UnclosedStringLiteral, UnclosedStringLiteral,
UnexpectedToken(),
} }

View File

@ -2,36 +2,46 @@ pub mod parsed_statement;
#[cfg(test)] #[cfg(test)]
mod parser_tests; mod parser_tests;
use super::lexer::lex_tokens::TokenType; use super::lexer::lex_tokens::{LexToken, TokenType};
use crate::parsing::parser::parsed_statement::ParsedStatement; use crate::parsing::parser::parsed_statement::ParsedStatement;
use itertools::{Itertools, MultiPeek}; use itertools::{Itertools, MultiPeek};
struct ParseReader<'a> { struct ParseReader<'a> {
tokens: MultiPeek<core::slice::Iter<'a, TokenType>>, tokens: MultiPeek<core::slice::Iter<'a, LexToken>>,
} }
impl<'a> ParseReader<'a> { impl<'a> ParseReader<'a> {
pub fn peek(&mut self) -> &TokenType { pub fn peek(&mut self) -> &LexToken {
let t = self.tokens.peek(); let t = self.tokens.peek();
match t { match t {
None => &TokenType::EndOfFile, None => self.tokens.by_ref().last().unwrap(),
Some(TokenType::WhiteSpace) => self.peek(), Some(v) => {
Some(v) => v, if v.token_type == TokenType::WhiteSpace {
self.peek()
} else {
v
}
}
} }
} }
pub fn next(&mut self) -> &TokenType { pub fn next(&mut self) -> &LexToken {
let t = self.tokens.next(); let t = self.tokens.next();
match t { match t {
None => &TokenType::EndOfFile, None => self.tokens.by_ref().last().unwrap(),
Some(TokenType::WhiteSpace) => self.next(), Some(v) => {
Some(v) => v, if v.token_type == TokenType::WhiteSpace {
self.next()
} else {
v
}
}
} }
} }
pub fn consume(&mut self, token: TokenType) -> &TokenType { pub fn consume(&mut self, token: TokenType) -> &LexToken {
let n = self.next(); let n = self.next();
if n != &token { if n.token_type != token {
// TODO: log error // TODO: log error
unimplemented!() unimplemented!()
} }
@ -44,7 +54,7 @@ impl<'a> ParseReader<'a> {
} }
} }
pub fn parse(tokens: Vec<TokenType>) -> Box<ParsedStatement> { pub fn parse(tokens: Vec<LexToken>) -> Box<ParsedStatement> {
let mut reader = ParseReader { let mut reader = ParseReader {
tokens: tokens.iter().multipeek(), tokens: tokens.iter().multipeek(),
}; };
@ -55,7 +65,7 @@ fn parse_script(reader: &mut ParseReader) -> Box<ParsedStatement> {
let mut vec: Vec<Box<ParsedStatement>> = Vec::new(); let mut vec: Vec<Box<ParsedStatement>> = Vec::new();
loop { loop {
let n = reader.peek(); let n = reader.peek();
match n { match n.token_type {
TokenType::NamespaceKeyword => { TokenType::NamespaceKeyword => {
vec.push(parse_namespace(reader)); vec.push(parse_namespace(reader));
} }
@ -73,7 +83,7 @@ fn parse_namespace(reader: &mut ParseReader) -> Box<ParsedStatement> {
reader.next(); // Consume namespace reader.next(); // Consume namespace
let identifier_token = reader.next(); let identifier_token = reader.next();
let s: String; let s: String;
match identifier_token { match &identifier_token.token_type {
TokenType::Identifier(i) => { TokenType::Identifier(i) => {
s = i.to_string(); s = i.to_string();
} }

View File

@ -1,10 +1,22 @@
use super::parse; use super::parse;
use super::parsed_statement::ParsedStatement; use super::parsed_statement::ParsedStatement;
use crate::parsing::lexer::lex_tokens::TokenType; use crate::parsing::lexer::lex_tokens::{LexToken, TokenType};
use crate::span::Span;
fn create_tokens(types: Vec<TokenType>) -> Vec<LexToken> {
let mut v = Vec::with_capacity(types.len());
for t in types {
v.push(LexToken {
token_type: t,
span: Span::new(0, 0),
});
}
v
}
#[test] #[test]
fn test_empty_namespace() { fn test_empty_namespace() {
let script = parse(vec![ let script = parse(create_tokens(vec![
TokenType::NamespaceKeyword, TokenType::NamespaceKeyword,
TokenType::WhiteSpace, TokenType::WhiteSpace,
TokenType::Identifier("foo".to_string()), TokenType::Identifier("foo".to_string()),
@ -12,7 +24,7 @@ fn test_empty_namespace() {
TokenType::OpenCurlyBracket, TokenType::OpenCurlyBracket,
TokenType::CloseCurlyBracket, TokenType::CloseCurlyBracket,
TokenType::EndOfFile, TokenType::EndOfFile,
]); ]));
if let ParsedStatement::Script(inner) = script.as_ref() { if let ParsedStatement::Script(inner) = script.as_ref() {
assert_eq!(1, inner.len()); assert_eq!(1, inner.len());
if let ParsedStatement::Namespace(identifier, inner_script) = inner[0].as_ref() { if let ParsedStatement::Namespace(identifier, inner_script) = inner[0].as_ref() {