SeraphScript/src/parsing/parser/mod.rs

100 lines
2.5 KiB
Rust

pub mod parsed_statement;
#[cfg(test)]
mod parser_tests;
use super::lexer::lex_tokens::{LexToken, TokenType};
use crate::parsing::parser::parsed_statement::ParsedStatement;
use itertools::{Itertools, MultiPeek};
struct ParseReader<'a> {
tokens: MultiPeek<core::slice::Iter<'a, LexToken>>,
}
impl<'a> ParseReader<'a> {
pub fn peek(&mut self) -> &LexToken {
let t = self.tokens.peek();
match t {
None => self.tokens.by_ref().last().unwrap(),
Some(v) => {
if v.token_type == TokenType::WhiteSpace {
self.peek()
} else {
v
}
}
}
}
pub fn next(&mut self) -> &LexToken {
let t = self.tokens.next();
match t {
None => self.tokens.by_ref().last().unwrap(),
Some(v) => {
if v.token_type == TokenType::WhiteSpace {
self.next()
} else {
v
}
}
}
}
pub fn consume(&mut self, token: TokenType) -> &LexToken {
let n = self.next();
if n.token_type != token {
// TODO: log error
unimplemented!()
}
n
}
#[inline(always)]
pub fn reset_peek(&mut self) {
self.tokens.reset_peek();
}
}
pub fn parse(tokens: Vec<LexToken>) -> Box<ParsedStatement> {
let mut reader = ParseReader {
tokens: tokens.iter().multipeek(),
};
parse_script(&mut reader)
}
fn parse_script(reader: &mut ParseReader) -> Box<ParsedStatement> {
let mut vec: Vec<Box<ParsedStatement>> = Vec::new();
loop {
let n = reader.peek();
match n.token_type {
TokenType::NamespaceKeyword => {
vec.push(parse_namespace(reader));
}
TokenType::EndOfFile => break,
_ => {
// Log error?
}
}
}
Box::new(ParsedStatement::Script(vec))
}
fn parse_namespace(reader: &mut ParseReader) -> Box<ParsedStatement> {
reader.next(); // Consume namespace
let identifier_token = reader.next();
let s: String;
match &identifier_token.token_type {
TokenType::Identifier(i) => {
s = i.to_string();
}
_ => {
// Log error
unimplemented!();
}
}
reader.consume(TokenType::OpenCurlyBracket);
let script = parse_script(reader);
reader.consume(TokenType::CloseCurlyBracket);
Box::new(ParsedStatement::Namespace(s, script))
}