Support for virtprops
This commit is contained in:
parent
cf13fa9a90
commit
a1663ff032
|
@ -5,4 +5,6 @@ authors = ["Deukhoofd <Deukhoofd@gmail.com>"]
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
itertools = "0.10.0"
|
||||
itertools = "0.10.0"
|
||||
enumflags2 = "0.7.1"
|
||||
backtrace = "0.3.63"
|
|
@ -2,8 +2,12 @@
|
|||
#![feature(exclusive_range_pattern)]
|
||||
#![feature(assert_matches)]
|
||||
#![feature(fn_traits)]
|
||||
#![feature(macro_attributes_in_derive_output)]
|
||||
#![feature(iter_advance_by)]
|
||||
#![feature(backtrace)]
|
||||
|
||||
pub(crate) mod defines;
|
||||
pub mod logger;
|
||||
pub mod modifiers;
|
||||
pub mod parsing;
|
||||
pub mod span;
|
||||
|
|
|
@ -12,4 +12,29 @@ pub enum Message {
|
|||
found: TokenType,
|
||||
expected: Vec<TokenType>,
|
||||
},
|
||||
EmptyProperty,
|
||||
}
|
||||
|
||||
impl Message {
|
||||
pub fn stringify(&self) -> String {
|
||||
match self {
|
||||
Message::UnexpectedCharacter(c) => {
|
||||
format!("Encountered unexpected character '{}'", c)
|
||||
}
|
||||
Message::InvalidCharacter { found, expected } => {
|
||||
format!(
|
||||
"Encountered invalid character '{}', expected '{}'",
|
||||
found, expected
|
||||
)
|
||||
}
|
||||
Message::UnclosedStringLiteral => "Encountered unclosed string literal".to_string(),
|
||||
Message::UnexpectedToken { found, expected } => {
|
||||
format!(
|
||||
"Encountered unexpected token '{:?}', expected any of {:?}",
|
||||
found, expected
|
||||
)
|
||||
}
|
||||
Message::EmptyProperty => "Property encountered without a getter or setter".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
use enumflags2::bitflags;
|
||||
|
||||
#[bitflags]
|
||||
#[repr(u8)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
pub enum TypeModifier {
|
||||
External = 0x01,
|
||||
Shared = 0x02,
|
||||
Abstract = 0x04,
|
||||
Final = 0x08,
|
||||
}
|
||||
|
||||
#[bitflags]
|
||||
#[repr(u8)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
pub enum FieldModifier {
|
||||
Private = 0x01,
|
||||
Protected = 0x02,
|
||||
}
|
|
@ -17,6 +17,8 @@ pub enum TokenType {
|
|||
|
||||
Semicolon,
|
||||
Colon,
|
||||
ColonColon,
|
||||
Comma,
|
||||
OpenBracket,
|
||||
CloseBracket,
|
||||
OpenCurlyBracket,
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
use super::lex;
|
||||
use crate::logger::messages::Message;
|
||||
use crate::parsing::lexer::lex_tokens::TokenType;
|
||||
use std::assert_matches::assert_matches;
|
||||
|
||||
macro_rules! lex_token_test {
|
||||
( $a: ident, $b: expr, $c: expr) => {
|
||||
#[test]
|
||||
fn $a() {
|
||||
let tokens = lex($b, &mut |_message, _span| {
|
||||
unreachable!();
|
||||
std::panic::panic_any(_message.stringify());
|
||||
});
|
||||
assert_eq!(tokens.len(), 2);
|
||||
assert_eq!(tokens[0].token_type, $c);
|
||||
|
@ -23,7 +24,7 @@ macro_rules! lex_identifier_test {
|
|||
#[test]
|
||||
fn $a() {
|
||||
let tokens = lex($b, &mut |_message, _span| {
|
||||
unreachable!();
|
||||
std::panic::panic_any(_message.stringify());
|
||||
});
|
||||
assert_eq!(tokens.len(), 2);
|
||||
assert_eq!(tokens[0].token_type, TokenType::Identifier($b.to_string()));
|
||||
|
@ -39,7 +40,7 @@ macro_rules! lex_integer_test {
|
|||
#[test]
|
||||
fn $a() {
|
||||
let tokens = lex($b, &mut |_message, _span| {
|
||||
unreachable!();
|
||||
std::panic::panic_any(_message.stringify());
|
||||
});
|
||||
assert_eq!(tokens.len(), 2);
|
||||
assert_eq!(tokens[0].token_type, TokenType::IntegerLiteral($c));
|
||||
|
@ -55,7 +56,7 @@ macro_rules! lex_float_test {
|
|||
#[test]
|
||||
fn $a() {
|
||||
let tokens = lex($b, &mut |_message, _span| {
|
||||
unreachable!();
|
||||
std::panic::panic_any(_message.stringify());
|
||||
});
|
||||
assert_eq!(tokens.len(), 2);
|
||||
assert_eq!(tokens[0].token_type, TokenType::FloatLiteral($c));
|
||||
|
@ -71,7 +72,7 @@ macro_rules! lex_string_test {
|
|||
#[test]
|
||||
fn $a() {
|
||||
let tokens = lex($b, &mut |_message, _span| {
|
||||
unreachable!();
|
||||
std::panic::panic_any(_message.stringify());
|
||||
});
|
||||
assert_eq!(tokens.len(), 2);
|
||||
assert_eq!(
|
||||
|
@ -137,6 +138,9 @@ lex_token_test!(
|
|||
lex_token_test!(lex_tilde, "~", TokenType::Tilde);
|
||||
lex_token_test!(lex_at_symbol, "@", TokenType::AtSymbol);
|
||||
|
||||
lex_token_test!(lex_colon, ":", TokenType::Colon);
|
||||
lex_token_test!(lex_coloncolon, "::", TokenType::ColonColon);
|
||||
|
||||
lex_token_test!(lex_and_keyword, "and", TokenType::AndKeyword);
|
||||
lex_token_test!(lex_abstract_keyword, "abstract", TokenType::AbstractKeyword);
|
||||
lex_token_test!(lex_auto_keyword, "auto", TokenType::AutoKeyword);
|
||||
|
@ -244,7 +248,9 @@ lex_string_test!(lex_heredoc_string_with_quote, "\"\"\"fo\"o\"\"\"", "fo\"o");
|
|||
|
||||
#[test]
|
||||
fn lex_two_identifier() {
|
||||
let tokens = lex("foo bar", &mut |_message, _span| {});
|
||||
let tokens = lex("foo bar", &mut |_message, _span| {
|
||||
std::panic::panic_any(_message.stringify());
|
||||
});
|
||||
assert_eq!(tokens.len(), 4);
|
||||
assert_eq!(
|
||||
tokens[0].token_type,
|
||||
|
@ -265,7 +271,9 @@ fn lex_two_identifier() {
|
|||
|
||||
#[test]
|
||||
fn lex_multiple_tokens_with_not_is() {
|
||||
let tokens = lex("a !is b", &mut |_message, _span| {});
|
||||
let tokens = lex("a !is b", &mut |_message, _span| {
|
||||
std::panic::panic_any(_message.stringify());
|
||||
});
|
||||
assert_eq!(tokens.len(), 6);
|
||||
assert_eq!(tokens[0].token_type, TokenType::Identifier("a".to_string()));
|
||||
assert_eq!(tokens[0].span.start, 0);
|
||||
|
|
|
@ -430,7 +430,17 @@ pub fn lex(s: &str, log: &mut dyn FnMut(Message, Span)) -> Vec<LexToken> {
|
|||
'~' => lex_and_consume(&mut chars, TT::Tilde, &mut add_token),
|
||||
'@' => lex_and_consume(&mut chars, TT::AtSymbol, &mut add_token),
|
||||
';' => lex_and_consume(&mut chars, TT::Semicolon, &mut add_token),
|
||||
':' => lex_and_consume(&mut chars, TT::Colon, &mut add_token),
|
||||
':' => {
|
||||
let start_pos = chars.real_position;
|
||||
if chars.peek() == Some(&':') {
|
||||
chars.next();
|
||||
chars.next();
|
||||
add_token(TT::ColonColon, start_pos, chars.real_position);
|
||||
} else {
|
||||
chars.next();
|
||||
add_token(TT::Colon, start_pos, chars.real_position);
|
||||
}
|
||||
}
|
||||
|
||||
'(' => lex_and_consume(&mut chars, TT::OpenBracket, &mut add_token),
|
||||
')' => lex_and_consume(&mut chars, TT::CloseBracket, &mut add_token),
|
||||
|
@ -438,6 +448,7 @@ pub fn lex(s: &str, log: &mut dyn FnMut(Message, Span)) -> Vec<LexToken> {
|
|||
'}' => lex_and_consume(&mut chars, TT::CloseCurlyBracket, &mut add_token),
|
||||
'[' => lex_and_consume(&mut chars, TT::OpenBlockBracket, &mut add_token),
|
||||
']' => lex_and_consume(&mut chars, TT::CloseBlockBracket, &mut add_token),
|
||||
',' => lex_and_consume(&mut chars, TT::Comma, &mut add_token),
|
||||
|
||||
'0'..'9' => lex_numeric(&mut chars, &mut add_token),
|
||||
'a'..'z' | 'A'..'Z' | '_' => lex_keyword_or_identifier(&mut chars, &mut add_token),
|
||||
|
|
|
@ -1,22 +1,29 @@
|
|||
pub mod parsed_statement;
|
||||
pub mod parsed_type_modifier;
|
||||
#[cfg(test)]
|
||||
mod parser_tests;
|
||||
|
||||
use super::lexer::lex_tokens::{LexToken, TokenType};
|
||||
use crate::logger::messages::Message;
|
||||
use crate::parsing::parser::parsed_statement::ParsedStatement;
|
||||
use crate::logger::messages::Message::UnexpectedToken;
|
||||
use crate::modifiers::{FieldModifier, TypeModifier};
|
||||
use crate::parsing::parser::parsed_type_modifier::ParsedTypeModifier;
|
||||
use crate::span::Span;
|
||||
use itertools::{Itertools, MultiPeek};
|
||||
use enumflags2::BitFlags;
|
||||
use parsed_statement::ParsedStatement;
|
||||
|
||||
struct ParseReader<'a> {
|
||||
tokens: MultiPeek<core::slice::Iter<'a, LexToken>>,
|
||||
tokens: &'a Vec<LexToken>,
|
||||
position: usize,
|
||||
peek_distance: usize,
|
||||
}
|
||||
|
||||
impl<'a> ParseReader<'a> {
|
||||
pub fn peek(&mut self) -> &LexToken {
|
||||
let t = self.tokens.peek();
|
||||
let t = self.tokens.get(self.position + self.peek_distance);
|
||||
self.peek_distance += 1;
|
||||
match t {
|
||||
None => self.tokens.by_ref().last().unwrap(),
|
||||
None => self.tokens.last().unwrap(),
|
||||
Some(v) => {
|
||||
if v.token_type == TokenType::WhiteSpace {
|
||||
self.peek()
|
||||
|
@ -28,9 +35,11 @@ impl<'a> ParseReader<'a> {
|
|||
}
|
||||
|
||||
pub fn next(&mut self) -> &LexToken {
|
||||
let t = self.tokens.next();
|
||||
let t = self.tokens.get(self.position);
|
||||
self.position += 1;
|
||||
self.peek_distance = 0;
|
||||
match t {
|
||||
None => self.tokens.by_ref().last().unwrap(),
|
||||
None => self.tokens.last().unwrap(),
|
||||
Some(v) => {
|
||||
if v.token_type == TokenType::WhiteSpace {
|
||||
self.next()
|
||||
|
@ -41,6 +50,11 @@ impl<'a> ParseReader<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn move_to_peek(&mut self) {
|
||||
self.position += self.peek_distance;
|
||||
self.peek_distance = 0;
|
||||
}
|
||||
|
||||
pub fn consume(
|
||||
&mut self,
|
||||
expected: TokenType,
|
||||
|
@ -61,13 +75,29 @@ impl<'a> ParseReader<'a> {
|
|||
|
||||
#[inline(always)]
|
||||
pub fn reset_peek(&mut self) {
|
||||
self.tokens.reset_peek();
|
||||
self.peek_distance = 0;
|
||||
}
|
||||
|
||||
pub fn create_inner(&self) -> ParseReader<'a> {
|
||||
ParseReader {
|
||||
tokens: self.tokens,
|
||||
position: self.position,
|
||||
peek_distance: self.peek_distance,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_from_inner(&mut self, inner: &ParseReader) {
|
||||
self.position = inner.position;
|
||||
self.peek_distance = inner.peek_distance;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(tokens: Vec<LexToken>, log: &mut dyn FnMut(Message, Span)) -> Box<ParsedStatement> {
|
||||
assert_ne!(tokens.len(), 0);
|
||||
let mut reader = ParseReader {
|
||||
tokens: tokens.iter().multipeek(),
|
||||
tokens: &tokens,
|
||||
position: 0,
|
||||
peek_distance: 0,
|
||||
};
|
||||
parse_script(&mut reader, log)
|
||||
}
|
||||
|
@ -76,28 +106,42 @@ fn parse_script(
|
|||
reader: &mut ParseReader,
|
||||
log: &mut dyn FnMut(Message, Span),
|
||||
) -> Box<ParsedStatement> {
|
||||
let mut vec: Vec<Box<ParsedStatement>> = Vec::new();
|
||||
let mut vec: Vec<ParsedStatement> = Vec::new();
|
||||
loop {
|
||||
let n = reader.peek();
|
||||
match n.token_type {
|
||||
let token_type = n.token_type.clone();
|
||||
let span = n.span;
|
||||
match token_type {
|
||||
TokenType::NamespaceKeyword => {
|
||||
vec.push(parse_namespace(reader, log));
|
||||
vec.push(*parse_namespace(reader, log));
|
||||
}
|
||||
TokenType::InterfaceKeyword => vec.push(*parse_interface(reader, log).unwrap()),
|
||||
TokenType::EndOfFile => break,
|
||||
TokenType::CloseCurlyBracket => break,
|
||||
_ => {
|
||||
// Log error?
|
||||
if let Some(s) = parse_interface(reader, log) {
|
||||
vec.push(*s);
|
||||
} else {
|
||||
log(
|
||||
UnexpectedToken {
|
||||
expected: vec![TokenType::NamespaceKeyword],
|
||||
found: token_type,
|
||||
},
|
||||
span,
|
||||
);
|
||||
reader.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Box::new(ParsedStatement::Script(vec))
|
||||
Box::new(ParsedStatement::Script { statements: vec })
|
||||
}
|
||||
|
||||
fn parse_namespace(
|
||||
fn parse_identifier(
|
||||
reader: &mut ParseReader,
|
||||
log: &mut dyn FnMut(Message, Span),
|
||||
) -> Box<ParsedStatement> {
|
||||
reader.next(); // Consume namespace
|
||||
) -> Option<String> {
|
||||
let identifier_token = reader.next();
|
||||
let s: String;
|
||||
match &identifier_token.token_type {
|
||||
|
@ -112,11 +156,347 @@ fn parse_namespace(
|
|||
},
|
||||
identifier_token.span,
|
||||
);
|
||||
return Box::new(ParsedStatement::Invalid);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Some(s)
|
||||
}
|
||||
|
||||
fn parse_namespace(
|
||||
reader: &mut ParseReader,
|
||||
log: &mut dyn FnMut(Message, Span),
|
||||
) -> Box<ParsedStatement> {
|
||||
reader.next(); // Consume namespace
|
||||
let identifier = parse_identifier(reader, log);
|
||||
if identifier.is_none() {
|
||||
return Box::new(ParsedStatement::Invalid);
|
||||
}
|
||||
reader.consume(TokenType::OpenCurlyBracket, log);
|
||||
let script = parse_script(reader, log);
|
||||
reader.consume(TokenType::CloseCurlyBracket, log);
|
||||
Box::new(ParsedStatement::Namespace(s, script))
|
||||
Box::new(ParsedStatement::Namespace {
|
||||
identifier: identifier.unwrap(),
|
||||
script,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_interface(
|
||||
reader: &mut ParseReader,
|
||||
log: &mut dyn FnMut(Message, Span),
|
||||
) -> Option<Box<ParsedStatement>> {
|
||||
let mut type_mod: BitFlags<TypeModifier> = BitFlags::empty();
|
||||
let identifier: Option<String>;
|
||||
let mut has_interface_keyword = false;
|
||||
reader.reset_peek();
|
||||
loop {
|
||||
match &reader.peek().token_type {
|
||||
TokenType::ExternalKeyword => type_mod |= TypeModifier::External,
|
||||
TokenType::SharedKeyword => type_mod |= TypeModifier::Shared,
|
||||
TokenType::Identifier(s) => {
|
||||
identifier = Some(s.clone());
|
||||
break;
|
||||
}
|
||||
TokenType::InterfaceKeyword => {
|
||||
has_interface_keyword = true;
|
||||
}
|
||||
_ => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
}
|
||||
if !has_interface_keyword {
|
||||
return None;
|
||||
}
|
||||
reader.move_to_peek();
|
||||
let mut statements: Vec<ParsedStatement> = Vec::new();
|
||||
let mut inherits: Vec<String> = Vec::new();
|
||||
loop {
|
||||
let t = reader.peek();
|
||||
match t.token_type {
|
||||
TokenType::Semicolon => {
|
||||
reader.next();
|
||||
if identifier.is_none() {
|
||||
return Some(Box::new(ParsedStatement::Invalid));
|
||||
}
|
||||
return Some(Box::new(ParsedStatement::Interface {
|
||||
type_mod,
|
||||
identifier: identifier.unwrap(),
|
||||
inherits,
|
||||
statements,
|
||||
}));
|
||||
}
|
||||
TokenType::Colon | TokenType::OpenCurlyBracket => break,
|
||||
TokenType::EndOfFile => {
|
||||
log(
|
||||
Message::UnexpectedToken {
|
||||
found: t.token_type.clone(),
|
||||
expected: vec![
|
||||
TokenType::Semicolon,
|
||||
TokenType::Colon,
|
||||
TokenType::OpenCurlyBracket,
|
||||
],
|
||||
},
|
||||
t.span,
|
||||
);
|
||||
return Some(Box::new(ParsedStatement::Invalid));
|
||||
}
|
||||
_ => {
|
||||
log(
|
||||
Message::UnexpectedToken {
|
||||
found: t.token_type.clone(),
|
||||
expected: vec![
|
||||
TokenType::Semicolon,
|
||||
TokenType::Colon,
|
||||
TokenType::OpenCurlyBracket,
|
||||
],
|
||||
},
|
||||
t.span,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
reader.reset_peek();
|
||||
if reader.peek().token_type == TokenType::Colon {
|
||||
reader.next();
|
||||
loop {
|
||||
let inherit_identifier = parse_identifier(reader, log);
|
||||
if inherit_identifier.is_none() {
|
||||
continue;
|
||||
}
|
||||
inherits.push(inherit_identifier.unwrap());
|
||||
if reader.peek().token_type != TokenType::Comma {
|
||||
break;
|
||||
}
|
||||
reader.next();
|
||||
}
|
||||
}
|
||||
reader.consume(TokenType::OpenCurlyBracket, log);
|
||||
// TODO: parse interfacemethod
|
||||
loop {
|
||||
if reader.peek().token_type == TokenType::CloseCurlyBracket {
|
||||
break;
|
||||
}
|
||||
let prop = parse_virtprop(reader, log);
|
||||
if prop.is_none() {
|
||||
break;
|
||||
}
|
||||
statements.push(*prop.unwrap());
|
||||
}
|
||||
|
||||
reader.consume(TokenType::CloseCurlyBracket, log);
|
||||
reader.move_to_peek();
|
||||
if identifier.is_none() {
|
||||
return Some(Box::new(ParsedStatement::Invalid));
|
||||
}
|
||||
|
||||
Some(Box::new(ParsedStatement::Interface {
|
||||
type_mod,
|
||||
identifier: identifier.unwrap(),
|
||||
inherits,
|
||||
statements,
|
||||
}))
|
||||
}
|
||||
|
||||
fn parse_statblock(
|
||||
_outer_reader: &mut ParseReader,
|
||||
_log: &mut dyn FnMut(Message, Span),
|
||||
) -> Option<Box<ParsedStatement>> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn parse_virtprop(
|
||||
outer_reader: &mut ParseReader,
|
||||
log: &mut dyn FnMut(Message, Span),
|
||||
) -> Option<Box<ParsedStatement>> {
|
||||
let mut reader = outer_reader.create_inner();
|
||||
let mut field_mod: BitFlags<FieldModifier> = BitFlags::empty();
|
||||
let property_type: Option<Box<ParsedStatement>>;
|
||||
loop {
|
||||
let t = reader.peek();
|
||||
match t.token_type {
|
||||
TokenType::PrivateKeyword => {
|
||||
field_mod |= FieldModifier::Private;
|
||||
reader.next();
|
||||
}
|
||||
TokenType::ProtectedKeyword => {
|
||||
field_mod |= FieldModifier::Protected;
|
||||
reader.next();
|
||||
}
|
||||
_ => {
|
||||
property_type = parse_type(&mut reader, log);
|
||||
property_type.as_ref()?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut is_handle = false;
|
||||
if reader.peek().token_type == TokenType::Ampersand {
|
||||
reader.next();
|
||||
is_handle = true;
|
||||
}
|
||||
let identifier = parse_identifier(&mut reader, log);
|
||||
identifier.as_ref()?;
|
||||
let next = reader.next();
|
||||
if next.token_type != TokenType::OpenCurlyBracket {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut has_get = false;
|
||||
let mut is_get_const = false;
|
||||
let mut get_statement: Option<Box<ParsedStatement>> = None;
|
||||
|
||||
let mut has_set = false;
|
||||
let mut is_set_const = false;
|
||||
let mut set_statement: Option<Box<ParsedStatement>> = None;
|
||||
let start_span = next.span;
|
||||
|
||||
loop {
|
||||
let next = reader.peek();
|
||||
match next.token_type {
|
||||
TokenType::GetKeyword => {
|
||||
reader.next();
|
||||
has_get = true;
|
||||
let mut peek = reader.peek();
|
||||
if peek.token_type == TokenType::ConstKeyword {
|
||||
reader.next();
|
||||
is_get_const = true;
|
||||
peek = reader.peek();
|
||||
}
|
||||
if peek.token_type != TokenType::Semicolon {
|
||||
get_statement = parse_statblock(&mut reader, log);
|
||||
} else {
|
||||
reader.next();
|
||||
}
|
||||
}
|
||||
TokenType::SetKeyword => {
|
||||
reader.next();
|
||||
has_set = true;
|
||||
let mut peek = reader.peek();
|
||||
if peek.token_type == TokenType::ConstKeyword {
|
||||
reader.next();
|
||||
is_set_const = true;
|
||||
peek = reader.peek();
|
||||
}
|
||||
if peek.token_type != TokenType::Semicolon {
|
||||
set_statement = parse_statblock(&mut reader, log);
|
||||
} else {
|
||||
reader.next();
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
let next = reader.next();
|
||||
if next.token_type != TokenType::CloseCurlyBracket {
|
||||
return None;
|
||||
}
|
||||
if !has_get && !has_set {
|
||||
log(
|
||||
Message::EmptyProperty,
|
||||
Span {
|
||||
start: start_span.start,
|
||||
end: next.span.end,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
outer_reader.set_from_inner(&reader);
|
||||
Some(Box::new(ParsedStatement::VirtProp {
|
||||
field_mod,
|
||||
property_type: property_type.unwrap(),
|
||||
identifier: identifier.unwrap(),
|
||||
is_handle,
|
||||
has_get,
|
||||
is_get_const,
|
||||
get_statement,
|
||||
has_set,
|
||||
is_set_const,
|
||||
set_statement,
|
||||
}))
|
||||
}
|
||||
|
||||
fn parse_type(
|
||||
reader: &mut ParseReader,
|
||||
log: &mut dyn FnMut(Message, Span),
|
||||
) -> Option<Box<ParsedStatement>> {
|
||||
let f = reader.peek();
|
||||
let mut is_const = false;
|
||||
if f.token_type == TokenType::ConstKeyword {
|
||||
reader.next();
|
||||
is_const = true;
|
||||
}
|
||||
let scope = parse_scope(reader, log);
|
||||
let identifier = parse_identifier(reader, log);
|
||||
// if none, we already logged an error. Return None.
|
||||
identifier.as_ref()?;
|
||||
|
||||
// TODO: Generics
|
||||
|
||||
let mut modifiers: Vec<ParsedTypeModifier> = Vec::new();
|
||||
loop {
|
||||
let n = reader.peek();
|
||||
match n.token_type {
|
||||
TokenType::OpenBlockBracket => {
|
||||
reader.next();
|
||||
reader.consume(TokenType::CloseBlockBracket, log);
|
||||
modifiers.push(ParsedTypeModifier::Array);
|
||||
}
|
||||
TokenType::Ampersand => {
|
||||
reader.next();
|
||||
if reader.peek().token_type == TokenType::ConstKeyword {
|
||||
reader.next();
|
||||
modifiers.push(ParsedTypeModifier::ConstHandle);
|
||||
} else {
|
||||
modifiers.push(ParsedTypeModifier::Handle);
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
Some(Box::new(ParsedStatement::Type {
|
||||
is_const,
|
||||
scope,
|
||||
identifier: identifier.unwrap(),
|
||||
modifiers,
|
||||
}))
|
||||
}
|
||||
|
||||
fn parse_scope(
|
||||
outer_reader: &mut ParseReader,
|
||||
_log: &mut dyn FnMut(Message, Span),
|
||||
) -> Option<Box<ParsedStatement>> {
|
||||
let mut reader = outer_reader.create_inner();
|
||||
let is_global = reader.peek().token_type == TokenType::ColonColon;
|
||||
if is_global {
|
||||
reader.next();
|
||||
}
|
||||
let mut scope: Vec<String> = Vec::new();
|
||||
loop {
|
||||
let n = reader.peek();
|
||||
if let TokenType::Identifier(s) = &n.token_type {
|
||||
let identifier = s.to_string();
|
||||
if reader.peek().token_type == TokenType::ColonColon {
|
||||
reader.next();
|
||||
reader.next();
|
||||
scope.push(identifier);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// TODO: generics
|
||||
if !is_global && scope.is_empty() {
|
||||
None
|
||||
} else {
|
||||
outer_reader.set_from_inner(&reader);
|
||||
Some(Box::new(ParsedStatement::Scope {
|
||||
is_global,
|
||||
scope,
|
||||
generic_types: None,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,45 @@
|
|||
use super::parsed_type_modifier::ParsedTypeModifier;
|
||||
use crate::modifiers::{FieldModifier, TypeModifier};
|
||||
use enumflags2::BitFlags;
|
||||
|
||||
pub enum ParsedStatement {
|
||||
Invalid,
|
||||
Script(Vec<Box<ParsedStatement>>),
|
||||
Namespace(String, Box<ParsedStatement>),
|
||||
Script {
|
||||
statements: Vec<ParsedStatement>,
|
||||
},
|
||||
Namespace {
|
||||
identifier: String,
|
||||
script: Box<ParsedStatement>,
|
||||
},
|
||||
Interface {
|
||||
type_mod: BitFlags<TypeModifier>,
|
||||
identifier: String,
|
||||
inherits: Vec<String>,
|
||||
statements: Vec<ParsedStatement>,
|
||||
},
|
||||
Scope {
|
||||
is_global: bool,
|
||||
scope: Vec<String>,
|
||||
generic_types: Option<Vec<ParsedStatement>>,
|
||||
},
|
||||
VirtProp {
|
||||
field_mod: BitFlags<FieldModifier>,
|
||||
property_type: Box<ParsedStatement>,
|
||||
identifier: String,
|
||||
is_handle: bool,
|
||||
|
||||
has_get: bool,
|
||||
is_get_const: bool,
|
||||
get_statement: Option<Box<ParsedStatement>>,
|
||||
|
||||
has_set: bool,
|
||||
is_set_const: bool,
|
||||
set_statement: Option<Box<ParsedStatement>>,
|
||||
},
|
||||
Type {
|
||||
is_const: bool,
|
||||
scope: Option<Box<ParsedStatement>>,
|
||||
identifier: String,
|
||||
modifiers: Vec<ParsedTypeModifier>,
|
||||
},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
pub enum ParsedTypeModifier {
|
||||
Array,
|
||||
Handle,
|
||||
ConstHandle,
|
||||
}
|
|
@ -1,7 +1,9 @@
|
|||
use super::parse;
|
||||
use super::parsed_statement::ParsedStatement;
|
||||
use crate::modifiers::TypeModifier;
|
||||
use crate::parsing::lexer::lex_tokens::{LexToken, TokenType};
|
||||
use crate::span::Span;
|
||||
use enumflags2::BitFlags;
|
||||
|
||||
fn create_tokens(types: Vec<TokenType>) -> Vec<LexToken> {
|
||||
let mut v = Vec::with_capacity(types.len());
|
||||
|
@ -27,15 +29,15 @@ fn test_empty_namespace() {
|
|||
TokenType::EndOfFile,
|
||||
]),
|
||||
&mut |_message, _span| {
|
||||
unimplemented!();
|
||||
std::panic::panic_any(_message.stringify());
|
||||
},
|
||||
);
|
||||
if let ParsedStatement::Script(inner) = script.as_ref() {
|
||||
assert_eq!(1, inner.len());
|
||||
if let ParsedStatement::Namespace(identifier, inner_script) = inner[0].as_ref() {
|
||||
if let ParsedStatement::Script { statements } = script.as_ref() {
|
||||
assert_eq!(1, statements.len());
|
||||
if let ParsedStatement::Namespace { identifier, script } = &statements[0] {
|
||||
assert_eq!(identifier, "foo");
|
||||
if let ParsedStatement::Script(inner) = inner_script.as_ref() {
|
||||
assert_eq!(0, inner.len());
|
||||
if let ParsedStatement::Script { statements } = script.as_ref() {
|
||||
assert_eq!(0, statements.len());
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
|
@ -46,3 +48,162 @@ fn test_empty_namespace() {
|
|||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_interface() {
|
||||
let script = parse(
|
||||
create_tokens(vec![
|
||||
TokenType::InterfaceKeyword,
|
||||
TokenType::WhiteSpace,
|
||||
TokenType::Identifier("foo".to_string()),
|
||||
TokenType::WhiteSpace,
|
||||
TokenType::OpenCurlyBracket,
|
||||
TokenType::CloseCurlyBracket,
|
||||
TokenType::EndOfFile,
|
||||
]),
|
||||
&mut |_message, _span| {
|
||||
std::panic::panic_any(_message.stringify());
|
||||
},
|
||||
);
|
||||
if let ParsedStatement::Script { statements } = script.as_ref() {
|
||||
assert_eq!(1, statements.len());
|
||||
if let ParsedStatement::Interface {
|
||||
type_mod,
|
||||
identifier,
|
||||
inherits,
|
||||
statements,
|
||||
} = &statements[0]
|
||||
{
|
||||
assert!(type_mod.is_empty());
|
||||
assert_eq!(identifier, "foo");
|
||||
assert_eq!(inherits.len(), 0);
|
||||
assert_eq!(statements.len(), 0);
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_external_shared_interface() {
|
||||
let script = parse(
|
||||
create_tokens(vec![
|
||||
TokenType::ExternalKeyword,
|
||||
TokenType::SharedKeyword,
|
||||
TokenType::InterfaceKeyword,
|
||||
TokenType::WhiteSpace,
|
||||
TokenType::Identifier("foo".to_string()),
|
||||
TokenType::WhiteSpace,
|
||||
TokenType::OpenCurlyBracket,
|
||||
TokenType::CloseCurlyBracket,
|
||||
TokenType::EndOfFile,
|
||||
]),
|
||||
&mut |_message, _span| {
|
||||
std::panic::panic_any(_message.stringify());
|
||||
},
|
||||
);
|
||||
if let ParsedStatement::Script { statements } = script.as_ref() {
|
||||
assert_eq!(1, statements.len());
|
||||
if let ParsedStatement::Interface {
|
||||
type_mod,
|
||||
identifier,
|
||||
inherits,
|
||||
statements,
|
||||
} = &statements[0]
|
||||
{
|
||||
assert!(!type_mod.is_empty());
|
||||
assert!(type_mod.contains(TypeModifier::External));
|
||||
assert!(type_mod.contains(TypeModifier::Shared));
|
||||
assert_eq!(identifier, "foo");
|
||||
assert_eq!(inherits.len(), 0);
|
||||
assert_eq!(statements.len(), 0);
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_interface_with_virtprop() {
|
||||
let script = parse(
|
||||
create_tokens(vec![
|
||||
TokenType::InterfaceKeyword,
|
||||
TokenType::WhiteSpace,
|
||||
TokenType::Identifier("foo".to_string()),
|
||||
TokenType::WhiteSpace,
|
||||
TokenType::OpenCurlyBracket,
|
||||
TokenType::Identifier("bar".to_string()),
|
||||
TokenType::Identifier("get_bar".to_string()),
|
||||
TokenType::OpenCurlyBracket,
|
||||
TokenType::GetKeyword,
|
||||
TokenType::Semicolon,
|
||||
TokenType::SetKeyword,
|
||||
TokenType::Semicolon,
|
||||
TokenType::CloseCurlyBracket,
|
||||
TokenType::CloseCurlyBracket,
|
||||
TokenType::EndOfFile,
|
||||
]),
|
||||
&mut |_message, _span| {
|
||||
std::panic::panic_any(_message.stringify());
|
||||
},
|
||||
);
|
||||
if let ParsedStatement::Script { statements } = script.as_ref() {
|
||||
assert_eq!(1, statements.len());
|
||||
if let ParsedStatement::Interface {
|
||||
type_mod,
|
||||
identifier,
|
||||
inherits,
|
||||
statements,
|
||||
} = &statements[0]
|
||||
{
|
||||
assert!(type_mod.is_empty());
|
||||
assert_eq!(identifier, "foo");
|
||||
assert_eq!(inherits.len(), 0);
|
||||
assert_eq!(statements.len(), 1);
|
||||
if let ParsedStatement::VirtProp {
|
||||
field_mod,
|
||||
property_type,
|
||||
identifier,
|
||||
is_handle,
|
||||
has_get,
|
||||
is_get_const,
|
||||
get_statement,
|
||||
has_set,
|
||||
is_set_const,
|
||||
set_statement,
|
||||
} = &statements[0]
|
||||
{
|
||||
assert_eq!(*field_mod, BitFlags::empty());
|
||||
if let ParsedStatement::Type {
|
||||
is_const,
|
||||
identifier,
|
||||
..
|
||||
} = property_type.as_ref()
|
||||
{
|
||||
assert!(!is_const);
|
||||
assert_eq!(identifier, "bar");
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
assert_eq!(identifier, "get_bar");
|
||||
assert!(has_get);
|
||||
assert!(!is_get_const);
|
||||
assert!(has_set);
|
||||
assert!(!is_set_const);
|
||||
assert!(get_statement.is_none());
|
||||
assert!(set_statement.is_none());
|
||||
assert!(!is_handle);
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue