Adds parsing for class, and for funcdef

master
Deukhoofd 10 months ago
parent 2e87674720
commit b1d516b268
Signed by: Deukhoofd
GPG Key ID: F63E044490819F6F
  1. 93
      src/integration_tests/empty_class_declaration.rs
  2. 2
      src/integration_tests/mod.rs
  3. 173
      src/integration_tests/multiple_inheritance_class.rs
  4. 53
      src/integration_tests/test_cases/empty_class_declaration/lex_tokens.json
  5. 14
      src/integration_tests/test_cases/empty_class_declaration/parsed_tree.json
  6. 1
      src/integration_tests/test_cases/empty_class_declaration/script.ses
  7. 129
      src/integration_tests/test_cases/multiple_inheritance_class/lex_tokens.json
  8. 18
      src/integration_tests/test_cases/multiple_inheritance_class/parsed_tree.json
  9. 1
      src/integration_tests/test_cases/multiple_inheritance_class/script.ses
  10. 1
      src/modifiers.rs
  11. 196
      src/parsing/parser/mod.rs
  12. 13
      src/parsing/parser/parsed_statement.rs

@ -0,0 +1,93 @@
////////////////////////////
// Automatically Generated//
////////////////////////////
use crate::logger::messages::Message;
use crate::parsing::lexer::lex;
use crate::parsing::lexer::lex_tokens::LexToken;
use crate::parsing::parser::parse;
use crate::parsing::parser::parsed_statement::ParsedStatement;
use crate::span::Span;
fn panic_on_error(msg: Message, _: Span) {
std::panic::panic_any(msg.stringify());
}
#[test]
fn integration_empty_class_declaration() {
let script = "class Foo {}";
let lexed_tokens = lex(script, &mut panic_on_error);
println!("{}", serde_json::to_string(&lexed_tokens).unwrap());
let expected_tokens: Vec<LexToken> =
serde_json::from_str(r#"[
{
"token_type": "ClassKeyword",
"span": {
"start": 0,
"end": 5
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 5,
"end": 6
}
},
{
"token_type": {
"Identifier": "Foo"
},
"span": {
"start": 6,
"end": 9
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 9,
"end": 10
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 10,
"end": 11
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 11,
"end": 12
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 12,
"end": 12
}
}
]"#).unwrap();
assert_eq!(lexed_tokens, expected_tokens);
let parsed_tree = parse(lexed_tokens, &mut panic_on_error);
println!("{}", serde_json::to_string(&parsed_tree).unwrap());
let expected_tree: Box<ParsedStatement> =
serde_json::from_str(r#"{
"Script": {
"statements": [
{
"ClassDeclaration": {
"modifiers": 0,
"name": "Foo",
"inherits": [],
"statements": []
}
}
]
}
}"#).unwrap();
assert_eq!(parsed_tree, expected_tree);
}

@ -1,2 +1,4 @@
mod enum_definition;
mod multiple_inheritance_class;
mod empty_class_declaration;
mod add_function;

@ -0,0 +1,173 @@
////////////////////////////
// Automatically Generated//
////////////////////////////
use crate::logger::messages::Message;
use crate::parsing::lexer::lex;
use crate::parsing::lexer::lex_tokens::LexToken;
use crate::parsing::parser::parse;
use crate::parsing::parser::parsed_statement::ParsedStatement;
use crate::span::Span;
fn panic_on_error(msg: Message, _: Span) {
std::panic::panic_any(msg.stringify());
}
#[test]
fn integration_multiple_inheritance_class() {
let script = "class Foo : Zom, Aar, Bar {}";
let lexed_tokens = lex(script, &mut panic_on_error);
println!("{}", serde_json::to_string(&lexed_tokens).unwrap());
let expected_tokens: Vec<LexToken> =
serde_json::from_str(r#"[
{
"token_type": "ClassKeyword",
"span": {
"start": 0,
"end": 5
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 5,
"end": 6
}
},
{
"token_type": {
"Identifier": "Foo"
},
"span": {
"start": 6,
"end": 9
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 9,
"end": 10
}
},
{
"token_type": "Colon",
"span": {
"start": 10,
"end": 11
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 11,
"end": 12
}
},
{
"token_type": {
"Identifier": "Zom"
},
"span": {
"start": 12,
"end": 15
}
},
{
"token_type": "Comma",
"span": {
"start": 15,
"end": 16
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 16,
"end": 17
}
},
{
"token_type": {
"Identifier": "Aar"
},
"span": {
"start": 17,
"end": 20
}
},
{
"token_type": "Comma",
"span": {
"start": 20,
"end": 21
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 21,
"end": 22
}
},
{
"token_type": {
"Identifier": "Bar"
},
"span": {
"start": 22,
"end": 25
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 25,
"end": 26
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 26,
"end": 27
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 27,
"end": 28
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 28,
"end": 28
}
}
]"#).unwrap();
assert_eq!(lexed_tokens, expected_tokens);
let parsed_tree = parse(lexed_tokens, &mut panic_on_error);
println!("{}", serde_json::to_string(&parsed_tree).unwrap());
let expected_tree: Box<ParsedStatement> =
serde_json::from_str(r#"{
"Script": {
"statements": [
{
"ClassDeclaration": {
"modifiers": 0,
"name": "Foo",
"inherits": [
"Zom",
"Aar",
"Bar"
],
"statements": []
}
}
]
}
}"#).unwrap();
assert_eq!(parsed_tree, expected_tree);
}

@ -0,0 +1,53 @@
[
{
"token_type": "ClassKeyword",
"span": {
"start": 0,
"end": 5
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 5,
"end": 6
}
},
{
"token_type": {
"Identifier": "Foo"
},
"span": {
"start": 6,
"end": 9
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 9,
"end": 10
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 10,
"end": 11
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 11,
"end": 12
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 12,
"end": 12
}
}
]

@ -0,0 +1,14 @@
{
"Script": {
"statements": [
{
"ClassDeclaration": {
"modifiers": 0,
"name": "Foo",
"inherits": [],
"statements": []
}
}
]
}
}

@ -0,0 +1,129 @@
[
{
"token_type": "ClassKeyword",
"span": {
"start": 0,
"end": 5
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 5,
"end": 6
}
},
{
"token_type": {
"Identifier": "Foo"
},
"span": {
"start": 6,
"end": 9
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 9,
"end": 10
}
},
{
"token_type": "Colon",
"span": {
"start": 10,
"end": 11
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 11,
"end": 12
}
},
{
"token_type": {
"Identifier": "Zom"
},
"span": {
"start": 12,
"end": 15
}
},
{
"token_type": "Comma",
"span": {
"start": 15,
"end": 16
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 16,
"end": 17
}
},
{
"token_type": {
"Identifier": "Aar"
},
"span": {
"start": 17,
"end": 20
}
},
{
"token_type": "Comma",
"span": {
"start": 20,
"end": 21
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 21,
"end": 22
}
},
{
"token_type": {
"Identifier": "Bar"
},
"span": {
"start": 22,
"end": 25
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 25,
"end": 26
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 26,
"end": 27
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 27,
"end": 28
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 28,
"end": 28
}
}
]

@ -0,0 +1,18 @@
{
"Script": {
"statements": [
{
"ClassDeclaration": {
"modifiers": 0,
"name": "Foo",
"inherits": [
"Zom",
"Aar",
"Bar"
],
"statements": []
}
}
]
}
}

@ -11,6 +11,7 @@ pub enum TypeModifier {
Shared = 0x02,
Abstract = 0x04,
Final = 0x08,
Mixin = 0x10,
}
#[bitflags]

@ -163,16 +163,15 @@ fn parse_script(
vec.push(*parse_namespace(reader, log));
}
TokenType::InterfaceKeyword => vec.push(*parse_interface(reader, log).unwrap()),
TokenType::EnumKeyword => vec.push(*parse_enum(reader, log).unwrap()),
TokenType::EndOfFile => break,
TokenType::CloseCurlyBracket => break,
_ => {
if let Some(s) = parse_enum(reader, log) {
vec.push(*s);
}
// else if let Some(s) = parse_class(reader, log) {
// vec.push(*s);
// }
else if let Some(s) = parse_interface(reader, log) {
} else if let Some(s) = parse_class(reader, log) {
vec.push(*s);
} else if let Some(s) = parse_interface(reader, log) {
vec.push(*s);
} else if let Some(s) = parse_virtprop(reader, log) {
vec.push(*s);
@ -219,6 +218,8 @@ fn parse_interface(
outer_reader: &mut ParseReader,
log: &mut dyn FnMut(Message, Span),
) -> Option<Box<ParsedStatement>> {
// interface ::= {'external' | 'shared'} 'interface' identifier (';' | ([':' identifier {',' identifier}] '{' {virtprop | interfacemethod} '}'));
let mut type_mod: BitFlags<TypeModifier> = BitFlags::empty();
let identifier: Option<String>;
let mut has_interface_keyword = false;
@ -304,14 +305,16 @@ fn parse_interface(
}
}
reader.consume(TokenType::OpenCurlyBracket, log);
// TODO: parse interfacemethod
loop {
if reader.peek().token_type == TokenType::CloseCurlyBracket {
break;
}
let prop = parse_virtprop(&mut reader, log);
let mut prop = parse_virtprop(&mut reader, log);
if prop.is_none() {
break;
prop = parse_interface_method(&mut reader, log);
if prop.is_none() {
break;
}
}
statements.push(*prop.unwrap());
}
@ -337,27 +340,22 @@ fn parse_interface_method(
// interfacemethod ::= type ['&'] identifier paramlist ['const'] ';';
let mut reader = outer_reader.create_inner();
let return_type = parse_type(&mut reader, log);
if return_type.is_none() {
return None;
}
return_type.as_ref()?;
let returns_reference = reader.peek().token_type == TokenType::Ampersand;
if returns_reference {
reader.next();
}
let identifier = parse_identifier(&mut reader, log, true);
if identifier.is_none() {
return None;
}
identifier.as_ref()?;
let param_list = parse_paramlist(&mut reader, log);
if param_list.is_none() {
return None;
}
param_list.as_ref()?;
let is_const = reader.peek().token_type == TokenType::ConstKeyword;
if is_const {
reader.next();
}
reader.consume(TokenType::Semicolon, log);
outer_reader.set_from_inner(&reader);
Some(Box::new(ParsedStatement::InterfaceMethod {
return_type: return_type.unwrap(),
returns_reference,
@ -379,16 +377,14 @@ fn parse_typedef(
let mut left_type = parse_primtype(reader, log);
if left_type.is_none() {
let left_type_identifier = parse_identifier(reader, log, false);
if left_type_identifier.is_some() {
if let Some(s) = left_type_identifier {
left_type = Some(Box::new(ParsedStatement::DataTypeIdentifier {
identifier: left_type_identifier.unwrap(),
identifier: s,
}));
}
}
let right_type_identifier = parse_identifier(reader, log, false);
if right_type_identifier.is_none() {
return None;
}
right_type_identifier.as_ref()?;
let right_type = Box::new(ParsedStatement::DataTypeIdentifier {
identifier: right_type_identifier.unwrap(),
});
@ -531,20 +527,156 @@ fn parse_enum(
}
fn parse_class(
_outer_reader: &mut ParseReader,
_log: &mut dyn FnMut(Message, Span),
reader: &mut ParseReader,
log: &mut dyn FnMut(Message, Span),
) -> Option<Box<ParsedStatement>> {
// class ::= {'shared' | 'abstract' | 'final' | 'external' | 'mixin'} 'class' identifier
// (';' | ([':' identifier {',' identifier}] '{' {virtprop | func | var | funcdef | class} '}'));
unimplemented!();
let mut inner_reader = reader.create_inner();
let mut modifiers: BitFlags<TypeModifier> = BitFlags::empty();
loop {
match inner_reader.peek().token_type {
TokenType::SharedKeyword => {
inner_reader.next();
modifiers |= TypeModifier::Shared;
}
TokenType::AbstractKeyword => {
inner_reader.next();
modifiers |= TypeModifier::Abstract;
}
TokenType::FinalKeyword => {
inner_reader.next();
modifiers |= TypeModifier::Final;
}
TokenType::ExternalKeyword => {
inner_reader.next();
modifiers |= TypeModifier::External;
}
TokenType::MixinKeyword => {
inner_reader.next();
modifiers |= TypeModifier::Mixin;
}
_ => {
break;
}
}
}
if inner_reader.peek().token_type != TokenType::ClassKeyword {
return None;
}
inner_reader.next();
reader.set_from_inner(&inner_reader);
let name = parse_identifier(reader, log, false);
// (';' |
// (
// [':' identifier {',' identifier}]
// '{' {virtprop | func | var | funcdef | class} '}'
// )
// )
// FIXME: Do we need predeclaring class (class foo;)?
let mut inherits = Vec::new();
if reader.peek().token_type == TokenType::Colon {
reader.next();
loop {
let inherit = parse_identifier(reader, log, false);
if let Some(s) = inherit {
inherits.push(s);
}
if reader.peek().token_type != TokenType::Comma {
break;
}
reader.next();
}
}
reader.consume(TokenType::OpenCurlyBracket, log);
let mut statements = Vec::new();
loop {
let mut statement = parse_virtprop(reader, log);
if statement.is_none() {
statement = parse_func(reader, log);
if statement.is_none() {
statement = parse_var(reader, log);
if statement.is_none() {
statement = parse_funcdef(reader, log);
if statement.is_none() {
statement = parse_class(reader, log);
}
}
}
}
if let Some(s) = statement {
statements.push(s);
} else {
break;
}
}
reader.consume(TokenType::CloseCurlyBracket, log);
Some(Box::new(ParsedStatement::ClassDeclaration {
modifiers,
name: name.unwrap(),
inherits,
statements,
}))
}
fn parse_funcdef(
_outer_reader: &mut ParseReader,
_log: &mut dyn FnMut(Message, Span),
outer_reader: &mut ParseReader,
log: &mut dyn FnMut(Message, Span),
) -> Option<Box<ParsedStatement>> {
// funcdef ::= {'external' | 'shared'} 'funcdef' type ['&'] identifier paramlist ';'
unimplemented!();
let mut reader = outer_reader.create_inner();
let mut modifiers: BitFlags<TypeModifier> = BitFlags::empty();
loop {
match reader.peek().token_type {
TokenType::SharedKeyword => {
reader.next();
modifiers |= TypeModifier::Shared;
}
TokenType::ExternalKeyword => {
reader.next();
modifiers |= TypeModifier::External;
}
_ => {
break;
}
}
}
if reader.peek().token_type != TokenType::FuncDefKeyword {
return None;
}
reader.next();
let return_type = parse_type(&mut reader, log);
return_type.as_ref()?;
let returns_reference = reader.peek().token_type == TokenType::Ampersand;
if returns_reference {
reader.next();
}
let identifier = parse_identifier(&mut reader, log, true);
identifier.as_ref()?;
let param_list = parse_paramlist(&mut reader, log);
param_list.as_ref()?;
if reader.peek().token_type != TokenType::Semicolon {
return None;
}
reader.next();
outer_reader.set_from_inner(&reader);
Some(Box::new(ParsedStatement::FuncDefDeclaration {
modifiers,
returns_reference,
return_type,
name: identifier.unwrap(),
param_list: param_list.unwrap(),
}))
}
fn parse_func(
@ -788,7 +920,7 @@ fn parse_paramlist(
fn parse_funcattr(
reader: &mut ParseReader,
log: &mut dyn FnMut(Message, Span),
_log: &mut dyn FnMut(Message, Span),
) -> BitFlags<FuncAttr> {
// funcattr ::= {'override' | 'final' | 'explicit' | 'property'};
let mut func_attr: BitFlags<FuncAttr> = BitFlags::empty();
@ -1067,8 +1199,8 @@ fn parse_for(
break;
}
let assign = parse_assign(reader, log);
if assign.is_some() {
increment_expressions.push(assign.unwrap());
if let Some(s) = assign {
increment_expressions.push(s);
}
if reader.peek().token_type != TokenType::Comma {
break;
@ -1433,7 +1565,7 @@ fn parse_lambda(
fn parse_typemod(
reader: &mut ParseReader,
log: &mut dyn FnMut(Message, Span),
_log: &mut dyn FnMut(Message, Span),
) -> Option<BitFlags<ReferenceModifier>> {
// typemod ::= ['&' ['in' | 'out' | 'inout']];
if reader.peek().token_type == TokenType::Ampersand {

@ -224,4 +224,17 @@ pub enum ParsedStatement {
func_attr: BitFlags<FuncAttr>,
block: Option<Box<ParsedStatement>>,
},
FuncDefDeclaration {
modifiers: BitFlags<TypeModifier>,
returns_reference: bool,
return_type: Option<Box<ParsedStatement>>,
name: String,
param_list: Box<ParsedStatement>,
},
ClassDeclaration {
modifiers: BitFlags<TypeModifier>,
name: String,
inherits: Vec<String>,
statements: Vec<Box<ParsedStatement>>,
},
}

Loading…
Cancel
Save