Adds func parsing, integration tests, some general fixes related to integration tests

This commit is contained in:
Deukhoofd 2022-04-06 22:39:25 +02:00
parent f277ecb72f
commit 373794a830
Signed by: Deukhoofd
GPG Key ID: F63E044490819F6F
23 changed files with 1873 additions and 42 deletions

View File

@ -3,12 +3,20 @@ name = "seraph_script"
version = "0.1.0"
authors = ["Deukhoofd <Deukhoofd@gmail.com>"]
edition = "2021"
build = "src/integration_tests/build.rs"
[dependencies]
itertools = "0.10.0"
enumflags2 = "0.7.1"
enumflags2 = { version = "0.7.1", features = ["serde"] }
backtrace = "0.3.63"
[dev-dependencies]
glob = "0.3.0"
serde = "1.0.136"
serde_derive = "1.0.136"
serde_json = "1.0.79"
pretty_assertions = "1.2.1"
[profile.release]
lto = true
codegen-units = 1

View File

@ -0,0 +1,353 @@
////////////////////////////
// Automatically Generated//
////////////////////////////
use crate::logger::messages::Message;
use crate::parsing::lexer::lex;
use crate::parsing::lexer::lex_tokens::LexToken;
use crate::parsing::parser::parse;
use crate::parsing::parser::parsed_statement::ParsedStatement;
use crate::span::Span;
fn panic_on_error(msg: Message, _: Span) {
std::panic::panic_any(msg.stringify());
}
#[test]
fn integration_add_function() {
let script = "int add(int a, int b) {
return a + b;
}";
let lexed_tokens = lex(script, &mut panic_on_error);
println!("{}", serde_json::to_string(&lexed_tokens).unwrap());
let expected_tokens: Vec<LexToken> =
serde_json::from_str(r#"[
{
"token_type": "IntKeyword",
"span": {
"start": 0,
"end": 3
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 3,
"end": 4
}
},
{
"token_type": {
"Identifier": "add"
},
"span": {
"start": 4,
"end": 7
}
},
{
"token_type": "OpenBracket",
"span": {
"start": 7,
"end": 8
}
},
{
"token_type": "IntKeyword",
"span": {
"start": 8,
"end": 11
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 11,
"end": 12
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 12,
"end": 13
}
},
{
"token_type": "Comma",
"span": {
"start": 13,
"end": 14
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 14,
"end": 15
}
},
{
"token_type": "IntKeyword",
"span": {
"start": 15,
"end": 18
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 18,
"end": 19
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 19,
"end": 20
}
},
{
"token_type": "CloseBracket",
"span": {
"start": 20,
"end": 21
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 21,
"end": 22
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 22,
"end": 23
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 23,
"end": 24
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 24,
"end": 25
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 25,
"end": 26
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 26,
"end": 27
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 27,
"end": 28
}
},
{
"token_type": "ReturnKeyword",
"span": {
"start": 28,
"end": 34
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 34,
"end": 35
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 35,
"end": 36
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 36,
"end": 37
}
},
{
"token_type": "Plus",
"span": {
"start": 37,
"end": 38
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 38,
"end": 39
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 39,
"end": 40
}
},
{
"token_type": "Semicolon",
"span": {
"start": 40,
"end": 41
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 41,
"end": 42
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 42,
"end": 43
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 43,
"end": 43
}
}
]"#).unwrap();
assert_eq!(lexed_tokens, expected_tokens);
let parsed_tree = parse(lexed_tokens, &mut panic_on_error);
println!("{}", serde_json::to_string(&parsed_tree).unwrap());
let expected_tree: Box<ParsedStatement> =
serde_json::from_str(r#"{
"Script": {
"statements": [
{
"FuncDeclaration": {
"modifiers": 0,
"field_mod": null,
"is_destructor": false,
"returns_reference": false,
"return_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"name": "add",
"param_list": {
"ParamList": {
"parameters": [
{
"parameter_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"type_mod": null,
"identifier": "a",
"default": null
},
{
"parameter_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"type_mod": null,
"identifier": "b",
"default": null
}
]
}
},
"is_const": false,
"func_attr": 0,
"block": {
"StatBlock": {
"statements": [
{
"ReturnStatement": {
"expression": {
"BinaryExpr": {
"left": {
"VarAccess": {
"scope": null,
"identifier": "a"
}
},
"operator": "Addition",
"right": {
"VarAccess": {
"scope": null,
"identifier": "b"
}
}
}
}
}
}
]
}
}
}
}
]
}
}"#).unwrap();
assert_eq!(parsed_tree, expected_tree);
}

View File

@ -0,0 +1,108 @@
use std::fs;
use std::fs::File;
use std::io::Write;
use std::path::Path;
fn main() {
let paths = fs::read_dir("src/integration_tests/test_cases/").unwrap();
let mod_file_path = Path::new("src/integration_tests/mod.rs");
let mut mod_file = File::create(mod_file_path).unwrap();
for path_opt in paths {
if let Err(..) = path_opt {
continue;
}
let path = path_opt.unwrap();
if !path.file_type().unwrap().is_dir() {
continue;
}
let p = path.path();
let mut script_path = p.clone();
script_path.push("script.ses");
if !script_path.exists() {
continue;
}
let script = fs::read_to_string(script_path).unwrap();
let test_name = p.file_stem().unwrap().to_str().unwrap();
writeln!(mod_file, "mod {name};", name = test_name).unwrap();
let testfile_path = format!("src/integration_tests/{}.rs", test_name);
let mut testfile = File::create(Path::new(&testfile_path)).unwrap();
write!(
testfile,
r#"////////////////////////////
// Automatically Generated//
////////////////////////////
use crate::logger::messages::Message;
use crate::parsing::lexer::lex;
use crate::parsing::lexer::lex_tokens::LexToken;
use crate::parsing::parser::parse;
use crate::parsing::parser::parsed_statement::ParsedStatement;
use crate::span::Span;
fn panic_on_error(msg: Message, _: Span) {{
std::panic::panic_any(msg.stringify());
}}"#
)
.unwrap();
write!(
testfile,
r#"
#[test]
fn integration_{name}() {{
let script = "{script}";
let lexed_tokens = lex(script, &mut panic_on_error);
println!("{{}}", serde_json::to_string(&lexed_tokens).unwrap());
"#,
name = test_name,
script = script.as_str()
)
.unwrap();
let mut tokens_file_path = p.clone();
tokens_file_path.push("lex_tokens.json");
if tokens_file_path.exists() {
let tokens_json = fs::read_to_string(tokens_file_path).unwrap();
write!(
testfile,
r##" let expected_tokens: Vec<LexToken> =
serde_json::from_str(r#"{tokens}"#).unwrap();
assert_eq!(lexed_tokens, expected_tokens);
"##,
tokens = tokens_json.as_str()
)
.unwrap();
}
write!(
testfile,
r##"
let parsed_tree = parse(lexed_tokens, &mut panic_on_error);
println!("{{}}", serde_json::to_string(&parsed_tree).unwrap());
"##
)
.unwrap();
let mut parsed_tree_path = p.clone();
parsed_tree_path.push("parsed_tree.json");
if parsed_tree_path.exists() {
let parsed_tree_json = fs::read_to_string(parsed_tree_path).unwrap();
write!(
testfile,
r##" let expected_tree: Box<ParsedStatement> =
serde_json::from_str(r#"{expected_tree}"#).unwrap();
assert_eq!(parsed_tree, expected_tree);
"##,
expected_tree = parsed_tree_json.as_str()
)
.unwrap();
}
write!(testfile, "}}").unwrap();
}
}

View File

@ -0,0 +1,439 @@
////////////////////////////
// Automatically Generated//
////////////////////////////
use crate::logger::messages::Message;
use crate::parsing::lexer::lex;
use crate::parsing::lexer::lex_tokens::LexToken;
use crate::parsing::parser::parse;
use crate::parsing::parser::parsed_statement::ParsedStatement;
use crate::span::Span;
fn panic_on_error(msg: Message, _: Span) {
std::panic::panic_any(msg.stringify());
}
#[test]
fn integration_enum_definition() {
let script = "enum TestEnum : uint8 {
a,
b,
c,
d = 128,
e
}";
let lexed_tokens = lex(script, &mut panic_on_error);
println!("{}", serde_json::to_string(&lexed_tokens).unwrap());
let expected_tokens: Vec<LexToken> =
serde_json::from_str(r#"[
{
"token_type": "EnumKeyword",
"span": {
"start": 0,
"end": 4
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 4,
"end": 5
}
},
{
"token_type": {
"Identifier": "TestEnum"
},
"span": {
"start": 5,
"end": 13
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 13,
"end": 14
}
},
{
"token_type": "Colon",
"span": {
"start": 14,
"end": 15
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 15,
"end": 16
}
},
{
"token_type": "Uint8Keyword",
"span": {
"start": 16,
"end": 21
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 21,
"end": 22
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 22,
"end": 23
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 23,
"end": 24
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 24,
"end": 25
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 25,
"end": 26
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 26,
"end": 27
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 27,
"end": 28
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 28,
"end": 29
}
},
{
"token_type": "Comma",
"span": {
"start": 29,
"end": 30
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 30,
"end": 31
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 31,
"end": 32
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 32,
"end": 33
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 33,
"end": 34
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 34,
"end": 35
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 35,
"end": 36
}
},
{
"token_type": "Comma",
"span": {
"start": 36,
"end": 37
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 37,
"end": 38
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 38,
"end": 39
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 39,
"end": 40
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 40,
"end": 41
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 41,
"end": 42
}
},
{
"token_type": {
"Identifier": "c"
},
"span": {
"start": 42,
"end": 43
}
},
{
"token_type": "Comma",
"span": {
"start": 43,
"end": 44
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 44,
"end": 45
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 45,
"end": 46
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 46,
"end": 47
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 47,
"end": 48
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 48,
"end": 49
}
},
{
"token_type": {
"Identifier": "d"
},
"span": {
"start": 49,
"end": 50
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 50,
"end": 51
}
},
{
"token_type": "Equals",
"span": {
"start": 51,
"end": 52
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 52,
"end": 53
}
},
{
"token_type": {
"IntegerLiteral": 128
},
"span": {
"start": 53,
"end": 56
}
},
{
"token_type": "Comma",
"span": {
"start": 56,
"end": 57
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 57,
"end": 58
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 58,
"end": 59
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 59,
"end": 60
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 60,
"end": 61
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 61,
"end": 62
}
},
{
"token_type": {
"Identifier": "e"
},
"span": {
"start": 62,
"end": 63
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 63,
"end": 64
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 64,
"end": 65
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 65,
"end": 65
}
}
]"#).unwrap();
assert_eq!(lexed_tokens, expected_tokens);
let parsed_tree = parse(lexed_tokens, &mut panic_on_error);
println!("{}", serde_json::to_string(&parsed_tree).unwrap());
let expected_tree: Box<ParsedStatement> =
serde_json::from_str(r#"{
"Script": {
"statements": [
{
"EnumDeclaration": {
"modifiers": 0,
"identifier": "TestEnum",
"base_type": {
"DataTypePrimType": {
"prim_type": "UInt8"
}
},
"values": [
[
"a",
null
],
[
"b",
null
],
[
"c",
null
],
[
"d",
{
"IntegerLiteral": 128
}
],
[
"e",
null
]
]
}
}
]
}
}"#).unwrap();
assert_eq!(parsed_tree, expected_tree);
}

View File

@ -0,0 +1,2 @@
mod enum_definition;
mod add_function;

View File

@ -0,0 +1,229 @@
[
{
"token_type": "IntKeyword",
"span": {
"start": 0,
"end": 3
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 3,
"end": 4
}
},
{
"token_type": {
"Identifier": "add"
},
"span": {
"start": 4,
"end": 7
}
},
{
"token_type": "OpenBracket",
"span": {
"start": 7,
"end": 8
}
},
{
"token_type": "IntKeyword",
"span": {
"start": 8,
"end": 11
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 11,
"end": 12
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 12,
"end": 13
}
},
{
"token_type": "Comma",
"span": {
"start": 13,
"end": 14
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 14,
"end": 15
}
},
{
"token_type": "IntKeyword",
"span": {
"start": 15,
"end": 18
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 18,
"end": 19
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 19,
"end": 20
}
},
{
"token_type": "CloseBracket",
"span": {
"start": 20,
"end": 21
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 21,
"end": 22
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 22,
"end": 23
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 23,
"end": 24
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 24,
"end": 25
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 25,
"end": 26
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 26,
"end": 27
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 27,
"end": 28
}
},
{
"token_type": "ReturnKeyword",
"span": {
"start": 28,
"end": 34
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 34,
"end": 35
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 35,
"end": 36
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 36,
"end": 37
}
},
{
"token_type": "Plus",
"span": {
"start": 37,
"end": 38
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 38,
"end": 39
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 39,
"end": 40
}
},
{
"token_type": "Semicolon",
"span": {
"start": 40,
"end": 41
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 41,
"end": 42
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 42,
"end": 43
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 43,
"end": 43
}
}
]

View File

@ -0,0 +1,96 @@
{
"Script": {
"statements": [
{
"FuncDeclaration": {
"modifiers": 0,
"field_mod": null,
"is_destructor": false,
"returns_reference": false,
"return_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"name": "add",
"param_list": {
"ParamList": {
"parameters": [
{
"parameter_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"type_mod": null,
"identifier": "a",
"default": null
},
{
"parameter_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"type_mod": null,
"identifier": "b",
"default": null
}
]
}
},
"is_const": false,
"func_attr": 0,
"block": {
"StatBlock": {
"statements": [
{
"ReturnStatement": {
"expression": {
"BinaryExpr": {
"left": {
"VarAccess": {
"scope": null,
"identifier": "a"
}
},
"operator": "Addition",
"right": {
"VarAccess": {
"scope": null,
"identifier": "b"
}
}
}
}
}
}
]
}
}
}
}
]
}
}

View File

@ -0,0 +1,3 @@
int add(int a, int b) {
return a + b;
}

View File

@ -0,0 +1,366 @@
[
{
"token_type": "EnumKeyword",
"span": {
"start": 0,
"end": 4
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 4,
"end": 5
}
},
{
"token_type": {
"Identifier": "TestEnum"
},
"span": {
"start": 5,
"end": 13
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 13,
"end": 14
}
},
{
"token_type": "Colon",
"span": {
"start": 14,
"end": 15
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 15,
"end": 16
}
},
{
"token_type": "Uint8Keyword",
"span": {
"start": 16,
"end": 21
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 21,
"end": 22
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 22,
"end": 23
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 23,
"end": 24
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 24,
"end": 25
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 25,
"end": 26
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 26,
"end": 27
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 27,
"end": 28
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 28,
"end": 29
}
},
{
"token_type": "Comma",
"span": {
"start": 29,
"end": 30
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 30,
"end": 31
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 31,
"end": 32
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 32,
"end": 33
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 33,
"end": 34
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 34,
"end": 35
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 35,
"end": 36
}
},
{
"token_type": "Comma",
"span": {
"start": 36,
"end": 37
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 37,
"end": 38
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 38,
"end": 39
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 39,
"end": 40
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 40,
"end": 41
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 41,
"end": 42
}
},
{
"token_type": {
"Identifier": "c"
},
"span": {
"start": 42,
"end": 43
}
},
{
"token_type": "Comma",
"span": {
"start": 43,
"end": 44
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 44,
"end": 45
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 45,
"end": 46
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 46,
"end": 47
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 47,
"end": 48
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 48,
"end": 49
}
},
{
"token_type": {
"Identifier": "d"
},
"span": {
"start": 49,
"end": 50
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 50,
"end": 51
}
},
{
"token_type": "Equals",
"span": {
"start": 51,
"end": 52
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 52,
"end": 53
}
},
{
"token_type": {
"IntegerLiteral": 128
},
"span": {
"start": 53,
"end": 56
}
},
{
"token_type": "Comma",
"span": {
"start": 56,
"end": 57
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 57,
"end": 58
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 58,
"end": 59
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 59,
"end": 60
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 60,
"end": 61
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 61,
"end": 62
}
},
{
"token_type": {
"Identifier": "e"
},
"span": {
"start": 62,
"end": 63
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 63,
"end": 64
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 64,
"end": 65
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 65,
"end": 65
}
}
]

View File

@ -0,0 +1,41 @@
{
"Script": {
"statements": [
{
"EnumDeclaration": {
"modifiers": 0,
"identifier": "TestEnum",
"base_type": {
"DataTypePrimType": {
"prim_type": "UInt8"
}
},
"values": [
[
"a",
null
],
[
"b",
null
],
[
"c",
null
],
[
"d",
{
"IntegerLiteral": 128
}
],
[
"e",
null
]
]
}
}
]
}
}

View File

@ -0,0 +1,7 @@
enum TestEnum : uint8 {
a,
b,
c,
d = 128,
e
}

View File

@ -13,3 +13,6 @@ pub mod modifiers;
pub mod parsing;
pub(crate) mod prim_type;
pub mod span;
#[cfg(test)]
mod integration_tests;

View File

@ -1,8 +1,11 @@
use enumflags2::bitflags;
#[cfg(test)]
use serde_derive::{Deserialize, Serialize};
#[bitflags]
#[repr(u8)]
#[derive(Debug, Copy, Clone, PartialEq)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum TypeModifier {
External = 0x01,
Shared = 0x02,
@ -12,7 +15,8 @@ pub enum TypeModifier {
#[bitflags]
#[repr(u8)]
#[derive(Debug, Copy, Clone, PartialEq)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum FieldModifier {
Private = 0x01,
Protected = 0x02,

View File

@ -1,12 +1,17 @@
use crate::defines::{LiteralFloat, LiteralInt};
use crate::span::Span;
#[cfg(test)]
use serde_derive::{Deserialize, Serialize};
#[derive(PartialEq, Debug)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub struct LexToken {
pub token_type: TokenType,
pub span: Span,
}
#[derive(PartialEq, Debug, Clone)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum TokenType {
EndOfFile,
WhiteSpace,

View File

@ -34,6 +34,7 @@ fn lex_eq_or(
chars.next();
f(eq, start_pos, chars.real_position);
} else {
chars.reset_peek();
f(or, start_pos, chars.real_position);
}
}
@ -58,10 +59,14 @@ fn lex_eq_rep_or(
chars.next();
f(eq, start_pos, chars.real_position);
} else {
chars.reset_peek();
f(or, start_pos, chars.real_position);
}
}
None => f(or, start_pos, chars.real_position),
None => {
chars.reset_peek();
f(or, start_pos, chars.real_position)
}
};
}

View File

@ -9,8 +9,8 @@ use super::lexer::lex_tokens::{LexToken, TokenType};
use crate::logger::messages::Message;
use crate::logger::messages::Message::UnexpectedToken;
use crate::modifiers::{FieldModifier, TypeModifier};
use crate::parsing::lexer::lex_tokens::TokenType::CloseBracket;
use crate::parsing::parser::parsed_funcattr::FuncAttr;
use crate::parsing::parser::parsed_statement::ParsedParameter;
use crate::parsing::parser::parsed_statement::ParsedStatement::{
AnonymousCall, ExprPostOp, IndexingOperator,
};
@ -143,12 +143,22 @@ fn parse_script(
reader: &mut ParseReader,
log: &mut dyn FnMut(Message, Span),
) -> Box<ParsedStatement> {
// script ::= {import | enum | typedef | class | interface | funcdef | virtprop | var | func | namespace | ';'};
let mut vec: Vec<ParsedStatement> = Vec::new();
loop {
let n = reader.peek();
let token_type = n.token_type.clone();
let span = n.span;
match token_type {
TokenType::ImportKeyword => {
vec.push(*parse_import(reader, log).unwrap());
}
TokenType::TypeDefKeyword => {
vec.push(*parse_typedef(reader, log).unwrap());
}
TokenType::FuncDefKeyword => {
vec.push(*parse_funcdef(reader, log).unwrap());
}
TokenType::NamespaceKeyword => {
vec.push(*parse_namespace(reader, log));
}
@ -156,10 +166,20 @@ fn parse_script(
TokenType::EndOfFile => break,
TokenType::CloseCurlyBracket => break,
_ => {
if let Some(s) = parse_interface(reader, log) {
if let Some(s) = parse_enum(reader, log) {
vec.push(*s);
}
// else if let Some(s) = parse_class(reader, log) {
// vec.push(*s);
// }
else if let Some(s) = parse_interface(reader, log) {
vec.push(*s);
} else if let Some(s) = parse_virtprop(reader, log) {
vec.push(*s);
} else if let Some(s) = parse_var(reader, log) {
vec.push(*s);
} else if let Some(s) = parse_func(reader, log) {
vec.push(*s);
} else {
log(
UnexpectedToken {
@ -494,12 +514,16 @@ fn parse_enum(
// FIXME: Add error log if value is None
}
values.push((identifier.unwrap(), value));
if reader.peek().token_type != TokenType::Comma {
break;
}
reader.next();
}
reader.consume(TokenType::CloseCurlyBracket, log);
}
Some(Box::new(ParsedStatement::EnumDeclaration {
modifiers: modifiers,
modifiers,
identifier: name.unwrap(),
base_type,
values,
@ -524,11 +548,85 @@ fn parse_funcdef(
}
fn parse_func(
_outer_reader: &mut ParseReader,
_log: &mut dyn FnMut(Message, Span),
outer_reader: &mut ParseReader,
log: &mut dyn FnMut(Message, Span),
) -> Option<Box<ParsedStatement>> {
// func ::= {'shared' | 'external'} ['private' | 'protected'] [((type ['&']) | '~')] identifier paramlist ['const'] funcattr (';' | statblock);
unimplemented!();
let mut reader = outer_reader.create_inner();
let mut modifiers: BitFlags<TypeModifier> = BitFlags::empty();
loop {
match reader.peek().token_type {
TokenType::SharedKeyword => {
reader.next();
modifiers |= TypeModifier::Shared;
}
TokenType::ExternalKeyword => {
reader.next();
modifiers |= TypeModifier::External;
}
_ => {
break;
}
}
}
let mut field_mod: Option<FieldModifier> = None;
match reader.peek().token_type {
TokenType::PrivateKeyword => {
field_mod = Some(FieldModifier::Private);
reader.next();
}
TokenType::ProtectedKeyword => {
field_mod = Some(FieldModifier::Protected);
reader.next();
}
_ => {}
}
// [((type ['&']) | '~')]
let mut is_destructor = false;
let mut returns_reference = false;
let mut return_type = None;
if reader.peek().token_type == TokenType::Tilde {
is_destructor = true;
reader.next();
} else {
return_type = parse_type(&mut reader, log);
if return_type.is_some() && reader.peek().token_type == TokenType::Ampersand {
returns_reference = true;
reader.next();
}
}
let name = parse_identifier(&mut reader, log, true);
name.as_ref()?;
let param_list = parse_paramlist(&mut reader, log);
param_list.as_ref()?;
let is_const = reader.peek().token_type == TokenType::ConstKeyword;
if is_const {
reader.next();
}
let func_attr = parse_funcattr(&mut reader, log);
let mut block = None;
if reader.peek().token_type == TokenType::Semicolon {
reader.next();
} else {
block = parse_statblock(&mut reader, log);
}
outer_reader.set_from_inner(&reader);
Some(Box::new(ParsedStatement::FuncDeclaration {
modifiers,
field_mod,
is_destructor,
returns_reference,
return_type,
name: name.unwrap(),
param_list: param_list.unwrap(),
is_const,
func_attr,
block,
}))
}
fn parse_virtprop(
@ -536,26 +634,21 @@ fn parse_virtprop(
log: &mut dyn FnMut(Message, Span),
) -> Option<Box<ParsedStatement>> {
let mut reader = outer_reader.create_inner();
let mut field_mod: BitFlags<FieldModifier> = BitFlags::empty();
let property_type: Option<Box<ParsedStatement>>;
loop {
let t = reader.peek();
match t.token_type {
let mut field_mod: Option<FieldModifier> = None;
match reader.peek().token_type {
TokenType::PrivateKeyword => {
field_mod |= FieldModifier::Private;
field_mod = Some(FieldModifier::Private);
reader.next();
}
TokenType::ProtectedKeyword => {
field_mod |= FieldModifier::Protected;
field_mod = Some(FieldModifier::Protected);
reader.next();
}
_ => {
property_type = parse_type(&mut reader, log);
_ => {}
}
let property_type = parse_type(&mut reader, log);
property_type.as_ref()?;
break;
}
}
}
let mut is_handle = false;
if reader.peek().token_type == TokenType::AtSymbol {
reader.next();
@ -678,7 +771,16 @@ fn parse_paramlist(
default = parse_expr(reader, log);
// FIXME: log if default is emtpy
}
params.push((param_type.unwrap(), type_mod, identifier, default));
params.push(ParsedParameter {
parameter_type: param_type.unwrap(),
type_mod,
identifier,
default,
});
if reader.peek().token_type != TokenType::Comma {
break;
}
reader.next();
}
reader.consume(TokenType::CloseBracket, log);
Some(Box::new(ParsedStatement::ParamList { parameters: params }))
@ -743,6 +845,8 @@ fn parse_var(
outer_reader: &mut ParseReader,
log: &mut dyn FnMut(Message, Span),
) -> Option<Box<ParsedStatement>> {
// var ::= ['private'|'protected'] type identifier [( '=' (initlist | expr)) | arglist] {',' identifier [( '=' (initlist | expr)) | arglist]} ';';
// var ::= ['private'|'protected'] type identifier
let mut reader = outer_reader.create_inner();
@ -793,6 +897,11 @@ fn parse_var(
// {',' identifier [( '=' (initlist | expr)) | arglist]} ';';
if reader.peek().token_type != TokenType::Semicolon {
return None;
}
reader.next();
outer_reader.set_from_inner(&reader);
Some(Box::new(ParsedStatement::Var {
modifier: field_mod,
@ -1087,6 +1196,9 @@ fn parse_expr(
if let Some(..) = binary_operand {
let expr_term2 = parse_exprterm(reader, log);
if expr_term2.is_none() {
unimplemented!()
}
// FIXME: deal with empty expr_term2
return Some(Box::new(ParsedStatement::BinaryExpr {
left: expr_term.unwrap(),
@ -1105,8 +1217,8 @@ fn parse_exprterm(
let mut reader = outer_reader.create_inner();
// exprterm ::= ([type '='] initlist) | ({exprpreop} exprvalue {exprpostop});
let expr_type = parse_type(&mut reader, log);
if expr_type.is_some() {
outer_reader.consume(TokenType::Equals, log);
if expr_type.is_some() && reader.peek().token_type == TokenType::Equals {
reader.consume(TokenType::Equals, log);
}
let mut init_list = None;
@ -1148,6 +1260,7 @@ fn parse_exprterm(
})
}
println!("{:?}", real_value);
outer_reader.set_from_inner(&reader);
Some(real_value)
}
@ -1420,7 +1533,7 @@ fn parse_constructcall(
let construct_type = parse_type(&mut reader, log);
construct_type.as_ref()?;
let arg_list = parse_arglist(&mut reader, log);
// FIXME: deal with None value for arg list
arg_list.as_ref()?;
outer_reader.set_from_inner(&reader);
Some(Box::new(ParsedStatement::ConstructCall {
@ -1654,6 +1767,9 @@ fn parse_primtype(
TokenType::Int32Keyword => Some(Box::new(ParsedStatement::DataTypePrimType {
prim_type: PrimitiveType::Int32,
})),
TokenType::IntKeyword => Some(Box::new(ParsedStatement::DataTypePrimType {
prim_type: PrimitiveType::Int32,
})),
TokenType::Int64Keyword => Some(Box::new(ParsedStatement::DataTypePrimType {
prim_type: PrimitiveType::Int64,
})),
@ -1666,6 +1782,9 @@ fn parse_primtype(
TokenType::Uint32Keyword => Some(Box::new(ParsedStatement::DataTypePrimType {
prim_type: PrimitiveType::UInt32,
})),
TokenType::UintKeyword => Some(Box::new(ParsedStatement::DataTypePrimType {
prim_type: PrimitiveType::UInt32,
})),
TokenType::Uint64Keyword => Some(Box::new(ParsedStatement::DataTypePrimType {
prim_type: PrimitiveType::UInt64,
})),

View File

@ -1,8 +1,11 @@
use enumflags2::bitflags;
#[cfg(test)]
use serde_derive::{Deserialize, Serialize};
#[bitflags]
#[repr(u8)]
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum FuncAttr {
Override,
Final,

View File

@ -9,7 +9,20 @@ use crate::parsing::parser::parser_operators::{
use crate::prim_type::PrimitiveType;
use enumflags2::BitFlags;
#[cfg(test)]
use serde_derive::{Deserialize, Serialize};
#[derive(PartialEq, Debug)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub struct ParsedParameter {
pub parameter_type: Box<ParsedStatement>,
pub type_mod: Option<BitFlags<ReferenceModifier>>,
pub identifier: Option<String>,
pub default: Option<Box<ParsedStatement>>,
}
#[derive(PartialEq, Debug)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum ParsedStatement {
Invalid,
Script {
@ -31,7 +44,7 @@ pub enum ParsedStatement {
generic_types: Option<Vec<ParsedStatement>>,
},
VirtProp {
field_mod: BitFlags<FieldModifier>,
field_mod: Option<FieldModifier>,
property_type: Box<ParsedStatement>,
identifier: String,
is_handle: bool,
@ -169,12 +182,7 @@ pub enum ParsedStatement {
expression: Option<Box<ParsedStatement>>,
},
ParamList {
parameters: Vec<(
Box<ParsedStatement>, // type
Option<BitFlags<ReferenceModifier>>, // typemod
Option<String>, // identifier
Option<Box<ParsedStatement>>, // default expression
)>,
parameters: Vec<ParsedParameter>,
},
InterfaceMethod {
return_type: Box<ParsedStatement>,
@ -204,4 +212,16 @@ pub enum ParsedStatement {
base_type: Option<Box<ParsedStatement>>,
values: Vec<(String, Option<Box<ParsedStatement>>)>,
},
FuncDeclaration {
modifiers: BitFlags<TypeModifier>,
field_mod: Option<FieldModifier>,
is_destructor: bool,
returns_reference: bool,
return_type: Option<Box<ParsedStatement>>,
name: String,
param_list: Box<ParsedStatement>,
is_const: bool,
func_attr: BitFlags<FuncAttr>,
block: Option<Box<ParsedStatement>>,
},
}

View File

@ -1,6 +1,9 @@
use enumflags2::bitflags;
#[cfg(test)]
use serde_derive::{Deserialize, Serialize};
#[derive(Eq, PartialEq, Debug)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum ParsedTypeModifier {
Array,
Handle,
@ -10,6 +13,7 @@ pub enum ParsedTypeModifier {
#[bitflags]
#[repr(u8)]
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum ReferenceModifier {
In,
Out,

View File

@ -1,4 +1,8 @@
#[cfg(test)]
use serde_derive::{Deserialize, Serialize};
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum PreOperator {
Negative,
Identity,
@ -10,12 +14,14 @@ pub enum PreOperator {
}
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum PostOperator {
Increment,
Decrement,
}
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum BinaryOperator {
// math op
Addition,
@ -61,6 +67,7 @@ pub enum BinaryOperator {
}
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum TernaryOperator {
Conditional,
}

View File

@ -5,7 +5,6 @@ use crate::parsing::lexer::lex_tokens::{LexToken, TokenType};
use crate::parsing::parser::parsed_statement::ParsedStatement::DataTypeAuto;
use crate::parsing::parser::parser_operators::{BinaryOperator, PreOperator};
use crate::span::Span;
use enumflags2::BitFlags;
fn create_tokens(types: Vec<TokenType>) -> Vec<LexToken> {
let mut v = Vec::with_capacity(types.len());
@ -179,7 +178,7 @@ fn test_interface_with_virtprop() {
set_statement,
} = &statements[0]
{
assert_eq!(*field_mod, BitFlags::empty());
assert_eq!(*field_mod, None);
if let ParsedStatement::Type {
is_const, datatype, ..
} = property_type.as_ref()
@ -225,6 +224,7 @@ fn test_assign_to_global_variable() {
TokenType::Equals,
TokenType::WhiteSpace,
TokenType::IntegerLiteral(100),
TokenType::Semicolon,
TokenType::EndOfFile,
]),
&mut |_message, _span| {
@ -276,6 +276,7 @@ fn test_assign_negative_to_global_variable() {
TokenType::WhiteSpace,
TokenType::Minus,
TokenType::IntegerLiteral(100),
TokenType::Semicolon,
TokenType::EndOfFile,
]),
&mut |_message, _span| {
@ -332,6 +333,7 @@ fn test_assign_addition_to_global_variable() {
TokenType::Plus,
TokenType::Minus,
TokenType::IntegerLiteral(20),
TokenType::Semicolon,
TokenType::EndOfFile,
]),
&mut |_message, _span| {

View File

@ -1,6 +1,9 @@
use crate::defines::PointerSize;
#[cfg(test)]
use serde_derive::{Deserialize, Serialize};
#[derive(Eq, PartialEq, Debug)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub enum PrimitiveType {
Void,
Int8,

View File

@ -1,4 +1,8 @@
#[derive(Copy, Clone, Debug)]
#[cfg(test)]
use serde_derive::{Deserialize, Serialize};
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
pub struct Span {
pub start: usize,
pub end: usize,