Adds func parsing, integration tests, some general fixes related to integration tests

This commit is contained in:
2022-04-06 22:39:25 +02:00
parent f277ecb72f
commit 373794a830
23 changed files with 1873 additions and 42 deletions

View File

@@ -0,0 +1,353 @@
////////////////////////////
// Automatically Generated//
////////////////////////////
use crate::logger::messages::Message;
use crate::parsing::lexer::lex;
use crate::parsing::lexer::lex_tokens::LexToken;
use crate::parsing::parser::parse;
use crate::parsing::parser::parsed_statement::ParsedStatement;
use crate::span::Span;
fn panic_on_error(msg: Message, _: Span) {
std::panic::panic_any(msg.stringify());
}
#[test]
fn integration_add_function() {
let script = "int add(int a, int b) {
return a + b;
}";
let lexed_tokens = lex(script, &mut panic_on_error);
println!("{}", serde_json::to_string(&lexed_tokens).unwrap());
let expected_tokens: Vec<LexToken> =
serde_json::from_str(r#"[
{
"token_type": "IntKeyword",
"span": {
"start": 0,
"end": 3
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 3,
"end": 4
}
},
{
"token_type": {
"Identifier": "add"
},
"span": {
"start": 4,
"end": 7
}
},
{
"token_type": "OpenBracket",
"span": {
"start": 7,
"end": 8
}
},
{
"token_type": "IntKeyword",
"span": {
"start": 8,
"end": 11
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 11,
"end": 12
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 12,
"end": 13
}
},
{
"token_type": "Comma",
"span": {
"start": 13,
"end": 14
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 14,
"end": 15
}
},
{
"token_type": "IntKeyword",
"span": {
"start": 15,
"end": 18
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 18,
"end": 19
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 19,
"end": 20
}
},
{
"token_type": "CloseBracket",
"span": {
"start": 20,
"end": 21
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 21,
"end": 22
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 22,
"end": 23
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 23,
"end": 24
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 24,
"end": 25
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 25,
"end": 26
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 26,
"end": 27
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 27,
"end": 28
}
},
{
"token_type": "ReturnKeyword",
"span": {
"start": 28,
"end": 34
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 34,
"end": 35
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 35,
"end": 36
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 36,
"end": 37
}
},
{
"token_type": "Plus",
"span": {
"start": 37,
"end": 38
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 38,
"end": 39
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 39,
"end": 40
}
},
{
"token_type": "Semicolon",
"span": {
"start": 40,
"end": 41
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 41,
"end": 42
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 42,
"end": 43
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 43,
"end": 43
}
}
]"#).unwrap();
assert_eq!(lexed_tokens, expected_tokens);
let parsed_tree = parse(lexed_tokens, &mut panic_on_error);
println!("{}", serde_json::to_string(&parsed_tree).unwrap());
let expected_tree: Box<ParsedStatement> =
serde_json::from_str(r#"{
"Script": {
"statements": [
{
"FuncDeclaration": {
"modifiers": 0,
"field_mod": null,
"is_destructor": false,
"returns_reference": false,
"return_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"name": "add",
"param_list": {
"ParamList": {
"parameters": [
{
"parameter_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"type_mod": null,
"identifier": "a",
"default": null
},
{
"parameter_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"type_mod": null,
"identifier": "b",
"default": null
}
]
}
},
"is_const": false,
"func_attr": 0,
"block": {
"StatBlock": {
"statements": [
{
"ReturnStatement": {
"expression": {
"BinaryExpr": {
"left": {
"VarAccess": {
"scope": null,
"identifier": "a"
}
},
"operator": "Addition",
"right": {
"VarAccess": {
"scope": null,
"identifier": "b"
}
}
}
}
}
}
]
}
}
}
}
]
}
}"#).unwrap();
assert_eq!(parsed_tree, expected_tree);
}

View File

@@ -0,0 +1,108 @@
use std::fs;
use std::fs::File;
use std::io::Write;
use std::path::Path;
fn main() {
let paths = fs::read_dir("src/integration_tests/test_cases/").unwrap();
let mod_file_path = Path::new("src/integration_tests/mod.rs");
let mut mod_file = File::create(mod_file_path).unwrap();
for path_opt in paths {
if let Err(..) = path_opt {
continue;
}
let path = path_opt.unwrap();
if !path.file_type().unwrap().is_dir() {
continue;
}
let p = path.path();
let mut script_path = p.clone();
script_path.push("script.ses");
if !script_path.exists() {
continue;
}
let script = fs::read_to_string(script_path).unwrap();
let test_name = p.file_stem().unwrap().to_str().unwrap();
writeln!(mod_file, "mod {name};", name = test_name).unwrap();
let testfile_path = format!("src/integration_tests/{}.rs", test_name);
let mut testfile = File::create(Path::new(&testfile_path)).unwrap();
write!(
testfile,
r#"////////////////////////////
// Automatically Generated//
////////////////////////////
use crate::logger::messages::Message;
use crate::parsing::lexer::lex;
use crate::parsing::lexer::lex_tokens::LexToken;
use crate::parsing::parser::parse;
use crate::parsing::parser::parsed_statement::ParsedStatement;
use crate::span::Span;
fn panic_on_error(msg: Message, _: Span) {{
std::panic::panic_any(msg.stringify());
}}"#
)
.unwrap();
write!(
testfile,
r#"
#[test]
fn integration_{name}() {{
let script = "{script}";
let lexed_tokens = lex(script, &mut panic_on_error);
println!("{{}}", serde_json::to_string(&lexed_tokens).unwrap());
"#,
name = test_name,
script = script.as_str()
)
.unwrap();
let mut tokens_file_path = p.clone();
tokens_file_path.push("lex_tokens.json");
if tokens_file_path.exists() {
let tokens_json = fs::read_to_string(tokens_file_path).unwrap();
write!(
testfile,
r##" let expected_tokens: Vec<LexToken> =
serde_json::from_str(r#"{tokens}"#).unwrap();
assert_eq!(lexed_tokens, expected_tokens);
"##,
tokens = tokens_json.as_str()
)
.unwrap();
}
write!(
testfile,
r##"
let parsed_tree = parse(lexed_tokens, &mut panic_on_error);
println!("{{}}", serde_json::to_string(&parsed_tree).unwrap());
"##
)
.unwrap();
let mut parsed_tree_path = p.clone();
parsed_tree_path.push("parsed_tree.json");
if parsed_tree_path.exists() {
let parsed_tree_json = fs::read_to_string(parsed_tree_path).unwrap();
write!(
testfile,
r##" let expected_tree: Box<ParsedStatement> =
serde_json::from_str(r#"{expected_tree}"#).unwrap();
assert_eq!(parsed_tree, expected_tree);
"##,
expected_tree = parsed_tree_json.as_str()
)
.unwrap();
}
write!(testfile, "}}").unwrap();
}
}

View File

@@ -0,0 +1,439 @@
////////////////////////////
// Automatically Generated//
////////////////////////////
use crate::logger::messages::Message;
use crate::parsing::lexer::lex;
use crate::parsing::lexer::lex_tokens::LexToken;
use crate::parsing::parser::parse;
use crate::parsing::parser::parsed_statement::ParsedStatement;
use crate::span::Span;
fn panic_on_error(msg: Message, _: Span) {
std::panic::panic_any(msg.stringify());
}
#[test]
fn integration_enum_definition() {
let script = "enum TestEnum : uint8 {
a,
b,
c,
d = 128,
e
}";
let lexed_tokens = lex(script, &mut panic_on_error);
println!("{}", serde_json::to_string(&lexed_tokens).unwrap());
let expected_tokens: Vec<LexToken> =
serde_json::from_str(r#"[
{
"token_type": "EnumKeyword",
"span": {
"start": 0,
"end": 4
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 4,
"end": 5
}
},
{
"token_type": {
"Identifier": "TestEnum"
},
"span": {
"start": 5,
"end": 13
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 13,
"end": 14
}
},
{
"token_type": "Colon",
"span": {
"start": 14,
"end": 15
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 15,
"end": 16
}
},
{
"token_type": "Uint8Keyword",
"span": {
"start": 16,
"end": 21
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 21,
"end": 22
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 22,
"end": 23
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 23,
"end": 24
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 24,
"end": 25
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 25,
"end": 26
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 26,
"end": 27
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 27,
"end": 28
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 28,
"end": 29
}
},
{
"token_type": "Comma",
"span": {
"start": 29,
"end": 30
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 30,
"end": 31
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 31,
"end": 32
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 32,
"end": 33
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 33,
"end": 34
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 34,
"end": 35
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 35,
"end": 36
}
},
{
"token_type": "Comma",
"span": {
"start": 36,
"end": 37
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 37,
"end": 38
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 38,
"end": 39
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 39,
"end": 40
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 40,
"end": 41
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 41,
"end": 42
}
},
{
"token_type": {
"Identifier": "c"
},
"span": {
"start": 42,
"end": 43
}
},
{
"token_type": "Comma",
"span": {
"start": 43,
"end": 44
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 44,
"end": 45
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 45,
"end": 46
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 46,
"end": 47
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 47,
"end": 48
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 48,
"end": 49
}
},
{
"token_type": {
"Identifier": "d"
},
"span": {
"start": 49,
"end": 50
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 50,
"end": 51
}
},
{
"token_type": "Equals",
"span": {
"start": 51,
"end": 52
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 52,
"end": 53
}
},
{
"token_type": {
"IntegerLiteral": 128
},
"span": {
"start": 53,
"end": 56
}
},
{
"token_type": "Comma",
"span": {
"start": 56,
"end": 57
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 57,
"end": 58
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 58,
"end": 59
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 59,
"end": 60
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 60,
"end": 61
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 61,
"end": 62
}
},
{
"token_type": {
"Identifier": "e"
},
"span": {
"start": 62,
"end": 63
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 63,
"end": 64
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 64,
"end": 65
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 65,
"end": 65
}
}
]"#).unwrap();
assert_eq!(lexed_tokens, expected_tokens);
let parsed_tree = parse(lexed_tokens, &mut panic_on_error);
println!("{}", serde_json::to_string(&parsed_tree).unwrap());
let expected_tree: Box<ParsedStatement> =
serde_json::from_str(r#"{
"Script": {
"statements": [
{
"EnumDeclaration": {
"modifiers": 0,
"identifier": "TestEnum",
"base_type": {
"DataTypePrimType": {
"prim_type": "UInt8"
}
},
"values": [
[
"a",
null
],
[
"b",
null
],
[
"c",
null
],
[
"d",
{
"IntegerLiteral": 128
}
],
[
"e",
null
]
]
}
}
]
}
}"#).unwrap();
assert_eq!(parsed_tree, expected_tree);
}

View File

@@ -0,0 +1,2 @@
mod enum_definition;
mod add_function;

View File

@@ -0,0 +1,229 @@
[
{
"token_type": "IntKeyword",
"span": {
"start": 0,
"end": 3
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 3,
"end": 4
}
},
{
"token_type": {
"Identifier": "add"
},
"span": {
"start": 4,
"end": 7
}
},
{
"token_type": "OpenBracket",
"span": {
"start": 7,
"end": 8
}
},
{
"token_type": "IntKeyword",
"span": {
"start": 8,
"end": 11
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 11,
"end": 12
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 12,
"end": 13
}
},
{
"token_type": "Comma",
"span": {
"start": 13,
"end": 14
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 14,
"end": 15
}
},
{
"token_type": "IntKeyword",
"span": {
"start": 15,
"end": 18
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 18,
"end": 19
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 19,
"end": 20
}
},
{
"token_type": "CloseBracket",
"span": {
"start": 20,
"end": 21
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 21,
"end": 22
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 22,
"end": 23
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 23,
"end": 24
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 24,
"end": 25
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 25,
"end": 26
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 26,
"end": 27
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 27,
"end": 28
}
},
{
"token_type": "ReturnKeyword",
"span": {
"start": 28,
"end": 34
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 34,
"end": 35
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 35,
"end": 36
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 36,
"end": 37
}
},
{
"token_type": "Plus",
"span": {
"start": 37,
"end": 38
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 38,
"end": 39
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 39,
"end": 40
}
},
{
"token_type": "Semicolon",
"span": {
"start": 40,
"end": 41
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 41,
"end": 42
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 42,
"end": 43
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 43,
"end": 43
}
}
]

View File

@@ -0,0 +1,96 @@
{
"Script": {
"statements": [
{
"FuncDeclaration": {
"modifiers": 0,
"field_mod": null,
"is_destructor": false,
"returns_reference": false,
"return_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"name": "add",
"param_list": {
"ParamList": {
"parameters": [
{
"parameter_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"type_mod": null,
"identifier": "a",
"default": null
},
{
"parameter_type": {
"Type": {
"is_const": false,
"scope": null,
"datatype": {
"DataTypePrimType": {
"prim_type": "Int32"
}
},
"modifiers": []
}
},
"type_mod": null,
"identifier": "b",
"default": null
}
]
}
},
"is_const": false,
"func_attr": 0,
"block": {
"StatBlock": {
"statements": [
{
"ReturnStatement": {
"expression": {
"BinaryExpr": {
"left": {
"VarAccess": {
"scope": null,
"identifier": "a"
}
},
"operator": "Addition",
"right": {
"VarAccess": {
"scope": null,
"identifier": "b"
}
}
}
}
}
}
]
}
}
}
}
]
}
}

View File

@@ -0,0 +1,3 @@
int add(int a, int b) {
return a + b;
}

View File

@@ -0,0 +1,366 @@
[
{
"token_type": "EnumKeyword",
"span": {
"start": 0,
"end": 4
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 4,
"end": 5
}
},
{
"token_type": {
"Identifier": "TestEnum"
},
"span": {
"start": 5,
"end": 13
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 13,
"end": 14
}
},
{
"token_type": "Colon",
"span": {
"start": 14,
"end": 15
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 15,
"end": 16
}
},
{
"token_type": "Uint8Keyword",
"span": {
"start": 16,
"end": 21
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 21,
"end": 22
}
},
{
"token_type": "OpenCurlyBracket",
"span": {
"start": 22,
"end": 23
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 23,
"end": 24
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 24,
"end": 25
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 25,
"end": 26
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 26,
"end": 27
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 27,
"end": 28
}
},
{
"token_type": {
"Identifier": "a"
},
"span": {
"start": 28,
"end": 29
}
},
{
"token_type": "Comma",
"span": {
"start": 29,
"end": 30
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 30,
"end": 31
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 31,
"end": 32
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 32,
"end": 33
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 33,
"end": 34
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 34,
"end": 35
}
},
{
"token_type": {
"Identifier": "b"
},
"span": {
"start": 35,
"end": 36
}
},
{
"token_type": "Comma",
"span": {
"start": 36,
"end": 37
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 37,
"end": 38
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 38,
"end": 39
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 39,
"end": 40
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 40,
"end": 41
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 41,
"end": 42
}
},
{
"token_type": {
"Identifier": "c"
},
"span": {
"start": 42,
"end": 43
}
},
{
"token_type": "Comma",
"span": {
"start": 43,
"end": 44
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 44,
"end": 45
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 45,
"end": 46
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 46,
"end": 47
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 47,
"end": 48
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 48,
"end": 49
}
},
{
"token_type": {
"Identifier": "d"
},
"span": {
"start": 49,
"end": 50
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 50,
"end": 51
}
},
{
"token_type": "Equals",
"span": {
"start": 51,
"end": 52
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 52,
"end": 53
}
},
{
"token_type": {
"IntegerLiteral": 128
},
"span": {
"start": 53,
"end": 56
}
},
{
"token_type": "Comma",
"span": {
"start": 56,
"end": 57
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 57,
"end": 58
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 58,
"end": 59
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 59,
"end": 60
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 60,
"end": 61
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 61,
"end": 62
}
},
{
"token_type": {
"Identifier": "e"
},
"span": {
"start": 62,
"end": 63
}
},
{
"token_type": "WhiteSpace",
"span": {
"start": 63,
"end": 64
}
},
{
"token_type": "CloseCurlyBracket",
"span": {
"start": 64,
"end": 65
}
},
{
"token_type": "EndOfFile",
"span": {
"start": 65,
"end": 65
}
}
]

View File

@@ -0,0 +1,41 @@
{
"Script": {
"statements": [
{
"EnumDeclaration": {
"modifiers": 0,
"identifier": "TestEnum",
"base_type": {
"DataTypePrimType": {
"prim_type": "UInt8"
}
},
"values": [
[
"a",
null
],
[
"b",
null
],
[
"c",
null
],
[
"d",
{
"IntegerLiteral": 128
}
],
[
"e",
null
]
]
}
}
]
}
}

View File

@@ -0,0 +1,7 @@
enum TestEnum : uint8 {
a,
b,
c,
d = 128,
e
}