Make a lot of one-liner functions inline
Some checks failed
continuous-integration/drone/push Build is failing
Some checks failed
continuous-integration/drone/push Build is failing
This commit is contained in:
@@ -77,7 +77,7 @@ TEST_CASE( "Lex Equality Token", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"==", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::EqualityToken);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -117,7 +117,7 @@ TEST_CASE( "Lex Longer Integers", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::Integer);
|
||||
auto* integerToken = (IntegerToken *)firstToken;
|
||||
CHECK(integerToken -> GetValue() == integer);
|
||||
@@ -139,7 +139,7 @@ TEST_CASE( "Lex Floats", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::Float);
|
||||
auto* floatToken = (FloatToken *)firstToken;
|
||||
CHECK(floatToken -> GetValue() == Approx(f));
|
||||
@@ -154,7 +154,7 @@ TEST_CASE( "Lex And Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"and", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::AndKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -164,7 +164,7 @@ TEST_CASE( "Lex Break Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"break", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::BreakKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -174,7 +174,7 @@ TEST_CASE( "Lex Do Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"do", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::DoKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -184,7 +184,7 @@ TEST_CASE( "Lex else Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"else", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::ElseKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -194,7 +194,7 @@ TEST_CASE( "Lex else if Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"elseif", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::ElseIfKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -204,7 +204,7 @@ TEST_CASE( "Lex end Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"end", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::EndKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -214,7 +214,7 @@ TEST_CASE( "Lex false Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"false", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::FalseKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -224,7 +224,7 @@ TEST_CASE( "Lex for Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"for", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::ForKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -235,7 +235,7 @@ TEST_CASE( "Lex function Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::FunctionKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -246,7 +246,7 @@ TEST_CASE( "Lex if Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"if", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::IfKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -256,7 +256,7 @@ TEST_CASE( "Lex in Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"in", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::InKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -266,7 +266,7 @@ TEST_CASE( "Lex local Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"local", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::LocalKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -276,7 +276,7 @@ TEST_CASE( "Lex nil Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"nil", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::NilKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -286,7 +286,7 @@ TEST_CASE( "Lex not Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"not", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::NotKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -296,7 +296,7 @@ TEST_CASE( "Lex or Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"or", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::OrKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -306,7 +306,7 @@ TEST_CASE( "Lex return Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"return", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::ReturnKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -316,7 +316,7 @@ TEST_CASE( "Lex then Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"then", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::ThenKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -326,7 +326,7 @@ TEST_CASE( "Lex true Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"true", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::TrueKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -336,7 +336,7 @@ TEST_CASE( "Lex while Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"while", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::WhileKeyword);
|
||||
for (auto t: tokens){
|
||||
delete t;
|
||||
@@ -347,7 +347,7 @@ TEST_CASE( "Lex identifier", "[lexer]" ) {
|
||||
Lexer lexer = Lexer(u"foo", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::Identifier);
|
||||
REQUIRE(((IdentifierToken*)firstToken) -> GetValue() == HashedString::CreateLookup(u"foo"));
|
||||
for (auto t: tokens){
|
||||
@@ -392,7 +392,7 @@ TEST_CASE("Lex Double Quote String", "[lexer]") {
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::String);
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == u"foo bar");
|
||||
for (auto t: tokens){
|
||||
@@ -406,7 +406,7 @@ TEST_CASE("Lex Single Quote String", "[lexer]") {
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::String);
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == u"foo bar");
|
||||
for (auto t: tokens){
|
||||
@@ -420,7 +420,7 @@ TEST_CASE("Lex Double Quote String, Escape Quote", "[lexer]") {
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::String);
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == u"foo\"bar");
|
||||
for (auto t: tokens){
|
||||
@@ -434,7 +434,7 @@ TEST_CASE("Lex String with newline", "[lexer]") {
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
const Token* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::String);
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == u"foo\nbar");
|
||||
for (auto t: tokens){
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
using namespace Porygon::Parser;
|
||||
|
||||
TEST_CASE( "Parse single true keyword", "[parser]" ) {
|
||||
vector<const IToken*> v {new SimpleToken(TokenKind::TrueKeyword,0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
|
||||
vector<const Token*> v {new SimpleToken(TokenKind::TrueKeyword,0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
|
||||
Parser parser = Parser(v, nullptr);
|
||||
auto parsedScript = parser.Parse();
|
||||
auto parsedStatements = parsedScript -> GetStatements();
|
||||
@@ -25,7 +25,7 @@ TEST_CASE( "Parse single true keyword", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Parse single false keyword", "[parser]" ) {
|
||||
vector<const IToken*> v {new SimpleToken(TokenKind::FalseKeyword,0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
|
||||
vector<const Token*> v {new SimpleToken(TokenKind::FalseKeyword,0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
|
||||
Parser parser = Parser(v, nullptr);
|
||||
auto parsedScript = parser.Parse();
|
||||
auto parsedStatements = parsedScript -> GetStatements();
|
||||
@@ -44,7 +44,7 @@ TEST_CASE( "Parse single false keyword", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Parse simple addition", "[parser]" ) {
|
||||
vector<const IToken*> v {
|
||||
vector<const Token*> v {
|
||||
new IntegerToken(5, 0, 0),
|
||||
new SimpleToken(TokenKind::PlusToken,0,0),
|
||||
new IntegerToken(10, 0, 0),
|
||||
@@ -74,7 +74,7 @@ TEST_CASE( "Parse simple addition", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Parse simple negation", "[parser]" ) {
|
||||
vector<const IToken*> v {
|
||||
vector<const Token*> v {
|
||||
new SimpleToken(TokenKind::MinusToken,0,0),
|
||||
new IntegerToken(10, 0, 0),
|
||||
new SimpleToken(TokenKind::EndOfFile,0,0)
|
||||
@@ -99,7 +99,7 @@ TEST_CASE( "Parse simple negation", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Parse logical negation", "[parser]" ) {
|
||||
vector<const IToken*> v {
|
||||
vector<const Token*> v {
|
||||
new SimpleToken(TokenKind::NotKeyword,0,0),
|
||||
new SimpleToken(TokenKind::FalseKeyword,0,0),
|
||||
new SimpleToken(TokenKind::EndOfFile,0,0)
|
||||
@@ -124,7 +124,7 @@ TEST_CASE( "Parse logical negation", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Are parenthesized expressions valid", "[parser]" ) {
|
||||
vector<const IToken*> v {
|
||||
vector<const Token*> v {
|
||||
new IntegerToken(5, 0, 0),
|
||||
new SimpleToken(TokenKind::PlusToken,0,0),
|
||||
new IntegerToken(10, 0, 0),
|
||||
@@ -159,7 +159,7 @@ TEST_CASE( "Are parenthesized expressions valid", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Assert binary precedence", "[parser]" ) {
|
||||
vector<const IToken*> v {
|
||||
vector<const Token*> v {
|
||||
new SimpleToken(TokenKind::OpenParenthesis,0,0),
|
||||
new IntegerToken(10, 0, 0),
|
||||
new SimpleToken(TokenKind::CloseParenthesis,0,0),
|
||||
@@ -183,7 +183,7 @@ TEST_CASE( "Assert binary precedence", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Parse String Tokens", "[parser]" ) {
|
||||
vector<const IToken*> v {new StringToken(u"foo bar", 0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
|
||||
vector<const Token*> v {new StringToken(u"foo bar", 0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
|
||||
Parser parser = Parser(v, nullptr);
|
||||
auto parsedScript = parser.Parse();
|
||||
auto parsedStatements = parsedScript -> GetStatements();
|
||||
@@ -202,7 +202,7 @@ TEST_CASE( "Parse String Tokens", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Parse Global Assignment", "[parser]" ) {
|
||||
vector<const IToken*> v {
|
||||
vector<const Token*> v {
|
||||
new IdentifierToken(HashedString::CreateLookup(u"foo"),0,0),
|
||||
new SimpleToken(TokenKind::AssignmentToken,0,0),
|
||||
new SimpleToken(TokenKind::TrueKeyword,0,0),
|
||||
@@ -226,7 +226,7 @@ TEST_CASE( "Parse Global Assignment", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Parse local Assignment", "[parser]" ) {
|
||||
vector<const IToken*> v {
|
||||
vector<const Token*> v {
|
||||
new SimpleToken(TokenKind::LocalKeyword,0,0),
|
||||
new IdentifierToken(HashedString(new u16string(u"foo")),0,0),
|
||||
new SimpleToken(TokenKind::AssignmentToken,0,0),
|
||||
@@ -252,7 +252,7 @@ TEST_CASE( "Parse local Assignment", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Parse function declaration", "[parser]" ){
|
||||
vector<const IToken*> v {
|
||||
vector<const Token*> v {
|
||||
new SimpleToken(TokenKind::FunctionKeyword,0,0),
|
||||
new IdentifierToken(HashedString(new u16string(u"foo")),0,0),
|
||||
new SimpleToken(TokenKind::OpenParenthesis,0,0),
|
||||
|
||||
Reference in New Issue
Block a user