#ifdef TESTS_BUILD #include #include "../../src/Parser/Lexer.hpp" TEST_CASE( "When at end of script return terminator", "[lexer]" ) { Lexer lexer = Lexer("", nullptr); REQUIRE(lexer.Peek() == '\0'); } TEST_CASE( "Peek doesn't advance", "[lexer]" ) { auto script = "5 + 5"; Lexer lexer = Lexer(script, nullptr); REQUIRE(lexer.Peek() == '5'); REQUIRE(lexer.Peek() == '5'); REQUIRE(lexer.Peek() == '5'); } TEST_CASE( "Next does advance", "[lexer]" ) { auto script = "5 + 5"; Lexer lexer = Lexer(script, nullptr); REQUIRE(lexer.Next() == '5'); REQUIRE(lexer.Next() == ' '); REQUIRE(lexer.Next() == '+'); REQUIRE(lexer.Next() == ' '); REQUIRE(lexer.Next() == '5'); REQUIRE(lexer.Next() == '\0'); } TEST_CASE( "Lex Null Terminator as EOF", "[lexer]" ) { Lexer lexer = Lexer("", nullptr); REQUIRE(lexer.LexNext('\0') -> GetKind() == TokenKind::EndOfFile); } TEST_CASE( "Lex Plus Token", "[lexer]" ) { Lexer lexer = Lexer("", nullptr); REQUIRE(lexer.LexNext('+') -> GetKind() == TokenKind::PlusToken); } TEST_CASE( "Lex Minus Token", "[lexer]" ) { Lexer lexer = Lexer("", nullptr); REQUIRE(lexer.LexNext('-') -> GetKind() == TokenKind::MinusToken); } TEST_CASE( "Lex Slash Token", "[lexer]" ) { Lexer lexer = Lexer("", nullptr); REQUIRE(lexer.LexNext('/') -> GetKind() == TokenKind::SlashToken); } TEST_CASE( "Lex Star Token", "[lexer]" ) { Lexer lexer = Lexer("", nullptr); REQUIRE(lexer.LexNext('*') -> GetKind() == TokenKind::StarToken); } TEST_CASE( "Lex Assignment Token", "[lexer]" ) { Lexer lexer = Lexer("", nullptr); REQUIRE(lexer.LexNext('=') -> GetKind() == TokenKind::AssignmentToken); } TEST_CASE( "Lex Equality Token", "[lexer]" ) { Lexer lexer = Lexer("==", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::EqualityToken); } TEST_CASE( "Lex Whitespace", "[lexer]" ) { Lexer lexer = Lexer("", nullptr); CHECK(lexer.LexNext(' ') -> GetKind() == TokenKind::WhiteSpace); CHECK(lexer.LexNext('\t') -> GetKind() == TokenKind::WhiteSpace); CHECK(lexer.LexNext('\n') -> GetKind() == TokenKind::WhiteSpace); CHECK(lexer.LexNext('\r') -> GetKind() == TokenKind::WhiteSpace); CHECK(lexer.LexNext('\v') -> GetKind() == TokenKind::WhiteSpace); CHECK(lexer.LexNext('\f') -> GetKind() == TokenKind::WhiteSpace); } TEST_CASE( "Lex Basic Digits", "[lexer]" ) { Lexer lexer = Lexer("", nullptr); CHECK(lexer.LexNext('0') -> GetKind() == TokenKind::Integer); CHECK(lexer.LexNext('1') -> GetKind() == TokenKind::Integer); CHECK(lexer.LexNext('2') -> GetKind() == TokenKind::Integer); CHECK(lexer.LexNext('3') -> GetKind() == TokenKind::Integer); CHECK(lexer.LexNext('4') -> GetKind() == TokenKind::Integer); CHECK(lexer.LexNext('5') -> GetKind() == TokenKind::Integer); CHECK(lexer.LexNext('6') -> GetKind() == TokenKind::Integer); CHECK(lexer.LexNext('7') -> GetKind() == TokenKind::Integer); CHECK(lexer.LexNext('8') -> GetKind() == TokenKind::Integer); CHECK(lexer.LexNext('9') -> GetKind() == TokenKind::Integer); } TEST_CASE( "Lex Longer Integers", "[lexer]" ) { long integers[] {0,1,5,9,10,50,100,1000,99999,6484,62163,48862}; for (long integer : integers){ Lexer lexer = Lexer(std::to_string(integer), nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::Integer); auto* integerToken = (IntegerToken *)firstToken; CHECK(integerToken -> GetValue() == integer); } } TEST_CASE( "Lex Floats", "[lexer]" ) { double floats[] {0.5, 0.8, 100.7, 52.3548, 8461354.1324886}; for (double f : floats){ Lexer lexer = Lexer(std::to_string(f), nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::Float); auto* floatToken = (FloatToken *)firstToken; CHECK(floatToken -> GetValue() == Approx(f)); } } TEST_CASE( "Lex And Keyword", "[lexer]" ) { Lexer lexer = Lexer("and", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::AndKeyword); } TEST_CASE( "Lex Break Keyword", "[lexer]" ) { Lexer lexer = Lexer("break", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::BreakKeyword); } TEST_CASE( "Lex Do Keyword", "[lexer]" ) { Lexer lexer = Lexer("do", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::DoKeyword); } TEST_CASE( "Lex else Keyword", "[lexer]" ) { Lexer lexer = Lexer("else", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::ElseKeyword); } TEST_CASE( "Lex else if Keyword", "[lexer]" ) { Lexer lexer = Lexer("elseif", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::ElseIfKeyword); } TEST_CASE( "Lex end Keyword", "[lexer]" ) { Lexer lexer = Lexer("end", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::EndKeyword); } TEST_CASE( "Lex false Keyword", "[lexer]" ) { Lexer lexer = Lexer("false", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::FalseKeyword); } TEST_CASE( "Lex for Keyword", "[lexer]" ) { Lexer lexer = Lexer("for", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::ForKeyword); } TEST_CASE( "Lex function Keyword", "[lexer]" ) { Lexer lexer = Lexer("function", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::FunctionKeyword); } TEST_CASE( "Lex if Keyword", "[lexer]" ) { Lexer lexer = Lexer("if", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::IfKeyword); } TEST_CASE( "Lex in Keyword", "[lexer]" ) { Lexer lexer = Lexer("in", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::InKeyword); } TEST_CASE( "Lex local Keyword", "[lexer]" ) { Lexer lexer = Lexer("local", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::LocalKeyword); } TEST_CASE( "Lex nil Keyword", "[lexer]" ) { Lexer lexer = Lexer("nil", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::NilKeyword); } TEST_CASE( "Lex not Keyword", "[lexer]" ) { Lexer lexer = Lexer("not", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::NotKeyword); } TEST_CASE( "Lex or Keyword", "[lexer]" ) { Lexer lexer = Lexer("or", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::OrKeyword); } TEST_CASE( "Lex return Keyword", "[lexer]" ) { Lexer lexer = Lexer("return", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::ReturnKeyword); } TEST_CASE( "Lex then Keyword", "[lexer]" ) { Lexer lexer = Lexer("then", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::ThenKeyword); } TEST_CASE( "Lex true Keyword", "[lexer]" ) { Lexer lexer = Lexer("true", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::TrueKeyword); } TEST_CASE( "Lex while Keyword", "[lexer]" ) { Lexer lexer = Lexer("while", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::WhileKeyword); } TEST_CASE( "Lex identifier", "[lexer]" ) { Lexer lexer = Lexer("foo", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::Identifier); REQUIRE(((IdentifierToken*)firstToken) -> GetValue() == HashedString("foo")); } TEST_CASE( "Lex Start Position", "[lexer]" ) { Lexer lexer = Lexer("+ - bar 1234", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 5); CHECK(((IdentifierToken*)tokens[0]) -> GetStartPosition() == 0); CHECK(((IdentifierToken*)tokens[1]) -> GetStartPosition() == 2); CHECK(((IdentifierToken*)tokens[2]) -> GetStartPosition() == 4); CHECK(((IdentifierToken*)tokens[3]) -> GetStartPosition() == 8); CHECK(((IdentifierToken*)tokens[4]) -> GetStartPosition() == 12); } TEST_CASE( "Lex End Position", "[lexer]" ) { Lexer lexer = Lexer("+ - bar 1234", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 5); CHECK(((IdentifierToken*)tokens[0]) -> GetEndPosition() == 0); CHECK(((IdentifierToken*)tokens[1]) -> GetEndPosition() == 2); CHECK(((IdentifierToken*)tokens[2]) -> GetEndPosition() == 6); CHECK(((IdentifierToken*)tokens[3]) -> GetEndPosition() == 11); CHECK(((IdentifierToken*)tokens[4]) -> GetEndPosition() == 12); } TEST_CASE("Lex Double Quote String", "[lexer]") { Lexer lexer = Lexer("\"foo bar\"", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::String); REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo bar"); } TEST_CASE("Lex Single Quote String", "[lexer]") { Lexer lexer = Lexer("'foo bar'", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::String); REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo bar"); } TEST_CASE("Lex Double Quote String, Escape Quote", "[lexer]") { Lexer lexer = Lexer("'foo\\\"bar'", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::String); REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo\"bar"); } TEST_CASE("Lex String with newline", "[lexer]") { Lexer lexer = Lexer("'foo\\nbar'", nullptr); auto tokens = lexer.Lex(); REQUIRE(tokens.size() == 2); const IToken* firstToken = tokens[0]; REQUIRE(firstToken -> GetKind() == TokenKind::String); REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo\nbar"); } #endif