Move Lexer to u16string handling, for unicode support
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
@@ -1,14 +1,16 @@
|
||||
#ifdef TESTS_BUILD
|
||||
#include <catch.hpp>
|
||||
#include <codecvt>
|
||||
#include <locale>
|
||||
#include "../../src/Parser/Lexer.hpp"
|
||||
|
||||
TEST_CASE( "When at end of script return terminator", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("", nullptr);
|
||||
Lexer lexer = Lexer(u"", nullptr);
|
||||
REQUIRE(lexer.Peek() == '\0');
|
||||
}
|
||||
|
||||
TEST_CASE( "Peek doesn't advance", "[lexer]" ) {
|
||||
auto script = new string("5 + 5"); // Create as reference to ensure the compiler plays nice with it in release builds
|
||||
auto script = new u16string(u"5 + 5"); // Create as reference to ensure the compiler plays nice with it in release builds
|
||||
Lexer lexer = Lexer(*script, nullptr);
|
||||
REQUIRE(lexer.Peek() == '5');
|
||||
REQUIRE(lexer.Peek() == '5');
|
||||
@@ -16,7 +18,7 @@ TEST_CASE( "Peek doesn't advance", "[lexer]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Next does advance", "[lexer]" ) {
|
||||
auto script = new string("5 + 5"); // Create as reference to ensure the compiler plays nice with it in release builds
|
||||
auto script = new u16string(u"5 + 5"); // Create as reference to ensure the compiler plays nice with it in release builds
|
||||
Lexer lexer = Lexer(*script, nullptr);
|
||||
REQUIRE(lexer.Next() == '5');
|
||||
REQUIRE(lexer.Next() == ' ');
|
||||
@@ -27,37 +29,37 @@ TEST_CASE( "Next does advance", "[lexer]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Null Terminator as EOF", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("", nullptr);
|
||||
Lexer lexer = Lexer(u"", nullptr);
|
||||
REQUIRE(lexer.LexNext('\0') -> GetKind() == TokenKind::EndOfFile);
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Plus Token", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("", nullptr);
|
||||
Lexer lexer = Lexer(u"", nullptr);
|
||||
REQUIRE(lexer.LexNext('+') -> GetKind() == TokenKind::PlusToken);
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Minus Token", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("", nullptr);
|
||||
Lexer lexer = Lexer(u"", nullptr);
|
||||
REQUIRE(lexer.LexNext('-') -> GetKind() == TokenKind::MinusToken);
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Slash Token", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("", nullptr);
|
||||
Lexer lexer = Lexer(u"", nullptr);
|
||||
REQUIRE(lexer.LexNext('/') -> GetKind() == TokenKind::SlashToken);
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Star Token", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("", nullptr);
|
||||
Lexer lexer = Lexer(u"", nullptr);
|
||||
REQUIRE(lexer.LexNext('*') -> GetKind() == TokenKind::StarToken);
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Assignment Token", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("", nullptr);
|
||||
Lexer lexer = Lexer(u"", nullptr);
|
||||
REQUIRE(lexer.LexNext('=') -> GetKind() == TokenKind::AssignmentToken);
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Equality Token", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("==", nullptr);
|
||||
Lexer lexer = Lexer(u"==", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
@@ -65,7 +67,7 @@ TEST_CASE( "Lex Equality Token", "[lexer]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Whitespace", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("", nullptr);
|
||||
Lexer lexer = Lexer(u"", nullptr);
|
||||
CHECK(lexer.LexNext(' ') -> GetKind() == TokenKind::WhiteSpace);
|
||||
CHECK(lexer.LexNext('\t') -> GetKind() == TokenKind::WhiteSpace);
|
||||
CHECK(lexer.LexNext('\n') -> GetKind() == TokenKind::WhiteSpace);
|
||||
@@ -75,7 +77,7 @@ TEST_CASE( "Lex Whitespace", "[lexer]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Basic Digits", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("", nullptr);
|
||||
Lexer lexer = Lexer(u"", nullptr);
|
||||
CHECK(lexer.LexNext('0') -> GetKind() == TokenKind::Integer);
|
||||
CHECK(lexer.LexNext('1') -> GetKind() == TokenKind::Integer);
|
||||
CHECK(lexer.LexNext('2') -> GetKind() == TokenKind::Integer);
|
||||
@@ -88,10 +90,16 @@ TEST_CASE( "Lex Basic Digits", "[lexer]" ) {
|
||||
CHECK(lexer.LexNext('9') -> GetKind() == TokenKind::Integer);
|
||||
}
|
||||
|
||||
std::u16string to_u16string(long const &i) {
|
||||
std::wstring_convert<std::codecvt_utf8_utf16<char16_t, 0x10ffff, std::little_endian>, char16_t> conv;
|
||||
return conv.from_bytes(std::to_string(i));
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Longer Integers", "[lexer]" ) {
|
||||
long integers[] {0,1,5,9,10,50,100,1000,99999,6484,62163,48862};
|
||||
for (long integer : integers){
|
||||
Lexer lexer = Lexer(std::to_string(integer), nullptr);
|
||||
auto s = to_u16string(integer);
|
||||
Lexer lexer = Lexer(s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
@@ -101,10 +109,15 @@ TEST_CASE( "Lex Longer Integers", "[lexer]" ) {
|
||||
}
|
||||
}
|
||||
|
||||
std::u16string to_u16string(double const &i) {
|
||||
std::wstring_convert<std::codecvt_utf8_utf16<char16_t, 0x10ffff, std::little_endian>, char16_t> conv;
|
||||
return conv.from_bytes(std::to_string(i));}
|
||||
|
||||
TEST_CASE( "Lex Floats", "[lexer]" ) {
|
||||
double floats[] {0.5, 0.8, 100.7, 52.3548, 8461354.1324886};
|
||||
for (double f : floats){
|
||||
Lexer lexer = Lexer(std::to_string(f), nullptr);
|
||||
auto s = to_u16string(f);
|
||||
Lexer lexer = Lexer(s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
@@ -115,133 +128,134 @@ TEST_CASE( "Lex Floats", "[lexer]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex And Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("and", nullptr);
|
||||
Lexer lexer = Lexer(u"and", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::AndKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex Break Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("break", nullptr);
|
||||
Lexer lexer = Lexer(u"break", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::BreakKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex Do Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("do", nullptr);
|
||||
Lexer lexer = Lexer(u"do", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::DoKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex else Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("else", nullptr);
|
||||
Lexer lexer = Lexer(u"else", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::ElseKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex else if Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("elseif", nullptr);
|
||||
Lexer lexer = Lexer(u"elseif", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::ElseIfKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex end Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("end", nullptr);
|
||||
Lexer lexer = Lexer(u"end", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::EndKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex false Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("false", nullptr);
|
||||
Lexer lexer = Lexer(u"false", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::FalseKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex for Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("for", nullptr);
|
||||
Lexer lexer = Lexer(u"for", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::ForKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex function Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("function", nullptr);
|
||||
auto s = new u16string(u"function");
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::FunctionKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex if Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("if", nullptr);
|
||||
Lexer lexer = Lexer(u"if", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::IfKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex in Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("in", nullptr);
|
||||
Lexer lexer = Lexer(u"in", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::InKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex local Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("local", nullptr);
|
||||
Lexer lexer = Lexer(u"local", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::LocalKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex nil Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("nil", nullptr);
|
||||
Lexer lexer = Lexer(u"nil", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::NilKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex not Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("not", nullptr);
|
||||
Lexer lexer = Lexer(u"not", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::NotKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex or Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("or", nullptr);
|
||||
Lexer lexer = Lexer(u"or", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::OrKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex return Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("return", nullptr);
|
||||
Lexer lexer = Lexer(u"return", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::ReturnKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex then Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("then", nullptr);
|
||||
Lexer lexer = Lexer(u"then", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::ThenKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex true Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("true", nullptr);
|
||||
Lexer lexer = Lexer(u"true", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::TrueKeyword);
|
||||
}
|
||||
TEST_CASE( "Lex while Keyword", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("while", nullptr);
|
||||
Lexer lexer = Lexer(u"while", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
@@ -249,7 +263,7 @@ TEST_CASE( "Lex while Keyword", "[lexer]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex identifier", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("foo", nullptr);
|
||||
Lexer lexer = Lexer(u"foo", nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
@@ -258,7 +272,8 @@ TEST_CASE( "Lex identifier", "[lexer]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex Start Position", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("+ - bar 1234", nullptr);
|
||||
auto s = new u16string(u"+ - bar 1234");
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 5);
|
||||
CHECK(((IdentifierToken*)tokens[0]) -> GetStartPosition() == 0);
|
||||
@@ -269,7 +284,8 @@ TEST_CASE( "Lex Start Position", "[lexer]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Lex End Position", "[lexer]" ) {
|
||||
Lexer lexer = Lexer("+ - bar 1234", nullptr);
|
||||
auto s = new u16string(u"+ - bar 1234");
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 5);
|
||||
CHECK(((IdentifierToken*)tokens[0]) -> GetEndPosition() == 0);
|
||||
@@ -280,39 +296,43 @@ TEST_CASE( "Lex End Position", "[lexer]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE("Lex Double Quote String", "[lexer]") {
|
||||
Lexer lexer = Lexer("\"foo bar\"", nullptr);
|
||||
auto s = new u16string(u"\"foo bar\"");
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::String);
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo bar");
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == u"foo bar");
|
||||
}
|
||||
|
||||
TEST_CASE("Lex Single Quote String", "[lexer]") {
|
||||
Lexer lexer = Lexer("'foo bar'", nullptr);
|
||||
auto s = new u16string(u"'foo bar'");
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::String);
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo bar");
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == u"foo bar");
|
||||
}
|
||||
|
||||
TEST_CASE("Lex Double Quote String, Escape Quote", "[lexer]") {
|
||||
Lexer lexer = Lexer("'foo\\\"bar'", nullptr);
|
||||
auto s = new u16string(u"'foo\\\"bar'");
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::String);
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo\"bar");
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == u"foo\"bar");
|
||||
}
|
||||
|
||||
TEST_CASE("Lex String with newline", "[lexer]") {
|
||||
Lexer lexer = Lexer("'foo\\nbar'", nullptr);
|
||||
auto s = new u16string(u"'foo\\nbar'");
|
||||
Lexer lexer = Lexer(*s, nullptr);
|
||||
auto tokens = lexer.Lex();
|
||||
REQUIRE(tokens.size() == 2);
|
||||
const IToken* firstToken = tokens[0];
|
||||
REQUIRE(firstToken -> GetKind() == TokenKind::String);
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo\nbar");
|
||||
REQUIRE(((StringToken*)firstToken) -> GetValue() == u"foo\nbar");
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -142,7 +142,7 @@ TEST_CASE( "Assert binary precedence", "[parser]" ) {
|
||||
}
|
||||
|
||||
TEST_CASE( "Parse String Tokens", "[parser]" ) {
|
||||
vector<const IToken*> v {new StringToken("foo bar", 0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
|
||||
vector<const IToken*> v {new StringToken(u"foo bar", 0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
|
||||
Parser parser = Parser(v, nullptr);
|
||||
auto parsedStatements = parser.Parse() -> GetStatements();
|
||||
REQUIRE(parsedStatements->size() == 1);
|
||||
@@ -151,7 +151,7 @@ TEST_CASE( "Parse String Tokens", "[parser]" ) {
|
||||
auto expression = ((ParsedExpressionStatement*)firstStatement)->GetExpression();
|
||||
REQUIRE(expression -> GetKind() == ParsedExpressionKind::LiteralString);
|
||||
auto boolean = ((LiteralStringExpression*)expression);
|
||||
REQUIRE(boolean->GetValue() == "foo bar");
|
||||
REQUIRE(boolean->GetValue() == u"foo bar");
|
||||
}
|
||||
|
||||
TEST_CASE( "Parse Global Assignment", "[parser]" ) {
|
||||
|
||||
Reference in New Issue
Block a user