Make Lexer use constant Tokens
continuous-integration/drone/push Build is failing Details

This commit is contained in:
Deukhoofd 2019-06-13 18:49:38 +02:00
parent 5910cbbfa9
commit 3e00f750ef
No known key found for this signature in database
GPG Key ID: B4C087AC81641654
10 changed files with 162 additions and 137 deletions

View File

@ -14,8 +14,8 @@ Lexer::Lexer(const string& scriptString, class Script* script)
}
vector<IToken*> Lexer::Lex() {
vector<IToken*> tokens;
vector<const IToken*> Lexer::Lex() {
vector<const IToken*> tokens;
while (true){
IToken* next = this -> LexNext(this -> Next());
auto nextKind = next -> GetKind();
@ -215,11 +215,11 @@ IToken * Lexer::LexIdentifierOrKeyword() {
case HashedString::ConstHash("then"): return new SimpleToken(TokenKind::ThenKeyword, start, 4);
case HashedString::ConstHash("true"): return new SimpleToken(TokenKind::TrueKeyword, start, 4);
case HashedString::ConstHash("while"): return new SimpleToken(TokenKind::WhileKeyword, start, 5);
default: return new IdentifierToken(s, start, s.length());
default: return new IdentifierToken(HashedString(s), start, s.length());
}
}
const unordered_map<char, char> ControlCharacters{
const unordered_map<char, char> ControlCharacters{ // NOLINT(cert-err58-cpp)
{'0', '\0'},
{'a', '\a'},
{'b', '\b'},

View File

@ -23,7 +23,7 @@ public:
public:
Script* ScriptData;
vector<IToken*> Lex();
vector<const IToken*> Lex();
explicit Lexer(const string& scriptString, class Script* script);
};

View File

@ -73,7 +73,7 @@ public:
}
explicit LiteralIntegerExpression(IntegerToken* token)
: ParsedExpression(token -> GetStartPosition(), token -> GetLength()),
_value(token -> Value)
_value(token -> GetValue())
{
}
@ -90,7 +90,7 @@ public:
}
explicit LiteralFloatExpression(FloatToken* token)
: ParsedExpression(token -> GetStartPosition(), token -> GetLength()),
_value(token -> Value)
_value(token -> GetValue())
{
}
@ -107,7 +107,7 @@ public:
}
explicit LiteralStringExpression(StringToken* token)
: ParsedExpression(token -> GetStartPosition(), token -> GetLength()),
_value(std::move(token -> Value))
_value(std::move(token -> GetValue()))
{
}
@ -122,7 +122,7 @@ public:
const ParsedExpressionKind GetKind() const final{
return ParsedExpressionKind::LiteralBool;
}
explicit LiteralBoolExpression(IToken* token)
explicit LiteralBoolExpression(const IToken* token)
: ParsedExpression(token -> GetStartPosition(), token -> GetLength()),
_value(token -> GetKind() == TokenKind::TrueKeyword)
{
@ -140,7 +140,7 @@ public:
return ParsedExpressionKind::Variable;
}
explicit VariableExpression(IdentifierToken* token) : ParsedExpression(token -> GetStartPosition(), token -> GetLength())
, _value(HashedString(token -> Value))
, _value(HashedString(token -> GetValue()))
{
}

View File

@ -154,8 +154,8 @@ class ParsedAssignmentStatement : public ParsedStatement{
const HashedString _identifier;
const ParsedExpression* _expression;
public:
ParsedAssignmentStatement(bool local, const std::string& identifier, ParsedExpression* expression, unsigned int start, unsigned int length)
: ParsedStatement(start, length), _local(local), _identifier(HashedString(identifier)), _expression(expression)
ParsedAssignmentStatement(bool local, const HashedString identifier, ParsedExpression* expression, unsigned int start, unsigned int length)
: ParsedStatement(start, length), _local(local), _identifier(identifier), _expression(expression)
{
}

View File

@ -22,20 +22,20 @@ ParsedScriptStatement* Parser::Parse() {
return new ParsedScriptStatement(statements);
}
IToken *Parser::Peek() {
const IToken *Parser::Peek() {
return this -> _tokens[_position];
}
IToken *Parser::PeekAt(int offset) {
const IToken *Parser::PeekAt(int offset) {
return this -> _tokens[_position + offset];
}
IToken *Parser::Next() {
const IToken *Parser::Next() {
this -> _position++;
return this -> _tokens[_position - 1];
}
ParsedStatement* Parser::ParseStatement(IToken* current){
ParsedStatement* Parser::ParseStatement(const IToken* current){
auto currentKind = current->GetKind();
switch (currentKind){
case TokenKind ::LocalKeyword: return this -> ParseAssignment(current);
@ -50,9 +50,9 @@ ParsedStatement* Parser::ParseStatement(IToken* current){
return new ParsedExpressionStatement(this -> ParseExpression(current));
}
ParsedStatement *Parser::ParseAssignment(IToken *current) {
ParsedStatement *Parser::ParseAssignment(const IToken *current) {
bool isLocal = false;
IToken* identifier;
const IToken* identifier;
if (current -> GetKind() == TokenKind::LocalKeyword){
isLocal = true;
identifier = this -> Next();
@ -72,7 +72,7 @@ ParsedStatement *Parser::ParseAssignment(IToken *current) {
}
auto start = current -> GetStartPosition();
return new ParsedAssignmentStatement(isLocal, ((IdentifierToken*)identifier) -> Value, expression, start, expression->GetEndPosition() - start);
return new ParsedAssignmentStatement(isLocal, ((IdentifierToken*)identifier) -> GetValue(), expression, start, expression->GetEndPosition() - start);
}
ParsedStatement *Parser::ParseBlock(const vector<TokenKind>& endTokens, const vector<const ParsedStatement*>& openStatements) {
@ -96,7 +96,7 @@ ParsedStatement *Parser::ParseBlock(const vector<TokenKind>& endTokens, const ve
return new ParsedBlockStatement(statements);
}
ParsedStatement *Parser::ParseFunctionDeclaration(IToken *current) {
ParsedStatement *Parser::ParseFunctionDeclaration(const IToken *current) {
auto functionIdentifierToken = this->Next();
auto openParenthesis = this->Next();
vector<TypedVariableIdentifier*> parameters;
@ -135,7 +135,7 @@ ParsedStatement *Parser::ParseFunctionDeclaration(IToken *current) {
}
auto typeToken = (IdentifierToken*)type;
auto identifierToken = (IdentifierToken*)identifier;
parameters.push_back(new TypedVariableIdentifier(HashedString(typeToken->Value), HashedString(identifierToken->Value)));
parameters.push_back(new TypedVariableIdentifier(typeToken->GetValue(), typeToken->GetValue()));
auto nextKind = next->GetKind();
if (nextKind == TokenKind::CloseParenthesis || nextKind == TokenKind::EndOfFile){
@ -154,18 +154,18 @@ ParsedStatement *Parser::ParseFunctionDeclaration(IToken *current) {
if (block->GetKind() == ParsedStatementKind::Bad){
return new ParsedBadStatement(start, block->GetEndPosition() - start);
}
auto functionIdentifier = ((IdentifierToken*) functionIdentifierToken)->Value;
auto functionIdentifier = ((IdentifierToken*) functionIdentifierToken)->GetValue();
return new ParsedFunctionDeclarationStatement(HashedString(functionIdentifier), parameters, (ParsedBlockStatement*)block, start, block->GetEndPosition() - start);
}
ParsedStatement* Parser::ParseReturnStatement(IToken* current){
ParsedStatement* Parser::ParseReturnStatement(const IToken* current){
//TODO: if next token is on a different line, don't parse it as return expression.
auto expression = this->ParseExpression(this->Next());
auto start = current->GetStartPosition();
return new ParsedReturnStatement(expression, start, expression->GetEndPosition() - start);
}
ParsedStatement* Parser::ParseIfStatement(IToken* current){
ParsedStatement* Parser::ParseIfStatement(const IToken* current){
auto condition = this->ParseExpression(this->Next());
auto next = this -> Next();
if (next->GetKind() != TokenKind::ThenKeyword){
@ -185,7 +185,7 @@ ParsedStatement* Parser::ParseIfStatement(IToken* current){
return new ParsedConditionalStatement(condition, block, start, block->GetEndPosition() - start);
}
ParsedExpression* Parser::ParseExpression(IToken* current){
ParsedExpression* Parser::ParseExpression(const IToken* current){
auto expression = this -> ParseBinaryExpression(current, OperatorPrecedence::No);
auto peekKind = this->Peek()->GetKind();
while (peekKind == TokenKind::OpenParenthesis ||
@ -274,7 +274,7 @@ OperatorPrecedence GetBinaryPrecedence(TokenKind kind){
}
}
ParsedExpression* Parser::ParseBinaryExpression(IToken* current, OperatorPrecedence parentPrecedence){
ParsedExpression* Parser::ParseBinaryExpression(const IToken* current, OperatorPrecedence parentPrecedence){
OperatorPrecedence unaryPrecedence = GetUnaryPrecedence(current -> GetKind());
ParsedExpression* left;
if (unaryPrecedence != OperatorPrecedence::No && unaryPrecedence >= parentPrecedence){
@ -301,7 +301,7 @@ ParsedExpression* Parser::ParseBinaryExpression(IToken* current, OperatorPrecede
return left;
}
ParsedExpression *Parser::ParsePrimaryExpression(IToken *current) {
ParsedExpression *Parser::ParsePrimaryExpression(const IToken *current) {
switch (current -> GetKind()){
case TokenKind ::Integer: return new LiteralIntegerExpression((IntegerToken*)current);
case TokenKind ::Float: return new LiteralFloatExpression((FloatToken*)current);
@ -319,7 +319,7 @@ ParsedExpression *Parser::ParsePrimaryExpression(IToken *current) {
}
}
ParsedExpression *Parser::ParseParenthesizedExpression(IToken *current) {
ParsedExpression *Parser::ParseParenthesizedExpression(const IToken *current) {
auto next = this -> Next();
auto expression = this -> ParseExpression(next);
auto closeToken = this -> Next();
@ -369,7 +369,7 @@ ParsedExpression* Parser::ParseIndexExpression(ParsedExpression* indexingExpress
return new IndexExpression(indexingExpression, indexExpression, start, closeBracket->GetEndPosition() - start);
}
ParsedExpression* Parser::ParseTableExpression(IToken* current){
ParsedExpression* Parser::ParseTableExpression(const IToken* current){
if (this -> Peek() -> GetKind() == TokenKind::CloseCurlyBracket){
this -> Next();
auto start = current->GetStartPosition();

View File

@ -18,39 +18,36 @@ enum class OperatorPrecedence {
};
class Parser {
vector<IToken*> _tokens;
vector<const IToken*> _tokens;
unsigned int _position;
Script* ScriptData;
IToken* Peek();
IToken* Next();
const IToken* Peek();
const IToken *PeekAt(int offset);
const IToken* Next();
ParsedStatement* ParseStatement(IToken* current);
ParsedStatement* ParseAssignment(IToken* current);
ParsedStatement* ParseStatement(const IToken* current);
ParsedStatement* ParseAssignment(const IToken* current);
ParsedStatement *ParseBlock(const vector<TokenKind>& endTokens, const vector<const ParsedStatement*>& openStatements = {});
ParsedStatement* ParseFunctionDeclaration(IToken* current);
ParsedStatement *ParseReturnStatement(IToken *current);
ParsedStatement *ParseIfStatement(IToken *current);
ParsedStatement* ParseFunctionDeclaration(const IToken* current);
ParsedStatement *ParseReturnStatement(const IToken *current);
ParsedStatement *ParseIfStatement(const IToken *current);
ParsedExpression* ParseExpression(IToken* current);
ParsedExpression* ParseBinaryExpression(IToken* current, OperatorPrecedence parentPrecedence);
ParsedExpression* ParsePrimaryExpression(IToken* current);
ParsedExpression* ParseParenthesizedExpression(IToken *current);
ParsedExpression* ParseExpression(const IToken* current);
ParsedExpression* ParseBinaryExpression(const IToken* current, OperatorPrecedence parentPrecedence);
ParsedExpression* ParsePrimaryExpression(const IToken* current);
ParsedExpression* ParseParenthesizedExpression(const IToken *current);
ParsedExpression* ParseFunctionCallExpression(ParsedExpression* functionExpression);
ParsedExpression *ParseIndexExpression(ParsedExpression *indexingExpression);
ParsedExpression *ParseTableExpression(const IToken *current);
public:
ParsedScriptStatement* Parse();
explicit Parser(const vector<IToken*>& tokens, Script* scriptData){
explicit Parser(const vector<const IToken*>& tokens, Script* scriptData){
_tokens = tokens;
_position = 0;
ScriptData = scriptData;
}
IToken *PeekAt(int offset);
ParsedExpression *ParseTableExpression(IToken *current);
};

View File

@ -1,104 +1,131 @@
#include <utility>
#ifndef PORYGONLANG_TOKEN_HPP
#define PORYGONLANG_TOKEN_HPP
#include <utility>
#include <string>
#include "TokenKind.hpp"
#include "../Utilities/HashedString.hpp"
using namespace std;
class IToken{
unsigned int Position;
unsigned int Length;
const unsigned int _position;
const unsigned int _length;
public:
virtual TokenKind GetKind() = 0;
virtual const TokenKind GetKind() const = 0;
IToken(unsigned int position, unsigned int length){
Position = position;
Length = length;
IToken(unsigned int position, unsigned int length)
: _position(position), _length(length)
{
}
unsigned int GetStartPosition(){
return Position;
const unsigned int GetStartPosition() const{
return _position;
}
unsigned int GetEndPosition(){
return Position + Length - 1;
const unsigned int GetEndPosition() const{
return _position + _length - 1;
}
unsigned int GetLength(){
return Length;
const unsigned int GetLength() const{
return _length;
}
virtual ~IToken(){
}
virtual ~IToken() = default;
};
class SimpleToken : public IToken{
const TokenKind _kind;
public:
TokenKind Kind;
explicit SimpleToken(TokenKind type, unsigned int position, unsigned int length) : IToken(position, length){
Kind = type;
explicit SimpleToken(TokenKind kind, unsigned int position, unsigned int length)
: IToken(position, length),
_kind(kind)
{
}
TokenKind GetKind() override{
return Kind;
const TokenKind GetKind() const final{
return _kind;
}
};
class IntegerToken : public IToken{
const long _value;
public:
long Value;
explicit IntegerToken(long value, unsigned int position, unsigned int length) : IToken(position, length){
Value = value;
explicit IntegerToken(long value, unsigned int position, unsigned int length)
: IToken(position, length),
_value(value)
{
}
TokenKind GetKind() override{
const TokenKind GetKind() const final{
return TokenKind::Integer;
}
const long GetValue() const{
return _value;
}
};
class FloatToken : public IToken{
const double _value;
public:
double Value;
explicit FloatToken(double value, unsigned int position, unsigned int length) : IToken(position, length){
Value = value;
explicit FloatToken(double value, unsigned int position, unsigned int length)
: IToken(position, length),
_value(value)
{
}
TokenKind GetKind() override{
const TokenKind GetKind() const final{
return TokenKind::Float;
}
const double GetValue() const{
return _value;
}
};
class StringToken : public IToken{
const string _value;
public:
string Value;
explicit StringToken(string value, unsigned int position, unsigned int length) : IToken(position, length){
Value = std::move(value);
explicit StringToken(string value, unsigned int position, unsigned int length)
: IToken(position, length),
_value(std::move(value))
{
}
TokenKind GetKind() override{
const TokenKind GetKind() const final{
return TokenKind::String;
}
const string& GetValue() const{
return _value;
}
};
class IdentifierToken : public IToken{
const HashedString _value;
public:
string Value;
explicit IdentifierToken(string value, unsigned int position, unsigned int length) : IToken(position, length){
Value = std::move(value);
explicit IdentifierToken(const HashedString value, unsigned int position, unsigned int length)
: IToken(position, length),
_value(value)
{
}
TokenKind GetKind() override{
const TokenKind GetKind() const final{
return TokenKind::Identifier;
}
const HashedString GetValue() const{
return _value;
}
};
#endif //PORYGONLANG_TOKEN_HPP

View File

@ -11,6 +11,7 @@ public:
}
explicit HashedString(char const *input) : _hash(ConstHash(input)){
}
HashedString(const HashedString& b) = default;
static unsigned constexpr ConstHash(char const *input) {
return *input ?

View File

@ -58,7 +58,7 @@ TEST_CASE( "Lex Equality Token", "[lexer]" ) {
Lexer lexer = Lexer("==", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::EqualityToken);
}
@ -92,10 +92,10 @@ TEST_CASE( "Lex Longer Integers", "[lexer]" ) {
Lexer lexer = Lexer(std::to_string(integer), nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::Integer);
auto* integerToken = (IntegerToken *)firstToken;
CHECK(integerToken -> Value == integer);
CHECK(integerToken -> GetValue() == integer);
}
}
@ -105,10 +105,10 @@ TEST_CASE( "Lex Floats", "[lexer]" ) {
Lexer lexer = Lexer(std::to_string(f), nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::Float);
auto* floatToken = (FloatToken *)firstToken;
CHECK(floatToken -> Value == Approx(f));
CHECK(floatToken -> GetValue() == Approx(f));
}
}
@ -116,133 +116,133 @@ TEST_CASE( "Lex And Keyword", "[lexer]" ) {
Lexer lexer = Lexer("and", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::AndKeyword);
}
TEST_CASE( "Lex Break Keyword", "[lexer]" ) {
Lexer lexer = Lexer("break", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::BreakKeyword);
}
TEST_CASE( "Lex Do Keyword", "[lexer]" ) {
Lexer lexer = Lexer("do", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::DoKeyword);
}
TEST_CASE( "Lex else Keyword", "[lexer]" ) {
Lexer lexer = Lexer("else", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::ElseKeyword);
}
TEST_CASE( "Lex else if Keyword", "[lexer]" ) {
Lexer lexer = Lexer("elseif", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::ElseIfKeyword);
}
TEST_CASE( "Lex end Keyword", "[lexer]" ) {
Lexer lexer = Lexer("end", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::EndKeyword);
}
TEST_CASE( "Lex false Keyword", "[lexer]" ) {
Lexer lexer = Lexer("false", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::FalseKeyword);
}
TEST_CASE( "Lex for Keyword", "[lexer]" ) {
Lexer lexer = Lexer("for", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::ForKeyword);
}
TEST_CASE( "Lex function Keyword", "[lexer]" ) {
Lexer lexer = Lexer("function", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::FunctionKeyword);
}
TEST_CASE( "Lex if Keyword", "[lexer]" ) {
Lexer lexer = Lexer("if", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::IfKeyword);
}
TEST_CASE( "Lex in Keyword", "[lexer]" ) {
Lexer lexer = Lexer("in", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::InKeyword);
}
TEST_CASE( "Lex local Keyword", "[lexer]" ) {
Lexer lexer = Lexer("local", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::LocalKeyword);
}
TEST_CASE( "Lex nil Keyword", "[lexer]" ) {
Lexer lexer = Lexer("nil", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::NilKeyword);
}
TEST_CASE( "Lex not Keyword", "[lexer]" ) {
Lexer lexer = Lexer("not", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::NotKeyword);
}
TEST_CASE( "Lex or Keyword", "[lexer]" ) {
Lexer lexer = Lexer("or", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::OrKeyword);
}
TEST_CASE( "Lex return Keyword", "[lexer]" ) {
Lexer lexer = Lexer("return", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::ReturnKeyword);
}
TEST_CASE( "Lex then Keyword", "[lexer]" ) {
Lexer lexer = Lexer("then", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::ThenKeyword);
}
TEST_CASE( "Lex true Keyword", "[lexer]" ) {
Lexer lexer = Lexer("true", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::TrueKeyword);
}
TEST_CASE( "Lex while Keyword", "[lexer]" ) {
Lexer lexer = Lexer("while", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::WhileKeyword);
}
@ -250,9 +250,9 @@ TEST_CASE( "Lex identifier", "[lexer]" ) {
Lexer lexer = Lexer("foo", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::Identifier);
REQUIRE(((IdentifierToken*)firstToken) -> Value == "foo");
REQUIRE(((IdentifierToken*)firstToken) -> GetValue() == HashedString("foo"));
}
TEST_CASE( "Lex Start Position", "[lexer]" ) {
@ -281,36 +281,36 @@ TEST_CASE("Lex Double Quote String", "[lexer]") {
Lexer lexer = Lexer("\"foo bar\"", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::String);
REQUIRE(((StringToken*)firstToken) -> Value == "foo bar");
REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo bar");
}
TEST_CASE("Lex Single Quote String", "[lexer]") {
Lexer lexer = Lexer("'foo bar'", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::String);
REQUIRE(((StringToken*)firstToken) -> Value == "foo bar");
REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo bar");
}
TEST_CASE("Lex Double Quote String, Escape Quote", "[lexer]") {
Lexer lexer = Lexer("'foo\\\"bar'", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::String);
REQUIRE(((StringToken*)firstToken) -> Value == "foo\"bar");
REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo\"bar");
}
TEST_CASE("Lex String with newline", "[lexer]") {
Lexer lexer = Lexer("'foo\\nbar'", nullptr);
auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 2);
IToken* firstToken = tokens[0];
const IToken* firstToken = tokens[0];
REQUIRE(firstToken -> GetKind() == TokenKind::String);
REQUIRE(((StringToken*)firstToken) -> Value == "foo\nbar");
REQUIRE(((StringToken*)firstToken) -> GetValue() == "foo\nbar");
}

View File

@ -5,7 +5,7 @@
#include "../../src/Parser/Parser.hpp"
TEST_CASE( "Parse single true keyword", "[parser]" ) {
vector<IToken*> v {new SimpleToken(TokenKind::TrueKeyword,0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
vector<const IToken*> v {new SimpleToken(TokenKind::TrueKeyword,0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
Parser parser = Parser(v, nullptr);
auto parsedStatements = parser.Parse() -> GetStatements();
REQUIRE(parsedStatements->size() == 1);
@ -18,7 +18,7 @@ TEST_CASE( "Parse single true keyword", "[parser]" ) {
}
TEST_CASE( "Parse single false keyword", "[parser]" ) {
vector<IToken*> v {new SimpleToken(TokenKind::FalseKeyword,0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
vector<const IToken*> v {new SimpleToken(TokenKind::FalseKeyword,0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
Parser parser = Parser(v, nullptr);
auto parsedStatements = parser.Parse() -> GetStatements();
REQUIRE(parsedStatements->size() == 1);
@ -31,7 +31,7 @@ TEST_CASE( "Parse single false keyword", "[parser]" ) {
}
TEST_CASE( "Parse simple addition", "[parser]" ) {
vector<IToken*> v {
vector<const IToken*> v {
new IntegerToken(5, 0, 0),
new SimpleToken(TokenKind::PlusToken,0,0),
new IntegerToken(10, 0, 0),
@ -55,7 +55,7 @@ TEST_CASE( "Parse simple addition", "[parser]" ) {
}
TEST_CASE( "Parse simple negation", "[parser]" ) {
vector<IToken*> v {
vector<const IToken*> v {
new SimpleToken(TokenKind::MinusToken,0,0),
new IntegerToken(10, 0, 0),
new SimpleToken(TokenKind::EndOfFile,0,0)
@ -75,7 +75,7 @@ TEST_CASE( "Parse simple negation", "[parser]" ) {
}
TEST_CASE( "Parse logical negation", "[parser]" ) {
vector<IToken*> v {
vector<const IToken*> v {
new SimpleToken(TokenKind::NotKeyword,0,0),
new SimpleToken(TokenKind::FalseKeyword,0,0),
new SimpleToken(TokenKind::EndOfFile,0,0)
@ -95,7 +95,7 @@ TEST_CASE( "Parse logical negation", "[parser]" ) {
}
TEST_CASE( "Are parenthesized expressions valid", "[parser]" ) {
vector<IToken*> v {
vector<const IToken*> v {
new IntegerToken(5, 0, 0),
new SimpleToken(TokenKind::PlusToken,0,0),
new IntegerToken(10, 0, 0),
@ -124,7 +124,7 @@ TEST_CASE( "Are parenthesized expressions valid", "[parser]" ) {
}
TEST_CASE( "Assert binary precedence", "[parser]" ) {
vector<IToken*> v {
vector<const IToken*> v {
new SimpleToken(TokenKind::OpenParenthesis,0,0),
new IntegerToken(10, 0, 0),
new SimpleToken(TokenKind::CloseParenthesis,0,0),
@ -142,7 +142,7 @@ TEST_CASE( "Assert binary precedence", "[parser]" ) {
}
TEST_CASE( "Parse String Tokens", "[parser]" ) {
vector<IToken*> v {new StringToken("foo bar", 0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
vector<const IToken*> v {new StringToken("foo bar", 0,0), new SimpleToken(TokenKind::EndOfFile,0,0)};
Parser parser = Parser(v, nullptr);
auto parsedStatements = parser.Parse() -> GetStatements();
REQUIRE(parsedStatements->size() == 1);
@ -155,8 +155,8 @@ TEST_CASE( "Parse String Tokens", "[parser]" ) {
}
TEST_CASE( "Parse Global Assignment", "[parser]" ) {
vector<IToken*> v {
new IdentifierToken("foo",0,0),
vector<const IToken*> v {
new IdentifierToken(HashedString("foo"),0,0),
new SimpleToken(TokenKind::AssignmentToken,0,0),
new SimpleToken(TokenKind::TrueKeyword,0,0),
new SimpleToken(TokenKind::EndOfFile,0,0)
@ -173,9 +173,9 @@ TEST_CASE( "Parse Global Assignment", "[parser]" ) {
}
TEST_CASE( "Parse local Assignment", "[parser]" ) {
vector<IToken*> v {
vector<const IToken*> v {
new SimpleToken(TokenKind::LocalKeyword,0,0),
new IdentifierToken("foo",0,0),
new IdentifierToken(HashedString("foo"),0,0),
new SimpleToken(TokenKind::AssignmentToken,0,0),
new SimpleToken(TokenKind::TrueKeyword,0,0),
new SimpleToken(TokenKind::EndOfFile,0,0)
@ -192,19 +192,19 @@ TEST_CASE( "Parse local Assignment", "[parser]" ) {
}
TEST_CASE( "Parse function declaration", "[parser]" ){
vector<IToken*> v {
vector<const IToken*> v {
new SimpleToken(TokenKind::FunctionKeyword,0,0),
new IdentifierToken("foo",0,0),
new IdentifierToken(HashedString("foo"),0,0),
new SimpleToken(TokenKind::OpenParenthesis,0,0),
new IdentifierToken("number",0,0),
new IdentifierToken("bar",0,0),
new IdentifierToken(HashedString("number"),0,0),
new IdentifierToken(HashedString("bar"),0,0),
new SimpleToken(TokenKind::CommaToken,0,0),
new IdentifierToken("number",0,0),
new IdentifierToken("par",0,0),
new IdentifierToken(HashedString("number"),0,0),
new IdentifierToken(HashedString("par"),0,0),
new SimpleToken(TokenKind::CloseParenthesis,0,0),
new IdentifierToken("bar",0,0),
new IdentifierToken(HashedString("bar"),0,0),
new SimpleToken(TokenKind::PlusToken,0,0),
new IdentifierToken("par",0,0),
new IdentifierToken(HashedString("par"),0,0),
new SimpleToken(TokenKind::EndKeyword,0,0),
new SimpleToken(TokenKind::EndOfFile,0,0),
};