Expanded on diagnostics, make whitespace completely ignored

This commit is contained in:
Deukhoofd 2019-05-21 15:11:00 +02:00
parent 8f2f122215
commit ae25598864
No known key found for this signature in database
GPG Key ID: B4C087AC81641654
7 changed files with 67 additions and 20 deletions

View File

@ -23,7 +23,7 @@ add_library(PorygonLang ${SRC_FILES})
add_executable(PorygonLangTests add_executable(PorygonLangTests
${SRC_FILES} ${SRC_FILES}
src/Parser/LexerTests.cpp src/Parser/LexerTests.cpp
) integration_tests/integration_tests.cpp)
target_compile_definitions(PorygonLangTests PRIVATE TESTS_BUILD) target_compile_definitions(PorygonLangTests PRIVATE TESTS_BUILD)

View File

@ -0,0 +1,27 @@
#ifdef TESTS_BUILD
#define CATCH_CONFIG_MAIN
#include <catch.hpp>
#include "../src/Script.hpp"
TEST_CASE( "Diagnostic invalid character", "[integration]" ) {
Script script = Script::Create("1 + 1 @");
REQUIRE(script.Diagnostics -> HasErrors());
auto diags = script.Diagnostics -> GetDiagnostics();
REQUIRE(diags.size() == 1);
CHECK(diags[0].GetCode() == DiagnosticCode::UnexpectedCharacter);
CHECK(diags[0].GetStartPosition() == 6);
CHECK(diags[0].GetLength() == 1);
}
TEST_CASE( "Diagnostic invalid token", "[integration]" ) {
Script script = Script::Create("1 +/ 1");
REQUIRE(script.Diagnostics -> HasErrors());
auto diags = script.Diagnostics -> GetDiagnostics();
REQUIRE(diags.size() == 1);
CHECK(diags[0].GetCode() == DiagnosticCode::UnexpectedToken);
CHECK(diags[0].GetStartPosition() == 3);
CHECK(diags[0].GetLength() == 1);
}
#endif

View File

@ -17,6 +17,21 @@ public:
_start = start; _start = start;
_length = length; _length = length;
} }
DiagnosticSeverity GetSeverity(){
return _severity;
}
DiagnosticCode GetCode(){
return _code;
}
unsigned int GetStartPosition(){
return _start;
}
unsigned int GetLength(){
return _length;
}
}; };
#endif //PORYGONLANG_DIAGNOSTIC_HPP #endif //PORYGONLANG_DIAGNOSTIC_HPP

View File

@ -17,6 +17,10 @@ public:
_hasErrors = false; _hasErrors = false;
} }
~Diagnostics(){
_diagnostics.clear();
}
void Log(DiagnosticSeverity severity, DiagnosticCode code, unsigned int start, unsigned int length){ void Log(DiagnosticSeverity severity, DiagnosticCode code, unsigned int start, unsigned int length){
_diagnostics.emplace_back(severity, code, start, length); _diagnostics.emplace_back(severity, code, start, length);
if (severity >= DiagnosticSeverity::Error){ if (severity >= DiagnosticSeverity::Error){
@ -38,6 +42,10 @@ public:
bool HasErrors(){ bool HasErrors(){
return _hasErrors; return _hasErrors;
} }
vector<Diagnostic> GetDiagnostics(){
return _diagnostics;
}
}; };

View File

@ -13,8 +13,10 @@ vector<IToken*> Lexer::Lex() {
vector<IToken*> tokens; vector<IToken*> tokens;
while (true){ while (true){
IToken* next = this -> LexNext(this -> Next()); IToken* next = this -> LexNext(this -> Next());
auto nextKind = next -> GetKind();
if (nextKind != TokenKind::WhiteSpace)
tokens.push_back(next); tokens.push_back(next);
if (next->GetKind() == TokenKind::EndOfFile) if (nextKind == TokenKind::EndOfFile)
break; break;
} }
return tokens; return tokens;

View File

@ -1,5 +1,4 @@
#ifdef TESTS_BUILD #ifdef TESTS_BUILD
#define CATCH_CONFIG_MAIN
#include <catch.hpp> #include <catch.hpp>
#include "Lexer.hpp" #include "Lexer.hpp"
@ -259,28 +258,22 @@ TEST_CASE( "Lex identifier", "[lexer]" ) {
TEST_CASE( "Lex Start Position", "[lexer]" ) { TEST_CASE( "Lex Start Position", "[lexer]" ) {
Lexer lexer = Lexer("+ - bar 1234", nullptr); Lexer lexer = Lexer("+ - bar 1234", nullptr);
auto tokens = lexer.Lex(); auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 8); REQUIRE(tokens.size() == 5);
CHECK(((IdentifierToken*)tokens[0]) -> GetStartPosition() == 0); CHECK(((IdentifierToken*)tokens[0]) -> GetStartPosition() == 0);
CHECK(((IdentifierToken*)tokens[1]) -> GetStartPosition() == 1); CHECK(((IdentifierToken*)tokens[1]) -> GetStartPosition() == 2);
CHECK(((IdentifierToken*)tokens[2]) -> GetStartPosition() == 2); CHECK(((IdentifierToken*)tokens[2]) -> GetStartPosition() == 4);
CHECK(((IdentifierToken*)tokens[3]) -> GetStartPosition() == 3); CHECK(((IdentifierToken*)tokens[3]) -> GetStartPosition() == 8);
CHECK(((IdentifierToken*)tokens[4]) -> GetStartPosition() == 4); CHECK(((IdentifierToken*)tokens[4]) -> GetStartPosition() == 12);
CHECK(((IdentifierToken*)tokens[5]) -> GetStartPosition() == 7);
CHECK(((IdentifierToken*)tokens[6]) -> GetStartPosition() == 8);
CHECK(((IdentifierToken*)tokens[7]) -> GetStartPosition() == 12);
} }
TEST_CASE( "Lex End Position", "[lexer]" ) { TEST_CASE( "Lex End Position", "[lexer]" ) {
Lexer lexer = Lexer("+ - bar 1234", nullptr); Lexer lexer = Lexer("+ - bar 1234", nullptr);
auto tokens = lexer.Lex(); auto tokens = lexer.Lex();
REQUIRE(tokens.size() == 8); REQUIRE(tokens.size() == 5);
CHECK(((IdentifierToken*)tokens[0]) -> GetEndPosition() == 0); CHECK(((IdentifierToken*)tokens[0]) -> GetEndPosition() == 0);
CHECK(((IdentifierToken*)tokens[1]) -> GetEndPosition() == 1); CHECK(((IdentifierToken*)tokens[1]) -> GetEndPosition() == 2);
CHECK(((IdentifierToken*)tokens[2]) -> GetEndPosition() == 2); CHECK(((IdentifierToken*)tokens[2]) -> GetEndPosition() == 6);
CHECK(((IdentifierToken*)tokens[3]) -> GetEndPosition() == 3); CHECK(((IdentifierToken*)tokens[3]) -> GetEndPosition() == 11);
CHECK(((IdentifierToken*)tokens[4]) -> GetEndPosition() == 6); CHECK(((IdentifierToken*)tokens[4]) -> GetEndPosition() == 12);
CHECK(((IdentifierToken*)tokens[5]) -> GetEndPosition() == 7);
CHECK(((IdentifierToken*)tokens[6]) -> GetEndPosition() == 11);
CHECK(((IdentifierToken*)tokens[7]) -> GetEndPosition() == 12);
} }
#endif #endif

View File

@ -110,6 +110,8 @@ ParsedExpression *Parser::ParsePrimaryExpression(IToken *current) {
case TokenKind ::Float: return new LiteralFloatExpression((FloatToken*)current); case TokenKind ::Float: return new LiteralFloatExpression((FloatToken*)current);
case TokenKind ::TrueKeyword: return new LiteralBoolExpression(current); case TokenKind ::TrueKeyword: return new LiteralBoolExpression(current);
case TokenKind ::FalseKeyword: return new LiteralBoolExpression(current); case TokenKind ::FalseKeyword: return new LiteralBoolExpression(current);
// If we find a bad token here, we should have already logged it in the lexer, so don't log another error.
case TokenKind ::BadToken: return new BadExpression(current->GetStartPosition(), current->GetLength());
default: default:
this -> ScriptData -> Diagnostics -> LogError(DiagnosticCode::UnexpectedToken, current->GetStartPosition(), current->GetLength()); this -> ScriptData -> Diagnostics -> LogError(DiagnosticCode::UnexpectedToken, current->GetStartPosition(), current->GetLength());
return new BadExpression(current->GetStartPosition(), current->GetLength()); return new BadExpression(current->GetStartPosition(), current->GetLength());