Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 60 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
name: CI

on:
push:
branches: [ master, main, develop ]
pull_request:
branches: [ master, main, develop ]

jobs:
test:
name: Run Tests
runs-on: ubuntu-latest

strategy:
matrix:
dc: [dmd-latest, ldc-latest]

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Install D compiler
uses: dlang-community/setup-dlang@v1
with:
compiler: ${{ matrix.dc }}

- name: Print D compiler version
run: |
dmd --version || true
ldc2 --version || true
dub --version

- name: Cache DUB dependencies
uses: actions/cache@v3
with:
path: ~/.dub
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.json', '**/dub.selections.json') }}
restore-keys: |
${{ runner.os }}-dub-

- name: Install dependencies
run: dub upgrade

- name: Build project (debug)
run: dub build --build=debug

- name: Build project (release)
run: dub build --build=release-fast

- name: Run unit tests
run: |
echo "=== Executando Unit Tests ==="
dub build --config=unittest --build=unittest
dub run --config=unittest --build=unittest

- name: Build with different compilers (if matrix)
run: |
echo "=== Testing with ${{ matrix.dc }} ==="
dub --version
dub build --compiler=${{ matrix.dc }} || dub build
6 changes: 5 additions & 1 deletion dub.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
{
"authors": [
"Fernando Dev"
"Fernando Dev",
"Italo Brito Brandão"
],
"copyright": "Copyright © 2025, Fernando Dev",
"description": "Compilador Geral Delégua",
Expand Down Expand Up @@ -32,6 +33,9 @@
"-boundscheck=off"
],
"dflags-gdc": ["-O3", "-frelease", "-fbounds-check=off"]
},
"unittest": {
"dflags-ldc": ["-g", "-O0", "-unittest"]
}
}
}
119 changes: 119 additions & 0 deletions src/frontend/lexer/lexer.d
Original file line number Diff line number Diff line change
Expand Up @@ -687,3 +687,122 @@ public:
}
}
}

unittest
{
writeln("Testando Lexer básico...");

auto error = new DiagnosticError();
auto lexer = new Lexer("test.delegua", "", ".", error);

assert(lexer !is null);

writeln("✓ Teste de criação do Lexer passou!");
}

unittest
{
writeln("Testando tokenização básica...");

auto error = new DiagnosticError();
auto lexer = new Lexer("test.delegua", "var x = 42;", ".", error);
auto tokens = lexer.tokenize();

assert(tokens.length >= 6);
assert(tokens[0].kind == TokenType.VAR);
assert(tokens[1].kind == TokenType.IDENTIFIER);
assert(tokens[1].value.get!string == "x");
assert(tokens[2].kind == TokenType.EQUALS);
assert(tokens[3].kind == TokenType.INT);
assert(tokens[3].value.get!long == 42);
assert(tokens[4].kind == TokenType.SEMICOLON);
assert(tokens[$-1].kind == TokenType.EOF);

writeln("✓ Teste de tokenização básica passou!");
}

unittest
{
writeln("Testando tokenização de strings...");

auto error = new DiagnosticError();
auto lexer = new Lexer("test.delegua", `"hello world"`, ".", error);
auto tokens = lexer.tokenize();

assert(tokens.length == 2);
assert(tokens[0].kind == TokenType.STRING);
assert(tokens[0].value.get!string == "hello world");
assert(tokens[1].kind == TokenType.EOF);

writeln("✓ Teste de tokenização de strings passou!");
}

unittest
{
writeln("Testando tokenização de números...");

auto error = new DiagnosticError();

// Teste número inteiro
auto lexer1 = new Lexer("test.delegua", "123", ".", error);
auto tokens1 = lexer1.tokenize();
assert(tokens1.length == 2);
assert(tokens1[0].kind == TokenType.INT);
assert(tokens1[0].value.get!long == 123);

// Teste número float - vou verificar primeiro o tipo retornado
auto lexer2 = new Lexer("test.delegua", "12.34", ".", error);
auto tokens2 = lexer2.tokenize();
assert(tokens2.length == 2);
assert(tokens2[0].kind == TokenType.FLOAT);
// Como pode ser string ou double, vou verificar se é um valor numérico válido
// usando conversão segura
if (tokens2[0].value.type == typeid(string)) {
import std.conv : to;
double val = tokens2[0].value.get!string.to!double;
assert(val == 12.34);
} else {
assert(tokens2[0].value.get!double == 12.34);
}

writeln("✓ Teste de tokenização de números passou!");
}

unittest
{
writeln("Testando keywords em português...");

auto error = new DiagnosticError();
auto lexer = new Lexer("test.delegua", "se verdadeiro então", ".", error);
auto tokens = lexer.tokenize();

assert(tokens.length == 4);
assert(tokens[0].kind == TokenType.SE);
assert(tokens[1].kind == TokenType.TRUE);
assert(tokens[2].kind == TokenType.IDENTIFIER);
assert(tokens[3].kind == TokenType.EOF);

writeln("✓ Teste de keywords em português passou!");
}

unittest
{
writeln("Testando operadores...");

auto error = new DiagnosticError();
auto lexer = new Lexer("test.delegua", "+ - * / == != >= <=", ".", error);
auto tokens = lexer.tokenize();

assert(tokens.length == 9);
assert(tokens[0].kind == TokenType.PLUS);
assert(tokens[1].kind == TokenType.MINUS);
assert(tokens[2].kind == TokenType.ASTERISK);
assert(tokens[3].kind == TokenType.SLASH);
assert(tokens[4].kind == TokenType.EQUALS_EQUALS);
assert(tokens[5].kind == TokenType.NOT_EQUALS);
assert(tokens[6].kind == TokenType.GREATER_THAN_OR_EQUALS);
assert(tokens[7].kind == TokenType.LESS_THAN_OR_EQUALS);
assert(tokens[8].kind == TokenType.EOF);

writeln("✓ Teste de operadores passou!");
}
120 changes: 111 additions & 9 deletions src/frontend/lexer/token.d
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
module frontend.lexer.token;

import std.variant;
import std.stdio;

// Token type
enum TokenType
{
// Keywords
VAR, // var
FALSE, // false
FALSE, // false
TRUE, // true
SUSTAR, // sustar
DO, // faça/faca
Expand Down Expand Up @@ -78,8 +79,8 @@ enum TokenType
NOT_EQUALS, // !=
GREATER_THAN, // >
LESS_THAN, // <
GREATER_THAN_OR_EQUALS, // >=
LESS_THAN_OR_EQUALS, // <=
GREATER_THAN_OR_EQUALS, // >=
LESS_THAN_OR_EQUALS, // <=
AND, // &&
OR, // ||
COMMA, // ,
Expand All @@ -90,15 +91,15 @@ enum TokenType
RPAREN, // )
LBRACE, // {
RBRACE, // }
LBRACKET, // [
RBRACKET, // ]
NOT, // ]
LBRACKET, // [
RBRACKET, // ]
NOT, // ]
RANGE, // ..
BANG, // !
QUESTION, // ?
BANG, // !
QUESTION, // ?

// Operadores bitwise básicos
BIT_AND, // &
BIT_AND, // &
BIT_OR, // |
BIT_XOR, // ^
BIT_NOT, // ~
Expand Down Expand Up @@ -253,3 +254,104 @@ bool isComplexTypeToken(Token token)
return false;
}
}

unittest
{
writeln("Testando Token...");

auto loc = Loc("test.d", 1, 1, 5, ".");
auto token = Token(TokenType.IDENTIFIER, Variant("teste"), loc);
assert(token.kind == TokenType.IDENTIFIER);
assert(token.value.get!string == "teste");
assert(token.loc.line == 1);
assert(token.loc.file == "test.d");

auto tokenVar = Token(TokenType.VAR, Variant("var"), loc);
assert(tokenVar.kind == TokenType.VAR);

writeln("✓ Testes de Token passaram!");
}

unittest
{
writeln("Testando isTypeToken...");

auto loc = Loc("test.d", 1, 1, 5, ".");

auto intToken = Token(TokenType.IDENTIFIER, Variant("int"), loc);
assert(isTypeToken(intToken) == true);

auto floatToken = Token(TokenType.IDENTIFIER, Variant("float"), loc);
assert(isTypeToken(floatToken) == true);

auto stringToken = Token(TokenType.IDENTIFIER, Variant("string"), loc);
assert(isTypeToken(stringToken) == true);

auto boolToken = Token(TokenType.IDENTIFIER, Variant("bool"), loc);
assert(isTypeToken(boolToken) == true);

auto voidToken = Token(TokenType.IDENTIFIER, Variant("void"), loc);
assert(isTypeToken(voidToken) == true);

auto vazioToken = Token(TokenType.IDENTIFIER, Variant("vazio"), loc);
assert(isTypeToken(vazioToken) == true);

auto invalidToken = Token(TokenType.IDENTIFIER, Variant("qualquercoisa"), loc);
assert(isTypeToken(invalidToken) == false);

auto nonIdToken = Token(TokenType.PLUS, Variant("+"), loc);
assert(isTypeToken(nonIdToken) == false);

writeln("✓ Testes de isTypeToken passaram!");
}

unittest
{
writeln("Testando isComplexTypeToken...");

auto loc = Loc("test.d", 1, 1, 5, ".");

auto asteriskToken = Token(TokenType.ASTERISK, Variant("*"), loc);
assert(isComplexTypeToken(asteriskToken) == true);

auto lbracketToken = Token(TokenType.LBRACKET, Variant("["), loc);
assert(isComplexTypeToken(lbracketToken) == true);

auto rbracketToken = Token(TokenType.RBRACKET, Variant("]"), loc);
assert(isComplexTypeToken(rbracketToken) == true);

auto plusToken = Token(TokenType.PLUS, Variant("+"), loc);
assert(isComplexTypeToken(plusToken) == false);

auto identifierToken = Token(TokenType.IDENTIFIER, Variant("teste"), loc);
assert(isComplexTypeToken(identifierToken) == false);

writeln("✓ Testes de isComplexTypeToken passaram!");
}

unittest
{
writeln("Testando keywords...");

assert("var" in keywords);
assert("se" in keywords);
assert("enquanto" in keywords);
assert("para" in keywords);
assert("funcao" in keywords);
assert("função" in keywords);
assert("classe" in keywords);
assert("verdadeiro" in keywords);
assert("falso" in keywords);

assert(keywords["var"] == TokenType.VAR);
assert(keywords["se"] == TokenType.SE);
assert(keywords["enquanto"] == TokenType.ENQUANTO);
assert(keywords["para"] == TokenType.PARA);
assert(keywords["funcao"] == TokenType.FUNCAO);
assert(keywords["função"] == TokenType.FUNCAO);
assert(keywords["classe"] == TokenType.CLASSE);
assert(keywords["verdadeiro"] == TokenType.TRUE);
assert(keywords["falso"] == TokenType.FALSE);

writeln("✓ Testes de keywords passaram!");
}
Loading
Loading