From 955869a3cf9bc96eb6755700bc2f885cf32eb04d Mon Sep 17 00:00:00 2001 From: nub31 Date: Mon, 26 May 2025 19:16:56 +0200 Subject: [PATCH] ... --- build.sh | 2 +- example/{c => interop}/bindings.nub | 2 + example/program.nub | 7 +- .../Frontend/Diagnostics/Diagnostic.cs | 18 ++-- .../{SourceFile.cs => SourceText.cs} | 11 +-- .../Nub.Lang/Frontend/DiagnosticsResult.cs | 2 +- .../Frontend/Lexing/DocumentationToken.cs | 2 +- .../Frontend/Lexing/IdentifierToken.cs | 2 +- .../Nub.Lang/Frontend/Lexing/Lexer.cs | 33 +++---- .../Nub.Lang/Frontend/Lexing/LiteralToken.cs | 2 +- .../Nub.Lang/Frontend/Lexing/ModifierToken.cs | 2 +- .../Nub.Lang/Frontend/Lexing/SymbolToken.cs | 5 +- .../Nub.Lang/Frontend/Lexing/Token.cs | 4 +- .../Nub.Lang/Frontend/Parsing/ModuleNode.cs | 10 -- .../Nub.Lang/Frontend/Parsing/Parser.cs | 51 +++++----- .../Nub.Lang/Frontend/Parsing/SourceFile.cs | 8 ++ .../Nub.Lang/Frontend/Typing/TypeChecker.cs | 22 ++--- src/compiler/Nub.Lang/Program.cs | 97 ++++++------------- 18 files changed, 124 insertions(+), 156 deletions(-) rename example/{c => interop}/bindings.nub (98%) rename src/compiler/Nub.Lang/Frontend/Diagnostics/{SourceFile.cs => SourceText.cs} (87%) delete mode 100644 src/compiler/Nub.Lang/Frontend/Parsing/ModuleNode.cs create mode 100644 src/compiler/Nub.Lang/Frontend/Parsing/SourceFile.cs diff --git a/build.sh b/build.sh index c4c6a61..9f2f60b 100755 --- a/build.sh +++ b/build.sh @@ -7,7 +7,7 @@ dotnet publish -c Release src/compiler/Nub.Lang clear -nub example out/out.qbe +nub example out/out.qbe > out/out.qbe nasm -g -felf64 src/runtime/runtime.asm -o out/runtime.o diff --git a/example/c/bindings.nub b/example/interop/bindings.nub similarity index 98% rename from example/c/bindings.nub rename to example/interop/bindings.nub index e3fc6c4..e425923 100644 --- a/example/c/bindings.nub +++ b/example/interop/bindings.nub @@ -1,3 +1,5 @@ +module c_interop + extern func printf(fmt: string, ...args: any) extern func getchar(): i32 extern func puts(str: string) diff --git a/example/program.nub b/example/program.nub index 3ad47ba..3421d7f 100644 --- a/example/program.nub +++ b/example/program.nub @@ -1,12 +1,17 @@ -import c +import c_interop + +module main /// # Documentation /// ## Documentation subtitle global func main(args: []string) { i = 0 + printf("%d\n", args.count) + while i < args.count { printf("%s\n", args[i]) + i = i + 1 } } diff --git a/src/compiler/Nub.Lang/Frontend/Diagnostics/Diagnostic.cs b/src/compiler/Nub.Lang/Frontend/Diagnostics/Diagnostic.cs index 0f547ac..1606084 100644 --- a/src/compiler/Nub.Lang/Frontend/Diagnostics/Diagnostic.cs +++ b/src/compiler/Nub.Lang/Frontend/Diagnostics/Diagnostic.cs @@ -17,7 +17,7 @@ public class Diagnostic { private readonly DiagnosticSeverity _severity; private readonly string _message; - private SourceFile? _sourceFile; + private SourceText? _sourceFile; private SourceSpan? _span; private string? _help; @@ -29,7 +29,7 @@ public class Diagnostic public DiagnosticBuilder At(Token token) { - _sourceFile = token.SourceFile; + _sourceFile = token.SourceText; _span = SourceLocationCalculator.GetSpan(token); return this; } @@ -41,14 +41,14 @@ public class Diagnostic throw new ArgumentException("Node has no tokens", nameof(node)); } - _sourceFile = node.Tokens[0].SourceFile; + _sourceFile = node.Tokens[0].SourceText; _span = SourceLocationCalculator.GetSpan(node); return this; } - public DiagnosticBuilder At(SourceFile sourceFile, SourceSpan span) + public DiagnosticBuilder At(SourceText sourceText, SourceSpan span) { - _sourceFile = sourceFile; + _sourceFile = sourceText; _span = span; return this; } @@ -68,11 +68,11 @@ public class Diagnostic public DiagnosticSeverity Severity { get; } public string Message { get; } - public SourceFile? SourceFile { get; } + public SourceText? SourceFile { get; } public SourceSpan? Span { get; } public string? Help { get; } - private Diagnostic(DiagnosticSeverity severity, string message, SourceFile? sourceFile, SourceSpan? span, string? help) + private Diagnostic(DiagnosticSeverity severity, string message, SourceText? sourceFile, SourceSpan? span, string? help) { Severity = severity; Message = message; @@ -123,9 +123,9 @@ public class Diagnostic }; } - private static void AppendSourceContext(StringBuilder sb, SourceFile sourceFile, SourceSpan span) + private static void AppendSourceContext(StringBuilder sb, SourceText sourceText, SourceSpan span) { - var lines = sourceFile.Content.Split('\n'); + var lines = sourceText.Content.Split('\n'); var startLine = span.Start.Line; var endLine = span.End.Line; diff --git a/src/compiler/Nub.Lang/Frontend/Diagnostics/SourceFile.cs b/src/compiler/Nub.Lang/Frontend/Diagnostics/SourceText.cs similarity index 87% rename from src/compiler/Nub.Lang/Frontend/Diagnostics/SourceFile.cs rename to src/compiler/Nub.Lang/Frontend/Diagnostics/SourceText.cs index 8a7ce89..c41b2f9 100644 --- a/src/compiler/Nub.Lang/Frontend/Diagnostics/SourceFile.cs +++ b/src/compiler/Nub.Lang/Frontend/Diagnostics/SourceText.cs @@ -1,10 +1,9 @@ -using System.Diagnostics.CodeAnalysis; using Nub.Lang.Frontend.Lexing; using Nub.Lang.Frontend.Parsing; namespace Nub.Lang.Frontend.Diagnostics; -public readonly struct SourceFile(string path, string content) +public readonly struct SourceText(string path, string content) { public string Path { get; } = path; public string Content { get; } = content; @@ -80,8 +79,8 @@ public static class SourceLocationCalculator public static SourceSpan GetSpan(Token token) { - var start = IndexToLocation(token.SourceFile.Content, token.StartIndex); - var end = IndexToLocation(token.SourceFile.Content, token.EndIndex); + var start = IndexToLocation(token.SourceText.Content, token.StartIndex); + var end = IndexToLocation(token.SourceText.Content, token.EndIndex); return new SourceSpan(start, end); } @@ -95,8 +94,8 @@ public static class SourceLocationCalculator var firstToken = node.Tokens[0]; var lastToken = node.Tokens[^1]; - var start = IndexToLocation(firstToken.SourceFile.Content, firstToken.StartIndex); - var end = IndexToLocation(lastToken.SourceFile.Content, lastToken.EndIndex); + var start = IndexToLocation(firstToken.SourceText.Content, firstToken.StartIndex); + var end = IndexToLocation(lastToken.SourceText.Content, lastToken.EndIndex); return new SourceSpan(start, end); } diff --git a/src/compiler/Nub.Lang/Frontend/DiagnosticsResult.cs b/src/compiler/Nub.Lang/Frontend/DiagnosticsResult.cs index d657d93..8805626 100644 --- a/src/compiler/Nub.Lang/Frontend/DiagnosticsResult.cs +++ b/src/compiler/Nub.Lang/Frontend/DiagnosticsResult.cs @@ -10,7 +10,7 @@ public class DiagnosticsResult(List diagnostics) { foreach (var diagnostic in diagnostics) { - Console.WriteLine(diagnostic.Format()); + Console.Error.WriteLine(diagnostic.Format()); } } } diff --git a/src/compiler/Nub.Lang/Frontend/Lexing/DocumentationToken.cs b/src/compiler/Nub.Lang/Frontend/Lexing/DocumentationToken.cs index d1f60f8..1dcbe3f 100644 --- a/src/compiler/Nub.Lang/Frontend/Lexing/DocumentationToken.cs +++ b/src/compiler/Nub.Lang/Frontend/Lexing/DocumentationToken.cs @@ -2,7 +2,7 @@ using Nub.Lang.Frontend.Diagnostics; namespace Nub.Lang.Frontend.Lexing; -public class DocumentationToken(SourceFile sourceFile, int startIndex, int endIndex, string documentation) : Token(sourceFile, startIndex, endIndex) +public class DocumentationToken(SourceText sourceText, int startIndex, int endIndex, string documentation) : Token(sourceText, startIndex, endIndex) { public string Documentation { get; } = documentation; } \ No newline at end of file diff --git a/src/compiler/Nub.Lang/Frontend/Lexing/IdentifierToken.cs b/src/compiler/Nub.Lang/Frontend/Lexing/IdentifierToken.cs index b84520d..5494f72 100644 --- a/src/compiler/Nub.Lang/Frontend/Lexing/IdentifierToken.cs +++ b/src/compiler/Nub.Lang/Frontend/Lexing/IdentifierToken.cs @@ -2,7 +2,7 @@ namespace Nub.Lang.Frontend.Lexing; -public class IdentifierToken(SourceFile sourceFile, int startIndex, int endIndex, string value) : Token(sourceFile, startIndex, endIndex) +public class IdentifierToken(SourceText sourceText, int startIndex, int endIndex, string value) : Token(sourceText, startIndex, endIndex) { public string Value { get; } = value; } \ No newline at end of file diff --git a/src/compiler/Nub.Lang/Frontend/Lexing/Lexer.cs b/src/compiler/Nub.Lang/Frontend/Lexing/Lexer.cs index af772d8..d080409 100644 --- a/src/compiler/Nub.Lang/Frontend/Lexing/Lexer.cs +++ b/src/compiler/Nub.Lang/Frontend/Lexing/Lexer.cs @@ -8,6 +8,7 @@ public class Lexer { ["func"] = Symbol.Func, ["import"] = Symbol.Import, + ["module"] = Symbol.Module, ["if"] = Symbol.If, ["else"] = Symbol.Else, ["while"] = Symbol.While, @@ -56,14 +57,12 @@ public class Lexer ['&'] = Symbol.Ampersand, }; - private string _src = null!; - private SourceFile _sourceFile; + private SourceText _sourceText; private int _index; - public List Lex(string src, SourceFile sourceFile) + public DiagnosticsResult> Tokenize(SourceText sourceText) { - _src = src; - _sourceFile = sourceFile; + _sourceText = sourceText; _index = 0; List tokens = []; @@ -72,7 +71,7 @@ public class Lexer tokens.Add(token); } - return tokens; + return new DiagnosticsResult>([], tokens); } private void ConsumeWhitespace() @@ -123,7 +122,7 @@ public class Lexer if (documentation != null) { - return new DocumentationToken(_sourceFile, startIndex, _index, documentation); + return new DocumentationToken(_sourceText, startIndex, _index, documentation); } ConsumeWhitespace(); @@ -146,20 +145,20 @@ public class Lexer if (Keywords.TryGetValue(buffer, out var keywordSymbol)) { - return new SymbolToken(_sourceFile, startIndex, _index, keywordSymbol); + return new SymbolToken(_sourceText, startIndex, _index, keywordSymbol); } if (Modifiers.TryGetValue(buffer, out var modifer)) { - return new ModifierToken(_sourceFile, startIndex, _index, modifer); + return new ModifierToken(_sourceText, startIndex, _index, modifer); } if (buffer is "true" or "false") { - return new LiteralToken(_sourceFile, startIndex, _index, NubPrimitiveType.Bool, buffer); + return new LiteralToken(_sourceText, startIndex, _index, NubPrimitiveType.Bool, buffer); } - return new IdentifierToken(_sourceFile, startIndex, _index, buffer); + return new IdentifierToken(_sourceText, startIndex, _index, buffer); } if (char.IsDigit(current)) @@ -197,7 +196,7 @@ public class Lexer } } - return new LiteralToken(_sourceFile, startIndex, _index, isFloat ? NubPrimitiveType.F64 : NubPrimitiveType.I64, buffer); + return new LiteralToken(_sourceText, startIndex, _index, isFloat ? NubPrimitiveType.F64 : NubPrimitiveType.I64, buffer); } // TODO: Revisit this @@ -217,7 +216,7 @@ public class Lexer Next(); } - return new SymbolToken(_sourceFile, startIndex, _index, chain.Value); + return new SymbolToken(_sourceText, startIndex, _index, chain.Value); } } } @@ -225,7 +224,7 @@ public class Lexer if (Chars.TryGetValue(current, out var charSymbol)) { Next(); - return new SymbolToken(_sourceFile, startIndex, _index, charSymbol); + return new SymbolToken(_sourceText, startIndex, _index, charSymbol); } if (current == '"') @@ -250,7 +249,7 @@ public class Lexer Next(); } - return new LiteralToken(_sourceFile, startIndex, _index, NubPrimitiveType.String, buffer); + return new LiteralToken(_sourceText, startIndex, _index, NubPrimitiveType.String, buffer); } throw new Exception($"Unknown character {current}"); @@ -258,9 +257,9 @@ public class Lexer private Optional Peek(int offset = 0) { - if (_index + offset < _src.Length) + if (_index + offset < _sourceText.Content.Length) { - return _src[_index + offset]; + return _sourceText.Content[_index + offset]; } return Optional.Empty(); diff --git a/src/compiler/Nub.Lang/Frontend/Lexing/LiteralToken.cs b/src/compiler/Nub.Lang/Frontend/Lexing/LiteralToken.cs index 976a53b..07cad59 100644 --- a/src/compiler/Nub.Lang/Frontend/Lexing/LiteralToken.cs +++ b/src/compiler/Nub.Lang/Frontend/Lexing/LiteralToken.cs @@ -2,7 +2,7 @@ namespace Nub.Lang.Frontend.Lexing; -public class LiteralToken(SourceFile sourceFile, int startIndex, int endIndex, NubType type, string value) : Token(sourceFile, startIndex, endIndex) +public class LiteralToken(SourceText sourceText, int startIndex, int endIndex, NubType type, string value) : Token(sourceText, startIndex, endIndex) { public NubType Type { get; } = type; public string Value { get; } = value; diff --git a/src/compiler/Nub.Lang/Frontend/Lexing/ModifierToken.cs b/src/compiler/Nub.Lang/Frontend/Lexing/ModifierToken.cs index d4b2ca6..a6651a0 100644 --- a/src/compiler/Nub.Lang/Frontend/Lexing/ModifierToken.cs +++ b/src/compiler/Nub.Lang/Frontend/Lexing/ModifierToken.cs @@ -2,7 +2,7 @@ using Nub.Lang.Frontend.Diagnostics; namespace Nub.Lang.Frontend.Lexing; -public class ModifierToken(SourceFile sourceFile, int startIndex, int endIndex, Modifier modifier) : Token(sourceFile, startIndex, endIndex) +public class ModifierToken(SourceText sourceText, int startIndex, int endIndex, Modifier modifier) : Token(sourceText, startIndex, endIndex) { public Modifier Modifier { get; } = modifier; } diff --git a/src/compiler/Nub.Lang/Frontend/Lexing/SymbolToken.cs b/src/compiler/Nub.Lang/Frontend/Lexing/SymbolToken.cs index 13223c4..dc063f9 100644 --- a/src/compiler/Nub.Lang/Frontend/Lexing/SymbolToken.cs +++ b/src/compiler/Nub.Lang/Frontend/Lexing/SymbolToken.cs @@ -2,7 +2,7 @@ namespace Nub.Lang.Frontend.Lexing; -public class SymbolToken(SourceFile sourceFile, int startIndex, int endIndex, Symbol symbol) : Token(sourceFile, startIndex, endIndex) +public class SymbolToken(SourceText sourceText, int startIndex, int endIndex, Symbol symbol) : Token(sourceText, startIndex, endIndex) { public Symbol Symbol { get; } = symbol; } @@ -10,6 +10,7 @@ public class SymbolToken(SourceFile sourceFile, int startIndex, int endIndex, Sy public enum Symbol { Import, + Module, Func, Return, If, @@ -42,5 +43,5 @@ public enum Symbol New, Struct, Caret, - Ampersand + Ampersand, } \ No newline at end of file diff --git a/src/compiler/Nub.Lang/Frontend/Lexing/Token.cs b/src/compiler/Nub.Lang/Frontend/Lexing/Token.cs index 1bd7b94..1a46ae8 100644 --- a/src/compiler/Nub.Lang/Frontend/Lexing/Token.cs +++ b/src/compiler/Nub.Lang/Frontend/Lexing/Token.cs @@ -2,9 +2,9 @@ namespace Nub.Lang.Frontend.Lexing; -public abstract class Token(SourceFile sourceFile, int startIndex, int endIndex) +public abstract class Token(SourceText sourceText, int startIndex, int endIndex) { - public SourceFile SourceFile { get; } = sourceFile; + public SourceText SourceText { get; } = sourceText; public int StartIndex { get; } = startIndex; public int EndIndex { get; } = endIndex; } \ No newline at end of file diff --git a/src/compiler/Nub.Lang/Frontend/Parsing/ModuleNode.cs b/src/compiler/Nub.Lang/Frontend/Parsing/ModuleNode.cs deleted file mode 100644 index fd09023..0000000 --- a/src/compiler/Nub.Lang/Frontend/Parsing/ModuleNode.cs +++ /dev/null @@ -1,10 +0,0 @@ -using Nub.Lang.Frontend.Lexing; - -namespace Nub.Lang.Frontend.Parsing; - -public class ModuleNode(IReadOnlyList tokens, string path, List imports, List definitions) : Node(tokens) -{ - public string Path { get; } = path; - public List Imports { get; } = imports; - public List Definitions { get; } = definitions; -} \ No newline at end of file diff --git a/src/compiler/Nub.Lang/Frontend/Parsing/Parser.cs b/src/compiler/Nub.Lang/Frontend/Parsing/Parser.cs index 2bc9076..f198e35 100644 --- a/src/compiler/Nub.Lang/Frontend/Parsing/Parser.cs +++ b/src/compiler/Nub.Lang/Frontend/Parsing/Parser.cs @@ -6,41 +6,44 @@ namespace Nub.Lang.Frontend.Parsing; public class Parser { + private List _diagnostics = []; private List _tokens = []; private int _index; - private List _diagnostics = []; - public DiagnosticsResult ParseModule(List tokens, string rootFilePath) + public DiagnosticsResult ParseModule(List tokens) { - _index = 0; - _tokens = tokens; _diagnostics = []; + _tokens = tokens; + _index = 0; - List definitions = []; - List imports = []; - - while (Peek().HasValue) + try { - try + List imports = []; + while (TryExpectSymbol(Symbol.Import)) { - if (TryExpectSymbol(Symbol.Import)) - { - var name = ExpectIdentifier(); - imports.Add(name.Value); - } - else - { - definitions.Add(ParseDefinition()); - } + var name = ExpectIdentifier(); + imports.Add(name.Value); } - catch (ParseException ex) + + ExpectSymbol(Symbol.Module); + var module = ExpectIdentifier().Value; + + List definitions = []; + + while (Peek().HasValue) { - _diagnostics.Add(ex.Diagnostic); - RecoverToNextDefinition(); + definitions.Add(ParseDefinition()); } + + return new DiagnosticsResult(_diagnostics, new SourceFile(module, imports, definitions)); + } + catch (ParseException ex) + { + _diagnostics.Add(ex.Diagnostic); + RecoverToNextDefinition(); } - return new DiagnosticsResult(_diagnostics, new ModuleNode(GetTokensForNode(0), rootFilePath, imports, definitions)); + return new DiagnosticsResult(_diagnostics, null); } private DefinitionNode ParseDefinition() @@ -567,7 +570,7 @@ public class Parser throw new ParseException(Diagnostic .Error("Unexpected end of file while parsing type") .WithHelp("Expected a type name") - .At(_tokens.Last().SourceFile, SourceLocationCalculator.GetSpan(_tokens.Last())) + .At(_tokens.Last().SourceText, SourceLocationCalculator.GetSpan(_tokens.Last())) .Build()); } @@ -585,7 +588,7 @@ public class Parser throw new ParseException(Diagnostic .Error("Unexpected end of file") .WithHelp("Expected more tokens to complete the syntax") - .At(_tokens.Last().SourceFile, SourceLocationCalculator.GetSpan(_tokens.Last())) + .At(_tokens.Last().SourceText, SourceLocationCalculator.GetSpan(_tokens.Last())) .Build()); } diff --git a/src/compiler/Nub.Lang/Frontend/Parsing/SourceFile.cs b/src/compiler/Nub.Lang/Frontend/Parsing/SourceFile.cs new file mode 100644 index 0000000..0f9642a --- /dev/null +++ b/src/compiler/Nub.Lang/Frontend/Parsing/SourceFile.cs @@ -0,0 +1,8 @@ +namespace Nub.Lang.Frontend.Parsing; + +public class SourceFile(string module, List imports, List definitions) +{ + public string Module { get; } = module; + public List Imports { get; } = imports; + public List Definitions { get; } = definitions; +} \ No newline at end of file diff --git a/src/compiler/Nub.Lang/Frontend/Typing/TypeChecker.cs b/src/compiler/Nub.Lang/Frontend/Typing/TypeChecker.cs index f2cc226..f06a776 100644 --- a/src/compiler/Nub.Lang/Frontend/Typing/TypeChecker.cs +++ b/src/compiler/Nub.Lang/Frontend/Typing/TypeChecker.cs @@ -5,20 +5,20 @@ namespace Nub.Lang.Frontend.Typing; public class TypeChecker { - private readonly Dictionary _variables = new(); - private readonly List _definitions; - private readonly List _diagnostics = []; + private Dictionary _variables = new(); + private List _sourceFiles = []; + private List _diagnostics = []; private NubType? _currentFunctionReturnType; private bool _hasReturnStatement; + private List _definitions = []; - public TypeChecker(List definitions) + public DiagnosticsResult> TypeCheck(List sourceFiles) { - _definitions = definitions; - } - - public DiagnosticsResult TypeCheck() - { - _diagnostics.Clear(); + _variables = new Dictionary(); + _diagnostics = []; + _definitions = sourceFiles.SelectMany(x => x.Definitions).ToList(); + _currentFunctionReturnType = null; + _hasReturnStatement = false; foreach (var structDef in _definitions.OfType()) { @@ -30,7 +30,7 @@ public class TypeChecker TypeCheckFuncDef(funcDef); } - return new DiagnosticsResult(_diagnostics.ToList()); + return new DiagnosticsResult>(_diagnostics, _definitions); } private void TypeCheckStructDef(StructDefinitionNode structDef) diff --git a/src/compiler/Nub.Lang/Program.cs b/src/compiler/Nub.Lang/Program.cs index af76aa9..e81dc79 100644 --- a/src/compiler/Nub.Lang/Program.cs +++ b/src/compiler/Nub.Lang/Program.cs @@ -1,6 +1,4 @@ -using System.Diagnostics.CodeAnalysis; -using Nub.Lang.Backend; -using Nub.Lang.Frontend; +using Nub.Lang.Backend; using Nub.Lang.Frontend.Diagnostics; using Nub.Lang.Frontend.Lexing; using Nub.Lang.Frontend.Parsing; @@ -10,94 +8,57 @@ namespace Nub.Lang; internal static class Program { - private static readonly Lexer Lexer = new(); - private static readonly Parser Parser = new(); - public static int Main(string[] args) { if (args.Length != 2) { - Console.WriteLine("Usage: nub "); - Console.WriteLine("Example: nub src out.asm"); + Console.Error.WriteLine("Usage: nub "); + Console.Error.WriteLine("Example: nub src"); return 1; } - var input = Path.GetFullPath(args[0]); - var output = Path.GetFullPath(args[1]); + var srcDir = Path.GetFullPath(args[0]); - if (!Directory.Exists(input)) + if (!Directory.Exists(srcDir)) { - Console.WriteLine($"Error: Input directory '{input}' does not exist."); + Console.Error.WriteLine($"Error: Input directory '{srcDir}' does not exist."); return 1; } - var outputDir = Path.GetDirectoryName(output); - if (outputDir == null || !Directory.Exists(outputDir)) - { - Console.WriteLine($"Error: Output directory '{outputDir}' does not exist."); - return 1; - } - - if (string.IsNullOrWhiteSpace(Path.GetFileName(output))) - { - Console.WriteLine("Error: Output path must specify a file, not a directory."); - return 1; - } - - if (TryRunFrontend(input, out var definitions)) - { - return 1; - } - - var generator = new Generator(definitions); - var result = generator.Generate(); - - File.WriteAllText(output, result); - return 0; + return Compile(srcDir); } - private static bool TryRunFrontend(string rootFilePath, out List definitions) + private static int Compile(string srcDir) { - List modules = []; - var error = TryRunFrontend(rootFilePath, modules); - definitions = modules.SelectMany(f => f.Definitions).ToList(); - - var typeChecker = new TypeChecker(definitions); - var typeCheckResult = typeChecker.TypeCheck(); - typeCheckResult.PrintAllDiagnostics(); - error = error || typeCheckResult.HasErrors; - return error; - } + var lexer = new Lexer(); + var parser = new Parser(); + var typeChecker = new TypeChecker(); - private static bool TryRunFrontend(string rootFilePath, List modules) - { - var error = false; - var filePaths = Directory.EnumerateFiles(rootFilePath, "*.nub", SearchOption.TopDirectoryOnly); - - List tokens = []; - foreach (var filePath in filePaths) + List files = []; + foreach (var file in Directory.EnumerateFiles(srcDir, "*.nub", SearchOption.AllDirectories)) { - var src = File.ReadAllText(filePath); - tokens.AddRange(Lexer.Lex(src, new SourceFile(filePath, src))); - } + var content = File.ReadAllText(file); - var parseResult = Parser.ParseModule(tokens, rootFilePath); - parseResult.PrintAllDiagnostics(); + var tokenizeResult = lexer.Tokenize(new SourceText(file, content)); + tokenizeResult.PrintAllDiagnostics(); - modules.Add(parseResult.Value); + var parseResult = parser.ParseModule(tokenizeResult.Value); + parseResult.PrintAllDiagnostics(); - foreach (var import in parseResult.Value.Imports) - { - var importPath = Path.GetFullPath(import, parseResult.Value.Path); - if (modules.All(m => m.Path != importPath)) + if (parseResult.Value != null) { - if (!TryRunFrontend(importPath, modules)) - { - error = true; - } + files.Add(parseResult.Value); } } - return error; + var typeCheckResult = typeChecker.TypeCheck(files); + typeCheckResult.PrintAllDiagnostics(); + + var generator = new Generator(typeCheckResult.Value); + var result = generator.Generate(); + + Console.Out.Write(result); + + return 0; } } \ No newline at end of file