This commit is contained in:
nub31
2025-05-26 19:16:56 +02:00
parent bc040c3fef
commit cfd7a6ebef
18 changed files with 124 additions and 156 deletions

View File

@@ -7,7 +7,7 @@ dotnet publish -c Release src/compiler/Nub.Lang
clear clear
nub example out/out.qbe nub example out/out.qbe > out/out.qbe
nasm -g -felf64 src/runtime/runtime.asm -o out/runtime.o nasm -g -felf64 src/runtime/runtime.asm -o out/runtime.o

View File

@@ -1,3 +1,5 @@
module c_interop
extern func printf(fmt: string, ...args: any) extern func printf(fmt: string, ...args: any)
extern func getchar(): i32 extern func getchar(): i32
extern func puts(str: string) extern func puts(str: string)

View File

@@ -1,12 +1,17 @@
import c import c_interop
module main
/// # Documentation /// # Documentation
/// ## Documentation subtitle /// ## Documentation subtitle
global func main(args: []string) { global func main(args: []string) {
i = 0 i = 0
printf("%d\n", args.count) printf("%d\n", args.count)
while i < args.count { while i < args.count {
printf("%s\n", args[i]) printf("%s\n", args[i])
i = i + 1 i = i + 1
} }
} }

View File

@@ -17,7 +17,7 @@ public class Diagnostic
{ {
private readonly DiagnosticSeverity _severity; private readonly DiagnosticSeverity _severity;
private readonly string _message; private readonly string _message;
private SourceFile? _sourceFile; private SourceText? _sourceFile;
private SourceSpan? _span; private SourceSpan? _span;
private string? _help; private string? _help;
@@ -29,7 +29,7 @@ public class Diagnostic
public DiagnosticBuilder At(Token token) public DiagnosticBuilder At(Token token)
{ {
_sourceFile = token.SourceFile; _sourceFile = token.SourceText;
_span = SourceLocationCalculator.GetSpan(token); _span = SourceLocationCalculator.GetSpan(token);
return this; return this;
} }
@@ -41,14 +41,14 @@ public class Diagnostic
throw new ArgumentException("Node has no tokens", nameof(node)); throw new ArgumentException("Node has no tokens", nameof(node));
} }
_sourceFile = node.Tokens[0].SourceFile; _sourceFile = node.Tokens[0].SourceText;
_span = SourceLocationCalculator.GetSpan(node); _span = SourceLocationCalculator.GetSpan(node);
return this; return this;
} }
public DiagnosticBuilder At(SourceFile sourceFile, SourceSpan span) public DiagnosticBuilder At(SourceText sourceText, SourceSpan span)
{ {
_sourceFile = sourceFile; _sourceFile = sourceText;
_span = span; _span = span;
return this; return this;
} }
@@ -68,11 +68,11 @@ public class Diagnostic
public DiagnosticSeverity Severity { get; } public DiagnosticSeverity Severity { get; }
public string Message { get; } public string Message { get; }
public SourceFile? SourceFile { get; } public SourceText? SourceFile { get; }
public SourceSpan? Span { get; } public SourceSpan? Span { get; }
public string? Help { get; } public string? Help { get; }
private Diagnostic(DiagnosticSeverity severity, string message, SourceFile? sourceFile, SourceSpan? span, string? help) private Diagnostic(DiagnosticSeverity severity, string message, SourceText? sourceFile, SourceSpan? span, string? help)
{ {
Severity = severity; Severity = severity;
Message = message; Message = message;
@@ -123,9 +123,9 @@ public class Diagnostic
}; };
} }
private static void AppendSourceContext(StringBuilder sb, SourceFile sourceFile, SourceSpan span) private static void AppendSourceContext(StringBuilder sb, SourceText sourceText, SourceSpan span)
{ {
var lines = sourceFile.Content.Split('\n'); var lines = sourceText.Content.Split('\n');
var startLine = span.Start.Line; var startLine = span.Start.Line;
var endLine = span.End.Line; var endLine = span.End.Line;

View File

@@ -1,10 +1,9 @@
using System.Diagnostics.CodeAnalysis;
using Nub.Lang.Frontend.Lexing; using Nub.Lang.Frontend.Lexing;
using Nub.Lang.Frontend.Parsing; using Nub.Lang.Frontend.Parsing;
namespace Nub.Lang.Frontend.Diagnostics; namespace Nub.Lang.Frontend.Diagnostics;
public readonly struct SourceFile(string path, string content) public readonly struct SourceText(string path, string content)
{ {
public string Path { get; } = path; public string Path { get; } = path;
public string Content { get; } = content; public string Content { get; } = content;
@@ -80,8 +79,8 @@ public static class SourceLocationCalculator
public static SourceSpan GetSpan(Token token) public static SourceSpan GetSpan(Token token)
{ {
var start = IndexToLocation(token.SourceFile.Content, token.StartIndex); var start = IndexToLocation(token.SourceText.Content, token.StartIndex);
var end = IndexToLocation(token.SourceFile.Content, token.EndIndex); var end = IndexToLocation(token.SourceText.Content, token.EndIndex);
return new SourceSpan(start, end); return new SourceSpan(start, end);
} }
@@ -95,8 +94,8 @@ public static class SourceLocationCalculator
var firstToken = node.Tokens[0]; var firstToken = node.Tokens[0];
var lastToken = node.Tokens[^1]; var lastToken = node.Tokens[^1];
var start = IndexToLocation(firstToken.SourceFile.Content, firstToken.StartIndex); var start = IndexToLocation(firstToken.SourceText.Content, firstToken.StartIndex);
var end = IndexToLocation(lastToken.SourceFile.Content, lastToken.EndIndex); var end = IndexToLocation(lastToken.SourceText.Content, lastToken.EndIndex);
return new SourceSpan(start, end); return new SourceSpan(start, end);
} }

View File

@@ -10,7 +10,7 @@ public class DiagnosticsResult(List<Diagnostic> diagnostics)
{ {
foreach (var diagnostic in diagnostics) foreach (var diagnostic in diagnostics)
{ {
Console.WriteLine(diagnostic.Format()); Console.Error.WriteLine(diagnostic.Format());
} }
} }
} }

View File

@@ -2,7 +2,7 @@ using Nub.Lang.Frontend.Diagnostics;
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public class DocumentationToken(SourceFile sourceFile, int startIndex, int endIndex, string documentation) : Token(sourceFile, startIndex, endIndex) public class DocumentationToken(SourceText sourceText, int startIndex, int endIndex, string documentation) : Token(sourceText, startIndex, endIndex)
{ {
public string Documentation { get; } = documentation; public string Documentation { get; } = documentation;
} }

View File

@@ -2,7 +2,7 @@
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public class IdentifierToken(SourceFile sourceFile, int startIndex, int endIndex, string value) : Token(sourceFile, startIndex, endIndex) public class IdentifierToken(SourceText sourceText, int startIndex, int endIndex, string value) : Token(sourceText, startIndex, endIndex)
{ {
public string Value { get; } = value; public string Value { get; } = value;
} }

View File

@@ -8,6 +8,7 @@ public class Lexer
{ {
["func"] = Symbol.Func, ["func"] = Symbol.Func,
["import"] = Symbol.Import, ["import"] = Symbol.Import,
["module"] = Symbol.Module,
["if"] = Symbol.If, ["if"] = Symbol.If,
["else"] = Symbol.Else, ["else"] = Symbol.Else,
["while"] = Symbol.While, ["while"] = Symbol.While,
@@ -56,14 +57,12 @@ public class Lexer
['&'] = Symbol.Ampersand, ['&'] = Symbol.Ampersand,
}; };
private string _src = null!; private SourceText _sourceText;
private SourceFile _sourceFile;
private int _index; private int _index;
public List<Token> Lex(string src, SourceFile sourceFile) public DiagnosticsResult<List<Token>> Tokenize(SourceText sourceText)
{ {
_src = src; _sourceText = sourceText;
_sourceFile = sourceFile;
_index = 0; _index = 0;
List<Token> tokens = []; List<Token> tokens = [];
@@ -72,7 +71,7 @@ public class Lexer
tokens.Add(token); tokens.Add(token);
} }
return tokens; return new DiagnosticsResult<List<Token>>([], tokens);
} }
private void ConsumeWhitespace() private void ConsumeWhitespace()
@@ -123,7 +122,7 @@ public class Lexer
if (documentation != null) if (documentation != null)
{ {
return new DocumentationToken(_sourceFile, startIndex, _index, documentation); return new DocumentationToken(_sourceText, startIndex, _index, documentation);
} }
ConsumeWhitespace(); ConsumeWhitespace();
@@ -146,20 +145,20 @@ public class Lexer
if (Keywords.TryGetValue(buffer, out var keywordSymbol)) if (Keywords.TryGetValue(buffer, out var keywordSymbol))
{ {
return new SymbolToken(_sourceFile, startIndex, _index, keywordSymbol); return new SymbolToken(_sourceText, startIndex, _index, keywordSymbol);
} }
if (Modifiers.TryGetValue(buffer, out var modifer)) if (Modifiers.TryGetValue(buffer, out var modifer))
{ {
return new ModifierToken(_sourceFile, startIndex, _index, modifer); return new ModifierToken(_sourceText, startIndex, _index, modifer);
} }
if (buffer is "true" or "false") if (buffer is "true" or "false")
{ {
return new LiteralToken(_sourceFile, startIndex, _index, NubPrimitiveType.Bool, buffer); return new LiteralToken(_sourceText, startIndex, _index, NubPrimitiveType.Bool, buffer);
} }
return new IdentifierToken(_sourceFile, startIndex, _index, buffer); return new IdentifierToken(_sourceText, startIndex, _index, buffer);
} }
if (char.IsDigit(current)) if (char.IsDigit(current))
@@ -197,7 +196,7 @@ public class Lexer
} }
} }
return new LiteralToken(_sourceFile, startIndex, _index, isFloat ? NubPrimitiveType.F64 : NubPrimitiveType.I64, buffer); return new LiteralToken(_sourceText, startIndex, _index, isFloat ? NubPrimitiveType.F64 : NubPrimitiveType.I64, buffer);
} }
// TODO: Revisit this // TODO: Revisit this
@@ -217,7 +216,7 @@ public class Lexer
Next(); Next();
} }
return new SymbolToken(_sourceFile, startIndex, _index, chain.Value); return new SymbolToken(_sourceText, startIndex, _index, chain.Value);
} }
} }
} }
@@ -225,7 +224,7 @@ public class Lexer
if (Chars.TryGetValue(current, out var charSymbol)) if (Chars.TryGetValue(current, out var charSymbol))
{ {
Next(); Next();
return new SymbolToken(_sourceFile, startIndex, _index, charSymbol); return new SymbolToken(_sourceText, startIndex, _index, charSymbol);
} }
if (current == '"') if (current == '"')
@@ -250,7 +249,7 @@ public class Lexer
Next(); Next();
} }
return new LiteralToken(_sourceFile, startIndex, _index, NubPrimitiveType.String, buffer); return new LiteralToken(_sourceText, startIndex, _index, NubPrimitiveType.String, buffer);
} }
throw new Exception($"Unknown character {current}"); throw new Exception($"Unknown character {current}");
@@ -258,9 +257,9 @@ public class Lexer
private Optional<char> Peek(int offset = 0) private Optional<char> Peek(int offset = 0)
{ {
if (_index + offset < _src.Length) if (_index + offset < _sourceText.Content.Length)
{ {
return _src[_index + offset]; return _sourceText.Content[_index + offset];
} }
return Optional<char>.Empty(); return Optional<char>.Empty();

View File

@@ -2,7 +2,7 @@
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public class LiteralToken(SourceFile sourceFile, int startIndex, int endIndex, NubType type, string value) : Token(sourceFile, startIndex, endIndex) public class LiteralToken(SourceText sourceText, int startIndex, int endIndex, NubType type, string value) : Token(sourceText, startIndex, endIndex)
{ {
public NubType Type { get; } = type; public NubType Type { get; } = type;
public string Value { get; } = value; public string Value { get; } = value;

View File

@@ -2,7 +2,7 @@ using Nub.Lang.Frontend.Diagnostics;
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public class ModifierToken(SourceFile sourceFile, int startIndex, int endIndex, Modifier modifier) : Token(sourceFile, startIndex, endIndex) public class ModifierToken(SourceText sourceText, int startIndex, int endIndex, Modifier modifier) : Token(sourceText, startIndex, endIndex)
{ {
public Modifier Modifier { get; } = modifier; public Modifier Modifier { get; } = modifier;
} }

View File

@@ -2,7 +2,7 @@
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public class SymbolToken(SourceFile sourceFile, int startIndex, int endIndex, Symbol symbol) : Token(sourceFile, startIndex, endIndex) public class SymbolToken(SourceText sourceText, int startIndex, int endIndex, Symbol symbol) : Token(sourceText, startIndex, endIndex)
{ {
public Symbol Symbol { get; } = symbol; public Symbol Symbol { get; } = symbol;
} }
@@ -10,6 +10,7 @@ public class SymbolToken(SourceFile sourceFile, int startIndex, int endIndex, Sy
public enum Symbol public enum Symbol
{ {
Import, Import,
Module,
Func, Func,
Return, Return,
If, If,
@@ -42,5 +43,5 @@ public enum Symbol
New, New,
Struct, Struct,
Caret, Caret,
Ampersand Ampersand,
} }

View File

@@ -2,9 +2,9 @@
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public abstract class Token(SourceFile sourceFile, int startIndex, int endIndex) public abstract class Token(SourceText sourceText, int startIndex, int endIndex)
{ {
public SourceFile SourceFile { get; } = sourceFile; public SourceText SourceText { get; } = sourceText;
public int StartIndex { get; } = startIndex; public int StartIndex { get; } = startIndex;
public int EndIndex { get; } = endIndex; public int EndIndex { get; } = endIndex;
} }

View File

@@ -1,10 +0,0 @@
using Nub.Lang.Frontend.Lexing;
namespace Nub.Lang.Frontend.Parsing;
public class ModuleNode(IReadOnlyList<Token> tokens, string path, List<string> imports, List<DefinitionNode> definitions) : Node(tokens)
{
public string Path { get; } = path;
public List<string> Imports { get; } = imports;
public List<DefinitionNode> Definitions { get; } = definitions;
}

View File

@@ -6,41 +6,44 @@ namespace Nub.Lang.Frontend.Parsing;
public class Parser public class Parser
{ {
private List<Diagnostic> _diagnostics = [];
private List<Token> _tokens = []; private List<Token> _tokens = [];
private int _index; private int _index;
private List<Diagnostic> _diagnostics = [];
public DiagnosticsResult<ModuleNode> ParseModule(List<Token> tokens, string rootFilePath) public DiagnosticsResult<SourceFile?> ParseModule(List<Token> tokens)
{ {
_index = 0;
_tokens = tokens;
_diagnostics = []; _diagnostics = [];
_tokens = tokens;
_index = 0;
List<DefinitionNode> definitions = []; try
List<string> imports = [];
while (Peek().HasValue)
{ {
try List<string> imports = [];
while (TryExpectSymbol(Symbol.Import))
{ {
if (TryExpectSymbol(Symbol.Import)) var name = ExpectIdentifier();
{ imports.Add(name.Value);
var name = ExpectIdentifier();
imports.Add(name.Value);
}
else
{
definitions.Add(ParseDefinition());
}
} }
catch (ParseException ex)
ExpectSymbol(Symbol.Module);
var module = ExpectIdentifier().Value;
List<DefinitionNode> definitions = [];
while (Peek().HasValue)
{ {
_diagnostics.Add(ex.Diagnostic); definitions.Add(ParseDefinition());
RecoverToNextDefinition();
} }
return new DiagnosticsResult<SourceFile?>(_diagnostics, new SourceFile(module, imports, definitions));
}
catch (ParseException ex)
{
_diagnostics.Add(ex.Diagnostic);
RecoverToNextDefinition();
} }
return new DiagnosticsResult<ModuleNode>(_diagnostics, new ModuleNode(GetTokensForNode(0), rootFilePath, imports, definitions)); return new DiagnosticsResult<SourceFile?>(_diagnostics, null);
} }
private DefinitionNode ParseDefinition() private DefinitionNode ParseDefinition()
@@ -567,7 +570,7 @@ public class Parser
throw new ParseException(Diagnostic throw new ParseException(Diagnostic
.Error("Unexpected end of file while parsing type") .Error("Unexpected end of file while parsing type")
.WithHelp("Expected a type name") .WithHelp("Expected a type name")
.At(_tokens.Last().SourceFile, SourceLocationCalculator.GetSpan(_tokens.Last())) .At(_tokens.Last().SourceText, SourceLocationCalculator.GetSpan(_tokens.Last()))
.Build()); .Build());
} }
@@ -585,7 +588,7 @@ public class Parser
throw new ParseException(Diagnostic throw new ParseException(Diagnostic
.Error("Unexpected end of file") .Error("Unexpected end of file")
.WithHelp("Expected more tokens to complete the syntax") .WithHelp("Expected more tokens to complete the syntax")
.At(_tokens.Last().SourceFile, SourceLocationCalculator.GetSpan(_tokens.Last())) .At(_tokens.Last().SourceText, SourceLocationCalculator.GetSpan(_tokens.Last()))
.Build()); .Build());
} }

View File

@@ -0,0 +1,8 @@
namespace Nub.Lang.Frontend.Parsing;
public class SourceFile(string module, List<string> imports, List<DefinitionNode> definitions)
{
public string Module { get; } = module;
public List<string> Imports { get; } = imports;
public List<DefinitionNode> Definitions { get; } = definitions;
}

View File

@@ -5,20 +5,20 @@ namespace Nub.Lang.Frontend.Typing;
public class TypeChecker public class TypeChecker
{ {
private readonly Dictionary<string, NubType> _variables = new(); private Dictionary<string, NubType> _variables = new();
private readonly List<DefinitionNode> _definitions; private List<SourceFile> _sourceFiles = [];
private readonly List<Diagnostic> _diagnostics = []; private List<Diagnostic> _diagnostics = [];
private NubType? _currentFunctionReturnType; private NubType? _currentFunctionReturnType;
private bool _hasReturnStatement; private bool _hasReturnStatement;
private List<DefinitionNode> _definitions = [];
public TypeChecker(List<DefinitionNode> definitions) public DiagnosticsResult<List<DefinitionNode>> TypeCheck(List<SourceFile> sourceFiles)
{ {
_definitions = definitions; _variables = new Dictionary<string, NubType>();
} _diagnostics = [];
_definitions = sourceFiles.SelectMany(x => x.Definitions).ToList();
public DiagnosticsResult TypeCheck() _currentFunctionReturnType = null;
{ _hasReturnStatement = false;
_diagnostics.Clear();
foreach (var structDef in _definitions.OfType<StructDefinitionNode>()) foreach (var structDef in _definitions.OfType<StructDefinitionNode>())
{ {
@@ -30,7 +30,7 @@ public class TypeChecker
TypeCheckFuncDef(funcDef); TypeCheckFuncDef(funcDef);
} }
return new DiagnosticsResult(_diagnostics.ToList()); return new DiagnosticsResult<List<DefinitionNode>>(_diagnostics, _definitions);
} }
private void TypeCheckStructDef(StructDefinitionNode structDef) private void TypeCheckStructDef(StructDefinitionNode structDef)

View File

@@ -1,6 +1,4 @@
using System.Diagnostics.CodeAnalysis; using Nub.Lang.Backend;
using Nub.Lang.Backend;
using Nub.Lang.Frontend;
using Nub.Lang.Frontend.Diagnostics; using Nub.Lang.Frontend.Diagnostics;
using Nub.Lang.Frontend.Lexing; using Nub.Lang.Frontend.Lexing;
using Nub.Lang.Frontend.Parsing; using Nub.Lang.Frontend.Parsing;
@@ -10,94 +8,57 @@ namespace Nub.Lang;
internal static class Program internal static class Program
{ {
private static readonly Lexer Lexer = new();
private static readonly Parser Parser = new();
public static int Main(string[] args) public static int Main(string[] args)
{ {
if (args.Length != 2) if (args.Length != 2)
{ {
Console.WriteLine("Usage: nub <input-dir> <output-file>"); Console.Error.WriteLine("Usage: nub <input-dir>");
Console.WriteLine("Example: nub src out.asm"); Console.Error.WriteLine("Example: nub src");
return 1; return 1;
} }
var input = Path.GetFullPath(args[0]); var srcDir = Path.GetFullPath(args[0]);
var output = Path.GetFullPath(args[1]);
if (!Directory.Exists(input)) if (!Directory.Exists(srcDir))
{ {
Console.WriteLine($"Error: Input directory '{input}' does not exist."); Console.Error.WriteLine($"Error: Input directory '{srcDir}' does not exist.");
return 1; return 1;
} }
var outputDir = Path.GetDirectoryName(output); return Compile(srcDir);
if (outputDir == null || !Directory.Exists(outputDir))
{
Console.WriteLine($"Error: Output directory '{outputDir}' does not exist.");
return 1;
}
if (string.IsNullOrWhiteSpace(Path.GetFileName(output)))
{
Console.WriteLine("Error: Output path must specify a file, not a directory.");
return 1;
}
if (TryRunFrontend(input, out var definitions))
{
return 1;
}
var generator = new Generator(definitions);
var result = generator.Generate();
File.WriteAllText(output, result);
return 0;
} }
private static bool TryRunFrontend(string rootFilePath, out List<DefinitionNode> definitions) private static int Compile(string srcDir)
{ {
List<ModuleNode> modules = []; var lexer = new Lexer();
var error = TryRunFrontend(rootFilePath, modules); var parser = new Parser();
definitions = modules.SelectMany(f => f.Definitions).ToList(); var typeChecker = new TypeChecker();
var typeChecker = new TypeChecker(definitions);
var typeCheckResult = typeChecker.TypeCheck();
typeCheckResult.PrintAllDiagnostics();
error = error || typeCheckResult.HasErrors;
return error;
}
private static bool TryRunFrontend(string rootFilePath, List<ModuleNode> modules) List<SourceFile> files = [];
{ foreach (var file in Directory.EnumerateFiles(srcDir, "*.nub", SearchOption.AllDirectories))
var error = false;
var filePaths = Directory.EnumerateFiles(rootFilePath, "*.nub", SearchOption.TopDirectoryOnly);
List<Token> tokens = [];
foreach (var filePath in filePaths)
{ {
var src = File.ReadAllText(filePath); var content = File.ReadAllText(file);
tokens.AddRange(Lexer.Lex(src, new SourceFile(filePath, src)));
}
var parseResult = Parser.ParseModule(tokens, rootFilePath); var tokenizeResult = lexer.Tokenize(new SourceText(file, content));
parseResult.PrintAllDiagnostics(); tokenizeResult.PrintAllDiagnostics();
modules.Add(parseResult.Value); var parseResult = parser.ParseModule(tokenizeResult.Value);
parseResult.PrintAllDiagnostics();
foreach (var import in parseResult.Value.Imports) if (parseResult.Value != null)
{
var importPath = Path.GetFullPath(import, parseResult.Value.Path);
if (modules.All(m => m.Path != importPath))
{ {
if (!TryRunFrontend(importPath, modules)) files.Add(parseResult.Value);
{
error = true;
}
} }
} }
return error; var typeCheckResult = typeChecker.TypeCheck(files);
typeCheckResult.PrintAllDiagnostics();
var generator = new Generator(typeCheckResult.Value);
var result = generator.Generate();
Console.Out.Write(result);
return 0;
} }
} }