This commit is contained in:
nub31
2025-05-31 23:48:29 +02:00
parent bba7906221
commit 65fd11ca6b
14 changed files with 211 additions and 162 deletions

View File

@@ -9,7 +9,7 @@ struct Human {
export func main(args: []^string) { export func main(args: []^string) {
let i: i64 let i: i64
c::printf("%d\n", args.count) c:printf("%d\n", args.count)
while i < args.count { while i < args.count {
c::printf("%s\n", args[i]) c::printf("%s\n", args[i])

View File

@@ -1,4 +1,5 @@
using Nub.Lang.Backend; using Nub.Lang;
using Nub.Lang.Backend;
using Nub.Lang.Frontend.Diagnostics; using Nub.Lang.Frontend.Diagnostics;
using Nub.Lang.Frontend.Lexing; using Nub.Lang.Frontend.Lexing;
using Nub.Lang.Frontend.Parsing; using Nub.Lang.Frontend.Parsing;
@@ -29,7 +30,7 @@ foreach (var file in Directory.EnumerateFiles(srcDir, "*.nub", SearchOption.AllD
{ {
var content = File.ReadAllText(file); var content = File.ReadAllText(file);
var tokenizeResult = lexer.Tokenize(new SourceText(file, content)); var tokenizeResult = lexer.Tokenize(new SourceText(content));
tokenizeResult.PrintAllDiagnostics(); tokenizeResult.PrintAllDiagnostics();
error = error || tokenizeResult.HasErrors; error = error || tokenizeResult.HasErrors;

View File

@@ -13,7 +13,7 @@ public static class ConsoleColors
public const string BrightWhite = "\e[97m"; public const string BrightWhite = "\e[97m";
public const string Gray = "\e[90m"; public const string Gray = "\e[90m";
public static bool IsColorSupported() private static bool IsColorSupported()
{ {
var term = Environment.GetEnvironmentVariable("TERM"); var term = Environment.GetEnvironmentVariable("TERM");
var colorTerm = Environment.GetEnvironmentVariable("COLORTERM"); var colorTerm = Environment.GetEnvironmentVariable("COLORTERM");

View File

@@ -4,22 +4,14 @@ using Nub.Lang.Frontend.Parsing;
namespace Nub.Lang.Frontend.Diagnostics; namespace Nub.Lang.Frontend.Diagnostics;
public enum DiagnosticSeverity
{
Info,
Warning,
Error
}
public class Diagnostic public class Diagnostic
{ {
public class DiagnosticBuilder public class DiagnosticBuilder
{ {
private readonly DiagnosticSeverity _severity; private readonly DiagnosticSeverity _severity;
private readonly string _message; private readonly string _message;
private SourceText? _sourceFile;
private SourceSpan? _span;
private string? _help; private string? _help;
private SourceSpan? _sourceSpan;
public DiagnosticBuilder(DiagnosticSeverity severity, string message) public DiagnosticBuilder(DiagnosticSeverity severity, string message)
{ {
@@ -29,27 +21,19 @@ public class Diagnostic
public DiagnosticBuilder At(Token token) public DiagnosticBuilder At(Token token)
{ {
_sourceFile = token.SourceText; _sourceSpan = token.Span;
_span = SourceLocationCalculator.GetSpan(token);
return this; return this;
} }
public DiagnosticBuilder At(Node node) public DiagnosticBuilder At(Node node)
{ {
if (!node.Tokens.Any()) _sourceSpan = SourceSpan.Merge(node.Tokens.Select(t => t.Span));
{
throw new ArgumentException("Node has no tokens", nameof(node));
}
_sourceFile = node.Tokens[0].SourceText;
_span = SourceLocationCalculator.GetSpan(node);
return this; return this;
} }
public DiagnosticBuilder At(SourceText sourceText, SourceSpan span) public DiagnosticBuilder At(SourceSpan span)
{ {
_sourceFile = sourceText; _sourceSpan = span;
_span = span;
return this; return this;
} }
@@ -59,7 +43,7 @@ public class Diagnostic
return this; return this;
} }
public Diagnostic Build() => new(_severity, _message, _sourceFile, _span, _help); public Diagnostic Build() => new(_severity, _message, _sourceSpan, _help);
} }
public static DiagnosticBuilder Error(string message) => new(DiagnosticSeverity.Error, message); public static DiagnosticBuilder Error(string message) => new(DiagnosticSeverity.Error, message);
@@ -68,15 +52,13 @@ public class Diagnostic
public DiagnosticSeverity Severity { get; } public DiagnosticSeverity Severity { get; }
public string Message { get; } public string Message { get; }
public SourceText? SourceFile { get; }
public SourceSpan? Span { get; } public SourceSpan? Span { get; }
public string? Help { get; } public string? Help { get; }
private Diagnostic(DiagnosticSeverity severity, string message, SourceText? sourceFile, SourceSpan? span, string? help) private Diagnostic(DiagnosticSeverity severity, string message, SourceSpan? span, string? help)
{ {
Severity = severity; Severity = severity;
Message = message; Message = message;
SourceFile = sourceFile;
Span = span; Span = span;
Help = help; Help = help;
} }
@@ -88,18 +70,18 @@ public class Diagnostic
var severityText = GetSeverityText(Severity); var severityText = GetSeverityText(Severity);
sb.Append(severityText); sb.Append(severityText);
if (SourceFile.HasValue) if (Span.HasValue)
{ {
var locationText = $" at {SourceFile.Value.Path}:{Span}"; // var locationText = $" at {Span.Value.Path}:{Span}";
sb.Append(ConsoleColors.Colorize(locationText, ConsoleColors.Gray)); // sb.Append(ConsoleColors.Colorize(locationText, ConsoleColors.Gray));
} }
sb.Append(": "); sb.Append(": ");
sb.AppendLine(ConsoleColors.Colorize(Message, ConsoleColors.BrightWhite)); sb.AppendLine(ConsoleColors.Colorize(Message, ConsoleColors.BrightWhite));
if (SourceFile.HasValue && Span.HasValue) if (Span.HasValue)
{ {
AppendSourceContext(sb, SourceFile.Value, Span.Value); AppendSourceContext(sb, Span.Value);
} }
if (!string.IsNullOrEmpty(Help)) if (!string.IsNullOrEmpty(Help))
@@ -123,9 +105,9 @@ public class Diagnostic
}; };
} }
private static void AppendSourceContext(StringBuilder sb, SourceText sourceText, SourceSpan span) private static void AppendSourceContext(StringBuilder sb, SourceSpan span)
{ {
var lines = sourceText.Content.Split('\n'); var lines = span.Content.Text.Split('\n');
var startLine = span.Start.Line; var startLine = span.Start.Line;
var endLine = span.End.Line; var endLine = span.End.Line;
@@ -143,7 +125,8 @@ public class Diagnostic
for (var lineNum = contextStart; lineNum < startLine; lineNum++) for (var lineNum = contextStart; lineNum < startLine; lineNum++)
{ {
AppendContextLine(sb, lineNum, lines[lineNum - 1], lineNumWidth); var line = lines[lineNum - 1];
AppendContextLine(sb, lineNum, line, lineNumWidth);
} }
for (var lineNum = startLine; lineNum <= endLine; lineNum++) for (var lineNum = startLine; lineNum <= endLine; lineNum++)
@@ -156,7 +139,8 @@ public class Diagnostic
var contextEnd = Math.Min(lines.Length, endLine + contextLines); var contextEnd = Math.Min(lines.Length, endLine + contextLines);
for (var lineNum = endLine + 1; lineNum <= contextEnd; lineNum++) for (var lineNum = endLine + 1; lineNum <= contextEnd; lineNum++)
{ {
AppendContextLine(sb, lineNum, lines[lineNum - 1], lineNumWidth); var line = lines[lineNum - 1];
AppendContextLine(sb, lineNum, line, lineNumWidth);
} }
} }
@@ -202,4 +186,11 @@ public class Diagnostic
return new string(indicator, line.Length); return new string(indicator, line.Length);
} }
}
public enum DiagnosticSeverity
{
Info,
Warning,
Error
} }

View File

@@ -1,102 +0,0 @@
using Nub.Lang.Frontend.Lexing;
using Nub.Lang.Frontend.Parsing;
namespace Nub.Lang.Frontend.Diagnostics;
public readonly struct SourceText(string path, string content)
{
public string Path { get; } = path;
public string Content { get; } = content;
}
public readonly struct SourceLocation(int line, int column)
{
public int Line { get; } = line;
public int Column { get; } = column;
}
public readonly struct SourceSpan(SourceLocation start, SourceLocation end)
{
public SourceLocation Start { get; } = start;
public SourceLocation End { get; } = end;
public override string ToString()
{
if (Start.Line == End.Line)
{
if (Start.Column == End.Column)
{
return $"{Start.Line}:{Start.Column}";
}
return $"{Start.Line}:{Start.Column}-{End.Column}";
}
return $"{Start.Line}:{Start.Column}-{End.Line}:{End.Column}";
}
}
public static class SourceLocationCalculator
{
private static int[] GetLineStarts(string content)
{
var lineStarts = new List<int> { 0 };
for (var i = 0; i < content.Length; i++)
{
if (content[i] == '\n')
{
lineStarts.Add(i + 1);
}
}
return lineStarts.ToArray();
}
private static SourceLocation IndexToLocation(string content, int index)
{
if (index < 0 || index > content.Length)
{
throw new ArgumentOutOfRangeException(nameof(index), $"Index {index} is out of range for content of length {content.Length}");
}
var lineStarts = GetLineStarts(content);
var line = Array.BinarySearch(lineStarts, index);
if (line < 0)
{
line = ~line - 1;
}
if (line < lineStarts.Length - 1 && lineStarts[line + 1] == index && index < content.Length && content[index] == '\n')
{
line++;
}
var column = index - lineStarts[line] + 1;
return new SourceLocation(line + 1, column);
}
public static SourceSpan GetSpan(Token token)
{
var start = IndexToLocation(token.SourceText.Content, token.StartIndex);
var end = IndexToLocation(token.SourceText.Content, token.EndIndex);
return new SourceSpan(start, end);
}
public static SourceSpan GetSpan(Node node)
{
if (!node.Tokens.Any())
{
throw new ArgumentException("Node has no tokens", nameof(node));
}
var firstToken = node.Tokens[0];
var lastToken = node.Tokens[^1];
var start = IndexToLocation(firstToken.SourceText.Content, firstToken.StartIndex);
var end = IndexToLocation(lastToken.SourceText.Content, lastToken.EndIndex);
return new SourceSpan(start, end);
}
}

View File

@@ -2,7 +2,7 @@ using Nub.Lang.Frontend.Diagnostics;
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public class DocumentationToken(SourceText sourceText, int startIndex, int endIndex, string documentation) : Token(sourceText, startIndex, endIndex) public class DocumentationToken(SourceSpan span, string documentation) : Token(span)
{ {
public string Documentation { get; } = documentation; public string Documentation { get; } = documentation;
} }

View File

@@ -2,7 +2,7 @@
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public class IdentifierToken(SourceText sourceText, int startIndex, int endIndex, string value) : Token(sourceText, startIndex, endIndex) public class IdentifierToken(SourceSpan span, string value) : Token(span)
{ {
public string Value { get; } = value; public string Value { get; } = value;
} }

View File

@@ -109,7 +109,7 @@ public class Lexer
Next(); Next();
} }
Next(); Next();
return new DocumentationToken(_sourceText, startIndex, _index, buffer); return new DocumentationToken(CreateSpan(startIndex), buffer);
} }
while (Peek().TryGetValue(out var character) && character != '\n') while (Peek().TryGetValue(out var character) && character != '\n')
@@ -132,20 +132,20 @@ public class Lexer
if (Keywords.TryGetValue(buffer, out var keywordSymbol)) if (Keywords.TryGetValue(buffer, out var keywordSymbol))
{ {
return new SymbolToken(_sourceText, startIndex, _index, keywordSymbol); return new SymbolToken(CreateSpan(startIndex), keywordSymbol);
} }
if (Modifiers.TryGetValue(buffer, out var modifer)) if (Modifiers.TryGetValue(buffer, out var modifer))
{ {
return new ModifierToken(_sourceText, startIndex, _index, modifer); return new ModifierToken(CreateSpan(startIndex), modifer);
} }
if (buffer is "true" or "false") if (buffer is "true" or "false")
{ {
return new LiteralToken(_sourceText, startIndex, _index, NubPrimitiveType.Bool, buffer); return new LiteralToken(CreateSpan(startIndex), NubPrimitiveType.Bool, buffer);
} }
return new IdentifierToken(_sourceText, startIndex, _index, buffer); return new IdentifierToken(CreateSpan(startIndex), buffer);
} }
if (char.IsDigit(current)) if (char.IsDigit(current))
@@ -183,7 +183,7 @@ public class Lexer
} }
} }
return new LiteralToken(_sourceText, startIndex, _index, isFloat ? NubPrimitiveType.F64 : NubPrimitiveType.I64, buffer); return new LiteralToken(CreateSpan(startIndex), isFloat ? NubPrimitiveType.F64 : NubPrimitiveType.I64, buffer);
} }
// TODO: Revisit this // TODO: Revisit this
@@ -203,7 +203,7 @@ public class Lexer
Next(); Next();
} }
return new SymbolToken(_sourceText, startIndex, _index, chain.Value); return new SymbolToken(CreateSpan(startIndex), chain.Value);
} }
} }
} }
@@ -211,7 +211,7 @@ public class Lexer
if (Chars.TryGetValue(current, out var charSymbol)) if (Chars.TryGetValue(current, out var charSymbol))
{ {
Next(); Next();
return new SymbolToken(_sourceText, startIndex, _index, charSymbol); return new SymbolToken(CreateSpan(startIndex), charSymbol);
} }
if (current == '"') if (current == '"')
@@ -236,17 +236,42 @@ public class Lexer
Next(); Next();
} }
return new LiteralToken(_sourceText, startIndex, _index, NubPrimitiveType.String, buffer); return new LiteralToken(CreateSpan(startIndex), NubPrimitiveType.String, buffer);
} }
throw new Exception($"Unknown character {current}"); throw new Exception($"Unknown character {current}");
} }
private SourceLocation CreateLocation(int index)
{
var line = 1;
var column = 0;
for (var i = 0; i < index; i++)
{
if (_sourceText.Text[i] == '\n')
{
column = 1;
line += 1;
}
else
{
column += 1;
}
}
return new SourceLocation(line, column);
}
private SourceSpan CreateSpan(int startIndex)
{
return new SourceSpan(_sourceText, CreateLocation(startIndex), CreateLocation(_index));
}
private Optional<char> Peek(int offset = 0) private Optional<char> Peek(int offset = 0)
{ {
if (_index + offset < _sourceText.Content.Length) if (_index + offset < _sourceText.Text.Length)
{ {
return _sourceText.Content[_index + offset]; return _sourceText.Text[_index + offset];
} }
return Optional<char>.Empty(); return Optional<char>.Empty();

View File

@@ -3,7 +3,7 @@ using Nub.Lang.Frontend.Typing;
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public class LiteralToken(SourceText sourceText, int startIndex, int endIndex, NubType type, string value) : Token(sourceText, startIndex, endIndex) public class LiteralToken(SourceSpan span, NubType type, string value) : Token(span)
{ {
public NubType Type { get; } = type; public NubType Type { get; } = type;
public string Value { get; } = value; public string Value { get; } = value;

View File

@@ -2,7 +2,7 @@ using Nub.Lang.Frontend.Diagnostics;
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public class ModifierToken(SourceText sourceText, int startIndex, int endIndex, Modifier modifier) : Token(sourceText, startIndex, endIndex) public class ModifierToken(SourceSpan span, Modifier modifier) : Token(span)
{ {
public Modifier Modifier { get; } = modifier; public Modifier Modifier { get; } = modifier;
} }

View File

@@ -2,7 +2,7 @@
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public class SymbolToken(SourceText sourceText, int startIndex, int endIndex, Symbol symbol) : Token(sourceText, startIndex, endIndex) public class SymbolToken(SourceSpan span, Symbol symbol) : Token(span)
{ {
public Symbol Symbol { get; } = symbol; public Symbol Symbol { get; } = symbol;
} }

View File

@@ -2,9 +2,7 @@
namespace Nub.Lang.Frontend.Lexing; namespace Nub.Lang.Frontend.Lexing;
public abstract class Token(SourceText sourceText, int startIndex, int endIndex) public abstract class Token(SourceSpan span)
{ {
public SourceText SourceText { get; } = sourceText; public SourceSpan Span { get; } = span;
public int StartIndex { get; } = startIndex;
public int EndIndex { get; } = endIndex;
} }

View File

@@ -672,7 +672,7 @@ public class Parser
throw new ParseException(Diagnostic throw new ParseException(Diagnostic
.Error("Unexpected end of file while parsing type") .Error("Unexpected end of file while parsing type")
.WithHelp("Expected a type name") .WithHelp("Expected a type name")
.At(_tokens.Last().SourceText, SourceLocationCalculator.GetSpan(_tokens.Last())) .At(_tokens.Last())
.Build()); .Build());
} }
@@ -690,7 +690,7 @@ public class Parser
throw new ParseException(Diagnostic throw new ParseException(Diagnostic
.Error("Unexpected end of file") .Error("Unexpected end of file")
.WithHelp("Expected more tokens to complete the syntax") .WithHelp("Expected more tokens to complete the syntax")
.At(_tokens.Last().SourceText, SourceLocationCalculator.GetSpan(_tokens.Last())) .At(_tokens.Last())
.Build()); .Build());
} }

136
src/lang/Nub.Lang/Source.cs Normal file
View File

@@ -0,0 +1,136 @@
using System.Diagnostics.CodeAnalysis;
namespace Nub.Lang;
public readonly struct SourceSpan(SourceText content, SourceLocation start, SourceLocation end) : IEquatable<SourceSpan>
{
public SourceText Content { get; } = content;
public SourceLocation Start { get; } = start;
public SourceLocation End { get; } = end;
/// <summary>
/// Merges one or more <see cref="SourceSpan"/> from a single file to a single <see cref="SourceSpan"/>
/// </summary>
/// <param name="spanEnumerable">The spans to merged</param>
/// <returns>The merged <see cref="SourceSpan"/></returns>
public static SourceSpan Merge(IEnumerable<SourceSpan> spanEnumerable)
{
var spans = spanEnumerable.ToArray();
if (spans.Length == 0)
{
throw new ArgumentException("Cannot merge empty spans", nameof(spanEnumerable));
}
var files = spans.Select(s => s.Content).Distinct().ToArray();
if (files.Length > 1)
{
throw new ArgumentException("Cannot merge spans from multiple files", nameof(spanEnumerable));
}
var first = spans.OrderBy(s => s.Start.Line).ThenBy(s => s.Start.Column).First().Start;
var last = spans.OrderByDescending(s => s.End.Line).ThenByDescending(s => s.End.Column).Last().End;
return new SourceSpan(files[0], first, last);
}
public override string ToString()
{
if (Start.Line == End.Line)
{
if (Start.Column == End.Column)
{
return $"{Start.Line}:{Start.Column}";
}
return $"{Start.Line}:{Start.Column}-{End.Column}";
}
return $"{Start.Line}:{Start.Column}-{End.Line}:{End.Column}";
}
public override bool Equals([NotNullWhen(true)] object? obj)
{
return obj is SourceSpan other && Equals(other);
}
public bool Equals(SourceSpan other)
{
return Content.Equals(other.Content) && Start.Equals(other.Start) && End.Equals(other.End);
}
public override int GetHashCode()
{
return HashCode.Combine(Content, Start, End);
}
public static bool operator ==(SourceSpan left, SourceSpan right)
{
return left.Equals(right);
}
public static bool operator !=(SourceSpan left, SourceSpan right)
{
return !(left == right);
}
}
public readonly struct SourceText(string text) : IEquatable<SourceText>
{
public string Text { get; } = text;
public bool Equals(SourceText other)
{
return Text == other.Text;
}
public override bool Equals([NotNullWhen(true)] object? obj)
{
return obj is SourceText other && Equals(other);
}
public override int GetHashCode()
{
return Text.GetHashCode();
}
public static bool operator ==(SourceText left, SourceText right)
{
return left.Equals(right);
}
public static bool operator !=(SourceText left, SourceText right)
{
return !(left == right);
}
}
public readonly struct SourceLocation(int line, int column) : IEquatable<SourceLocation>
{
public int Line { get; } = line;
public int Column { get; } = column;
public override bool Equals([NotNullWhen(true)] object? obj)
{
return obj is SourceLocation other && Equals(other);
}
public bool Equals(SourceLocation other)
{
return Line == other.Line && Column == other.Column;
}
public override int GetHashCode()
{
return HashCode.Combine(Line, Column);
}
public static bool operator ==(SourceLocation left, SourceLocation right)
{
return left.Equals(right);
}
public static bool operator !=(SourceLocation left, SourceLocation right)
{
return !(left == right);
}
}