Improve messages, implement* field access

Compiler messages are now printed in color with their respective code,
field access support has been added in the type checker. However, field
access nodes that contain other field access nodes are not yet
supported.

Signed-off-by: Slendi <slendi@socopon.com>
This commit is contained in:
Slendi 2024-04-08 11:42:39 +03:00
parent 0f5db0972c
commit a966ff45a3
11 changed files with 362 additions and 139 deletions

View File

@ -49,14 +49,14 @@ NodeKind :: enum {
Node :: struct { Node :: struct {
kind: NodeKind, kind: NodeKind,
range: TextRange, range: SourceLocation,
children: [dynamic]^Node, children: [dynamic]^Node,
value: TokenValue, value: TokenValue,
value_token_kind: TokenKind, value_token_kind: TokenKind,
return_type: ^Type, return_type: ^Type,
} }
node_create_value :: proc(kind: NodeKind, range: TextRange, value: TokenValue) -> (ret: ^Node) { node_create_value :: proc(kind: NodeKind, range: SourceLocation, value: TokenValue) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = kind, kind = kind,
@ -66,7 +66,7 @@ node_create_value :: proc(kind: NodeKind, range: TextRange, value: TokenValue) -
return return
} }
node_create_pointer :: proc(range: TextRange, value: ^Node) -> (ret: ^Node) { node_create_pointer :: proc(range: SourceLocation, value: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .Pointer, kind = .Pointer,
@ -76,7 +76,7 @@ node_create_pointer :: proc(range: TextRange, value: ^Node) -> (ret: ^Node) {
return return
} }
node_create_array :: proc(range: TextRange, size: u64, value: ^Node) -> (ret: ^Node) { node_create_array :: proc(range: SourceLocation, size: u64, value: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .Array, kind = .Array,
@ -87,7 +87,7 @@ node_create_array :: proc(range: TextRange, size: u64, value: ^Node) -> (ret: ^N
return return
} }
node_create_block :: proc(range: TextRange, children: [dynamic]^Node) -> (ret: ^Node) { node_create_block :: proc(range: SourceLocation, children: [dynamic]^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .Block, kind = .Block,
@ -97,7 +97,7 @@ node_create_block :: proc(range: TextRange, children: [dynamic]^Node) -> (ret: ^
return return
} }
node_create_binary :: proc(kind: TokenKind, range: TextRange, left: ^Node, right: ^Node) -> (ret: ^Node) { node_create_binary :: proc(kind: TokenKind, range: SourceLocation, left: ^Node, right: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .BinaryExpression, kind = .BinaryExpression,
@ -108,7 +108,7 @@ node_create_binary :: proc(kind: TokenKind, range: TextRange, left: ^Node, right
return return
} }
node_create_unary :: proc(kind: TokenKind, range: TextRange, operand: ^Node) -> (ret: ^Node) { node_create_unary :: proc(kind: TokenKind, range: SourceLocation, operand: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .UnaryExpression, kind = .UnaryExpression,
@ -119,7 +119,7 @@ node_create_unary :: proc(kind: TokenKind, range: TextRange, operand: ^Node) ->
return return
} }
node_create_field_access :: proc(range: TextRange, left: ^Node, right: ^Node) -> (ret: ^Node) { node_create_field_access :: proc(range: SourceLocation, left: ^Node, right: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .FieldAccess, kind = .FieldAccess,
@ -129,7 +129,7 @@ node_create_field_access :: proc(range: TextRange, left: ^Node, right: ^Node) ->
return return
} }
node_create_index_access :: proc(range: TextRange, left: ^Node, right: ^Node) -> (ret: ^Node) { node_create_index_access :: proc(range: SourceLocation, left: ^Node, right: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .IndexAccess, kind = .IndexAccess,
@ -139,7 +139,7 @@ node_create_index_access :: proc(range: TextRange, left: ^Node, right: ^Node) ->
return return
} }
node_create_function_call :: proc(range: TextRange, name: ^Node, args: [dynamic]^Node) -> (ret: ^Node) { node_create_function_call :: proc(range: SourceLocation, name: ^Node, args: [dynamic]^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .FunctionCall, kind = .FunctionCall,
@ -153,7 +153,7 @@ node_create_function_call :: proc(range: TextRange, name: ^Node, args: [dynamic]
} }
node_create_extern_function :: proc( node_create_extern_function :: proc(
range: TextRange, range: SourceLocation,
name: [dynamic]u8, name: [dynamic]u8,
return_type: ^Node, return_type: ^Node,
args: [dynamic]^Node, args: [dynamic]^Node,
@ -174,7 +174,7 @@ node_create_extern_function :: proc(
} }
node_create_function :: proc( node_create_function :: proc(
range: TextRange, range: SourceLocation,
name: [dynamic]u8, name: [dynamic]u8,
return_type, body: ^Node, return_type, body: ^Node,
args: [dynamic]^Node, args: [dynamic]^Node,
@ -264,7 +264,7 @@ parse_use_path2 :: proc(path: [dynamic]u8) -> (ret: [dynamic]u8) {
return return
} }
node_create_use :: proc(range: TextRange, path, alias: [dynamic]u8) -> (ret: ^Node) { node_create_use :: proc(range: SourceLocation, path, alias: [dynamic]u8) -> (ret: ^Node) {
path_ := path path_ := path
// Check if the path ends with ".cat", if not, append it. // Check if the path ends with ".cat", if not, append it.
path_ = parse_use_path(path) path_ = parse_use_path(path)
@ -288,7 +288,7 @@ node_create_use :: proc(range: TextRange, path, alias: [dynamic]u8) -> (ret: ^No
return return
} }
node_create_if :: proc(range: TextRange, condition, then, else_: ^Node) -> (ret: ^Node) { node_create_if :: proc(range: SourceLocation, condition, then, else_: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .If, kind = .If,
@ -305,7 +305,7 @@ node_create_if :: proc(range: TextRange, condition, then, else_: ^Node) -> (ret:
return return
} }
node_create_for :: proc(range: TextRange, init, condition, step, body: ^Node) -> (ret: ^Node) { node_create_for :: proc(range: SourceLocation, init, condition, step, body: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .For, kind = .For,
@ -315,7 +315,7 @@ node_create_for :: proc(range: TextRange, init, condition, step, body: ^Node) ->
return return
} }
node_create_variable :: proc(range: TextRange, name, type_, value: ^Node, is_const: bool) -> (ret: ^Node) { node_create_variable :: proc(range: SourceLocation, name, type_, value: ^Node, is_const: bool) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .VariableDeclaration, kind = .VariableDeclaration,
@ -326,7 +326,7 @@ node_create_variable :: proc(range: TextRange, name, type_, value: ^Node, is_con
return return
} }
node_create_cast :: proc(range: TextRange, value, type_: ^Node) -> (ret: ^Node) { node_create_cast :: proc(range: SourceLocation, value, type_: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .Cast, kind = .Cast,
@ -336,7 +336,7 @@ node_create_cast :: proc(range: TextRange, value, type_: ^Node) -> (ret: ^Node)
return return
} }
node_create_bitwise_cast :: proc(range: TextRange, value, type_: ^Node) -> (ret: ^Node) { node_create_bitwise_cast :: proc(range: SourceLocation, value, type_: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .BitwiseCast, kind = .BitwiseCast,
@ -346,7 +346,7 @@ node_create_bitwise_cast :: proc(range: TextRange, value, type_: ^Node) -> (ret:
return return
} }
node_create_ret :: proc(range: TextRange, value: ^Node) -> (ret: ^Node) { node_create_ret :: proc(range: SourceLocation, value: ^Node) -> (ret: ^Node) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .Ret, kind = .Ret,
@ -356,7 +356,13 @@ node_create_ret :: proc(range: TextRange, value: ^Node) -> (ret: ^Node) {
return return
} }
node_create_struct_initializer :: proc(range: TextRange, name: [dynamic]u8, fields: [dynamic]^Node) -> (ret: ^Node) { node_create_struct_initializer :: proc(
range: SourceLocation,
name: [dynamic]u8,
fields: [dynamic]^Node,
) -> (
ret: ^Node,
) {
ret = new(Node) ret = new(Node)
if ret == nil { if ret == nil {
panic("Failed to allocate node") panic("Failed to allocate node")
@ -374,7 +380,7 @@ node_create_struct_initializer :: proc(range: TextRange, name: [dynamic]u8, fiel
} }
node_create_struct_enum_or_union :: proc( node_create_struct_enum_or_union :: proc(
range: TextRange, range: SourceLocation,
kind: NodeKind, kind: NodeKind,
name: [dynamic]u8, name: [dynamic]u8,
fields: [dynamic]^Node, fields: [dynamic]^Node,

View File

@ -1,24 +1,93 @@
package main package main
import "core:fmt"
MessageLevel :: enum { MessageLevel :: enum {
FIXME,
Warning, Warning,
Error, Error,
Fatal, Fatal,
} }
// FIXME: Add file
Message :: struct { Message :: struct {
level: MessageLevel, level: MessageLevel,
message: string, message: string,
range: TextRange, source_location: SourceLocation,
} }
g_message_list : [dynamic]Message g_message_list: [dynamic]Message
message_create :: proc(level: MessageLevel, message: string, range: TextRange) -> Message { message_create :: proc(level: MessageLevel, message: string, source_location: SourceLocation) -> Message {
return Message { return Message{level = level, message = message, source_location = source_location}
level = level, }
message = message,
range = range, source_print :: proc(data: ^[]u8, range: TextRange) {
pos := TextPosition{1, 1}
should_print := false
additional_spaces := 0
for ch in data {
should_print := pos.line >= range.start.line && pos.line <= range.end.line
if should_print {
if pos.column == 1 {
fmt.printf("\x1B[90m % 4d | \x1B[0m", pos.line)
}
if range.start.line == pos.line && range.start.column - 1 == pos.column {
fmt.printf("\x1B[95m")
}
if ch == '\t' {
additional_spaces += 1
fmt.printf(" ")
} else {
fmt.printf("%c", ch)
}
if pos == range.end {
fmt.printf("\x1B[0m")
}
}
pos.column += 1
if ch == '\n' {
pos.column = 1
pos.line += 1
}
} }
fmt.printf(" ")
for _ in 0 ..< range.start.column + u64(additional_spaces) {
fmt.printf(" ")
}
fmt.printf("\x1B[95m")
for _ in 0 ..= range.end.column - range.start.column {
fmt.printf("^")
}
fmt.println(" Here \x1B[0m")
} }
message_print :: proc(message: ^Message, source_code: ^[]u8) {
fmt.printf("\x1b[1m")
if message.level == .FIXME {
fmt.printf("\x1B[94mFIXME\x1B[0m \x1b[37mat ")
} else if message.level == .Warning {
fmt.printf("\x1B[93mWarning\x1B[0m \x1b[37mat ")
} else if message.level == .Error {
fmt.printf("\x1B[91mError\x1B[0m \x1b[37mat ")
} else if message.level == .Fatal {
fmt.printf("\x1B[91;1mError\x1B[0m \x1b[37mat ")
}
fmt.printf(
"\x1B[32m%s\x1b[90m:\x1B[36m{}:{}\x1B[0m->\x1B[36m{}:{}\x1B[0m: ",
message.source_location.file,
message.source_location.range.start.line,
message.source_location.range.start.column,
message.source_location.range.end.line,
message.source_location.range.end.column,
)
fmt.println(message.message)
source_print(source_code, message.source_location.range)
}

View File

@ -5,6 +5,7 @@ import "core:fmt"
import "core:math" import "core:math"
Lexer :: struct { Lexer :: struct {
file_name: string,
data: ^[dynamic]u8, data: ^[dynamic]u8,
read_position: u64, read_position: u64,
position: TextPosition, position: TextPosition,
@ -13,9 +14,10 @@ Lexer :: struct {
should_return_semicolon: bool, should_return_semicolon: bool,
} }
lexer_create :: proc(data: ^[dynamic]u8) -> ^Lexer { lexer_create :: proc(data: ^[dynamic]u8, file_name: string) -> ^Lexer {
lexer := new(Lexer) lexer := new(Lexer)
lexer^ = { lexer^ = {
file_name = file_name,
data = data, data = data,
read_position = 0, read_position = 0,
position = TextPosition{line = 1, column = 1}, position = TextPosition{line = 1, column = 1},
@ -100,15 +102,15 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) {
if lexer.should_return_semicolon { if lexer.should_return_semicolon {
lexer.should_return_semicolon = false lexer.should_return_semicolon = false
return token_create(.Semicolon, TextRange{start = lexer.position, end = lexer.position}) return token_create(
.Semicolon,
SourceLocation{TextRange{start = lexer.position, end = lexer.position}, lexer.file_name},
)
} }
defer lexer.last_token_kind = ret.kind defer lexer.last_token_kind = ret.kind
crange := TextRange { crange := SourceLocation{TextRange{start = lexer.position, end = lexer.position}, lexer.file_name}
start = lexer.position,
end = lexer.position,
}
ret = token_create(.Invalid, crange) ret = token_create(.Invalid, crange)
should_advance := true should_advance := true
@ -118,18 +120,18 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) {
ret = token_create(.Add, crange) ret = token_create(.Add, crange)
if lexer.next == '+' { if lexer.next == '+' {
lexer_advance(lexer) lexer_advance(lexer)
crange.end = lexer.position crange.range.end = lexer.position
ret = token_create(.Increment, crange) ret = token_create(.Increment, crange)
} }
case '-': case '-':
ret = token_create(.Subtract, crange) ret = token_create(.Subtract, crange)
if lexer.next == '-' { if lexer.next == '-' {
lexer_advance(lexer) lexer_advance(lexer)
crange.end = lexer.position crange.range.end = lexer.position
ret = token_create(.Decrement, crange) ret = token_create(.Decrement, crange)
} else if lexer.next == '>' { } else if lexer.next == '>' {
lexer_advance(lexer) lexer_advance(lexer)
crange.end = lexer.position crange.range.end = lexer.position
ret = token_create(.Arrow, crange) ret = token_create(.Arrow, crange)
} }
case '*': case '*':
@ -144,36 +146,36 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) {
ret = token_create(.Assign, crange) ret = token_create(.Assign, crange)
if lexer.next == '=' { if lexer.next == '=' {
lexer_advance(lexer) lexer_advance(lexer)
crange.end = lexer.position crange.range.end = lexer.position
ret = token_create(.Equals, crange) ret = token_create(.Equals, crange)
} }
case '!': case '!':
ret = token_create(.Not, crange) ret = token_create(.Not, crange)
if lexer.next == '=' { if lexer.next == '=' {
lexer_advance(lexer) lexer_advance(lexer)
crange.end = lexer.position crange.range.end = lexer.position
ret = token_create(.NotEquals, crange) ret = token_create(.NotEquals, crange)
} }
case '<': case '<':
ret = token_create(.LessThan, crange) ret = token_create(.LessThan, crange)
if lexer.next == '=' { if lexer.next == '=' {
lexer_advance(lexer) lexer_advance(lexer)
crange.end = lexer.position crange.range.end = lexer.position
ret = token_create(.LessThanOrEqual, crange) ret = token_create(.LessThanOrEqual, crange)
} else if lexer.next == '<' { } else if lexer.next == '<' {
lexer_advance(lexer) lexer_advance(lexer)
crange.end = lexer.position crange.range.end = lexer.position
ret = token_create(.BitwiseLeftShift, crange) ret = token_create(.BitwiseLeftShift, crange)
} }
case '>': case '>':
ret = token_create(.GreaterThan, crange) ret = token_create(.GreaterThan, crange)
if lexer.next == '=' { if lexer.next == '=' {
lexer_advance(lexer) lexer_advance(lexer)
crange.end = lexer.position crange.range.end = lexer.position
ret = token_create(.GreaterThanOrEqual, crange) ret = token_create(.GreaterThanOrEqual, crange)
} else if lexer.next == '>' { } else if lexer.next == '>' {
lexer_advance(lexer) lexer_advance(lexer)
crange.end = lexer.position crange.range.end = lexer.position
ret = token_create(.BitwiseRightShift, crange) ret = token_create(.BitwiseRightShift, crange)
} }
case '&': case '&':
@ -236,10 +238,7 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) {
@(private = "file") @(private = "file")
lexer_read_string :: proc(lexer: ^Lexer, kind: TokenKind, outer: u8) -> Token { lexer_read_string :: proc(lexer: ^Lexer, kind: TokenKind, outer: u8) -> Token {
crange := TextRange { crange := SourceLocation{TextRange{start = lexer.position, end = lexer.position}, lexer.file_name}
start = lexer.position,
end = lexer.position,
}
lexer_advance(lexer) lexer_advance(lexer)
@ -268,7 +267,7 @@ lexer_read_string :: proc(lexer: ^Lexer, kind: TokenKind, outer: u8) -> Token {
message_create( message_create(
.Warning, .Warning,
fmt.aprintf("Invalid string/character escape: %c at %s", lexer.char, "TODO LOCATION"), fmt.aprintf("Invalid string/character escape: %c at %s", lexer.char, "TODO LOCATION"),
range, SourceLocation{range, lexer.file_name},
), ),
) )
} }
@ -279,21 +278,19 @@ lexer_read_string :: proc(lexer: ^Lexer, kind: TokenKind, outer: u8) -> Token {
lexer_advance(lexer) lexer_advance(lexer)
} }
crange.end = lexer.position crange.range.end = lexer.position
return token_create_u8(kind, str, crange) return token_create_u8(kind, str, crange)
} }
@(private = "file") @(private = "file")
lexer_read_identifier :: proc(lexer: ^Lexer) -> Token { lexer_read_identifier :: proc(lexer: ^Lexer) -> Token {
crange := TextRange { crange := SourceLocation{TextRange{start = lexer.position}, lexer.file_name}
start = lexer.position,
}
str: [dynamic]u8 str: [dynamic]u8
for libc.isalnum(i32(lexer.char)) != 0 || lexer.char == '_' { for libc.isalnum(i32(lexer.char)) != 0 || lexer.char == '_' {
append(&str, lexer.char) append(&str, lexer.char)
crange.end = lexer.position crange.range.end = lexer.position
lexer_advance(lexer) lexer_advance(lexer)
} }
@ -421,7 +418,11 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token {
} else if read_mode == .Hex { } else if read_mode == .Hex {
append( append(
&g_message_list, &g_message_list,
message_create(.Error, "Hexadecimal floating point numbers are not supported yet", crange), message_create(
.Error,
"Hexadecimal floating point numbers are not supported yet",
SourceLocation{crange, lexer.file_name},
),
) )
lowered := libc.tolower(i32(lexer.char)) lowered := libc.tolower(i32(lexer.char))
for libc.isxdigit(lowered) != 0 && lexer.char > 0 { for libc.isxdigit(lowered) != 0 && lexer.char > 0 {
@ -437,7 +438,11 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token {
} else if read_mode == .Binary { } else if read_mode == .Binary {
append( append(
&g_message_list, &g_message_list,
message_create(.Error, "Binary floating point numbers are not supported yet", crange), message_create(
.Error,
"Binary floating point numbers are not supported yet",
SourceLocation{crange, lexer.file_name},
),
) )
for lexer.char == '0' || lexer.char == '1' { for lexer.char == '0' || lexer.char == '1' {
fractional_part = (fractional_part << 1) + u64(lexer.char) - '0' fractional_part = (fractional_part << 1) + u64(lexer.char) - '0'
@ -456,8 +461,8 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token {
floating: f64 = 0 floating: f64 = 0
floating = f64(fractional_part) / math.pow_f64(10, f64(count)) + f64(whole_part) floating = f64(fractional_part) / math.pow_f64(10, f64(count)) + f64(whole_part)
return token_create_f64(.Float, floating, crange) return token_create_f64(.Float, floating, SourceLocation{crange, lexer.file_name})
} }
return token_create_u64(.Integer, whole_part, crange) return token_create_u64(.Integer, whole_part, SourceLocation{crange, lexer.file_name})
} }

View File

@ -5,13 +5,15 @@ import "core:os"
main :: proc() { main :: proc() {
handle: os.Handle handle: os.Handle
file_name := "<stdin>"
if len(os.args) >= 2 { if len(os.args) >= 2 {
errno: os.Errno errno: os.Errno
handle, errno = os.open(os.args[1]) handle, errno = os.open(os.args[1])
if errno != 0 { if errno != 0 {
fmt.printf("Error opening file\n", errno) fmt.printf("Error opening file\n", errno)
return return
} }
file_name = os.args[1]
} else { } else {
handle = os.stdin handle = os.stdin
} }
@ -23,37 +25,51 @@ main :: proc() {
return return
} }
u8_arr : [dynamic]u8 u8_arr: [dynamic]u8
for ch in data { for ch in data {
append(&u8_arr, u8(ch)) append(&u8_arr, u8(ch))
} }
lexer := lexer_create(&u8_arr) lexer := lexer_create(&u8_arr, file_name)
parser := parser_create(lexer) parser := parser_create(lexer)
ast := parser_parse(&parser) ast := parser_parse(&parser)
if len(g_message_list) > 0 { if len(g_message_list) > 0 {
for msg in g_message_list { contains_errors := false
fmt.printf("%s\n", msg) for &msg in g_message_list {
message_print(&msg, &data)
if msg.level == .Error || msg.level == .Fatal {
contains_errors = true
}
//fmt.printf("%s\n", msg)
}
if contains_errors {
return
} }
return
} }
fmt.println("After parse:") fmt.println("After parse:")
node_print(ast) //node_print(ast)
clear(&g_message_list) clear(&g_message_list)
type_check(ast, nil) type_check(ast, nil)
fmt.println("After type check:") fmt.println("After type check:")
node_print(ast) //node_print(ast)
if len(g_message_list) > 0 { if len(g_message_list) > 0 {
for msg in g_message_list { contains_errors := false
fmt.printf("%s\n", msg) for &msg in g_message_list {
message_print(&msg, &data)
if msg.level == .Error || msg.level == .Fatal {
contains_errors = true
}
//fmt.printf("%s\n", msg)
}
if contains_errors {
return
} }
return
} }
node_print(ast) //node_print(ast)
name : string name: string
if handle == os.stdin { if handle == os.stdin {
name = "stdin" name = "stdin"
} else { } else {

View File

@ -112,7 +112,7 @@ parser_parse_statement :: proc(parser: ^Parser) -> (ret: ^Node) {
expect(parser, .Semicolon) expect(parser, .Semicolon)
} }
if ret != nil { if ret != nil {
ret.range.start = range_beg.start ret.range.range.start = range_beg.range.start
} }
return return
} }
@ -395,7 +395,7 @@ parser_parse_binary_expression :: proc(
parser.can_be_function = false parser.can_be_function = false
rhs := next(parser) rhs := next(parser)
lhs = node_create_binary(kind, lhs.range, lhs, rhs) lhs = node_create_binary(kind, lhs.range, lhs, rhs)
lhs^.range.end = rhs.range.end lhs^.range.range.end = rhs.range.range.end
parser.can_be_function = prev_can_be_function parser.can_be_function = prev_can_be_function
} }
i += 1 i += 1
@ -481,23 +481,23 @@ parser_parse_prefix_2 :: proc(parser: ^Parser) -> ^Node {
range := parser.tok.range range := parser.tok.range
if accept(parser, .Not) { if accept(parser, .Not) {
rhs := parser_parse_suffix(parser) rhs := parser_parse_suffix(parser)
range.end = rhs.range.end range.range.end = rhs.range.range.end
return node_create_unary(.Not, range, rhs) return node_create_unary(.Not, range, rhs)
} else if accept(parser, .BitwiseNot) { } else if accept(parser, .BitwiseNot) {
rhs := parser_parse_suffix(parser) rhs := parser_parse_suffix(parser)
range.end = rhs.range.end range.range.end = rhs.range.range.end
return node_create_unary(.BitwiseNot, range, rhs) return node_create_unary(.BitwiseNot, range, rhs)
} else if accept(parser, .Increment) { } else if accept(parser, .Increment) {
rhs := parser_parse_suffix(parser) rhs := parser_parse_suffix(parser)
range.end = rhs.range.end range.range.end = rhs.range.range.end
return node_create_unary(.Increment, range, rhs) return node_create_unary(.Increment, range, rhs)
} else if accept(parser, .Decrement) { } else if accept(parser, .Decrement) {
rhs := parser_parse_suffix(parser) rhs := parser_parse_suffix(parser)
range.end = rhs.range.end range.range.end = rhs.range.range.end
return node_create_unary(.Decrement, range, rhs) return node_create_unary(.Decrement, range, rhs)
} else if accept(parser, .BitwiseXOR) { } else if accept(parser, .BitwiseXOR) {
rhs := parser_parse_suffix(parser) rhs := parser_parse_suffix(parser)
range.end = rhs.range.end range.range.end = rhs.range.range.end
return node_create_unary(.BitwiseXOR, range, rhs) return node_create_unary(.BitwiseXOR, range, rhs)
} }
return parser_parse_suffix(parser) return parser_parse_suffix(parser)
@ -508,10 +508,10 @@ parser_parse_suffix :: proc(parser: ^Parser) -> ^Node {
range := parser.tok.range range := parser.tok.range
lhs := parser_parse_prefix(parser) lhs := parser_parse_prefix(parser)
range_op := parser.tok.range range_op := parser.tok.range
range.end = range_op.end range.range.end = range_op.range.end
if accept(parser, .OpenBracket) { if accept(parser, .OpenBracket) {
rhs := parser_parse_expression(parser) rhs := parser_parse_expression(parser)
range.end = rhs.range.end range.range.end = rhs.range.range.end
expect(parser, .CloseBracket) expect(parser, .CloseBracket)
return node_create_index_access(range, lhs, rhs) return node_create_index_access(range, lhs, rhs)
} else if accept(parser, .Increment) { } else if accept(parser, .Increment) {
@ -520,11 +520,11 @@ parser_parse_suffix :: proc(parser: ^Parser) -> ^Node {
return node_create_unary(.Decrement, range, lhs) return node_create_unary(.Decrement, range, lhs)
} else if accept(parser, .As) { } else if accept(parser, .As) {
type := parser_parse_type(parser) type := parser_parse_type(parser)
range.end = type.range.end range.range.end = type.range.range.end
return node_create_cast(range, lhs, type) return node_create_cast(range, lhs, type)
} else if accept(parser, .BitwiseAs) { } else if accept(parser, .BitwiseAs) {
type := parser_parse_type(parser) type := parser_parse_type(parser)
range.end = type.range.end range.range.end = type.range.range.end
return node_create_bitwise_cast(range, lhs, type) return node_create_bitwise_cast(range, lhs, type)
} }
return lhs return lhs
@ -574,7 +574,7 @@ parser_parse_factor :: proc(parser: ^Parser) -> (ret: ^Node) {
expect(parser, .OpenBrace) expect(parser, .OpenBrace)
args := parser_parse_expression_list(parser) args := parser_parse_expression_list(parser)
range.end = parser.tok.range.end range.range.end = parser.tok.range.range.end
expect(parser, .CloseBrace) expect(parser, .CloseBrace)
ret = node_create_struct_initializer(range, name, args) ret = node_create_struct_initializer(range, name, args)
} else if parser.tok.kind == .Integer { } else if parser.tok.kind == .Integer {
@ -595,7 +595,11 @@ parser_parse_factor :: proc(parser: ^Parser) -> (ret: ^Node) {
prev := parser.can_be_function prev := parser.can_be_function
parser.can_be_function = false parser.can_be_function = false
if accept(parser, .Dot) { if accept(parser, .Dot) {
ret = node_create_field_access({ret.range.start, parser.tok.range.start}, ret, parser_parse_factor(parser)) ret = node_create_field_access(
SourceLocation{{ret.range.range.start, parser.tok.range.range.start}, ret.range.file},
ret,
parser_parse_factor(parser),
)
} }
parser.can_be_function = prev parser.can_be_function = prev
if parser.can_be_function && if parser.can_be_function &&

View File

@ -88,13 +88,18 @@ TokenKind :: enum {
} }
TextPosition :: struct { TextPosition :: struct {
line: u64, line: u64,
column: u64, column: u64,
} }
TextRange :: struct { TextRange :: struct {
start: TextPosition, start: TextPosition,
end: TextPosition, end: TextPosition,
}
SourceLocation :: struct {
range: TextRange,
file: string,
} }
TokenValue :: union { TokenValue :: union {
@ -106,42 +111,23 @@ TokenValue :: union {
} }
Token :: struct { Token :: struct {
kind: TokenKind, kind: TokenKind,
value: TokenValue, value: TokenValue,
range: TextRange, range: SourceLocation,
} }
token_create :: proc(kind: TokenKind, range: TextRange) -> Token { token_create :: proc(kind: TokenKind, range: SourceLocation) -> Token {
return { return {kind = kind, value = nil, range = range}
kind = kind,
value = nil,
range = range,
};
} }
token_create_u8 :: proc(kind: TokenKind, text: [dynamic]u8, range: TextRange) -> Token { token_create_u8 :: proc(kind: TokenKind, text: [dynamic]u8, range: SourceLocation) -> Token {
return { return {kind = kind, value = text, range = range}
kind = kind,
value = text,
range = range,
};
} }
token_create_u64 :: proc(kind: TokenKind, value: u64, range: TextRange) -> Token { token_create_u64 :: proc(kind: TokenKind, value: u64, range: SourceLocation) -> Token {
return { return {kind = kind, value = value, range = range}
kind = kind,
value = value,
range = range,
};
} }
token_create_f64 :: proc(kind: TokenKind, value: f64, range: TextRange) -> Token { token_create_f64 :: proc(kind: TokenKind, value: f64, range: SourceLocation) -> Token {
return { return {kind = kind, value = value, range = range}
kind = kind,
value = value,
range = range,
};
} }

View File

@ -1,5 +1,7 @@
package main package main
import "core:fmt"
TypeKind :: enum { TypeKind :: enum {
Integer, Integer,
Float, Float,
@ -22,6 +24,25 @@ Type :: struct {
struct_type: ^StructType, struct_type: ^StructType,
} }
type_to_string :: proc(type: ^Type) -> string {
if type.kind == .Integer {
if type.is_signed {
return fmt.aprintf("i{}", type.bit_size)
} else {
return fmt.aprintf("u{}", type.bit_size)
}
} else if type.kind == .Float {
return fmt.aprintf("f{}", type.bit_size)
} else if type.kind == .Pointer {
return fmt.aprintf("^{}", type_to_string(type.pointer_to))
} else if type.kind == .Array {
return fmt.aprintf("[{}]{}", type.array_size, type_to_string(type.array_of))
} else if type.kind == .Struct {
return fmt.aprintf("Struct`%s`", type.struct_type.name)
}
return "???"
}
FunctionType :: struct { FunctionType :: struct {
name: [dynamic]u8, name: [dynamic]u8,
return_type: ^Type, return_type: ^Type,
@ -31,6 +52,11 @@ FunctionType :: struct {
compare_types :: proc(a: ^Type, b: ^Type) -> (ret: bool, cast_required: bool) { compare_types :: proc(a: ^Type, b: ^Type) -> (ret: bool, cast_required: bool) {
cast_required = false cast_required = false
if (a == nil && b != nil) || (a != nil && b == nil) {
ret = false
return
}
if (a.kind == .Integer || a.kind == .Float) && (a.bit_size > b.bit_size) { if (a.kind == .Integer || a.kind == .Float) && (a.bit_size > b.bit_size) {
ret = true ret = true
cast_required = true cast_required = true

View File

@ -232,20 +232,93 @@ type_check :: proc(ast: ^Node, parent_ast: ^Node) {
type_check(child, ast) type_check(child, ast)
} }
scope_leave() scope_leave()
case .FunctionCall: case .FieldAccess:
if ast.children[0].kind == .FieldAccess { lhs := ast.children[0]
type_check(ast.children[0], ast) rhs := ast.children[1]
if ast.children[0].return_type == nil { // FIXME: Add support for nesting
append( if lhs.kind != .Identifier {
&g_message_list, append(
message_create(.Error, fmt.aprintf("Field access return type is nil"), ast.children[0].range), &g_message_list,
) message_create(.Error, fmt.aprintf("Field access lhs is not an identifier"), lhs.range),
)
break
}
if rhs.kind != .Identifier {
append(
&g_message_list,
message_create(.Error, fmt.aprintf("Field access rhs is not an identifier"), rhs.range),
)
break
}
struct_var := scope_variable_lookup(lhs.value.([dynamic]u8))
if struct_var == nil {
append(
&g_message_list,
message_create(
.Error,
fmt.aprintf("Cannot find struct of name: `%s`", lhs.value.([dynamic]u8)),
rhs.range,
),
)
break
}
struct_ := scope_struct_lookup(struct_var.struct_type.name)
if struct_ == nil {
append(
&g_message_list,
message_create(
.Error,
fmt.aprintf("Cannot find struct of type name: `%s`", lhs.value.([dynamic]u8)),
rhs.range,
),
)
break
}
found_field := false
for &field in struct_.fields {
if compare_dyn_arrs(&field.name, &rhs.value.([dynamic]u8)) {
ast.return_type = field.type
found_field = true
break break
} }
}
lhs := ast.children[0].children[0] if !found_field {
rhs := ast.children[0].children[1] append(
&g_message_list,
message_create(
.Error,
fmt.aprintf("Cannot find field of name: `%s`", rhs.value.([dynamic]u8)),
rhs.range,
),
)
break
}
case .FunctionCall:
if ast.children[0].kind == .FieldAccess {
// FIXME: This is some temporary shitfuckery, check if a function is part
// of a struct or namespace first, then do this shit
type_check(ast.children[0], ast)
child := ast.children[0]^
free(ast.children[0])
clear(&ast.children)
ast^ = child
return
//if ast.children[0].return_type == nil {
// append(
// &g_message_list,
// message_create(.Error, fmt.aprintf("Field access return type is nil"), ast.children[0].range),
// )
// break
//}
//lhs := ast.children[0].children[0]
//rhs := ast.children[0].children[1]
} }
type := scope_variable_lookup(ast.children[0].value.([dynamic]u8)) type := scope_variable_lookup(ast.children[0].value.([dynamic]u8))
@ -336,7 +409,11 @@ type_check :: proc(ast: ^Node, parent_ast: ^Node) {
&g_message_list, &g_message_list,
message_create( message_create(
.Error, .Error,
fmt.aprintf("Type mismatch: {} and {}", ast.children[0].return_type, ast.children[1].return_type), fmt.aprintf(
"Type mismatch: {} and {}",
type_to_string(ast.children[0].return_type),
type_to_string(ast.children[1].return_type),
),
ast.range, ast.range,
), ),
) )
@ -365,7 +442,6 @@ type_check :: proc(ast: ^Node, parent_ast: ^Node) {
ast.value_token_kind == .LessThanOrEqual { ast.value_token_kind == .LessThanOrEqual {
ast.return_type = type_create_integer(1, true) ast.return_type = type_create_integer(1, true)
} }
// FIXME: Verify that the operation is possible // FIXME: Verify that the operation is possible
case .UnaryExpression: case .UnaryExpression:
// FIXME: Verify that the operation is possible // FIXME: Verify that the operation is possible
@ -390,7 +466,11 @@ type_check :: proc(ast: ^Node, parent_ast: ^Node) {
&g_message_list, &g_message_list,
message_create( message_create(
.Error, .Error,
fmt.aprintf("Type mismatch: {} and {}", function_return_type, ast.children[0].return_type), fmt.aprintf(
"Type mismatch: {} and {}",
type_to_string(function_return_type),
type_to_string(ast.children[0].return_type),
),
ast.range, ast.range,
), ),
) )
@ -399,19 +479,27 @@ type_check :: proc(ast: ^Node, parent_ast: ^Node) {
case .Cast: case .Cast:
type_check(ast.children[0], ast) type_check(ast.children[0], ast)
type_to := ast_to_type(ast.children[1]) type_to := ast_to_type(ast.children[1])
append( if ast.children[0].return_type.kind == .Struct || type_to.kind == .Struct {
&g_message_list, append(&g_message_list, message_create(.Error, "Cannot cast to/from Struct type.", ast.range))
message_create( } else {
.Warning, // FIXME: Check if compatible
fmt.aprintf("Cast to type not checked: {}", ast.children[1].value.([dynamic]u8)), append(
ast.children[1].range, &g_message_list,
), message_create(
) .FIXME,
// FIXME: Check if compatible fmt.aprintf("Cast to type not checked: %s.", ast.children[1].value.([dynamic]u8)),
ast.range,
),
)
}
ast.return_type = type_to ast.return_type = type_to
case .BitwiseCast: case .BitwiseCast:
type_check(ast.children[0], ast) type_check(ast.children[0], ast)
// FIXME: Check if they are both the same bit size // FIXME: Check if they are both the same bit size
append(
&g_message_list,
message_create(.FIXME, fmt.aprintf("BitwiseCast bit size check not implemented."), ast.range),
)
ast.return_type = ast_to_type(ast.children[1]) ast.return_type = ast_to_type(ast.children[1])
case .VariableDeclaration: case .VariableDeclaration:
if ast.children[2] != nil { if ast.children[2] != nil {

View File

@ -31,3 +31,18 @@ get_character_sum_of_dyn_arr :: proc(a: ^[dynamic]u8) -> int {
} }
return sum return sum
} }
digit_count := proc(a: u64) -> (r: u64) {
a_ := a
if a == 0 {
return 1
}
r = 0
for a_ != 0 {
a_ /= 10
r += 1
}
return
}

View File

@ -59,7 +59,7 @@ fn WindowShouldClose i32
1 << 3 | 2 1 << 3 | 2
fn ColorToRaylib(c: Color) u32 { fn ColorToRaylib(c: Color) u32 {
ret c.a as u32 << 24 | c.b as u32 << 16 | c.g as u32 << 8 | c.r ret c.a as u32 << 24 as u32 | c.b as u32 << 16 as u32 | c.g as u32 << 8 as u32 | c.r
} }
fn ClearBackgroundWrap(c: Color) { fn ClearBackgroundWrap(c: Color) {

8
testmin.cat Normal file
View File

@ -0,0 +1,8 @@
struct A {
a b: i32,
}
let inst := .A{ 1 2 }
inst.a as u32