diff --git a/src/ast.odin b/src/ast.odin index 4604658..d9944bd 100644 --- a/src/ast.odin +++ b/src/ast.odin @@ -49,14 +49,14 @@ NodeKind :: enum { Node :: struct { kind: NodeKind, - range: TextRange, + range: SourceLocation, children: [dynamic]^Node, value: TokenValue, value_token_kind: TokenKind, return_type: ^Type, } -node_create_value :: proc(kind: NodeKind, range: TextRange, value: TokenValue) -> (ret: ^Node) { +node_create_value :: proc(kind: NodeKind, range: SourceLocation, value: TokenValue) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = kind, @@ -66,7 +66,7 @@ node_create_value :: proc(kind: NodeKind, range: TextRange, value: TokenValue) - return } -node_create_pointer :: proc(range: TextRange, value: ^Node) -> (ret: ^Node) { +node_create_pointer :: proc(range: SourceLocation, value: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .Pointer, @@ -76,7 +76,7 @@ node_create_pointer :: proc(range: TextRange, value: ^Node) -> (ret: ^Node) { return } -node_create_array :: proc(range: TextRange, size: u64, value: ^Node) -> (ret: ^Node) { +node_create_array :: proc(range: SourceLocation, size: u64, value: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .Array, @@ -87,7 +87,7 @@ node_create_array :: proc(range: TextRange, size: u64, value: ^Node) -> (ret: ^N return } -node_create_block :: proc(range: TextRange, children: [dynamic]^Node) -> (ret: ^Node) { +node_create_block :: proc(range: SourceLocation, children: [dynamic]^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .Block, @@ -97,7 +97,7 @@ node_create_block :: proc(range: TextRange, children: [dynamic]^Node) -> (ret: ^ return } -node_create_binary :: proc(kind: TokenKind, range: TextRange, left: ^Node, right: ^Node) -> (ret: ^Node) { +node_create_binary :: proc(kind: TokenKind, range: SourceLocation, left: ^Node, right: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .BinaryExpression, @@ -108,7 +108,7 @@ node_create_binary :: proc(kind: TokenKind, range: TextRange, left: ^Node, right return } -node_create_unary :: proc(kind: TokenKind, range: TextRange, operand: ^Node) -> (ret: ^Node) { +node_create_unary :: proc(kind: TokenKind, range: SourceLocation, operand: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .UnaryExpression, @@ -119,7 +119,7 @@ node_create_unary :: proc(kind: TokenKind, range: TextRange, operand: ^Node) -> return } -node_create_field_access :: proc(range: TextRange, left: ^Node, right: ^Node) -> (ret: ^Node) { +node_create_field_access :: proc(range: SourceLocation, left: ^Node, right: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .FieldAccess, @@ -129,7 +129,7 @@ node_create_field_access :: proc(range: TextRange, left: ^Node, right: ^Node) -> return } -node_create_index_access :: proc(range: TextRange, left: ^Node, right: ^Node) -> (ret: ^Node) { +node_create_index_access :: proc(range: SourceLocation, left: ^Node, right: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .IndexAccess, @@ -139,7 +139,7 @@ node_create_index_access :: proc(range: TextRange, left: ^Node, right: ^Node) -> return } -node_create_function_call :: proc(range: TextRange, name: ^Node, args: [dynamic]^Node) -> (ret: ^Node) { +node_create_function_call :: proc(range: SourceLocation, name: ^Node, args: [dynamic]^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .FunctionCall, @@ -153,7 +153,7 @@ node_create_function_call :: proc(range: TextRange, name: ^Node, args: [dynamic] } node_create_extern_function :: proc( - range: TextRange, + range: SourceLocation, name: [dynamic]u8, return_type: ^Node, args: [dynamic]^Node, @@ -174,7 +174,7 @@ node_create_extern_function :: proc( } node_create_function :: proc( - range: TextRange, + range: SourceLocation, name: [dynamic]u8, return_type, body: ^Node, args: [dynamic]^Node, @@ -264,7 +264,7 @@ parse_use_path2 :: proc(path: [dynamic]u8) -> (ret: [dynamic]u8) { return } -node_create_use :: proc(range: TextRange, path, alias: [dynamic]u8) -> (ret: ^Node) { +node_create_use :: proc(range: SourceLocation, path, alias: [dynamic]u8) -> (ret: ^Node) { path_ := path // Check if the path ends with ".cat", if not, append it. path_ = parse_use_path(path) @@ -288,7 +288,7 @@ node_create_use :: proc(range: TextRange, path, alias: [dynamic]u8) -> (ret: ^No return } -node_create_if :: proc(range: TextRange, condition, then, else_: ^Node) -> (ret: ^Node) { +node_create_if :: proc(range: SourceLocation, condition, then, else_: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .If, @@ -305,7 +305,7 @@ node_create_if :: proc(range: TextRange, condition, then, else_: ^Node) -> (ret: return } -node_create_for :: proc(range: TextRange, init, condition, step, body: ^Node) -> (ret: ^Node) { +node_create_for :: proc(range: SourceLocation, init, condition, step, body: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .For, @@ -315,7 +315,7 @@ node_create_for :: proc(range: TextRange, init, condition, step, body: ^Node) -> return } -node_create_variable :: proc(range: TextRange, name, type_, value: ^Node, is_const: bool) -> (ret: ^Node) { +node_create_variable :: proc(range: SourceLocation, name, type_, value: ^Node, is_const: bool) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .VariableDeclaration, @@ -326,7 +326,7 @@ node_create_variable :: proc(range: TextRange, name, type_, value: ^Node, is_con return } -node_create_cast :: proc(range: TextRange, value, type_: ^Node) -> (ret: ^Node) { +node_create_cast :: proc(range: SourceLocation, value, type_: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .Cast, @@ -336,7 +336,7 @@ node_create_cast :: proc(range: TextRange, value, type_: ^Node) -> (ret: ^Node) return } -node_create_bitwise_cast :: proc(range: TextRange, value, type_: ^Node) -> (ret: ^Node) { +node_create_bitwise_cast :: proc(range: SourceLocation, value, type_: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .BitwiseCast, @@ -346,7 +346,7 @@ node_create_bitwise_cast :: proc(range: TextRange, value, type_: ^Node) -> (ret: return } -node_create_ret :: proc(range: TextRange, value: ^Node) -> (ret: ^Node) { +node_create_ret :: proc(range: SourceLocation, value: ^Node) -> (ret: ^Node) { ret = new(Node) ret^ = { kind = .Ret, @@ -356,7 +356,13 @@ node_create_ret :: proc(range: TextRange, value: ^Node) -> (ret: ^Node) { return } -node_create_struct_initializer :: proc(range: TextRange, name: [dynamic]u8, fields: [dynamic]^Node) -> (ret: ^Node) { +node_create_struct_initializer :: proc( + range: SourceLocation, + name: [dynamic]u8, + fields: [dynamic]^Node, +) -> ( + ret: ^Node, +) { ret = new(Node) if ret == nil { panic("Failed to allocate node") @@ -374,7 +380,7 @@ node_create_struct_initializer :: proc(range: TextRange, name: [dynamic]u8, fiel } node_create_struct_enum_or_union :: proc( - range: TextRange, + range: SourceLocation, kind: NodeKind, name: [dynamic]u8, fields: [dynamic]^Node, diff --git a/src/error.odin b/src/error.odin index 5356471..a2dfec7 100644 --- a/src/error.odin +++ b/src/error.odin @@ -1,24 +1,93 @@ package main +import "core:fmt" + MessageLevel :: enum { + FIXME, Warning, Error, Fatal, } +// FIXME: Add file Message :: struct { - level: MessageLevel, - message: string, - range: TextRange, + level: MessageLevel, + message: string, + source_location: SourceLocation, } -g_message_list : [dynamic]Message +g_message_list: [dynamic]Message -message_create :: proc(level: MessageLevel, message: string, range: TextRange) -> Message { - return Message { - level = level, - message = message, - range = range, +message_create :: proc(level: MessageLevel, message: string, source_location: SourceLocation) -> Message { + return Message{level = level, message = message, source_location = source_location} +} + +source_print :: proc(data: ^[]u8, range: TextRange) { + pos := TextPosition{1, 1} + should_print := false + + additional_spaces := 0 + for ch in data { + should_print := pos.line >= range.start.line && pos.line <= range.end.line + if should_print { + if pos.column == 1 { + fmt.printf("\x1B[90m % 4d | \x1B[0m", pos.line) + } + if range.start.line == pos.line && range.start.column - 1 == pos.column { + fmt.printf("\x1B[95m") + } + if ch == '\t' { + additional_spaces += 1 + fmt.printf(" ") + } else { + fmt.printf("%c", ch) + } + if pos == range.end { + fmt.printf("\x1B[0m") + } + } + + pos.column += 1 + if ch == '\n' { + pos.column = 1 + pos.line += 1 + } } + + fmt.printf(" ") + for _ in 0 ..< range.start.column + u64(additional_spaces) { + fmt.printf(" ") + } + + fmt.printf("\x1B[95m") + for _ in 0 ..= range.end.column - range.start.column { + fmt.printf("^") + } + fmt.println(" Here \x1B[0m") } +message_print :: proc(message: ^Message, source_code: ^[]u8) { + fmt.printf("\x1b[1m") + if message.level == .FIXME { + fmt.printf("\x1B[94mFIXME\x1B[0m \x1b[37mat ") + } else if message.level == .Warning { + fmt.printf("\x1B[93mWarning\x1B[0m \x1b[37mat ") + } else if message.level == .Error { + fmt.printf("\x1B[91mError\x1B[0m \x1b[37mat ") + } else if message.level == .Fatal { + fmt.printf("\x1B[91;1mError\x1B[0m \x1b[37mat ") + } + + fmt.printf( + "\x1B[32m%s\x1b[90m:\x1B[36m{}:{}\x1B[0m->\x1B[36m{}:{}\x1B[0m: ", + message.source_location.file, + message.source_location.range.start.line, + message.source_location.range.start.column, + message.source_location.range.end.line, + message.source_location.range.end.column, + ) + + fmt.println(message.message) + + source_print(source_code, message.source_location.range) +} diff --git a/src/lexer.odin b/src/lexer.odin index d082b98..76d240d 100644 --- a/src/lexer.odin +++ b/src/lexer.odin @@ -5,6 +5,7 @@ import "core:fmt" import "core:math" Lexer :: struct { + file_name: string, data: ^[dynamic]u8, read_position: u64, position: TextPosition, @@ -13,9 +14,10 @@ Lexer :: struct { should_return_semicolon: bool, } -lexer_create :: proc(data: ^[dynamic]u8) -> ^Lexer { +lexer_create :: proc(data: ^[dynamic]u8, file_name: string) -> ^Lexer { lexer := new(Lexer) lexer^ = { + file_name = file_name, data = data, read_position = 0, position = TextPosition{line = 1, column = 1}, @@ -100,15 +102,15 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) { if lexer.should_return_semicolon { lexer.should_return_semicolon = false - return token_create(.Semicolon, TextRange{start = lexer.position, end = lexer.position}) + return token_create( + .Semicolon, + SourceLocation{TextRange{start = lexer.position, end = lexer.position}, lexer.file_name}, + ) } defer lexer.last_token_kind = ret.kind - crange := TextRange { - start = lexer.position, - end = lexer.position, - } + crange := SourceLocation{TextRange{start = lexer.position, end = lexer.position}, lexer.file_name} ret = token_create(.Invalid, crange) should_advance := true @@ -118,18 +120,18 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) { ret = token_create(.Add, crange) if lexer.next == '+' { lexer_advance(lexer) - crange.end = lexer.position + crange.range.end = lexer.position ret = token_create(.Increment, crange) } case '-': ret = token_create(.Subtract, crange) if lexer.next == '-' { lexer_advance(lexer) - crange.end = lexer.position + crange.range.end = lexer.position ret = token_create(.Decrement, crange) } else if lexer.next == '>' { lexer_advance(lexer) - crange.end = lexer.position + crange.range.end = lexer.position ret = token_create(.Arrow, crange) } case '*': @@ -144,36 +146,36 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) { ret = token_create(.Assign, crange) if lexer.next == '=' { lexer_advance(lexer) - crange.end = lexer.position + crange.range.end = lexer.position ret = token_create(.Equals, crange) } case '!': ret = token_create(.Not, crange) if lexer.next == '=' { lexer_advance(lexer) - crange.end = lexer.position + crange.range.end = lexer.position ret = token_create(.NotEquals, crange) } case '<': ret = token_create(.LessThan, crange) if lexer.next == '=' { lexer_advance(lexer) - crange.end = lexer.position + crange.range.end = lexer.position ret = token_create(.LessThanOrEqual, crange) } else if lexer.next == '<' { lexer_advance(lexer) - crange.end = lexer.position + crange.range.end = lexer.position ret = token_create(.BitwiseLeftShift, crange) } case '>': ret = token_create(.GreaterThan, crange) if lexer.next == '=' { lexer_advance(lexer) - crange.end = lexer.position + crange.range.end = lexer.position ret = token_create(.GreaterThanOrEqual, crange) } else if lexer.next == '>' { lexer_advance(lexer) - crange.end = lexer.position + crange.range.end = lexer.position ret = token_create(.BitwiseRightShift, crange) } case '&': @@ -236,10 +238,7 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) { @(private = "file") lexer_read_string :: proc(lexer: ^Lexer, kind: TokenKind, outer: u8) -> Token { - crange := TextRange { - start = lexer.position, - end = lexer.position, - } + crange := SourceLocation{TextRange{start = lexer.position, end = lexer.position}, lexer.file_name} lexer_advance(lexer) @@ -268,7 +267,7 @@ lexer_read_string :: proc(lexer: ^Lexer, kind: TokenKind, outer: u8) -> Token { message_create( .Warning, fmt.aprintf("Invalid string/character escape: %c at %s", lexer.char, "TODO LOCATION"), - range, + SourceLocation{range, lexer.file_name}, ), ) } @@ -279,21 +278,19 @@ lexer_read_string :: proc(lexer: ^Lexer, kind: TokenKind, outer: u8) -> Token { lexer_advance(lexer) } - crange.end = lexer.position + crange.range.end = lexer.position return token_create_u8(kind, str, crange) } @(private = "file") lexer_read_identifier :: proc(lexer: ^Lexer) -> Token { - crange := TextRange { - start = lexer.position, - } + crange := SourceLocation{TextRange{start = lexer.position}, lexer.file_name} str: [dynamic]u8 for libc.isalnum(i32(lexer.char)) != 0 || lexer.char == '_' { append(&str, lexer.char) - crange.end = lexer.position + crange.range.end = lexer.position lexer_advance(lexer) } @@ -421,7 +418,11 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token { } else if read_mode == .Hex { append( &g_message_list, - message_create(.Error, "Hexadecimal floating point numbers are not supported yet", crange), + message_create( + .Error, + "Hexadecimal floating point numbers are not supported yet", + SourceLocation{crange, lexer.file_name}, + ), ) lowered := libc.tolower(i32(lexer.char)) for libc.isxdigit(lowered) != 0 && lexer.char > 0 { @@ -437,7 +438,11 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token { } else if read_mode == .Binary { append( &g_message_list, - message_create(.Error, "Binary floating point numbers are not supported yet", crange), + message_create( + .Error, + "Binary floating point numbers are not supported yet", + SourceLocation{crange, lexer.file_name}, + ), ) for lexer.char == '0' || lexer.char == '1' { fractional_part = (fractional_part << 1) + u64(lexer.char) - '0' @@ -456,8 +461,8 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token { floating: f64 = 0 floating = f64(fractional_part) / math.pow_f64(10, f64(count)) + f64(whole_part) - return token_create_f64(.Float, floating, crange) + return token_create_f64(.Float, floating, SourceLocation{crange, lexer.file_name}) } - return token_create_u64(.Integer, whole_part, crange) + return token_create_u64(.Integer, whole_part, SourceLocation{crange, lexer.file_name}) } diff --git a/src/main.odin b/src/main.odin index a43ff28..511f101 100644 --- a/src/main.odin +++ b/src/main.odin @@ -5,13 +5,15 @@ import "core:os" main :: proc() { handle: os.Handle + file_name := "" if len(os.args) >= 2 { errno: os.Errno handle, errno = os.open(os.args[1]) if errno != 0 { fmt.printf("Error opening file\n", errno) - return + return } + file_name = os.args[1] } else { handle = os.stdin } @@ -23,37 +25,51 @@ main :: proc() { return } - u8_arr : [dynamic]u8 + u8_arr: [dynamic]u8 for ch in data { append(&u8_arr, u8(ch)) } - lexer := lexer_create(&u8_arr) + lexer := lexer_create(&u8_arr, file_name) parser := parser_create(lexer) ast := parser_parse(&parser) if len(g_message_list) > 0 { - for msg in g_message_list { - fmt.printf("%s\n", msg) + contains_errors := false + for &msg in g_message_list { + message_print(&msg, &data) + if msg.level == .Error || msg.level == .Fatal { + contains_errors = true + } + //fmt.printf("%s\n", msg) + } + if contains_errors { + return } - return } fmt.println("After parse:") - node_print(ast) + //node_print(ast) clear(&g_message_list) type_check(ast, nil) fmt.println("After type check:") - node_print(ast) + //node_print(ast) if len(g_message_list) > 0 { - for msg in g_message_list { - fmt.printf("%s\n", msg) + contains_errors := false + for &msg in g_message_list { + message_print(&msg, &data) + if msg.level == .Error || msg.level == .Fatal { + contains_errors = true + } + //fmt.printf("%s\n", msg) + } + if contains_errors { + return } - return } - node_print(ast) + //node_print(ast) - name : string + name: string if handle == os.stdin { name = "stdin" } else { diff --git a/src/parser.odin b/src/parser.odin index d4e7c3d..48c7ce6 100644 --- a/src/parser.odin +++ b/src/parser.odin @@ -112,7 +112,7 @@ parser_parse_statement :: proc(parser: ^Parser) -> (ret: ^Node) { expect(parser, .Semicolon) } if ret != nil { - ret.range.start = range_beg.start + ret.range.range.start = range_beg.range.start } return } @@ -395,7 +395,7 @@ parser_parse_binary_expression :: proc( parser.can_be_function = false rhs := next(parser) lhs = node_create_binary(kind, lhs.range, lhs, rhs) - lhs^.range.end = rhs.range.end + lhs^.range.range.end = rhs.range.range.end parser.can_be_function = prev_can_be_function } i += 1 @@ -481,23 +481,23 @@ parser_parse_prefix_2 :: proc(parser: ^Parser) -> ^Node { range := parser.tok.range if accept(parser, .Not) { rhs := parser_parse_suffix(parser) - range.end = rhs.range.end + range.range.end = rhs.range.range.end return node_create_unary(.Not, range, rhs) } else if accept(parser, .BitwiseNot) { rhs := parser_parse_suffix(parser) - range.end = rhs.range.end + range.range.end = rhs.range.range.end return node_create_unary(.BitwiseNot, range, rhs) } else if accept(parser, .Increment) { rhs := parser_parse_suffix(parser) - range.end = rhs.range.end + range.range.end = rhs.range.range.end return node_create_unary(.Increment, range, rhs) } else if accept(parser, .Decrement) { rhs := parser_parse_suffix(parser) - range.end = rhs.range.end + range.range.end = rhs.range.range.end return node_create_unary(.Decrement, range, rhs) } else if accept(parser, .BitwiseXOR) { rhs := parser_parse_suffix(parser) - range.end = rhs.range.end + range.range.end = rhs.range.range.end return node_create_unary(.BitwiseXOR, range, rhs) } return parser_parse_suffix(parser) @@ -508,10 +508,10 @@ parser_parse_suffix :: proc(parser: ^Parser) -> ^Node { range := parser.tok.range lhs := parser_parse_prefix(parser) range_op := parser.tok.range - range.end = range_op.end + range.range.end = range_op.range.end if accept(parser, .OpenBracket) { rhs := parser_parse_expression(parser) - range.end = rhs.range.end + range.range.end = rhs.range.range.end expect(parser, .CloseBracket) return node_create_index_access(range, lhs, rhs) } else if accept(parser, .Increment) { @@ -520,11 +520,11 @@ parser_parse_suffix :: proc(parser: ^Parser) -> ^Node { return node_create_unary(.Decrement, range, lhs) } else if accept(parser, .As) { type := parser_parse_type(parser) - range.end = type.range.end + range.range.end = type.range.range.end return node_create_cast(range, lhs, type) } else if accept(parser, .BitwiseAs) { type := parser_parse_type(parser) - range.end = type.range.end + range.range.end = type.range.range.end return node_create_bitwise_cast(range, lhs, type) } return lhs @@ -574,7 +574,7 @@ parser_parse_factor :: proc(parser: ^Parser) -> (ret: ^Node) { expect(parser, .OpenBrace) args := parser_parse_expression_list(parser) - range.end = parser.tok.range.end + range.range.end = parser.tok.range.range.end expect(parser, .CloseBrace) ret = node_create_struct_initializer(range, name, args) } else if parser.tok.kind == .Integer { @@ -595,7 +595,11 @@ parser_parse_factor :: proc(parser: ^Parser) -> (ret: ^Node) { prev := parser.can_be_function parser.can_be_function = false if accept(parser, .Dot) { - ret = node_create_field_access({ret.range.start, parser.tok.range.start}, ret, parser_parse_factor(parser)) + ret = node_create_field_access( + SourceLocation{{ret.range.range.start, parser.tok.range.range.start}, ret.range.file}, + ret, + parser_parse_factor(parser), + ) } parser.can_be_function = prev if parser.can_be_function && diff --git a/src/tokens.odin b/src/tokens.odin index 6cdcee3..abda680 100644 --- a/src/tokens.odin +++ b/src/tokens.odin @@ -88,13 +88,18 @@ TokenKind :: enum { } TextPosition :: struct { - line: u64, + line: u64, column: u64, } TextRange :: struct { start: TextPosition, - end: TextPosition, + end: TextPosition, +} + +SourceLocation :: struct { + range: TextRange, + file: string, } TokenValue :: union { @@ -106,42 +111,23 @@ TokenValue :: union { } Token :: struct { - kind: TokenKind, + kind: TokenKind, value: TokenValue, - range: TextRange, + range: SourceLocation, } -token_create :: proc(kind: TokenKind, range: TextRange) -> Token { - return { - kind = kind, - value = nil, - range = range, - }; +token_create :: proc(kind: TokenKind, range: SourceLocation) -> Token { + return {kind = kind, value = nil, range = range} } -token_create_u8 :: proc(kind: TokenKind, text: [dynamic]u8, range: TextRange) -> Token { - return { - kind = kind, - value = text, - range = range, - }; +token_create_u8 :: proc(kind: TokenKind, text: [dynamic]u8, range: SourceLocation) -> Token { + return {kind = kind, value = text, range = range} } -token_create_u64 :: proc(kind: TokenKind, value: u64, range: TextRange) -> Token { - return { - kind = kind, - value = value, - range = range, - }; +token_create_u64 :: proc(kind: TokenKind, value: u64, range: SourceLocation) -> Token { + return {kind = kind, value = value, range = range} } -token_create_f64 :: proc(kind: TokenKind, value: f64, range: TextRange) -> Token { - return { - kind = kind, - value = value, - range = range, - }; +token_create_f64 :: proc(kind: TokenKind, value: f64, range: SourceLocation) -> Token { + return {kind = kind, value = value, range = range} } - - - diff --git a/src/type.odin b/src/type.odin index 6405023..bac4256 100644 --- a/src/type.odin +++ b/src/type.odin @@ -1,5 +1,7 @@ package main +import "core:fmt" + TypeKind :: enum { Integer, Float, @@ -22,6 +24,25 @@ Type :: struct { struct_type: ^StructType, } +type_to_string :: proc(type: ^Type) -> string { + if type.kind == .Integer { + if type.is_signed { + return fmt.aprintf("i{}", type.bit_size) + } else { + return fmt.aprintf("u{}", type.bit_size) + } + } else if type.kind == .Float { + return fmt.aprintf("f{}", type.bit_size) + } else if type.kind == .Pointer { + return fmt.aprintf("^{}", type_to_string(type.pointer_to)) + } else if type.kind == .Array { + return fmt.aprintf("[{}]{}", type.array_size, type_to_string(type.array_of)) + } else if type.kind == .Struct { + return fmt.aprintf("Struct`%s`", type.struct_type.name) + } + return "???" +} + FunctionType :: struct { name: [dynamic]u8, return_type: ^Type, @@ -31,6 +52,11 @@ FunctionType :: struct { compare_types :: proc(a: ^Type, b: ^Type) -> (ret: bool, cast_required: bool) { cast_required = false + if (a == nil && b != nil) || (a != nil && b == nil) { + ret = false + return + } + if (a.kind == .Integer || a.kind == .Float) && (a.bit_size > b.bit_size) { ret = true cast_required = true diff --git a/src/type_checker.odin b/src/type_checker.odin index 97a0644..cd9d187 100644 --- a/src/type_checker.odin +++ b/src/type_checker.odin @@ -232,20 +232,93 @@ type_check :: proc(ast: ^Node, parent_ast: ^Node) { type_check(child, ast) } scope_leave() - case .FunctionCall: - if ast.children[0].kind == .FieldAccess { - type_check(ast.children[0], ast) - if ast.children[0].return_type == nil { - append( - &g_message_list, - message_create(.Error, fmt.aprintf("Field access return type is nil"), ast.children[0].range), - ) + case .FieldAccess: + lhs := ast.children[0] + rhs := ast.children[1] + // FIXME: Add support for nesting + if lhs.kind != .Identifier { + append( + &g_message_list, + message_create(.Error, fmt.aprintf("Field access lhs is not an identifier"), lhs.range), + ) + break + } + if rhs.kind != .Identifier { + append( + &g_message_list, + message_create(.Error, fmt.aprintf("Field access rhs is not an identifier"), rhs.range), + ) + break + } + + struct_var := scope_variable_lookup(lhs.value.([dynamic]u8)) + if struct_var == nil { + append( + &g_message_list, + message_create( + .Error, + fmt.aprintf("Cannot find struct of name: `%s`", lhs.value.([dynamic]u8)), + rhs.range, + ), + ) + break + } + + struct_ := scope_struct_lookup(struct_var.struct_type.name) + if struct_ == nil { + append( + &g_message_list, + message_create( + .Error, + fmt.aprintf("Cannot find struct of type name: `%s`", lhs.value.([dynamic]u8)), + rhs.range, + ), + ) + break + } + + found_field := false + for &field in struct_.fields { + if compare_dyn_arrs(&field.name, &rhs.value.([dynamic]u8)) { + ast.return_type = field.type + found_field = true break } + } - lhs := ast.children[0].children[0] - rhs := ast.children[0].children[1] + if !found_field { + append( + &g_message_list, + message_create( + .Error, + fmt.aprintf("Cannot find field of name: `%s`", rhs.value.([dynamic]u8)), + rhs.range, + ), + ) + break + } + case .FunctionCall: + if ast.children[0].kind == .FieldAccess { + // FIXME: This is some temporary shitfuckery, check if a function is part + // of a struct or namespace first, then do this shit + type_check(ast.children[0], ast) + child := ast.children[0]^ + free(ast.children[0]) + clear(&ast.children) + ast^ = child + return + + //if ast.children[0].return_type == nil { + // append( + // &g_message_list, + // message_create(.Error, fmt.aprintf("Field access return type is nil"), ast.children[0].range), + // ) + // break + //} + + //lhs := ast.children[0].children[0] + //rhs := ast.children[0].children[1] } type := scope_variable_lookup(ast.children[0].value.([dynamic]u8)) @@ -336,7 +409,11 @@ type_check :: proc(ast: ^Node, parent_ast: ^Node) { &g_message_list, message_create( .Error, - fmt.aprintf("Type mismatch: {} and {}", ast.children[0].return_type, ast.children[1].return_type), + fmt.aprintf( + "Type mismatch: {} and {}", + type_to_string(ast.children[0].return_type), + type_to_string(ast.children[1].return_type), + ), ast.range, ), ) @@ -365,7 +442,6 @@ type_check :: proc(ast: ^Node, parent_ast: ^Node) { ast.value_token_kind == .LessThanOrEqual { ast.return_type = type_create_integer(1, true) } - // FIXME: Verify that the operation is possible case .UnaryExpression: // FIXME: Verify that the operation is possible @@ -390,7 +466,11 @@ type_check :: proc(ast: ^Node, parent_ast: ^Node) { &g_message_list, message_create( .Error, - fmt.aprintf("Type mismatch: {} and {}", function_return_type, ast.children[0].return_type), + fmt.aprintf( + "Type mismatch: {} and {}", + type_to_string(function_return_type), + type_to_string(ast.children[0].return_type), + ), ast.range, ), ) @@ -399,19 +479,27 @@ type_check :: proc(ast: ^Node, parent_ast: ^Node) { case .Cast: type_check(ast.children[0], ast) type_to := ast_to_type(ast.children[1]) - append( - &g_message_list, - message_create( - .Warning, - fmt.aprintf("Cast to type not checked: {}", ast.children[1].value.([dynamic]u8)), - ast.children[1].range, - ), - ) - // FIXME: Check if compatible + if ast.children[0].return_type.kind == .Struct || type_to.kind == .Struct { + append(&g_message_list, message_create(.Error, "Cannot cast to/from Struct type.", ast.range)) + } else { + // FIXME: Check if compatible + append( + &g_message_list, + message_create( + .FIXME, + fmt.aprintf("Cast to type not checked: %s.", ast.children[1].value.([dynamic]u8)), + ast.range, + ), + ) + } ast.return_type = type_to case .BitwiseCast: type_check(ast.children[0], ast) // FIXME: Check if they are both the same bit size + append( + &g_message_list, + message_create(.FIXME, fmt.aprintf("BitwiseCast bit size check not implemented."), ast.range), + ) ast.return_type = ast_to_type(ast.children[1]) case .VariableDeclaration: if ast.children[2] != nil { diff --git a/src/util.odin b/src/util.odin index 154b550..c6fbd1b 100644 --- a/src/util.odin +++ b/src/util.odin @@ -31,3 +31,18 @@ get_character_sum_of_dyn_arr :: proc(a: ^[dynamic]u8) -> int { } return sum } + +digit_count := proc(a: u64) -> (r: u64) { + a_ := a + if a == 0 { + return 1 + } + + r = 0 + for a_ != 0 { + a_ /= 10 + r += 1 + } + + return +} diff --git a/test_type_checker.cat b/test_type_checker.cat index 7bb9f6e..dc83afe 100644 --- a/test_type_checker.cat +++ b/test_type_checker.cat @@ -59,7 +59,7 @@ fn WindowShouldClose i32 1 << 3 | 2 fn ColorToRaylib(c: Color) u32 { - ret c.a as u32 << 24 | c.b as u32 << 16 | c.g as u32 << 8 | c.r + ret c.a as u32 << 24 as u32 | c.b as u32 << 16 as u32 | c.g as u32 << 8 as u32 | c.r } fn ClearBackgroundWrap(c: Color) { diff --git a/testmin.cat b/testmin.cat new file mode 100644 index 0000000..51413fa --- /dev/null +++ b/testmin.cat @@ -0,0 +1,8 @@ +struct A { + a b: i32, +} + +let inst := .A{ 1 2 } + +inst.a as u32 +