commit 4534c39a4be4576b06eb245c9c12e251ff199db4 Author: Slendi Date: Thu Feb 8 01:04:11 2024 +0200 Initial commit diff --git a/.gitingore b/.gitingore new file mode 100644 index 0000000..7c2b96e --- /dev/null +++ b/.gitingore @@ -0,0 +1,2 @@ +speedcat +speedcat.exe diff --git a/ast.odin b/ast.odin new file mode 100644 index 0000000..774b2ad --- /dev/null +++ b/ast.odin @@ -0,0 +1,136 @@ +package main + +import "core:fmt" + +NodeKind :: enum { + Integer, + Float, + Character, + String, + Identifier, + + Block, + BinaryExpression, + UnaryExpression, + FieldAccess, + IndexAccess, + FunctionCall, + + Function, + Struct, + Enum, + Union, +} + +Node :: struct { + kind: NodeKind, + range: TextRange, + children: [dynamic]^Node, + value: TokenValue, + value_token_kind: TokenKind, +} + +node_create_value :: proc(kind: NodeKind, range: TextRange, value: TokenValue) -> (ret: ^Node) { + ret = new(Node) + ret^ = { + kind = kind, + range = range, + value = value, + } + return +} + +node_create_block :: proc(range: TextRange, children: [dynamic]^Node) -> (ret: ^Node) { + ret = new(Node) + ret^ = { + kind = .Block, + range = range, + children = children, + } + return +} + +node_create_binary :: proc(kind: TokenKind, range: TextRange, left: ^Node, right: ^Node) -> (ret: ^Node) { + ret = new(Node) + ret^ = { + kind = .BinaryExpression, + range = range, + children = { left, right }, + value_token_kind = kind, + } + return +} + +node_create_unary :: proc(kind: TokenKind, range: TextRange, operand: ^Node) -> (ret: ^Node) { + ret = new(Node) + ret^ = { + kind = .UnaryExpression, + range = range, + children = { operand }, + value_token_kind = kind, + } + return +} + +node_create_field_access :: proc(range: TextRange, left: ^Node, right: ^Node) -> (ret: ^Node) { + ret = new(Node) + ret^ = { + kind = .FieldAccess, + range = range, + children = { left, right }, + } + return +} + +node_create_index_access :: proc(range: TextRange, left: ^Node, right: ^Node) -> (ret: ^Node) { + ret = new(Node) + ret^ = { + kind = .IndexAccess, + range = range, + children = { left, right }, + } + return +} + +node_create_function_call :: proc(range: TextRange, name: ^Node, args: [dynamic]^Node) -> (ret: ^Node) { + ret = new(Node) + ret^ = { + kind = .FunctionCall, + range = range, + children = { name }, + } + for arg in args { + append(&ret.children, arg) + } + return +} + +node_print :: proc(node: ^Node, indent := 0) { + for i in 0.. Message { + return Message { + level = level, + message = message, + range = range, + } +} + diff --git a/examples/calc.cat b/examples/calc.cat new file mode 100644 index 0000000..181f558 --- /dev/null +++ b/examples/calc.cat @@ -0,0 +1,32 @@ +use "std" + +let run = pub fn { + for { + std.printf "first number: " + let a = std.read_i32 + std.printf "second number: " + let b = std.read_i32 + std.printf "enter operation [+-/*]: " + let op = std.read_char + let res: i32 + switch op { + case '+' + res = a + b + break + case '-' + res = a - b + break + case '/' + res = a / b + break + case '*' + res = a * b + break + case + std.println "Invalid operation" + } + + std.printf "Final answer: %d\n" res + } +} + diff --git a/examples/raylib.cat b/examples/raylib.cat new file mode 100644 index 0000000..29b90c4 --- /dev/null +++ b/examples/raylib.cat @@ -0,0 +1,13 @@ +use rl "raylib.h" + +rl.InitWindow 640 480 "Main window" +defer rl.CloseWindow +rl.SetTargetFPS 60 + +for !rl.WindowShouldClose { + rl.BeginDrawing + rl.ClearBackground .RAYWHITE + rl.DrawText "Hello world!" 50 50 40 .RED + rl.EndDrawing +} + diff --git a/examples/sample.cat b/examples/sample.cat new file mode 100644 index 0000000..53b6d00 --- /dev/null +++ b/examples/sample.cat @@ -0,0 +1,20 @@ +use "std" +use "io" +use "str" + +use "./calc" + +io.read_all "epic_file" -> str.sort_lines -> io.write_all "output" + +std.println "Counting to 10:" +for i in 0..=10 { + std.printf "%d" i + if i == 10 { + std.println "!" + } else { + std.printf "... " + } +} + +calc.run + diff --git a/lexer.odin b/lexer.odin new file mode 100644 index 0000000..33658a8 --- /dev/null +++ b/lexer.odin @@ -0,0 +1,361 @@ +package main + +import "core:fmt" +import "core:c/libc" +import "core:math" + +Lexer :: struct { + data: ^[dynamic]u8, + read_position: u64, + position: TextPosition, + + char, next: u8, + last_token_kind: TokenKind, + should_return_semicolon: bool, +} + +lexer_create :: proc(data: ^[dynamic]u8) -> ^Lexer { + lexer := new(Lexer) + lexer^ = { + data = data, + read_position = 0, + position = TextPosition { + line = 1, + column = 1, + }, + } + lexer_advance(lexer) + lexer_advance(lexer) + return lexer +} + +@(private = "file") +lexer_advance :: proc(lexer: ^Lexer) { + lexer.char = lexer.next + if lexer.read_position < u64(len(lexer.data)) { + lexer.next = lexer.data[lexer.read_position] + } else { + lexer.next = 0 + } + lexer.read_position += 1 + if lexer.char == '\r' { + lexer_advance(lexer) + } + if lexer.char == '\n' { + lexer.position.line += 1 + lexer.position.column = 1 + } else { + lexer.position.column += 1 + } +} + +@(private = "file") +lexer_should_not_emit_semicolon :: proc(lexer: ^Lexer) -> bool { + return lexer.last_token_kind == .CloseBrace || + lexer.last_token_kind == .Semicolon || + lexer.last_token_kind == .EOF || + lexer.last_token_kind == .Invalid || + lexer.last_token_kind == .OpenParen || + lexer.last_token_kind == .OpenBrace || + lexer.last_token_kind == .OpenBracket || + lexer.last_token_kind == .Add || + lexer.last_token_kind == .Subtract || + lexer.last_token_kind == .Multiply || + lexer.last_token_kind == .Divide || + lexer.last_token_kind == .Modulo || + lexer.last_token_kind == .Exponent || + lexer.last_token_kind == .Assign || + lexer.last_token_kind == .Not || + lexer.last_token_kind == .BitwiseAnd || + lexer.last_token_kind == .BitwiseOr || + lexer.last_token_kind == .BitwiseXOR || + lexer.last_token_kind == .BitwiseNot || + lexer.last_token_kind == .LessThan || + lexer.last_token_kind == .GreaterThan || + lexer.last_token_kind == .BitwiseLeftShift || + lexer.last_token_kind == .BitwiseRightShift +} + +@(private = "file") +lexer_skip_whitespace :: proc(lexer: ^Lexer) { + // FIXME: Do the funny golang thing where newlines are semicolons based on some rules + for lexer.char == ' ' || lexer.char == '\t' || lexer.char == '\r' || lexer.char == '\n' { + if lexer.char == '\n' { + if !lexer_should_not_emit_semicolon(lexer) { + lexer.should_return_semicolon = true + lexer_advance(lexer) + return + } + } + lexer_advance(lexer) + } +} + +lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) { + lexer_skip_whitespace(lexer) + if lexer.should_return_semicolon { + lexer.should_return_semicolon = false + return token_create(.Semicolon, TextRange { start = lexer.position, end = lexer.position }) + } + + defer lexer.last_token_kind = ret.kind + + crange := TextRange { + start = lexer.position, + end = lexer.position, + } + + ret = token_create(.Invalid, crange) + should_advance := true + + switch lexer.char { + case '+': + ret = token_create(.Add, crange) + if lexer.next == '+' { + lexer_advance(lexer) + crange.end = lexer.position + ret = token_create(.Increment, crange) + } + case '-': + ret = token_create(.Subtract, crange) + if lexer.next == '-' { + lexer_advance(lexer) + crange.end = lexer.position + ret = token_create(.Decrement, crange) + } else if lexer.next == '>' { + lexer_advance(lexer) + crange.end = lexer.position + ret = token_create(.Arrow, crange) + } + case '*': ret = token_create(.Multiply, crange) + case '/': ret = token_create(.Divide, crange) + case '%': ret = token_create(.Modulo, crange) + case '`': ret = token_create(.Exponent, crange) + case '=': ret = token_create(.Assign, crange) + case '!': + ret = token_create(.Not, crange) + if lexer.next == '=' { + lexer_advance(lexer) + crange.end = lexer.position + ret = token_create(.NotEquals, crange) + } + case '<': + ret = token_create(.LessThan, crange) + if lexer.next == '=' { + lexer_advance(lexer) + crange.end = lexer.position + ret = token_create(.LessThanOrEqual, crange) + } else if lexer.next == '<' { + lexer_advance(lexer) + crange.end = lexer.position + ret = token_create(.BitwiseLeftShift, crange) + } + case '>': + ret = token_create(.GreaterThan, crange) + if lexer.next == '=' { + lexer_advance(lexer) + crange.end = lexer.position + ret = token_create(.GreaterThanOrEqual, crange) + } else if lexer.next == '>' { + lexer_advance(lexer) + crange.end = lexer.position + ret = token_create(.BitwiseRightShift, crange) + } + case '&': ret = token_create(.BitwiseAnd, crange) + case '|': ret = token_create(.BitwiseOr, crange) + case '^': ret = token_create(.BitwiseXOR, crange) + case '~': ret = token_create(.BitwiseNot, crange) + case '(': ret = token_create(.OpenParen, crange) + case ')': ret = token_create(.CloseParen, crange) + case '[': ret = token_create(.OpenBracket, crange) + case ']': ret = token_create(.CloseBracket, crange) + case '{': ret = token_create(.OpenBrace, crange) + case '}': ret = token_create(.CloseBrace, crange) + + case '?': ret = token_create(.Question, crange) + case ':': ret = token_create(.Colon, crange) + case '.': ret = token_create(.Dot, crange) + case ';': ret = token_create(.Semicolon, crange) + + case '"': ret = lexer_read_string(lexer, .String, '\"') + case '\'': ret = lexer_read_string(lexer, .Character, '\'') + case 'a'..='z': fallthrough + case 'A'..='Z': fallthrough + case '_': + ret = lexer_read_identifier(lexer) + should_advance = false + case '0'..='9': + ret = lexer_read_number(lexer) + should_advance = false + case 0: + ret = token_create(.EOF, crange) + should_advance = false + } + + if should_advance { + lexer_advance(lexer) + } + + return +} + +@(private = "file") +lexer_read_string :: proc(lexer: ^Lexer, kind: TokenKind, outer: u8) -> Token { + crange := TextRange { + start = lexer.position, + end = lexer.position, + } + + lexer_advance(lexer) + + str : [dynamic]u8 + for lexer.char != outer { + if lexer.char == '\\' { + range := TextRange { start = lexer.position } + lexer_advance(lexer) + switch lexer.char { + case 'n': append(&str, '\n'); break + case 't': append(&str, '\t'); break + case 'b': append(&str, '\b'); break + case 'r': append(&str, '\r'); break + case '\\': append(&str, '\\'); break + case: + range.end = lexer.position + append(&g_message_list, + message_create(.Warning, fmt.aprintf("Invalid string/character escape: %c at %s", lexer.char, "TODO LOCATION"), range), + ) + } + lexer_advance(lexer) + continue + } + append(&str, lexer.char) + + lexer_advance(lexer) + } + crange.end = lexer.position + + return token_create_u8(kind, str, crange) +} + +@(private = "file") +lexer_read_identifier :: proc(lexer: ^Lexer) -> Token { + crange := TextRange { start = lexer.position } + + str : [dynamic]u8 + for libc.isalnum(i32(lexer.char)) != 0 || lexer.char == '_' { + append(&str, lexer.char) + crange.end = lexer.position + lexer_advance(lexer) + } + + if compare_dyn_arr_string(&str, "fn") { return token_create(.Function, crange) } + else if compare_dyn_arr_string(&str, "struct") { return token_create(.Struct, crange) } + else if compare_dyn_arr_string(&str, "enum") { return token_create(.Enum, crange) } + else if compare_dyn_arr_string(&str, "union") { return token_create(.Union, crange) } + else if compare_dyn_arr_string(&str, "type") { return token_create(.Type, crange) } + else if compare_dyn_arr_string(&str, "use") { return token_create(.Use, crange) } + else if compare_dyn_arr_string(&str, "pub") { return token_create(.Pub, crange) } + else if compare_dyn_arr_string(&str, "let") { return token_create(.Let, crange) } + else if compare_dyn_arr_string(&str, "mut") { return token_create(.Mut, crange) } + else if compare_dyn_arr_string(&str, "as") { return token_create(.As, crange) } + else if compare_dyn_arr_string(&str, "in") { return token_create(.In, crange) } + else if compare_dyn_arr_string(&str, "else") { return token_create(.Else, crange) } + else if compare_dyn_arr_string(&str, "elif") { return token_create(.Elif, crange) } + else if compare_dyn_arr_string(&str, "for") { return token_create(.For, crange) } + else if compare_dyn_arr_string(&str, "break") { return token_create(.Break, crange) } + else if compare_dyn_arr_string(&str, "continue") { return token_create(.Continue, crange) } + else if compare_dyn_arr_string(&str, "switch") { return token_create(.Switch, crange) } + else if compare_dyn_arr_string(&str, "case") { return token_create(.Case, crange) } + else if compare_dyn_arr_string(&str, "ret") { return token_create(.Ret, crange) } + else if compare_dyn_arr_string(&str, "static") { return token_create(.Static, crange) } + else if compare_dyn_arr_string(&str, "defer") { return token_create(.Defer, crange) } + else if compare_dyn_arr_string(&str, "and") { return token_create(.And, crange) } + else if compare_dyn_arr_string(&str, "or") { return token_create(.Or, crange) } + + return token_create_u8(.Identifier, str, crange) +} + +@(private = "file") +lexer_read_number :: proc(lexer: ^Lexer) -> Token { + crange := TextRange { + start = lexer.position, + end = lexer.position, + } + + // FIXME: Implement binary + ReadMode :: enum { + Normal, + Hex, + } + read_mode := ReadMode.Normal + + if lexer.char == '0' && lexer.next == 'x' { + read_mode = .Hex + lexer_advance(lexer) + crange.end = lexer.position + lexer_advance(lexer) + } + + whole_part : u64 = 0 + if read_mode == .Normal { + for libc.isdigit(i32(lexer.char)) != 0 && lexer.char > 0 { + whole_part = whole_part * 10 + u64(lexer.char) - '0' + crange.end = lexer.position + lexer_advance(lexer) + } + } else if read_mode == .Hex { + lowered := libc.tolower(i32(lexer.char)) + for libc.isxdigit(lowered) != 0 && lexer.char > 0 && lexer.char != '.' { + digit := lowered - '0' + if libc.isdigit(lowered) == 0 { + digit = lowered - 'a' + 10 + } + whole_part = (whole_part << 4) | u64(digit) + crange.end = lexer.position + lexer_advance(lexer) + } + whole_part = whole_part >> 4 + } + + if lexer.char == '.' { + lexer_advance(lexer) + + // FIXME: Move this to another procedure because this is repeating lmfao + fractional_part : u64 = 0 + if read_mode == .Normal { + for libc.isdigit(i32(lexer.char)) != 0 && lexer.char > 0 { + fractional_part = fractional_part * 10 + u64(lexer.char) - '0' + crange.end = lexer.position + lexer_advance(lexer) + } + } else if read_mode == .Hex { + append(&g_message_list, message_create(.Error, "Hexadecimal floating point numbers are not supported yet", crange)) + lowered := libc.tolower(i32(lexer.char)) + for libc.isxdigit(lowered) != 0 && lexer.char > 0 { + digit := lowered - '0' + if libc.isdigit(lowered) == 0 { + digit = lowered - 'a' + 10 + } + fractional_part = fractional_part * 16 + u64(digit) + crange.end = lexer.position + lexer_advance(lexer) + } + fractional_part = fractional_part / 16 + } + + fractional_part_clone := fractional_part + count := 0 + for fractional_part_clone != 0 { + fractional_part_clone = fractional_part_clone / 10 + count = count + 1 + } + + floating : f64 = 0 + floating = f64(fractional_part) / math.pow_f64(10, f64(count)) + f64(whole_part) + + return token_create_f64(.Float, floating, crange) + } + + return token_create_u64(.Integer, whole_part, crange) +} + diff --git a/main.odin b/main.odin new file mode 100644 index 0000000..b26e2a5 --- /dev/null +++ b/main.odin @@ -0,0 +1,43 @@ +package main + +import "core:fmt" +import "core:os" + +main :: proc() { + handle: os.Handle + if len(os.args) >= 2 { + errno: os.Errno + handle, errno = os.open(os.args[1]) + if errno != 0 { + fmt.printf("Error opening file\n", errno) + return + } + } else { + handle = os.stdin + } + defer os.close(handle) + + data, err := os.read_entire_file_from_handle(handle) + if !err { + fmt.printf("Error reading file\n", err) + return + } + + u8_arr : [dynamic]u8 + for ch in data { + append(&u8_arr, u8(ch)) + } + + lexer := lexer_create(&u8_arr) + parser := parser_create(lexer) + + ast := parser_parse(&parser) + if len(g_message_list) > 0 { + for msg in g_message_list { + fmt.printf("%s\n", msg) + } + return + } + node_print(ast) +} + diff --git a/parser.odin b/parser.odin new file mode 100644 index 0000000..b3309ec --- /dev/null +++ b/parser.odin @@ -0,0 +1,231 @@ +package main + +import "core:fmt" + +Parser :: struct { + lexer: ^Lexer, + + tok, next: Token, + can_be_function: bool, +} + +parser_create :: proc(lexer: ^Lexer) -> (ret: Parser) { + ret = { + lexer = lexer, + can_be_function = true, + } + parser_next(&ret) + parser_next(&ret) + return +} + +@(private = "file") +parser_next :: proc(parser: ^Parser) { + parser.tok = parser.next + parser.next = lexer_next(parser.lexer) +} + +@(private = "file") +accept :: proc(parser: ^Parser, tok: TokenKind) -> bool { + if parser.tok.kind == tok { + parser_next(parser) + return true + } + return false +} + +@(private = "file") +expect :: proc(parser: ^Parser, tok: TokenKind) { + if !accept(parser, tok) { + append(&g_message_list, message_create(.Error, fmt.aprintf("Expected {}, got {} at {}", tok, parser.tok.kind, "TODO"), parser.tok.range)) + } +} + +parser_parse :: proc(parser: ^Parser) -> (ret: ^Node) { + ret = parser_parse_block(parser, .EOF) + return +} + +@(private = "file") +parser_parse_block :: proc(parser: ^Parser, end: TokenKind) -> (ret: ^Node) { + range := parser.tok.range + statements : [dynamic]^Node + for parser.tok.kind != end && parser.tok.kind != .EOF { + append(&statements, parser_parse_statement(parser)) + } + expect(parser, end) + return node_create_block(range, statements) +} + +@(private = "file") +parser_parse_statement :: proc(parser: ^Parser) -> ^Node { + ret := parser_parse_expression(parser) + expect(parser, .Semicolon) + return ret +} + +@(private = "file") +parser_parse_expression :: proc(parser: ^Parser) -> ^Node { + return parser_parse_arrow(parser) +} + +@(private = "file") +parser_parse_arrow :: proc(parser: ^Parser) -> ^Node { + // Basically, a -> b is the same as function_call(b, {a}) + lhs := parser_parse_assignment(parser) + for accept(parser, .Arrow) { + rhs := parser_parse_assignment(parser) + if rhs.kind != .FunctionCall && rhs.kind != .Identifier && rhs.kind != .FieldAccess && rhs.kind != .IndexAccess { + append(&g_message_list, message_create(.Error, fmt.aprintf("Expected function call, got {} at {}", rhs.kind, "TODO"), rhs.range)) + return lhs + } + if rhs.kind != .FunctionCall { + rhs = node_create_function_call(rhs.range, rhs, nil) + } + inject_at(&rhs.children, 1, lhs) + lhs = rhs + } + return lhs +} + +@(private = "file") +parser_parse_binary_expression :: proc(parser: ^Parser, kinds: []TokenKind, next: proc(parser: ^Parser) -> ^Node) -> ^Node { + lhs := next(parser) + for kind in kinds { + for accept(parser, kind) { + rhs := next(parser) + lhs = node_create_binary(kind, lhs.range, lhs, rhs) + lhs^.range.end = rhs.range.end + } + } + return lhs +} + +@(private = "file") +parser_parse_assignment :: proc(parser: ^Parser) -> ^Node { + return parser_parse_binary_expression(parser, {.Assign}, parser_parse_addition) +} + +@(private = "file") +parser_parse_addition :: proc(parser: ^Parser) -> ^Node { + return parser_parse_binary_expression(parser, {.Add, .Subtract}, parser_parse_multiplication) +} + +@(private = "file") +parser_parse_multiplication :: proc(parser: ^Parser) -> ^Node { + return parser_parse_binary_expression(parser, {.Multiply, .Divide, .Modulo}, parser_parse_exponent) +} + +@(private = "file") +parser_parse_exponent :: proc(parser: ^Parser) -> ^Node { + return parser_parse_binary_expression(parser, {.Exponent}, parser_parse_prefix_2) +} + +@(private = "file") +parser_parse_bitwise :: proc(parser: ^Parser) -> ^Node { + return parser_parse_binary_expression(parser, {.BitwiseAnd, .BitwiseOr, .BitwiseXOR, .BitwiseLeftShift, .BitwiseRightShift}, parser_parse_prefix_2) +} + +@(private = "file") +parser_parse_prefix_2 :: proc(parser: ^Parser) -> ^Node { + range := parser.tok.range + if accept(parser, .Not) { + rhs := parser_parse_suffix(parser) + range.end = rhs.range.end + return node_create_unary(.Not, range, rhs) + } else if accept(parser, .BitwiseNot) { + rhs := parser_parse_suffix(parser) + range.end = rhs.range.end + return node_create_unary(.BitwiseNot, range, rhs) + } else if accept(parser, .Increment) { + rhs := parser_parse_suffix(parser) + range.end = rhs.range.end + return node_create_unary(.Increment, range, rhs) + } else if accept(parser, .Decrement) { + rhs := parser_parse_suffix(parser) + range.end = rhs.range.end + return node_create_unary(.Decrement, range, rhs) + } else if accept(parser, .BitwiseXOR) { + rhs := parser_parse_suffix(parser) + range.end = rhs.range.end + return node_create_unary(.BitwiseXOR, range, rhs) + } + return parser_parse_suffix(parser) +} + +@(private = "file") +parser_parse_suffix :: proc(parser: ^Parser) -> ^Node { + range := parser.tok.range + lhs := parser_parse_prefix(parser) + range_op := parser.tok.range + range.end = range_op.end + if accept(parser, .OpenBracket) { + rhs := parser_parse_expression(parser) + range.end = rhs.range.end + expect(parser, .CloseBracket) + return node_create_index_access(range, lhs, rhs) + } else if accept(parser, .Increment) { + return node_create_unary(.Increment, range, lhs) + } else if accept(parser, .Decrement) { + return node_create_unary(.Decrement, range, lhs) + } + return lhs +} + +@(private = "file") +parser_parse_prefix :: proc(parser: ^Parser) -> ^Node { + range := parser.tok.range + if accept(parser, .Add) { + return node_create_unary(.Add, range, parser_parse_prefix(parser)) + } else if accept(parser, .Subtract) { + return node_create_unary(.Subtract, range, parser_parse_prefix(parser)) + } + return parser_parse_factor(parser) +} + +@(private = "file") +parser_parse_factor :: proc(parser: ^Parser) -> (ret: ^Node) { + ret = nil + if parser.tok.kind == .Integer { + ret = node_create_value(.Integer, parser.tok.range, parser.tok.value) + parser_next(parser) + } else if parser.tok.kind == .Float { + ret = node_create_value(.Float, parser.tok.range, parser.tok.value) + parser_next(parser) + } else if parser.tok.kind == .Character { + ret = node_create_value(.Character, parser.tok.range, parser.tok.value) + parser_next(parser) + } else if parser.tok.kind == .String { + ret = node_create_value(.String, parser.tok.range, parser.tok.value) + parser_next(parser) + } else if parser.tok.kind == .Identifier { + ret = node_create_value(.Identifier, parser.tok.range, parser.tok.value) + parser_next(parser) + prev := parser.can_be_function + parser.can_be_function = false + if accept(parser, .Dot) { + ret = node_create_field_access({ ret.range.start, parser.tok.range.start }, ret, parser_parse_factor(parser)) + } + parser.can_be_function = prev + if parser.can_be_function && parser.tok.kind != .CloseParen && parser.tok.kind != .Semicolon && parser.tok.kind != .Arrow && parser.tok.kind != .EOF { + prev := parser.can_be_function + parser.can_be_function = false + args : [dynamic]^Node + for parser.tok.kind != .CloseParen && parser.tok.kind != .Semicolon && parser.tok.kind != .Arrow && parser.tok.kind != .EOF { + append(&args, parser_parse_expression(parser)) + } + ret = node_create_function_call(ret.range, ret, args) + parser.can_be_function = prev + } + } else if accept(parser, .OpenParen) { + prev := parser.can_be_function + parser.can_be_function = true + ret = parser_parse_expression(parser) + parser.can_be_function = prev + expect(parser, .CloseParen) + } else { + append(&g_message_list, message_create(.Error, fmt.aprintf("Unexpected factor token {} at {}", parser.tok.kind, "TODO"), parser.tok.range)) + } + return +} + diff --git a/speedcat.exe b/speedcat.exe new file mode 100644 index 0000000..6d09145 Binary files /dev/null and b/speedcat.exe differ diff --git a/test.cat b/test.cat new file mode 100644 index 0000000..d1d0809 --- /dev/null +++ b/test.cat @@ -0,0 +1,2 @@ +fmt.printf "%d + %d = %d File length: %d" a b a + b (io.file_size "file.txt") +fmt.println "Hello world!" diff --git a/tokens.odin b/tokens.odin new file mode 100644 index 0000000..02d6c57 --- /dev/null +++ b/tokens.odin @@ -0,0 +1,142 @@ +package main + +TokenKind :: enum { + Invalid, + EOF, + + // Literals + Identifier, + Integer, + Float, + String, + Character, + + // Keywords + Function, + Struct, + Enum, + Union, + Type, + Use, + Pub, + Let, + Mut, + As, + In, + If, + Else, + Elif, + For, + Break, + Continue, + Switch, + Case, + Ret, + Static, + Defer, + + // Logical Operators Keywords + And, + Or, + + // Operators + Add, + Subtract, + Multiply, + Divide, + Modulo, + Exponent, + Assign, + Increment, + Decrement, + + // Logical Operators + Equals, + NotEquals, + LessThan, + GreaterThan, + LessThanOrEqual, + GreaterThanOrEqual, + Not, + + // Bitwise Operators + BitwiseAnd, + BitwiseOr, + BitwiseXOR, + BitwiseNot, + BitwiseLeftShift, + BitwiseRightShift, + + // Delimiters + OpenParen, + CloseParen, + OpenBrace, + CloseBrace, + OpenBracket, + CloseBracket, + + // Punctuation + Question, // For Zig-like error handling + Colon, + Arrow, + Dot, + + // Other + Semicolon, +} + +TextPosition :: struct { + line: u64, + column: u64, +} + +TextRange :: struct { + start: TextPosition, + end: TextPosition, +} + +TokenValue :: union { + u64, + f64, + [dynamic]u8, + ^u8, +} + +Token :: struct { + kind: TokenKind, + value: TokenValue, + range: TextRange, +} + +token_create :: proc(kind: TokenKind, range: TextRange) -> Token { + return { + kind = kind, + value = nil, + range = range, + }; +} + +token_create_u8 :: proc(kind: TokenKind, text: [dynamic]u8, range: TextRange) -> Token { + return { + kind = kind, + value = text, + range = range, + }; +} + +token_create_u64 :: proc(kind: TokenKind, value: u64, range: TextRange) -> Token { + return { + kind = kind, + value = value, + range = range, + }; +} + +token_create_f64 :: proc(kind: TokenKind, value: f64, range: TextRange) -> Token { + return { + kind = kind, + value = value, + range = range, + }; +} + diff --git a/util.odin b/util.odin new file mode 100644 index 0000000..4783858 --- /dev/null +++ b/util.odin @@ -0,0 +1,13 @@ +package main + +compare_dyn_arr_string :: proc(a: ^[dynamic]u8, b: string) -> bool { + if len(a) != len(b) { + return false + } + for c, i in a { + if c != b[i] { + return false + } + } + return true +}