Add parser for structs, unions and functions

This commit is contained in:
Slendi 2024-02-09 14:34:02 +02:00
parent d5990adfee
commit 540e94037e
5 changed files with 466 additions and 142 deletions

108
ast.odin
View File

@ -10,22 +10,25 @@ NodeKind :: enum {
String, String,
Identifier, Identifier,
//
Block, Block,
BinaryExpression, BinaryExpression,
UnaryExpression, UnaryExpression,
FieldAccess, FieldAccess,
IndexAccess, IndexAccess,
FunctionCall, FunctionCall,
VariableDeclaration,
//
Function, Function,
Struct, Struct,
Enum, Enum,
Union, Union,
Use, Use,
//
If, If,
VariableDeclaration, For,
} }
Node :: struct { Node :: struct {
@ -56,12 +59,19 @@ node_create_block :: proc(range: TextRange, children: [dynamic]^Node) -> (ret: ^
return return
} }
node_create_binary :: proc(kind: TokenKind, range: TextRange, left: ^Node, right: ^Node) -> (ret: ^Node) { node_create_binary :: proc(
kind: TokenKind,
range: TextRange,
left: ^Node,
right: ^Node,
) -> (
ret: ^Node,
) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .BinaryExpression, kind = .BinaryExpression,
range = range, range = range,
children = { left, right }, children = {left, right},
value_token_kind = kind, value_token_kind = kind,
} }
return return
@ -72,7 +82,7 @@ node_create_unary :: proc(kind: TokenKind, range: TextRange, operand: ^Node) ->
ret^ = { ret^ = {
kind = .UnaryExpression, kind = .UnaryExpression,
range = range, range = range,
children = { operand }, children = {operand},
value_token_kind = kind, value_token_kind = kind,
} }
return return
@ -83,7 +93,7 @@ node_create_field_access :: proc(range: TextRange, left: ^Node, right: ^Node) ->
ret^ = { ret^ = {
kind = .FieldAccess, kind = .FieldAccess,
range = range, range = range,
children = { left, right }, children = {left, right},
} }
return return
} }
@ -93,17 +103,44 @@ node_create_index_access :: proc(range: TextRange, left: ^Node, right: ^Node) ->
ret^ = { ret^ = {
kind = .IndexAccess, kind = .IndexAccess,
range = range, range = range,
children = { left, right }, children = {left, right},
} }
return return
} }
node_create_function_call :: proc(range: TextRange, name: ^Node, args: [dynamic]^Node) -> (ret: ^Node) { node_create_function_call :: proc(
range: TextRange,
name: ^Node,
args: [dynamic]^Node,
) -> (
ret: ^Node,
) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .FunctionCall, kind = .FunctionCall,
range = range, range = range,
children = { name }, children = {name},
}
for arg in args {
append(&ret.children, arg)
}
return
}
node_create_function :: proc(
range: TextRange,
name: [dynamic]u8,
return_type, body: ^Node,
args: [dynamic]^Node,
) -> (
ret: ^Node,
) {
ret = new(Node)
ret^ = {
kind = .Function,
range = range,
children = {return_type, body},
value = name,
} }
for arg in args { for arg in args {
append(&ret.children, arg) append(&ret.children, arg)
@ -112,7 +149,7 @@ node_create_function_call :: proc(range: TextRange, name: ^Node, args: [dynamic]
} }
node_print :: proc(node: ^Node, indent := 0) { node_print :: proc(node: ^Node, indent := 0) {
for i in 0..<indent { for i in 0 ..< indent {
fmt.printf(" ") fmt.printf(" ")
} }
if node == nil { if node == nil {
@ -124,7 +161,7 @@ node_print :: proc(node: ^Node, indent := 0) {
data, ok := node.value.([dynamic]u8) data, ok := node.value.([dynamic]u8)
if ok { if ok {
fmt.printf("\"") fmt.printf("\"")
for i in 0..<len(data) { for i in 0 ..< len(data) {
fmt.printf("%c", data[i]) fmt.printf("%c", data[i])
} }
fmt.printf("\" ") fmt.printf("\" ")
@ -145,7 +182,11 @@ node_create_use :: proc(range: TextRange, path, alias: [dynamic]u8) -> (ret: ^No
path_ := path path_ := path
// Check if the path ends with ".cat", if not, append it. // Check if the path ends with ".cat", if not, append it.
len_path := len(path_) len_path := len(path_)
if len(path_) < 4 || path_[len_path - 4] != '.' || path_[len_path - 3] != 'c' || path_[len_path - 2] != 'a' || path_[len_path - 1] != 't' { if len(path_) < 4 ||
path_[len_path - 4] != '.' ||
path_[len_path - 3] != 'c' ||
path_[len_path - 2] != 'a' ||
path_[len_path - 1] != 't' {
append(&path_, '.') append(&path_, '.')
append(&path_, 'c') append(&path_, 'c')
append(&path_, 'a') append(&path_, 'a')
@ -162,7 +203,7 @@ node_create_use :: proc(range: TextRange, path, alias: [dynamic]u8) -> (ret: ^No
} else { } else {
// Get the filename, and trucate the extension, then replace any special characters with _ // Get the filename, and trucate the extension, then replace any special characters with _
new_alias := [dynamic]u8{} new_alias := [dynamic]u8{}
for i in 0..<len(path) { for i in 0 ..< len(path) {
if path[i] == '.' { if path[i] == '.' {
break break
} }
@ -189,7 +230,7 @@ node_create_if :: proc(range: TextRange, condition, then, else_: ^Node) -> (ret:
ret^ = { ret^ = {
kind = .If, kind = .If,
range = range, range = range,
children = { condition, then }, children = {condition, then},
} }
if else_ != nil { if else_ != nil {
append(&ret.children, else_) append(&ret.children, else_)
@ -197,14 +238,49 @@ node_create_if :: proc(range: TextRange, condition, then, else_: ^Node) -> (ret:
return return
} }
node_create_variable :: proc(range: TextRange, name, type_, value: ^Node, is_const: bool) -> (ret: ^Node) { node_create_for :: proc(range: TextRange, init, condition, step, body: ^Node) -> (ret: ^Node) {
ret = new(Node)
ret^ = {
kind = .For,
range = range,
children = {init, condition, step, body},
}
return
}
node_create_variable :: proc(
range: TextRange,
name, type_, value: ^Node,
is_const: bool,
) -> (
ret: ^Node,
) {
ret = new(Node) ret = new(Node)
ret^ = { ret^ = {
kind = .VariableDeclaration, kind = .VariableDeclaration,
range = range, range = range,
children = { name, type_, value }, children = {name, type_, value},
value = is_const, value = is_const,
} }
return return
} }
node_create_struct_enum_or_union :: proc(
range: TextRange,
kind: NodeKind,
name: [dynamic]u8,
fields: [dynamic]^Node,
) -> (
ret: ^Node,
) {
ret = new(Node)
ret^ = {
kind = kind,
range = range,
value = name,
}
for field in fields {
append(&ret.children, field)
}
return
}

View File

@ -8,7 +8,6 @@ Lexer :: struct {
data: ^[dynamic]u8, data: ^[dynamic]u8,
read_position: u64, read_position: u64,
position: TextPosition, position: TextPosition,
char, next: u8, char, next: u8,
last_token_kind: TokenKind, last_token_kind: TokenKind,
should_return_semicolon: bool, should_return_semicolon: bool,
@ -19,10 +18,7 @@ lexer_create :: proc(data: ^[dynamic]u8) -> ^Lexer {
lexer^ = { lexer^ = {
data = data, data = data,
read_position = 0, read_position = 0,
position = TextPosition { position = TextPosition{line = 1, column = 1},
line = 1,
column = 1,
},
} }
lexer_advance(lexer) lexer_advance(lexer)
lexer_advance(lexer) lexer_advance(lexer)
@ -51,13 +47,17 @@ lexer_advance :: proc(lexer: ^Lexer) {
@(private = "file") @(private = "file")
lexer_should_not_emit_semicolon :: proc(lexer: ^Lexer) -> bool { lexer_should_not_emit_semicolon :: proc(lexer: ^Lexer) -> bool {
return lexer.last_token_kind == .CloseBrace || return(
lexer.last_token_kind == .CloseBrace ||
lexer.last_token_kind == .Semicolon || lexer.last_token_kind == .Semicolon ||
lexer.last_token_kind == .EOF || lexer.last_token_kind == .EOF ||
lexer.last_token_kind == .Invalid || lexer.last_token_kind == .Invalid ||
lexer.last_token_kind == .OpenParen || lexer.last_token_kind == .OpenParen ||
lexer.last_token_kind == .OpenBrace || lexer.last_token_kind == .OpenBrace ||
lexer.last_token_kind == .OpenBracket || lexer.last_token_kind == .OpenBracket ||
lexer.last_token_kind == .CloseParen ||
lexer.last_token_kind == .CloseBrace ||
lexer.last_token_kind == .CloseBracket ||
lexer.last_token_kind == .Add || lexer.last_token_kind == .Add ||
lexer.last_token_kind == .Subtract || lexer.last_token_kind == .Subtract ||
lexer.last_token_kind == .Multiply || lexer.last_token_kind == .Multiply ||
@ -73,7 +73,9 @@ lexer_should_not_emit_semicolon :: proc(lexer: ^Lexer) -> bool {
lexer.last_token_kind == .LessThan || lexer.last_token_kind == .LessThan ||
lexer.last_token_kind == .GreaterThan || lexer.last_token_kind == .GreaterThan ||
lexer.last_token_kind == .BitwiseLeftShift || lexer.last_token_kind == .BitwiseLeftShift ||
lexer.last_token_kind == .BitwiseRightShift lexer.last_token_kind == .BitwiseRightShift ||
lexer.last_token_kind == .Comma \
)
} }
@(private = "file") @(private = "file")
@ -103,7 +105,7 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) {
if lexer.should_return_semicolon { if lexer.should_return_semicolon {
lexer.should_return_semicolon = false lexer.should_return_semicolon = false
return token_create(.Semicolon, TextRange { start = lexer.position, end = lexer.position }) return token_create(.Semicolon, TextRange{start = lexer.position, end = lexer.position})
} }
defer lexer.last_token_kind = ret.kind defer lexer.last_token_kind = ret.kind
@ -135,10 +137,14 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) {
crange.end = lexer.position crange.end = lexer.position
ret = token_create(.Arrow, crange) ret = token_create(.Arrow, crange)
} }
case '*': ret = token_create(.Multiply, crange) case '*':
case '/': ret = token_create(.Divide, crange) ret = token_create(.Multiply, crange)
case '%': ret = token_create(.Modulo, crange) case '/':
case '`': ret = token_create(.Exponent, crange) ret = token_create(.Divide, crange)
case '%':
ret = token_create(.Modulo, crange)
case '`':
ret = token_create(.Exponent, crange)
case '=': case '=':
ret = token_create(.Assign, crange) ret = token_create(.Assign, crange)
if lexer.next == '=' { if lexer.next == '=' {
@ -175,31 +181,50 @@ lexer_next :: proc(lexer: ^Lexer) -> (ret: Token) {
crange.end = lexer.position crange.end = lexer.position
ret = token_create(.BitwiseRightShift, crange) ret = token_create(.BitwiseRightShift, crange)
} }
case '&': ret = token_create(.BitwiseAnd, crange) case '&':
case '|': ret = token_create(.BitwiseOr, crange) ret = token_create(.BitwiseAnd, crange)
case '^': ret = token_create(.BitwiseXOR, crange) case '|':
case '~': ret = token_create(.BitwiseNot, crange) ret = token_create(.BitwiseOr, crange)
case '(': ret = token_create(.OpenParen, crange) case '^':
case ')': ret = token_create(.CloseParen, crange) ret = token_create(.BitwiseXOR, crange)
case '[': ret = token_create(.OpenBracket, crange) case '~':
case ']': ret = token_create(.CloseBracket, crange) ret = token_create(.BitwiseNot, crange)
case '{': ret = token_create(.OpenBrace, crange) case '(':
case '}': ret = token_create(.CloseBrace, crange) ret = token_create(.OpenParen, crange)
case ')':
ret = token_create(.CloseParen, crange)
case '[':
ret = token_create(.OpenBracket, crange)
case ']':
ret = token_create(.CloseBracket, crange)
case '{':
ret = token_create(.OpenBrace, crange)
case '}':
ret = token_create(.CloseBrace, crange)
case '?': ret = token_create(.Question, crange) case '?':
case ':': ret = token_create(.Colon, crange) ret = token_create(.Question, crange)
case '.': ret = token_create(.Dot, crange) case ':':
case ',': ret = token_create(.Comma, crange) ret = token_create(.Colon, crange)
case ';': ret = token_create(.Semicolon, crange) case '.':
ret = token_create(.Dot, crange)
case ',':
ret = token_create(.Comma, crange)
case ';':
ret = token_create(.Semicolon, crange)
case '"': ret = lexer_read_string(lexer, .String, '\"') case '"':
case '\'': ret = lexer_read_string(lexer, .Character, '\'') ret = lexer_read_string(lexer, .String, '\"')
case 'a'..='z': fallthrough case '\'':
case 'A'..='Z': fallthrough ret = lexer_read_string(lexer, .Character, '\'')
case 'a' ..= 'z':
fallthrough
case 'A' ..= 'Z':
fallthrough
case '_': case '_':
ret = lexer_read_identifier(lexer) ret = lexer_read_identifier(lexer)
should_advance = false should_advance = false
case '0'..='9': case '0' ..= '9':
ret = lexer_read_number(lexer) ret = lexer_read_number(lexer)
should_advance = false should_advance = false
case 0: case 0:
@ -223,21 +248,37 @@ lexer_read_string :: proc(lexer: ^Lexer, kind: TokenKind, outer: u8) -> Token {
lexer_advance(lexer) lexer_advance(lexer)
str : [dynamic]u8 str: [dynamic]u8
for lexer.char != outer { for lexer.char != outer {
if lexer.char == '\\' { if lexer.char == '\\' {
range := TextRange { start = lexer.position } range := TextRange {
start = lexer.position,
}
lexer_advance(lexer) lexer_advance(lexer)
switch lexer.char { switch lexer.char {
case 'n': append(&str, '\n'); break case 'n':
case 't': append(&str, '\t'); break append(&str, '\n');break
case 'b': append(&str, '\b'); break case 't':
case 'r': append(&str, '\r'); break append(&str, '\t');break
case '\\': append(&str, '\\'); break case 'b':
append(&str, '\b');break
case 'r':
append(&str, '\r');break
case '\\':
append(&str, '\\');break
case: case:
range.end = lexer.position range.end = lexer.position
append(&g_message_list, append(
message_create(.Warning, fmt.aprintf("Invalid string/character escape: %c at %s", lexer.char, "TODO LOCATION"), range), &g_message_list,
message_create(
.Warning,
fmt.aprintf(
"Invalid string/character escape: %c at %s",
lexer.char,
"TODO LOCATION",
),
range,
),
) )
} }
lexer_advance(lexer) lexer_advance(lexer)
@ -254,40 +295,21 @@ lexer_read_string :: proc(lexer: ^Lexer, kind: TokenKind, outer: u8) -> Token {
@(private = "file") @(private = "file")
lexer_read_identifier :: proc(lexer: ^Lexer) -> Token { lexer_read_identifier :: proc(lexer: ^Lexer) -> Token {
crange := TextRange { start = lexer.position } crange := TextRange {
start = lexer.position,
}
str : [dynamic]u8 str: [dynamic]u8
for libc.isalnum(i32(lexer.char)) != 0 || lexer.char == '_' { for libc.isalnum(i32(lexer.char)) != 0 || lexer.char == '_' {
append(&str, lexer.char) append(&str, lexer.char)
crange.end = lexer.position crange.end = lexer.position
lexer_advance(lexer) lexer_advance(lexer)
} }
if compare_dyn_arr_string(&str, "fn") { return token_create(.Function, crange) } if compare_dyn_arr_string(
else if compare_dyn_arr_string(&str, "struct") { return token_create(.Struct, crange) } &str,
else if compare_dyn_arr_string(&str, "enum") { return token_create(.Enum, crange) } "fn",
else if compare_dyn_arr_string(&str, "union") { return token_create(.Union, crange) } ) {return token_create(.Function, crange)} else if compare_dyn_arr_string(&str, "struct") {return token_create(.Struct, crange)} else if compare_dyn_arr_string(&str, "enum") {return token_create(.Enum, crange)} else if compare_dyn_arr_string(&str, "union") {return token_create(.Union, crange)} else if compare_dyn_arr_string(&str, "type") {return token_create(.Type, crange)} else if compare_dyn_arr_string(&str, "use") {return token_create(.Use, crange)} else if compare_dyn_arr_string(&str, "pub") {return token_create(.Pub, crange)} else if compare_dyn_arr_string(&str, "let") {return token_create(.Let, crange)} else if compare_dyn_arr_string(&str, "mut") {return token_create(.Mut, crange)} else if compare_dyn_arr_string(&str, "as") {return token_create(.As, crange)} else if compare_dyn_arr_string(&str, "in") {return token_create(.In, crange)} else if compare_dyn_arr_string(&str, "if") {return token_create(.If, crange)} else if compare_dyn_arr_string(&str, "else") {return token_create(.Else, crange)} else if compare_dyn_arr_string(&str, "elif") {return token_create(.Elif, crange)} else if compare_dyn_arr_string(&str, "for") {return token_create(.For, crange)} else if compare_dyn_arr_string(&str, "break") {return token_create(.Break, crange)} else if compare_dyn_arr_string(&str, "continue") {return token_create(.Continue, crange)} else if compare_dyn_arr_string(&str, "switch") {return token_create(.Switch, crange)} else if compare_dyn_arr_string(&str, "case") {return token_create(.Case, crange)} else if compare_dyn_arr_string(&str, "ret") {return token_create(.Ret, crange)} else if compare_dyn_arr_string(&str, "static") {return token_create(.Static, crange)} else if compare_dyn_arr_string(&str, "defer") {return token_create(.Defer, crange)} else if compare_dyn_arr_string(&str, "let") {return token_create(.Let, crange)} else if compare_dyn_arr_string(&str, "and") {return token_create(.And, crange)} else if compare_dyn_arr_string(&str, "or") {return token_create(.Or, crange)}
else if compare_dyn_arr_string(&str, "type") { return token_create(.Type, crange) }
else if compare_dyn_arr_string(&str, "use") { return token_create(.Use, crange) }
else if compare_dyn_arr_string(&str, "pub") { return token_create(.Pub, crange) }
else if compare_dyn_arr_string(&str, "let") { return token_create(.Let, crange) }
else if compare_dyn_arr_string(&str, "mut") { return token_create(.Mut, crange) }
else if compare_dyn_arr_string(&str, "as") { return token_create(.As, crange) }
else if compare_dyn_arr_string(&str, "in") { return token_create(.In, crange) }
else if compare_dyn_arr_string(&str, "if") { return token_create(.If, crange) }
else if compare_dyn_arr_string(&str, "else") { return token_create(.Else, crange) }
else if compare_dyn_arr_string(&str, "elif") { return token_create(.Elif, crange) }
else if compare_dyn_arr_string(&str, "for") { return token_create(.For, crange) }
else if compare_dyn_arr_string(&str, "break") { return token_create(.Break, crange) }
else if compare_dyn_arr_string(&str, "continue") { return token_create(.Continue, crange) }
else if compare_dyn_arr_string(&str, "switch") { return token_create(.Switch, crange) }
else if compare_dyn_arr_string(&str, "case") { return token_create(.Case, crange) }
else if compare_dyn_arr_string(&str, "ret") { return token_create(.Ret, crange) }
else if compare_dyn_arr_string(&str, "static") { return token_create(.Static, crange) }
else if compare_dyn_arr_string(&str, "defer") { return token_create(.Defer, crange) }
else if compare_dyn_arr_string(&str, "let") { return token_create(.Let, crange) }
else if compare_dyn_arr_string(&str, "and") { return token_create(.And, crange) }
else if compare_dyn_arr_string(&str, "or") { return token_create(.Or, crange) }
return token_create_u8(.Identifier, str, crange) return token_create_u8(.Identifier, str, crange)
} }
@ -313,7 +335,7 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token {
lexer_advance(lexer) lexer_advance(lexer)
} }
whole_part : u64 = 0 whole_part: u64 = 0
if read_mode == .Normal { if read_mode == .Normal {
for libc.isdigit(i32(lexer.char)) != 0 && lexer.char > 0 { for libc.isdigit(i32(lexer.char)) != 0 && lexer.char > 0 {
whole_part = whole_part * 10 + u64(lexer.char) - '0' whole_part = whole_part * 10 + u64(lexer.char) - '0'
@ -338,7 +360,7 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token {
lexer_advance(lexer) lexer_advance(lexer)
// FIXME: Move this to another procedure because this is repeating lmfao // FIXME: Move this to another procedure because this is repeating lmfao
fractional_part : u64 = 0 fractional_part: u64 = 0
if read_mode == .Normal { if read_mode == .Normal {
for libc.isdigit(i32(lexer.char)) != 0 && lexer.char > 0 { for libc.isdigit(i32(lexer.char)) != 0 && lexer.char > 0 {
fractional_part = fractional_part * 10 + u64(lexer.char) - '0' fractional_part = fractional_part * 10 + u64(lexer.char) - '0'
@ -346,7 +368,14 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token {
lexer_advance(lexer) lexer_advance(lexer)
} }
} else if read_mode == .Hex { } else if read_mode == .Hex {
append(&g_message_list, message_create(.Error, "Hexadecimal floating point numbers are not supported yet", crange)) append(
&g_message_list,
message_create(
.Error,
"Hexadecimal floating point numbers are not supported yet",
crange,
),
)
lowered := libc.tolower(i32(lexer.char)) lowered := libc.tolower(i32(lexer.char))
for libc.isxdigit(lowered) != 0 && lexer.char > 0 { for libc.isxdigit(lowered) != 0 && lexer.char > 0 {
digit := lowered - '0' digit := lowered - '0'
@ -367,7 +396,7 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token {
count = count + 1 count = count + 1
} }
floating : f64 = 0 floating: f64 = 0
floating = f64(fractional_part) / math.pow_f64(10, f64(count)) + f64(whole_part) floating = f64(fractional_part) / math.pow_f64(10, f64(count)) + f64(whole_part)
return token_create_f64(.Float, floating, crange) return token_create_f64(.Float, floating, crange)
@ -375,4 +404,3 @@ lexer_read_number :: proc(lexer: ^Lexer) -> Token {
return token_create_u64(.Integer, whole_part, crange) return token_create_u64(.Integer, whole_part, crange)
} }

11
ols.json Normal file
View File

@ -0,0 +1,11 @@
{
"collections": {
"name": "main",
"path": ".",
},
"enable_document_symbols": true,
"enable_semantic_tokens": true,
"enable_snippets": true,
"enable_references": true,
}

View File

@ -4,7 +4,6 @@ import "core:fmt"
Parser :: struct { Parser :: struct {
lexer: ^Lexer, lexer: ^Lexer,
tok, next: Token, tok, next: Token,
can_be_function: bool, can_be_function: bool,
} }
@ -37,7 +36,14 @@ accept :: proc(parser: ^Parser, tok: TokenKind) -> bool {
@(private = "file") @(private = "file")
expect :: proc(parser: ^Parser, tok: TokenKind) -> bool { expect :: proc(parser: ^Parser, tok: TokenKind) -> bool {
if !accept(parser, tok) { if !accept(parser, tok) {
append(&g_message_list, message_create(.Error, fmt.aprintf("Expected {}, got {} at {}", tok, parser.tok.kind, "TODO"), parser.tok.range)) append(
&g_message_list,
message_create(
.Error,
fmt.aprintf("Expected {}, got {} at {}", tok, parser.tok.kind, "TODO"),
parser.tok.range,
),
)
return false return false
} }
return true return true
@ -51,14 +57,16 @@ parser_parse :: proc(parser: ^Parser) -> (ret: ^Node) {
@(private = "file") @(private = "file")
parser_parse_block :: proc(parser: ^Parser, end: TokenKind) -> (ret: ^Node) { parser_parse_block :: proc(parser: ^Parser, end: TokenKind) -> (ret: ^Node) {
range := parser.tok.range range := parser.tok.range
statements : [dynamic]^Node statements: [dynamic]^Node
for parser.tok.kind != end && parser.tok.kind != .EOF { for parser.tok.kind != end && parser.tok.kind != .EOF {
if accept(parser, .Let) { if accept(parser, .Let) {
ret := parser_parse_definitions(parser) ret := parser_parse_definitions(parser)
expect(parser, .Semicolon) expect(parser, .Semicolon)
for stmt in ret { for stmt in ret {
if stmt != nil {
append(&statements, stmt) append(&statements, stmt)
} }
}
} else { } else {
stmt := parser_parse_statement(parser) stmt := parser_parse_statement(parser)
if stmt != nil { if stmt != nil {
@ -78,16 +86,133 @@ parser_parse_statement :: proc(parser: ^Parser) -> (ret: ^Node) {
} else if parser.tok.kind == .If { } else if parser.tok.kind == .If {
expect(parser, .If) expect(parser, .If)
ret = parser_parse_if_statement(parser) ret = parser_parse_if_statement(parser)
ret.range.start = range_beg.start
} else if parser.tok.kind == .Use { } else if parser.tok.kind == .Use {
ret = parser_parse_use_statement(parser) ret = parser_parse_use_statement(parser)
expect(parser, .Semicolon)
} else if parser.tok.kind == .OpenBrace {
ret = parser_parse_block(parser, .CloseBrace)
} else if parser.tok.kind == .For {
ret = parser_parse_for_statement(parser)
} else if parser.tok.kind == .Function {
ret = parser_parse_function_definition(parser)
} else if parser.tok.kind == .Struct {
ret = parser_parse_struct_definition(parser)
fmt.printf("{} {}\n", parser.tok, parser.next)
} else if parser.tok.kind == .Enum {
ret = parser_parse_enum_definition(parser)
} else if parser.tok.kind == .Union {
ret = parser_parse_union_definition(parser)
} else { } else {
ret = parser_parse_expression(parser) ret = parser_parse_expression(parser)
expect(parser, .Semicolon) expect(parser, .Semicolon)
} }
if ret != nil {
ret.range.start = range_beg.start
}
return return
} }
@(private = "file")
parser_parse_struct_definition :: proc(parser: ^Parser) -> ^Node {
range := parser.tok.range
expect(parser, .Struct)
name: [dynamic]u8
if parser.tok.kind == .Identifier {
name = parser.tok.value.([dynamic]u8)
parser_next(parser)
} else {
expect(parser, .Identifier)
}
expect(parser, .OpenBrace)
fields := parser_parse_definitions(parser, .CloseBrace)
expect(parser, .CloseBrace)
return node_create_struct_enum_or_union(range, .Struct, name, fields)
}
@(private = "file")
parser_parse_enum_definition :: proc(parser: ^Parser) -> ^Node {
range := parser.tok.range
expect(parser, .Enum)
panic("TODO, enum not implemented yet")
}
@(private = "file")
parser_parse_union_definition :: proc(parser: ^Parser) -> ^Node {
range := parser.tok.range
expect(parser, .Union)
name: [dynamic]u8
if parser.tok.kind == .Identifier {
name = parser.tok.value.([dynamic]u8)
parser_next(parser)
} else {
expect(parser, .Identifier)
}
expect(parser, .OpenBrace)
fields := parser_parse_definitions(parser, .CloseBrace)
expect(parser, .CloseBrace)
return node_create_struct_enum_or_union(range, .Union, name, fields)
}
@(private = "file")
parser_parse_function_definition :: proc(parser: ^Parser) -> ^Node {
expect(parser, .Function)
name: [dynamic]u8
if parser.tok.kind == .Identifier {
name = parser.tok.value.([dynamic]u8)
parser_next(parser)
} else {
expect(parser, .Identifier)
}
params: [dynamic]^Node
if accept(parser, .OpenParen) {
params = parser_parse_definitions(parser, .CloseParen)
expect(parser, .CloseParen)
} else {
params = {}
}
type: ^Node = nil
if parser.tok.kind != .OpenBrace {
type = parser_parse_type(parser)
}
expect(parser, .OpenBrace)
body := parser_parse_block(parser, .CloseBrace)
return node_create_function(parser.tok.range, name, type, body, params)
}
@(private = "file")
parser_parse_for_statement :: proc(parser: ^Parser) -> ^Node {
range := parser.tok.range
expect(parser, .For)
if accept(parser, .OpenBrace) {
body := parser_parse_block(parser, .CloseBrace)
return node_create_for(range, nil, nil, nil, body)
}
if parser.tok.kind == .Let {
panic("TODO, let in for not implemented yet")
}
init := parser_parse_expression(parser)
if accept(parser, .OpenBrace) {
body := parser_parse_block(parser, .CloseBrace)
return node_create_for(range, nil, init, nil, body)
}
expect(parser, .Semicolon)
condition: ^Node = nil
if parser.tok.kind != .Semicolon {
condition = parser_parse_expression(parser)
}
expect(parser, .Semicolon)
if accept(parser, .OpenBrace) {
body := parser_parse_block(parser, .CloseBrace)
return node_create_for(range, init, condition, nil, body)
}
after := parser_parse_expression(parser)
expect(parser, .OpenBrace)
body := parser_parse_block(parser, .CloseBrace)
return node_create_for(range, init, condition, after, body)
}
@(private = "file") @(private = "file")
parser_parse_type :: proc(parser: ^Parser) -> (ret: ^Node) { parser_parse_type :: proc(parser: ^Parser) -> (ret: ^Node) {
// FIXME: Add more types // FIXME: Add more types
@ -96,10 +221,14 @@ parser_parse_type :: proc(parser: ^Parser) -> (ret: ^Node) {
ret = node_create_value(.Identifier, range, parser.tok.value.([dynamic]u8)) ret = node_create_value(.Identifier, range, parser.tok.value.([dynamic]u8))
parser_next(parser) parser_next(parser)
} else { } else {
append(&g_message_list, message_create( append(
&g_message_list,
message_create(
.Error, .Error,
fmt.aprintf("Expected type, got {} at {}", parser.tok.kind, "TODO"), fmt.aprintf("Expected type, got {} at {}", parser.tok.kind, "TODO"),
parser.tok.range)) parser.tok.range,
),
)
ret = nil ret = nil
} }
return return
@ -108,8 +237,8 @@ parser_parse_type :: proc(parser: ^Parser) -> (ret: ^Node) {
@(private = "file") @(private = "file")
parser_parse_definitions :: proc(parser: ^Parser, end := TokenKind.Semicolon) -> [dynamic]^Node { parser_parse_definitions :: proc(parser: ^Parser, end := TokenKind.Semicolon) -> [dynamic]^Node {
range := parser.tok.range range := parser.tok.range
vars : [dynamic]^Node vars: [dynamic]^Node
type : ^Node = nil type: ^Node = nil
are_constants := false are_constants := false
uninitialized := false uninitialized := false
for parser.tok.kind != end && parser.tok.kind != .EOF { for parser.tok.kind != end && parser.tok.kind != .EOF {
@ -137,8 +266,8 @@ parser_parse_definitions :: proc(parser: ^Parser, end := TokenKind.Semicolon) ->
uninitialized = true uninitialized = true
} }
for i in 0..<len(names) { for i in 0 ..< len(names) {
value : ^Node = nil value: ^Node = nil
if uninitialized == false { if uninitialized == false {
value = parser_parse_expression(parser) value = parser_parse_expression(parser)
} }
@ -161,7 +290,7 @@ parser_parse_definitions :: proc(parser: ^Parser, end := TokenKind.Semicolon) ->
parser_parse_use_statement :: proc(parser: ^Parser) -> ^Node { parser_parse_use_statement :: proc(parser: ^Parser) -> ^Node {
range := parser.tok.range range := parser.tok.range
expect(parser, .Use) expect(parser, .Use)
alias : [dynamic]u8 alias: [dynamic]u8
if parser.tok.kind == .Identifier { if parser.tok.kind == .Identifier {
alias = parser.tok.value.([dynamic]u8) alias = parser.tok.value.([dynamic]u8)
parser_next(parser) parser_next(parser)
@ -197,7 +326,11 @@ parser_parse_expression :: proc(parser: ^Parser) -> ^Node {
} }
@(private = "file") @(private = "file")
parser_parse_binary_expression :: proc(parser: ^Parser, kinds: []TokenKind, next: proc(parser: ^Parser) -> ^Node) -> ^Node { parser_parse_binary_expression :: proc(
parser: ^Parser,
kinds: []TokenKind,
next: proc(parser: ^Parser) -> ^Node,
) -> ^Node {
lhs := next(parser) lhs := next(parser)
for kind in kinds { for kind in kinds {
for accept(parser, kind) { for accept(parser, kind) {
@ -224,10 +357,14 @@ parser_parse_arrow :: proc(parser: ^Parser) -> ^Node {
rhs.kind != .Identifier && rhs.kind != .Identifier &&
rhs.kind != .FieldAccess && rhs.kind != .FieldAccess &&
rhs.kind != .IndexAccess { rhs.kind != .IndexAccess {
append(&g_message_list, message_create( append(
&g_message_list,
message_create(
.Error, .Error,
fmt.aprintf("Expected function call, got {} at {}", rhs.kind, "TODO"), fmt.aprintf("Expected function call, got {} at {}", rhs.kind, "TODO"),
rhs.range)) rhs.range,
),
)
return lhs return lhs
} }
if rhs.kind != .FunctionCall { if rhs.kind != .FunctionCall {
@ -246,7 +383,11 @@ parser_parse_equality :: proc(parser: ^Parser) -> ^Node {
@(private = "file") @(private = "file")
parser_parse_comparison :: proc(parser: ^Parser) -> ^Node { parser_parse_comparison :: proc(parser: ^Parser) -> ^Node {
return parser_parse_binary_expression(parser, {.LessThan, .LessThanOrEqual, .GreaterThan, .GreaterThanOrEqual}, parser_parse_addition) return parser_parse_binary_expression(
parser,
{.LessThan, .LessThanOrEqual, .GreaterThan, .GreaterThanOrEqual},
parser_parse_addition,
)
} }
@(private = "file") @(private = "file")
@ -256,7 +397,11 @@ parser_parse_addition :: proc(parser: ^Parser) -> ^Node {
@(private = "file") @(private = "file")
parser_parse_multiplication :: proc(parser: ^Parser) -> ^Node { parser_parse_multiplication :: proc(parser: ^Parser) -> ^Node {
return parser_parse_binary_expression(parser, {.Multiply, .Divide, .Modulo}, parser_parse_exponent) return parser_parse_binary_expression(
parser,
{.Multiply, .Divide, .Modulo},
parser_parse_exponent,
)
} }
@(private = "file") @(private = "file")
@ -266,7 +411,11 @@ parser_parse_exponent :: proc(parser: ^Parser) -> ^Node {
@(private = "file") @(private = "file")
parser_parse_bitwise :: proc(parser: ^Parser) -> ^Node { parser_parse_bitwise :: proc(parser: ^Parser) -> ^Node {
return parser_parse_binary_expression(parser, {.BitwiseAnd, .BitwiseOr, .BitwiseXOR, .BitwiseLeftShift, .BitwiseRightShift}, parser_parse_prefix_2) return parser_parse_binary_expression(
parser,
{.BitwiseAnd, .BitwiseOr, .BitwiseXOR, .BitwiseLeftShift, .BitwiseRightShift},
parser_parse_prefix_2,
)
} }
@(private = "file") @(private = "file")
@ -360,14 +509,26 @@ parser_parse_factor :: proc(parser: ^Parser) -> (ret: ^Node) {
prev := parser.can_be_function prev := parser.can_be_function
parser.can_be_function = false parser.can_be_function = false
if accept(parser, .Dot) { if accept(parser, .Dot) {
ret = node_create_field_access({ ret.range.start, parser.tok.range.start }, ret, parser_parse_factor(parser)) ret = node_create_field_access(
{ret.range.start, parser.tok.range.start},
ret,
parser_parse_factor(parser),
)
} }
parser.can_be_function = prev parser.can_be_function = prev
if parser.can_be_function && parser.tok.kind != .CloseParen && parser.tok.kind != .Semicolon && parser.tok.kind != .Arrow && parser.tok.kind != .EOF { if parser.can_be_function &&
parser.tok.kind != .CloseParen &&
parser.tok.kind != .Semicolon &&
parser.tok.kind != .Arrow &&
parser.tok.kind != .EOF {
prev := parser.can_be_function prev := parser.can_be_function
parser.can_be_function = false parser.can_be_function = false
args : [dynamic]^Node args: [dynamic]^Node
for parser.tok.kind != .CloseParen && parser.tok.kind != .Semicolon && parser.tok.kind != .Arrow && parser.tok.kind != .EOF && parser_is_factor_token_or_prefix(parser.tok.kind) { for parser.tok.kind != .CloseParen &&
parser.tok.kind != .Semicolon &&
parser.tok.kind != .Arrow &&
parser.tok.kind != .EOF &&
parser_is_factor_token_or_prefix(parser.tok.kind) {
append(&args, parser_parse_expression(parser)) append(&args, parser_parse_expression(parser))
} }
ret = node_create_function_call(ret.range, ret, args) ret = node_create_function_call(ret.range, ret, args)
@ -380,8 +541,14 @@ parser_parse_factor :: proc(parser: ^Parser) -> (ret: ^Node) {
parser.can_be_function = prev parser.can_be_function = prev
expect(parser, .CloseParen) expect(parser, .CloseParen)
} else { } else {
append(&g_message_list, message_create(.Error, fmt.aprintf("Unexpected factor token {} at {}", parser.tok.kind, "TODO"), parser.tok.range)) append(
&g_message_list,
message_create(
.Error,
fmt.aprintf("Unexpected factor token {} at {}", parser.tok.kind, "TODO"),
parser.tok.range,
),
)
} }
return return
} }

View File

@ -4,7 +4,7 @@ use "directory/library'with'special'chars"
\ This is a comment, it should be ignored by the compiler \ This is a comment, it should be ignored by the compiler
fmt.printf "%d + %d = %d File length: %d" a b a + b (io.file_size "file.txt") fmt.printf "%i + %i = %i File length: %i\n" a b a + b (io.file_size "file.txt")
fmt.println "Hello world!" fmt.println "Hello world!"
let a := 123 \ This is another comment, that should be ignored by the compiler let a := 123 \ This is another comment, that should be ignored by the compiler
@ -19,3 +19,45 @@ if a == 1 {
} else { } else {
aaaaaaa aaaaaaa
} }
for {
\ Infinite loop
}
for a > 0 {
\ Countdown loop
fmt.printf "%i\n" a--
}
\for let i : i32 = 0; i < 20; i++ {
\ \ Loop that goes up to 19
\}
\
\for let i : i32 in 0..<20 {
\ \ Shorthand for above
\}
fn name {}
fn name returntype {}
fn name () {}
fn name(param1 param2 param3: i32, param4: u32) u32 { }
struct StructName {
field1 field2 field3: i32,
field4: u32,
}
union MyUnion {
data: StructName,
some_other_data: EnumName,
}
\enum EnumName {
\ Value1,
\ Value2,
\ Value3,
\ Value4,
\}
\EnumName.Value1