Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for arrays to compiler #73

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions examples/array.lol
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
/* Demonstrate array syntax */
module io = import("stdio.h");

function main() -> i32 {
let array: Array[i32] = [0, 1, 2, 3];
io::printf("Array: [%d, %d, %d, %d]\n",
array[0], array[1], array[2], array[3]);
return 0;
}
78 changes: 58 additions & 20 deletions src/compiler/parser/lol_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,15 @@ def get_name_as_str(self):
return self.name.name


@frozen_dataclass
class LolParserItemAccess(LolParserGeneric):
name: LolParserIdentifier
arguments: List[LolParserExpression]

def get_name_as_str(self):
return self.name.name


@frozen_dataclass
class LolParserVariableDefinition(LolParserGeneric):
name: LolParserIdentifier
Expand Down Expand Up @@ -251,35 +260,66 @@ def parse_parenthetic_expression(
return ret

@staticmethod
def parse_func_call_args(
stream: TokenStream, func_identifier: LolParserIdentifier
) -> LolParserFunctionCall:
start_pos = func_identifier.start_position
eat_token(stream, LolTokenType.LPAREN)
args: List[LolParserValueExpression] = []
def parse_comma_separated_expressions(
stream: TokenStream, start_position: int, end_token_type: LolTokenType
) -> list[LolParserExpression]:
args: List[LolParserExpression] = []
token = stream.get_token()
# Check if empty set of arguments
if token.is_type(LolTokenType.RPAREN):
rp_tok = eat_token(stream, LolTokenType.RPAREN)
return LolParserFunctionCall(
start_pos, get_end(rp_tok), func_identifier, args
)
if token.is_type(end_token_type):
eat_token(stream, end_token_type)
return args
# At this point, we have at least one argument (or error)
while True:
expr = Parser.parse_value_expression(stream)
args.append(expr)
token = stream.get_token()
if token.is_type(LolTokenType.RPAREN):
eat_token(stream, LolTokenType.RPAREN)
if token.is_type(end_token_type):
eat_token(stream, end_token_type)
break
elif token.is_type(LolTokenType.COMMA):
eat_token(stream, LolTokenType.COMMA)
continue
else:
raise ValueError("Expected COMMA or RPAREN")
error_msg = f"Expected COMMA or {end_token_type.name}, got {token.token_type.name}"
LolError.print_error(
stream.path, start_position, start_position + 1, error_msg
)
raise ValueError(error_msg)
return args

@staticmethod
def parse_func_call_args(
stream: TokenStream, func_identifier: LolParserIdentifier
) -> LolParserFunctionCall:
start_pos = func_identifier.start_position
eat_token(stream, LolTokenType.LPAREN)
args = Parser.parse_comma_separated_expressions(
stream, start_pos, LolTokenType.RPAREN
)
end_pos = get_end(stream.get_token(offset=-1))
return LolParserFunctionCall(start_pos, end_pos, func_identifier, args)

@staticmethod
def parse_item_access_args(
stream: TokenStream, func_identifier: LolParserIdentifier
) -> LolParserFunctionCall:
start_pos = func_identifier.start_position
eat_token(stream, LolTokenType.LSQB)
args = Parser.parse_comma_separated_expressions(
stream, start_pos, LolTokenType.RSQB
)
end_pos = get_end(stream.get_token(offset=-1))
return LolParserItemAccess(start_pos, end_pos, func_identifier, args)

@staticmethod
def parse_list(stream: TokenStream) -> list[LolParserExpression]:
start_pos = get_start(stream.get_token())
eat_token(stream, LolTokenType.LSQB)
return Parser.parse_comma_separated_expressions(
stream, start_pos, LolTokenType.RSQB
)

@staticmethod
def parse_identifier_with_namespace_separator(
stream: TokenStream, identifier_leaf: LolParserIdentifier
Expand Down Expand Up @@ -336,7 +376,7 @@ def parse_leading_identifier(
if token.is_type(LolTokenType.LPAREN):
return Parser.parse_func_call_args(stream, identifier_leaf)
elif token.is_type(LolTokenType.LSQB):
raise ValueError("accesses not supported yet... i.e. `x[100]`")
return Parser.parse_item_access_args(stream, identifier_leaf)
else:
return LolParserIdentifier(
identifier_leaf.start_position,
Expand Down Expand Up @@ -368,6 +408,8 @@ def parse_primary(stream: TokenStream) -> LolParserExpression:
return Parser.parse_literal(stream)
elif token.is_type(LolTokenType.LPAREN):
return Parser.parse_parenthetic_expression(stream)
elif token.is_type(LolTokenType.LSQB):
return Parser.parse_list(stream)
else:
error_msg = f"unrecognized primary {token}"
LolError.print_error(
Expand Down Expand Up @@ -474,11 +516,7 @@ def parse_expression(stream: TokenStream) -> LolParserExpression:

@staticmethod
def parse_type_expression(stream: TokenStream) -> LolParserTypeExpression:
# We only support single-token type expressions for now
ident = eat_token(stream, LolTokenType.IDENTIFIER)
return LolParserIdentifier(
get_start(ident), get_end(ident), ident.as_str()
)
return Parser.parse_expression(stream)

@staticmethod
def parse_value_expression(stream: TokenStream) -> LolParserValueExpression:
Expand Down