Skip to content

Commit

Permalink
Change main file location
Browse files Browse the repository at this point in the history
  • Loading branch information
thedavidchu committed Jul 13, 2024
1 parent f966ab7 commit e192917
Show file tree
Hide file tree
Showing 6 changed files with 58 additions and 11 deletions.
3 changes: 1 addition & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,9 @@ jobs:
- name: Compile
run: |
# For some reason, if we change to the compiler directory, Python complains.
export PYTHONPATH="${PYTHONPATH}:/home/runner/work/dolang/dolang/src/"
for x in fibonacci helloworld math_ops nested_if sum_three
do
python src/compiler/lol.py -i examples/$x.lol -o results
python src/main.py -i examples/$x.lol -o results
gcc results/$x-*.c
./a.out
done
5 changes: 5 additions & 0 deletions examples/invalid/bad_token.lol
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
/* Bad token */
function main() -> i32 {
let a: i32 = 1 ++ 2;
return 0;
}
15 changes: 10 additions & 5 deletions src/compiler/lexer/lol_lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,13 +206,11 @@ def _is_punctuation_implemented(token_type: LolTokenType) -> bool:
and len(token_type.value) >= 2
and token_type.value[1]
in {
LolTokenType.NOT_YET_IMPLEMENTED,
LolTokenType.WONT_BE_IMPLEMENTED,
LolTokenType.NOT_YET_IMPLEMENTED.value,
LolTokenType.WONT_BE_IMPLEMENTED.value,
}
):
raise NotImplementedError(
f"token_type {token_type.n} not implemented"
)
return False
return True

@staticmethod
Expand Down Expand Up @@ -246,6 +244,13 @@ def lex_punctuation(stream: CharacterStream):
)

if not Lexer._is_punctuation_implemented(token_type):
err = LolError(
stream.get_text(),
start_pos,
start_pos + len(lexeme),
"unimplemented token",
)
print(err)
raise NotImplementedError

return LolToken(
Expand Down
44 changes: 41 additions & 3 deletions src/compiler/lexer/lol_lexer_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,44 @@ class LolTokenType(Enum):
NOT = auto()


# UNIMPLEMENTED_TOKEN_TYPES: set[LolTokenType] = {
# # Unimplemented in tokenizer
# EXCLAMATION, # !
# AT, # @
# PERCENT, # %
# CIRCUMFLEX, # ^
# AMPERSAND, # &
# QUESTION, # ?
# VBAR, # |
# # Doubled characters
# RSHIFT, # >>
# LSHIFT, # <<
# GREATER_EQUAL, # >=
# LESSER_EQUAL, # <=
# EQUAL_EQUAL, # ==
# NOT_EQUAL, # !=
# # Unimplemented in tokenizer (no plan to implement these yet)
# STAR_STAR, # **
# PLUS_PLUS, # ++
# MINUS_MINUS, # --
# SLASH_SLASH, # //
# # COLON_EQUAL = auto() # :=
# # STAR_EQUAL = WONT_BE_IMPLEMENTED # *=
# # PLUS_EQUAL = WONT_BE_IMPLEMENTED # +=
# # MINUS_EQUAL = WONT_BE_IMPLEMENTED # -=
# # SLASH_EQUAL = WONT_BE_IMPLEMENTED # /=
# # RSHIFT_EQUAL = WONT_BE_IMPLEMENTED # >>=
# # LSHIFT_EQUAL = WONT_BE_IMPLEMENTED # <<=
# # PERCENT_EQUAL = WONT_BE_IMPLEMENTED # %=
# # CIRCUMFLEX_EQUAL = WONT_BE_IMPLEMENTED # ^=
# # AMPERSAND_EQUAL = WONT_BE_IMPLEMENTED # &=
# # QUESTION_EQUAL = WONT_BE_IMPLEMENTED # ?=
# # VBAR_EQUAL = WONT_BE_IMPLEMENTED # |=
# # AT_EQUAL = WONT_BE_IMPLEMENTED # @=
# # BSLASH = auto(), WONT_BE_IMPLEMENTED # \
# }


SYMBOL_CONTROL: Dict[Optional[str], Union[Dict, LolTokenType]] = {
"(": {None: LolTokenType.LPAREN},
")": {None: LolTokenType.RPAREN},
Expand All @@ -109,7 +147,7 @@ class LolTokenType(Enum):
".": {None: LolTokenType.DOT},
";": {None: LolTokenType.SEMICOLON},
"?": {None: LolTokenType.QUESTION},
"|": {None: LolTokenType.QUESTION},
"|": {None: LolTokenType.VBAR},
"&": {None: LolTokenType.AMPERSAND},
"^": {None: LolTokenType.CIRCUMFLEX},
"@": {None: LolTokenType.AT},
Expand Down Expand Up @@ -155,7 +193,7 @@ class LolTokenType(Enum):
}


class Token:
class LolToken:
def __init__(
self,
lexeme: str,
Expand Down Expand Up @@ -185,7 +223,7 @@ def get_token_type_as_str(self):
def __repr__(self):
"""Pretty print the token. This is NOT for serialization, because the
token type should be an integer id so that it's easier to parse."""
return f"Token(lexeme={repr(self.lexeme)}, token_type={self.get_token_type_as_str()}, start_idx={self.start_position}, full_text?={isinstance(self.full_text, str)})"
return f"LolToken(lexeme={repr(self.lexeme)}, token_type={self.get_token_type_as_str()}, start_idx={self.start_position}, full_text?={isinstance(self.full_text, str)})"

def get_line_and_column_numbers(self) -> Optional[Tuple[int, int]]:
if self.start_position is None or self.full_text is None:
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion test/compiler/test_lolc.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from common import add_compiler_to_sys_path

add_compiler_to_sys_path()
from compiler.main import LolModule
from main import LolModule


def lol_compile(input_file: str, output_dir: str = "results"):
Expand Down

0 comments on commit e192917

Please sign in to comment.