2929# IMPORTS #
3030###########
3131
32- import ast
3332from typing import (
3433 Any ,
3534 List ,
@@ -71,9 +70,7 @@ def __repr__(self) -> str:
7170 :return: String representation of the token.
7271 """
7372
74- return (
75- "{" + self .type + ":'" + self .value + "'}"
76- ) # TODO (ElBe): Remove manual string formatting
73+ return f"{ self .type } : { self .value !r} "
7774
7875
7976class LexerError (BaseException ):
@@ -111,7 +108,12 @@ class Lexer:
111108 def __init__ (self , text : str ):
112109 self .text = text
113110 self .separators = [" " , "\t " , "\n " ]
114- self .double_marks = {"==" : "EQUAL" , "++" : "COUNT_UP" , "--" : "COUNT_DOWN" }
111+ self .double_marks = {
112+ "==" : "EQUAL" ,
113+ "===" : "TYPE_EQUAL" ,
114+ "++" : "COUNT_UP" ,
115+ "--" : "COUNT_DOWN" ,
116+ }
115117 self .marks = {
116118 ";" : "END_CMD" ,
117119 "=" : "SET" ,
@@ -131,7 +133,7 @@ def __init__(self, text: str):
131133 "*" : "MULTIPLY" ,
132134 "/" : "DIVIDE" ,
133135 "%" : "MODULO" ,
134- "//." : "CHILD" , # Duplicate, needs escaping
136+ " ." . replace ( " " , "" ) : "CHILD" , # Duplicate, needs escaping
135137 "," : "SEPERATOR" ,
136138 }
137139 self .keywords = {
@@ -161,7 +163,6 @@ def __init__(self, text: str):
161163 "list" ,
162164 "str" ,
163165 "string" ,
164-
165166 "None" ,
166167 "Null" ,
167168 ]
@@ -214,13 +215,6 @@ def gettoken(string: str, line: int, column: int) -> LexerToken | None:
214215 else :
215216 raise LexerError ("Unrecognized Pattern: '" + string + "'" , line , column )
216217
217- def replace_none (ar ): # What's that?
218- n = []
219- for el in ar :
220- if el is not None :
221- n .append (el )
222- return n
223-
224218 line = 1
225219 comment = 0
226220 column = 1
@@ -276,13 +270,13 @@ def replace_none(ar): # What's that?
276270 buffer += self .text [index ]
277271
278272 index += 1
279- self . tokens = replace_none ( self . tokens )
280- return self .tokens
273+
274+ return [ str ( token ) for token in self .tokens if token is not None ]
281275
282276
283277if __name__ == "__main__" :
284278 with open ("../test.ilang" ) as test :
285279 data = test .read ()
286280
287281 lexer = Lexer (data )
288- print (lexer .lex ())
282+ print (" \n " . join ( lexer .lex () ))
0 commit comments