Skip to content
This repository was archived by the owner on Apr 25, 2023. It is now read-only.

Commit e8a982d

Browse files
committed
Resolved #5 for the (hopefully) final time by doing something overcomplicated, I could have done differently
1 parent 1f17245 commit e8a982d

File tree

1 file changed

+24
-21
lines changed

1 file changed

+24
-21
lines changed

Main/lexer.py

Lines changed: 24 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -313,6 +313,7 @@ def lex(self, text: Optional[str] = None) -> Optional[List[LexerToken]]:
313313
line = 1
314314
comment = False
315315
multiline_comment = False
316+
append_newline = False
316317
helper = 0
317318
column = 1
318319
in_string = False
@@ -322,20 +323,16 @@ def lex(self, text: Optional[str] = None) -> Optional[List[LexerToken]]:
322323
try:
323324
for index, character in enumerate(self.text):
324325
helper -= 1
326+
325327
self.tokens = [token for token in self.tokens if token is not None]
326328

327329
if character == "\n":
330+
append_newline = True
331+
328332
line += 1
329333
column = 1
330334
comment = False
331335

332-
try:
333-
if not str(self.tokens[-1].type) == "SEMICOLON" or str(self.tokens[-1].type).endswith("CLOSE"):
334-
print("Error: Line was not closed.")
335-
sys.exit(5)
336-
except IndexError:
337-
pass
338-
339336
else:
340337
column += 1
341338

@@ -358,7 +355,9 @@ def lex(self, text: Optional[str] = None) -> Optional[List[LexerToken]]:
358355
if character in ['"', "'"]:
359356
in_string = not in_string
360357
if not in_string:
361-
self.tokens.append(LexerToken("STRING", buffer))
358+
self.tokens.append(
359+
LexerToken("STRING", buffer.getvalue())
360+
)
362361

363362
buffer.close()
364363
buffer = io.StringIO()
@@ -367,9 +366,7 @@ def lex(self, text: Optional[str] = None) -> Optional[List[LexerToken]]:
367366
buffer.write(character)
368367

369368
elif self.text[index] in SEPARATORS:
370-
self.tokens.append(
371-
gettoken(buffer.getvalue(), line, column)
372-
)
369+
self.tokens.append(gettoken(buffer.getvalue(), line, column))
373370

374371
buffer.close()
375372
buffer = io.StringIO()
@@ -404,24 +401,27 @@ def lex(self, text: Optional[str] = None) -> Optional[List[LexerToken]]:
404401

405402
else:
406403
buffer.write(character)
404+
if append_newline:
405+
append_newline = False
406+
self.tokens.append(LexerToken("NEWLINE", "\n"))
407407

408408
except IndexError:
409409
pass
410410
finally:
411411
buffer.close()
412412

413413
self.tokens = [token for token in self.tokens if token is not None]
414+
modified_tokens = {}
415+
414416
for index, token in enumerate(self.tokens):
415417
try:
418+
if token.type == "NEWLINE" and index == 0:
419+
self.tokens.pop(index)
420+
index -= 1
416421
if token.type == "INT" and self.tokens[index - 1].type == "DOT":
417-
self.tokens.insert(
418-
index + 1,
419-
LexerToken(
420-
"FLOAT",
421-
str(self.tokens[index - 2].value)
422-
+ "."
423-
+ str(self.tokens[index].value),
424-
),
422+
modified_tokens[index - 2] = LexerToken(
423+
"FLOAT",
424+
f"{self.tokens[index-2].value}.{self.tokens[index].value}",
425425
)
426426

427427
self.tokens.pop(index)
@@ -430,6 +430,9 @@ def lex(self, text: Optional[str] = None) -> Optional[List[LexerToken]]:
430430
except IndexError:
431431
pass
432432

433+
for index, token in modified_tokens.items():
434+
self.tokens.insert(index, token)
435+
433436
return self.tokens
434437

435438

@@ -491,8 +494,8 @@ def lex(self, text: Optional[str] = None) -> Optional[List[LexerToken]]:
491494
data = file.read()
492495
except (IndexError, FileNotFoundError):
493496
data = """
494-
int i = 1234;
495-
float f = 12.34;
497+
int i = 1234
498+
float f = 12.34
496499
"""
497500

498501
lexer = Lexer(data)

0 commit comments

Comments
 (0)