Skip to content
This repository was archived by the owner on Apr 25, 2023. It is now read-only.

Commit 9ac20da

Browse files
committed
Fixed pylint issue
1 parent f3bb8bb commit 9ac20da

File tree

1 file changed

+17
-27
lines changed

1 file changed

+17
-27
lines changed

ilanguage/Main/lexer.py

Lines changed: 17 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -217,7 +217,7 @@ def validate_integer(string: str) -> bool:
217217
return True
218218

219219

220-
def gettoken( # pylint: disable=R1710
220+
def gettoken(
221221
string: str, line: int, column: int
222222
) -> Optional[LexerToken]:
223223
"""Returns a token from the specified string.
@@ -231,50 +231,40 @@ def gettoken( # pylint: disable=R1710
231231
Token from the specified string.
232232
"""
233233

234-
already_tokenized = False
235-
result = None
234+
if string in list(KEYWORDS):
235+
return LexerToken(KEYWORDS[string], string)
236236

237-
if string in list(KEYWORDS) and not already_tokenized:
238-
already_tokenized = True
239-
result = LexerToken(KEYWORDS[string], string)
237+
elif len(string) > 1 and string[0] == "_":
238+
return LexerToken("BUILTIN_CONST", string)
240239

241-
elif (len(string) > 1 and string[0] == "_") and not already_tokenized:
242-
already_tokenized = True
243-
result = LexerToken("BUILTIN_CONST", string)
240+
elif string in ["true", "false"]:
241+
return LexerToken("BOOL", string)
244242

245-
elif string in ["true", "false"] and not already_tokenized:
246-
already_tokenized = True
247-
result = LexerToken("BOOL", string)
243+
elif string in BASE_TYPES:
244+
return LexerToken("BASETYPE", string)
248245

249-
elif string in BASE_TYPES and not already_tokenized:
250-
already_tokenized = True
251-
result = LexerToken("BASETYPE", string)
246+
elif len(string) == 0:
247+
return None
252248

253-
elif len(string) == 0 and not already_tokenized:
254-
already_tokenized = True
255-
result = None
256-
257-
elif validate_integer(string) and not already_tokenized:
258-
already_tokenized = True
259-
result = LexerToken("INT", string)
249+
elif validate_integer(string):
250+
return LexerToken("INT", string)
260251

261252
elif (
262253
len(string) > 0 and string[0] not in DIGITS_AS_STRINGS
263-
) and not already_tokenized:
264-
already_tokenized = True
265-
result = LexerToken("NAME", string)
254+
):
255+
return LexerToken("NAME", string)
266256
else:
267257
LexerError(f"Unrecognized Pattern: {string!r}", line, column)
268258

269-
return result
259+
return None
270260

271261

272262
##############
273263
# MAIN LEXER #
274264
##############
275265

276266

277-
def lex( # pylint: disable=R0912, R0915, R1260
267+
def lex( # pylint: disable=R0912, R0915
278268
text: Optional[str] = None,
279269
) -> Optional[List[LexerToken]]:
280270
"""Lexes the specified string.

0 commit comments

Comments
 (0)