this repo has no description

Make the parser drive the lexer

authored by

neuroevolutus and committed by
Max Bernstein
c4f975be 00845e95

+6 -5
+6 -5
scrapscript.py
··· 18 18 from dataclasses import dataclass 19 19 from enum import auto 20 20 from types import ModuleType 21 - from typing import Any, Callable, Dict, Iterator, Mapping, Optional, Set, Tuple, Union 21 + from typing import Any, Callable, Dict, Generator, Iterator, Mapping, Optional, Set, Tuple, Union 22 22 23 23 readline: Optional[ModuleType] 24 24 try: ··· 218 218 219 219 return result 220 220 221 + def read_tokens(self) -> Generator[Token, None, None]: 222 + while (token := self.read_token()) and not isinstance(token, EOF): 223 + yield token 224 + 221 225 def read_token(self) -> Token: 222 226 # Consume all whitespace 223 227 while self.has_input(): ··· 407 411 408 412 def tokenize(x: str) -> Peekable: 409 413 lexer = Lexer(x) 410 - tokens = [] 411 - while (token := lexer.read_token()) and not isinstance(token, EOF): 412 - tokens.append(token) 413 - return Peekable(iter(tokens)) 414 + return Peekable(lexer.read_tokens()) 414 415 415 416 416 417 @dataclass(frozen=True)