28
28
from codecs import lookup , BOM_UTF8
29
29
import collections
30
30
from io import TextIOWrapper
31
- from itertools import chain
32
31
import itertools as _itertools
33
32
import re
34
33
import sys
@@ -278,7 +277,7 @@ def compat(self, token, iterable):
278
277
startline = token [0 ] in (NEWLINE , NL )
279
278
prevstring = False
280
279
281
- for tok in chain ([token ], iterable ):
280
+ for tok in _itertools . chain ([token ], iterable ):
282
281
toknum , tokval = tok [:2 ]
283
282
if toknum == ENCODING :
284
283
self .encoding = tokval
@@ -475,13 +474,10 @@ def tokenize(readline):
475
474
The first token sequence will always be an ENCODING token
476
475
which tells you which encoding was used to decode the bytes stream.
477
476
"""
478
- # This import is here to avoid problems when the itertools module is not
479
- # built yet and tokenize is imported.
480
- from itertools import chain , repeat
481
477
encoding , consumed = detect_encoding (readline )
482
- rl_gen = iter ( readline , b"" )
483
- empty = repeat ( b"" )
484
- return _tokenize (chain ( consumed , rl_gen , empty ) .__next__ , encoding )
478
+ empty = _itertools . repeat ( b"" )
479
+ rl_gen = _itertools . chain ( consumed , iter ( readline , b"" ), empty )
480
+ return _tokenize (rl_gen .__next__ , encoding )
485
481
486
482
487
483
def _tokenize (readline , encoding ):
@@ -496,7 +492,7 @@ def _tokenize(readline, encoding):
496
492
# BOM will already have been stripped.
497
493
encoding = "utf-8"
498
494
yield TokenInfo (ENCODING , encoding , (0 , 0 ), (0 , 0 ), '' )
499
- while True : # loop over lines in stream
495
+ while True : # loop over lines in stream
500
496
try :
501
497
line = readline ()
502
498
except StopIteration :
@@ -581,7 +577,7 @@ def _tokenize(readline, encoding):
581
577
continue
582
578
token , initial = line [start :end ], line [start ]
583
579
584
- if (initial in numchars or # ordinary number
580
+ if (initial in numchars or # ordinary number
585
581
(initial == '.' and token != '.' and token != '...' )):
586
582
yield TokenInfo (NUMBER , token , spos , epos , line )
587
583
elif initial in '\r \n ' :
@@ -667,7 +663,8 @@ def main():
667
663
668
664
# Helper error handling routines
669
665
def perror (message ):
670
- print (message , file = sys .stderr )
666
+ sys .stderr .write (message )
667
+ sys .stderr .write ('\n ' )
671
668
672
669
def error (message , filename = None , location = None ):
673
670
if location :
0 commit comments