69 self.lines = text.count(os.linesep) |
69 self.lines = text.count(os.linesep) |
70 |
70 |
71 source = io.BytesIO(text.encode("utf-8")) |
71 source = io.BytesIO(text.encode("utf-8")) |
72 try: |
72 try: |
73 gen = tokenize.tokenize(source.readline) |
73 gen = tokenize.tokenize(source.readline) |
74 for toktype, toktext, start, end, line in gen: |
74 for toktype, toktext, start, _end, line in gen: |
75 (srow, scol) = start |
75 (srow, scol) = start |
76 (erow, ecol) = end |
|
77 if toktype in [token.NEWLINE, tokenize.NL]: |
76 if toktype in [token.NEWLINE, tokenize.NL]: |
78 self.__addToken(toktype, os.linesep, srow, scol, line) |
77 self.__addToken(toktype, os.linesep, srow, scol, line) |
79 elif toktype in [token.INDENT, token.DEDENT]: |
78 elif toktype in [token.INDENT, token.DEDENT]: |
80 self.__addToken(toktype, "", srow, scol, line) |
79 self.__addToken(toktype, "", srow, scol, line) |
81 elif toktype == token.NAME and keyword.iskeyword(toktext): |
80 elif toktype == token.NAME and keyword.iskeyword(toktext): |