171 def generate_tokens(self, text): |
171 def generate_tokens(self, text): |
172 """A stand-in for `tokenize.generate_tokens`.""" |
172 """A stand-in for `tokenize.generate_tokens`.""" |
173 if text != self.last_text: |
173 if text != self.last_text: |
174 self.last_text = text |
174 self.last_text = text |
175 readline = iter(text.splitlines(True)).__next__ |
175 readline = iter(text.splitlines(True)).__next__ |
176 self.last_tokens = list(tokenize.generate_tokens(readline)) |
176 try: |
|
177 self.last_tokens = list(tokenize.generate_tokens(readline)) |
|
178 except: |
|
179 self.last_text = None |
|
180 raise |
177 return self.last_tokens |
181 return self.last_tokens |
178 |
182 |
179 # Create our generate_tokens cache as a callable replacement function. |
183 # Create our generate_tokens cache as a callable replacement function. |
180 generate_tokens = CachedTokenizer().generate_tokens |
184 generate_tokens = CachedTokenizer().generate_tokens |
181 |
185 |