ThirdParty/Jasy/jasy/js/parse/Parser.py

changeset 5843
76eee727ccd9
parent 3145
a9de05d4a22f
equal deleted inserted replaced
5842:c3f41b959a65 5843:76eee727ccd9
1 # 1 #
2 # Jasy - Web Tooling Framework 2 # Jasy - Web Tooling Framework
3 # Copyright 2010-2012 Zynga Inc. 3 # Copyright 2010-2012 Zynga Inc.
4 # Copyright 2013-2014 Sebastian Werner
4 # 5 #
5 6
6 # 7 #
7 # License: MPL 1.1/GPL 2.0/LGPL 2.1 8 # License: MPL 1.1/GPL 2.0/LGPL 2.1
8 # Authors: 9 # Authors:
9 # - Brendan Eich <brendan@mozilla.org> (Original JavaScript) (2004-2010) 10 # - Brendan Eich <brendan@mozilla.org> (Original JavaScript) (2004-2010)
10 # - Sebastian Werner <info@sebastian-werner.net> (Python Port) (2010-2012) 11 # - Sebastian Werner <info@sebastian-werner.net> (Python Port) (2010-2012)
11 # 12 #
12 13
13 from __future__ import unicode_literals 14 from __future__ import unicode_literals
19 __all__ = [ "parse", "parseExpression" ] 20 __all__ = [ "parse", "parseExpression" ]
20 21
21 def parseExpression(source, fileId=None, line=1, builder=None): 22 def parseExpression(source, fileId=None, line=1, builder=None):
22 if builder == None: 23 if builder == None:
23 builder = jasy.js.parse.VanillaBuilder.VanillaBuilder() 24 builder = jasy.js.parse.VanillaBuilder.VanillaBuilder()
24 25
25 # Convert source into expression statement to be friendly to the Tokenizer 26 # Convert source into expression statement to be friendly to the Tokenizer
26 if not source.endswith(";"): 27 if not source.endswith(";"):
27 source = source + ";" 28 source = source + ";"
28 29
29 tokenizer = jasy.js.tokenize.Tokenizer.Tokenizer(source, fileId, line) 30 tokenizer = jasy.js.tokenize.Tokenizer.Tokenizer(source, fileId, line)
30 staticContext = StaticContext(False, builder) 31 staticContext = StaticContext(False, builder)
31 32
32 return Expression(tokenizer, staticContext) 33 return Expression(tokenizer, staticContext)
34
33 35
34 36
35 def parse(source, fileId=None, line=1, builder=None): 37 def parse(source, fileId=None, line=1, builder=None):
36 if builder == None: 38 if builder == None:
37 builder = jasy.js.parse.VanillaBuilder.VanillaBuilder() 39 builder = jasy.js.parse.VanillaBuilder.VanillaBuilder()
38 40
39 tokenizer = jasy.js.tokenize.Tokenizer.Tokenizer(source, fileId, line) 41 tokenizer = jasy.js.tokenize.Tokenizer.Tokenizer(source, fileId, line)
40 staticContext = StaticContext(False, builder) 42 staticContext = StaticContext(False, builder)
41 node = Script(tokenizer, staticContext) 43 node = Script(tokenizer, staticContext)
42 44
43 # store fileId on top-level node 45 # store fileId on top-level node
44 node.fileId = tokenizer.fileId 46 node.fileId = tokenizer.fileId
45 47
46 # add missing comments e.g. empty file with only a comment etc. 48 # add missing comments e.g. empty file with only a comment etc.
47 # if there is something non-attached by an inner node it is attached to 49 # if there is something non-attached by an inner node it is attached to
48 # the top level node, which is not correct, but might be better than 50 # the top level node, which is not correct, but might be better than
49 # just ignoring the comment after all. 51 # just ignoring the comment after all.
50 if len(node) > 0: 52 if len(node) > 0:
51 builder.COMMENTS_add(node[-1], None, tokenizer.getComments()) 53 builder.COMMENTS_add(node[-1], None, tokenizer.getComments())
52 else: 54 else:
53 builder.COMMENTS_add(node, None, tokenizer.getComments()) 55 builder.COMMENTS_add(node, None, tokenizer.getComments())
54 56
55 if not tokenizer.done(): 57 if not tokenizer.done():
56 raise SyntaxError("Unexpected end of file", tokenizer) 58 raise SyntaxError("Unexpected end of file", tokenizer)
57 59
58 return node 60 return node
59 61
60 62
61 63
62 class SyntaxError(Exception): 64 class SyntaxError(Exception):
63 def __init__(self, message, tokenizer): 65 def __init__(self, message, tokenizer):
64 Exception.__init__(self, "Syntax error: %s\n%s:%s" % ( 66 Exception.__init__(self, "Syntax error: %s\n%s:%s" % (message, tokenizer.fileId, tokenizer.line))
65 message, tokenizer.fileId, tokenizer.line))
66 67
67 68
68 # Used as a status container during tree-building for every def body and the global body 69 # Used as a status container during tree-building for every def body and the global body
69 class StaticContext(object): 70 class StaticContext(object):
70 # inFunction is used to check if a return stm appears in a valid context. 71 # inFunction is used to check if a return stm appears in a valid context.
71 def __init__(self, inFunction, builder): 72 def __init__(self, inFunction, builder):
72 # Whether this is inside a function, mostly True, only for top-level scope 73 # Whether this is inside a function, mostly True, only for top-level scope it's False
73 # it's False
74 self.inFunction = inFunction 74 self.inFunction = inFunction
75 75
76 self.hasEmptyReturn = False 76 self.hasEmptyReturn = False
77 self.hasReturnWithValue = False 77 self.hasReturnWithValue = False
78 self.isGenerator = False 78 self.isGenerator = False
79 self.blockId = 0 79 self.blockId = 0
80 self.builder = builder 80 self.builder = builder
81 self.statementStack = [] 81 self.statementStack = []
82 82
83 # Sets to store variable uses 83 # Sets to store variable uses
84 # self.functions = set() 84 # self.functions = set()
85 # self.variables = set() 85 # self.variables = set()
86 86
87 # Status 87 # Status
88 # self.needsHoisting = False 88 # self.needsHoisting = False
89 self.bracketLevel = 0 89 self.bracketLevel = 0
90 self.curlyLevel = 0 90 self.curlyLevel = 0
91 self.parenLevel = 0 91 self.parenLevel = 0
92 self.hookLevel = 0 92 self.hookLevel = 0
93 93
94 # Configure strict ecmascript 3 mode 94 # Configure strict ecmascript 3 mode
95 self.ecma3OnlyMode = False 95 self.ecma3OnlyMode = False
96 96
97 # Status flag during parsing 97 # Status flag during parsing
98 self.inForLoopInit = False 98 self.inForLoopInit = False
99 99
100 100
101 def Script(tokenizer, staticContext): 101 def Script(tokenizer, staticContext):
102 """Parses the toplevel and def bodies.""" 102 """Parses the toplevel and def bodies."""
103 node = Statements(tokenizer, staticContext) 103 node = Statements(tokenizer, staticContext)
104 104
105 # change type from "block" to "script" for script root 105 # change type from "block" to "script" for script root
106 node.type = "script" 106 node.type = "script"
107 107
108 # copy over data from compiler context 108 # copy over data from compiler context
109 # node.functions = staticContext.functions 109 # node.functions = staticContext.functions
110 # node.variables = staticContext.variables 110 # node.variables = staticContext.variables
111 111
112 return node 112 return node
113 113
114 114
115 def nest(tokenizer, staticContext, node, func, end=None): 115 def nest(tokenizer, staticContext, node, func, end=None):
116 """Statement stack and nested statement handler.""" 116 """Statement stack and nested statement handler."""
117 staticContext.statementStack.append(node) 117 staticContext.statementStack.append(node)
118 node = func(tokenizer, staticContext) 118 node = func(tokenizer, staticContext)
119 staticContext.statementStack.pop() 119 staticContext.statementStack.pop()
120 end and tokenizer.mustMatch(end) 120 end and tokenizer.mustMatch(end)
121 121
122 return node 122 return node
123 123
124 124
125 def Statements(tokenizer, staticContext): 125 def Statements(tokenizer, staticContext):
126 """Parses a list of Statements.""" 126 """Parses a list of Statements."""
155 155
156 def Block(tokenizer, staticContext): 156 def Block(tokenizer, staticContext):
157 tokenizer.mustMatch("left_curly") 157 tokenizer.mustMatch("left_curly")
158 node = Statements(tokenizer, staticContext) 158 node = Statements(tokenizer, staticContext)
159 tokenizer.mustMatch("right_curly") 159 tokenizer.mustMatch("right_curly")
160 160
161 return node 161 return node
162 162
163 163
164 def Statement(tokenizer, staticContext): 164 def Statement(tokenizer, staticContext):
165 """Parses a Statement.""" 165 """Parses a Statement."""
167 tokenType = tokenizer.get(True) 167 tokenType = tokenizer.get(True)
168 builder = staticContext.builder 168 builder = staticContext.builder
169 169
170 # Cases for statements ending in a right curly return early, avoiding the 170 # Cases for statements ending in a right curly return early, avoiding the
171 # common semicolon insertion magic after this switch. 171 # common semicolon insertion magic after this switch.
172 172
173 if tokenType == "function": 173 if tokenType == "function":
174 # "declared_form" extends functions of staticContext, 174 # "declared_form" extends functions of staticContext, "statement_form" doesn'tokenizer.
175 # "statement_form" doesn'tokenizer.
176 if len(staticContext.statementStack) > 1: 175 if len(staticContext.statementStack) > 1:
177 kind = "statement_form" 176 kind = "statement_form"
178 else: 177 else:
179 kind = "declared_form" 178 kind = "declared_form"
180 179
181 return FunctionDefinition(tokenizer, staticContext, True, kind) 180 return FunctionDefinition(tokenizer, staticContext, True, kind)
182 181
183 182
184 elif tokenType == "left_curly": 183 elif tokenType == "left_curly":
185 node = Statements(tokenizer, staticContext) 184 node = Statements(tokenizer, staticContext)
186 tokenizer.mustMatch("right_curly") 185 tokenizer.mustMatch("right_curly")
187 186
188 return node 187 return node
189 188
190 189
191 elif tokenType == "if": 190 elif tokenType == "if":
192 node = builder.IF_build(tokenizer) 191 node = builder.IF_build(tokenizer)
193 builder.IF_setCondition(node, ParenExpression(tokenizer, staticContext)) 192 builder.IF_setCondition(node, ParenExpression(tokenizer, staticContext))
194 staticContext.statementStack.append(node) 193 staticContext.statementStack.append(node)
195 builder.IF_setThenPart(node, Statement(tokenizer, staticContext)) 194 builder.IF_setThenPart(node, Statement(tokenizer, staticContext))
200 builder.COMMENTS_add(elsePart, node, comments) 199 builder.COMMENTS_add(elsePart, node, comments)
201 builder.IF_setElsePart(node, elsePart) 200 builder.IF_setElsePart(node, elsePart)
202 201
203 staticContext.statementStack.pop() 202 staticContext.statementStack.pop()
204 builder.IF_finish(node) 203 builder.IF_finish(node)
205 204
206 return node 205 return node
207 206
208 207
209 elif tokenType == "switch": 208 elif tokenType == "switch":
210 # This allows CASEs after a "default", which is in the standard. 209 # This allows CASEs after a "default", which is in the standard.
211 node = builder.SWITCH_build(tokenizer) 210 node = builder.SWITCH_build(tokenizer)
212 builder.SWITCH_setDiscriminant(node, ParenExpression(tokenizer, staticContext)) 211 builder.SWITCH_setDiscriminant(node, ParenExpression(tokenizer, staticContext))
213 staticContext.statementStack.append(node) 212 staticContext.statementStack.append(node)
214 213
215 tokenizer.mustMatch("left_curly") 214 tokenizer.mustMatch("left_curly")
216 tokenType = tokenizer.get() 215 tokenType = tokenizer.get()
217 216
218 while tokenType != "right_curly": 217 while tokenType != "right_curly":
219 if tokenType == "default": 218 if tokenType == "default":
220 if node.defaultIndex >= 0: 219 if node.defaultIndex >= 0:
221 raise SyntaxError("More than one switch default", tokenizer) 220 raise SyntaxError("More than one switch default", tokenizer)
222 221
223 childNode = builder.DEFAULT_build(tokenizer) 222 childNode = builder.DEFAULT_build(tokenizer)
224 builder.SWITCH_setDefaultIndex(node, len(node)-1) 223 builder.SWITCH_setDefaultIndex(node, len(node)-1)
225 tokenizer.mustMatch("colon") 224 tokenizer.mustMatch("colon")
226 builder.DEFAULT_initializeStatements(childNode, tokenizer) 225 builder.DEFAULT_initializeStatements(childNode, tokenizer)
227 226
228 while True: 227 while True:
229 tokenType=tokenizer.peek(True) 228 tokenType=tokenizer.peek(True)
230 if tokenType == "case" or tokenType == "default" or tokenType == "right_curly": 229 if tokenType == "case" or tokenType == "default" or tokenType == "right_curly":
231 break 230 break
232 builder.DEFAULT_addStatement(childNode, Statement(tokenizer, staticContext)) 231 builder.DEFAULT_addStatement(childNode, Statement(tokenizer, staticContext))
233 232
234 builder.DEFAULT_finish(childNode) 233 builder.DEFAULT_finish(childNode)
235 234
236 elif tokenType == "case": 235 elif tokenType == "case":
237 childNode = builder.CASE_build(tokenizer) 236 childNode = builder.CASE_build(tokenizer)
238 builder.CASE_setLabel(childNode, Expression(tokenizer, staticContext)) 237 builder.CASE_setLabel(childNode, Expression(tokenizer, staticContext))
242 while True: 241 while True:
243 tokenType=tokenizer.peek(True) 242 tokenType=tokenizer.peek(True)
244 if tokenType == "case" or tokenType == "default" or tokenType == "right_curly": 243 if tokenType == "case" or tokenType == "default" or tokenType == "right_curly":
245 break 244 break
246 builder.CASE_addStatement(childNode, Statement(tokenizer, staticContext)) 245 builder.CASE_addStatement(childNode, Statement(tokenizer, staticContext))
247 246
248 builder.CASE_finish(childNode) 247 builder.CASE_finish(childNode)
249 248
250 else: 249 else:
251 raise SyntaxError("Invalid switch case", tokenizer) 250 raise SyntaxError("Invalid switch case", tokenizer)
252 251
255 254
256 staticContext.statementStack.pop() 255 staticContext.statementStack.pop()
257 builder.SWITCH_finish(node) 256 builder.SWITCH_finish(node)
258 257
259 return node 258 return node
260 259
261 260
262 elif tokenType == "for": 261 elif tokenType == "for":
263 node = builder.FOR_build(tokenizer) 262 node = builder.FOR_build(tokenizer)
264 forBlock = None 263 forBlock = None
265 264
266 if tokenizer.match("identifier") and tokenizer.token.value == "each": 265 if tokenizer.match("identifier") and tokenizer.token.value == "each":
267 builder.FOR_rebuildForEach(node) 266 builder.FOR_rebuildForEach(node)
268 267
269 tokenizer.mustMatch("left_paren") 268 tokenizer.mustMatch("left_paren")
270 tokenType = tokenizer.peek() 269 tokenType = tokenizer.peek()
271 childNode = None 270 childNode = None
272 271
273 if tokenType != "semicolon": 272 if tokenType != "semicolon":
274 staticContext.inForLoopInit = True 273 staticContext.inForLoopInit = True
275 274
276 if tokenType == "var" or tokenType == "const": 275 if tokenType == "var" or tokenType == "const":
277 tokenizer.get() 276 tokenizer.get()
278 childNode = Variables(tokenizer, staticContext) 277 childNode = Variables(tokenizer, staticContext)
279 278
280 elif tokenType == "let": 279 elif tokenType == "let":
281 tokenizer.get() 280 tokenizer.get()
282 281
283 if tokenizer.peek() == "left_paren": 282 if tokenizer.peek() == "left_paren":
284 childNode = LetBlock(tokenizer, staticContext, False) 283 childNode = LetBlock(tokenizer, staticContext, False)
285 284
286 else: 285 else:
287 # Let in for head, we need to add an implicit block 286 # Let in for head, we need to add an implicit block
288 # around the rest of the for. 287 # around the rest of the for.
289 forBlock = builder.BLOCK_build(tokenizer, staticContext.blockId) 288 forBlock = builder.BLOCK_build(tokenizer, staticContext.blockId)
290 staticContext.blockId += 1 289 staticContext.blockId += 1
291 staticContext.statementStack.append(forBlock) 290 staticContext.statementStack.append(forBlock)
292 childNode = Variables(tokenizer, staticContext, forBlock) 291 childNode = Variables(tokenizer, staticContext, forBlock)
293 292
294 else: 293 else:
295 childNode = Expression(tokenizer, staticContext) 294 childNode = Expression(tokenizer, staticContext)
296 295
297 staticContext.inForLoopInit = False 296 staticContext.inForLoopInit = False
298 297
299 if childNode and tokenizer.match("in"): 298 if childNode and tokenizer.match("in"):
300 builder.FOR_rebuildForIn(node) 299 builder.FOR_rebuildForIn(node)
301 builder.FOR_setObject(node, Expression(tokenizer, staticContext), forBlock) 300 builder.FOR_setObject(node, Expression(tokenizer, staticContext), forBlock)
302 301
303 if childNode.type == "var" or childNode.type == "let": 302 if childNode.type == "var" or childNode.type == "let":
304 if len(childNode) != 1: 303 if len(childNode) != 1:
305 raise SyntaxError("Invalid for..in left-hand side", tokenizer) 304 raise SyntaxError("Invalid for..in left-hand side", tokenizer)
306 305
307 builder.FOR_setIterator(node, childNode, forBlock) 306 builder.FOR_setIterator(node, childNode, forBlock)
308 307
309 else: 308 else:
310 builder.FOR_setIterator(node, childNode, forBlock) 309 builder.FOR_setIterator(node, childNode, forBlock)
311 310
312 else: 311 else:
313 builder.FOR_setSetup(node, childNode) 312 builder.FOR_setSetup(node, childNode)
314 tokenizer.mustMatch("semicolon") 313 tokenizer.mustMatch("semicolon")
315 314
316 if node.isEach: 315 if node.isEach:
317 raise SyntaxError("Invalid for each..in loop", tokenizer) 316 raise SyntaxError("Invalid for each..in loop", tokenizer)
318 317
319 if tokenizer.peek() == "semicolon": 318 if tokenizer.peek() == "semicolon":
320 builder.FOR_setCondition(node, None) 319 builder.FOR_setCondition(node, None)
321 else: 320 else:
322 builder.FOR_setCondition(node, Expression(tokenizer, staticContext)) 321 builder.FOR_setCondition(node, Expression(tokenizer, staticContext))
323 322
324 tokenizer.mustMatch("semicolon") 323 tokenizer.mustMatch("semicolon")
325 324
326 if tokenizer.peek() == "right_paren": 325 if tokenizer.peek() == "right_paren":
327 builder.FOR_setUpdate(node, None) 326 builder.FOR_setUpdate(node, None)
328 else: 327 else:
329 builder.FOR_setUpdate(node, Expression(tokenizer, staticContext)) 328 builder.FOR_setUpdate(node, Expression(tokenizer, staticContext))
330 329
331 tokenizer.mustMatch("right_paren") 330 tokenizer.mustMatch("right_paren")
332 builder.FOR_setBody(node, nest(tokenizer, staticContext, node, Statement)) 331 builder.FOR_setBody(node, nest(tokenizer, staticContext, node, Statement))
333 332
334 if forBlock: 333 if forBlock:
335 builder.BLOCK_finish(forBlock) 334 builder.BLOCK_finish(forBlock)
336 staticContext.statementStack.pop() 335 staticContext.statementStack.pop()
337 336
338 builder.FOR_finish(node) 337 builder.FOR_finish(node)
339 return node 338 return node
340 339
341 340
342 elif tokenType == "while": 341 elif tokenType == "while":
343 node = builder.WHILE_build(tokenizer) 342 node = builder.WHILE_build(tokenizer)
344 343
345 builder.WHILE_setCondition(node, ParenExpression(tokenizer, staticContext)) 344 builder.WHILE_setCondition(node, ParenExpression(tokenizer, staticContext))
346 builder.WHILE_setBody(node, nest(tokenizer, staticContext, node, Statement)) 345 builder.WHILE_setBody(node, nest(tokenizer, staticContext, node, Statement))
347 builder.WHILE_finish(node) 346 builder.WHILE_finish(node)
348 347
349 return node 348 return node
350 349
351 350
352 elif tokenType == "do": 351 elif tokenType == "do":
353 node = builder.DO_build(tokenizer) 352 node = builder.DO_build(tokenizer)
354 353
355 builder.DO_setBody(node, nest(tokenizer, staticContext, node, Statement, "while")) 354 builder.DO_setBody(node, nest(tokenizer, staticContext, node, Statement, "while"))
356 builder.DO_setCondition(node, ParenExpression(tokenizer, staticContext)) 355 builder.DO_setCondition(node, ParenExpression(tokenizer, staticContext))
357 builder.DO_finish(node) 356 builder.DO_finish(node)
358 357
359 if not staticContext.ecma3OnlyMode: 358 if not staticContext.ecma3OnlyMode:
360 # <script language="JavaScript"> (without version hints) may need 359 # <script language="JavaScript"> (without version hints) may need
361 # automatic semicolon insertion without a newline after do-while. 360 # automatic semicolon insertion without a newline after do-while.
362 # See http://bugzilla.mozilla.org/show_bug.cgi?id=238945. 361 # See http://bugzilla.mozilla.org/show_bug.cgi?id=238945.
363 tokenizer.match("semicolon") 362 tokenizer.match("semicolon")
364 return node 363 return node
365 364
366 # NO RETURN 365 # NO RETURN
367 366
368 367
369 elif tokenType == "break" or tokenType == "continue": 368 elif tokenType == "break" or tokenType == "continue":
370 if tokenType == "break": 369 if tokenType == "break":
371 node = builder.BREAK_build(tokenizer) 370 node = builder.BREAK_build(tokenizer)
372 else: 371 else:
373 node = builder.CONTINUE_build(tokenizer) 372 node = builder.CONTINUE_build(tokenizer)
374 373
375 if tokenizer.peekOnSameLine() == "identifier": 374 if tokenizer.peekOnSameLine() == "identifier":
376 tokenizer.get() 375 tokenizer.get()
377 376
378 if tokenType == "break": 377 if tokenType == "break":
379 builder.BREAK_setLabel(node, tokenizer.token.value) 378 builder.BREAK_setLabel(node, tokenizer.token.value)
380 else: 379 else:
381 builder.CONTINUE_setLabel(node, tokenizer.token.value) 380 builder.CONTINUE_setLabel(node, tokenizer.token.value)
382 381
390 if i < 0: 389 if i < 0:
391 raise SyntaxError("Label not found", tokenizer) 390 raise SyntaxError("Label not found", tokenizer)
392 if getattr(statementStack[i], "label", None) == label: 391 if getattr(statementStack[i], "label", None) == label:
393 break 392 break
394 393
395 # 394 #
396 # Both break and continue to label need to be handled specially 395 # Both break and continue to label need to be handled specially
397 # within a labeled loop, so that they target that loop. If not in 396 # within a labeled loop, so that they target that loop. If not in
398 # a loop, then break targets its labeled statement. Labels can be 397 # a loop, then break targets its labeled statement. Labels can be
399 # nested so we skip all labels immediately enclosing the nearest 398 # nested so we skip all labels immediately enclosing the nearest
400 # non-label statement. 399 # non-label statement.
401 # 400 #
402 while i < len(statementStack) - 1 and statementStack[i+1].type == "label": 401 while i < len(statementStack) - 1 and statementStack[i+1].type == "label":
403 i += 1 402 i += 1
404 403
405 if i < len(statementStack) - 1 and getattr(statementStack[i+1], "isLoop", False): 404 if i < len(statementStack) - 1 and getattr(statementStack[i+1], "isLoop", False):
406 i += 1 405 i += 1
407 elif tokenType == "continue": 406 elif tokenType == "continue":
408 raise SyntaxError("Invalid continue", tokenizer) 407 raise SyntaxError("Invalid continue", tokenizer)
409 408
410 else: 409 else:
411 while True: 410 while True:
412 i -= 1 411 i -= 1
413 if i < 0: 412 if i < 0:
414 if tokenType == "break": 413 if tokenType == "break":
416 else: 415 else:
417 raise SyntaxError("Invalid continue", tokenizer) 416 raise SyntaxError("Invalid continue", tokenizer)
418 417
419 if getattr(statementStack[i], "isLoop", False) or (tokenType == "break" and statementStack[i].type == "switch"): 418 if getattr(statementStack[i], "isLoop", False) or (tokenType == "break" and statementStack[i].type == "switch"):
420 break 419 break
421 420
422 if tokenType == "break": 421 if tokenType == "break":
423 builder.BREAK_finish(node) 422 builder.BREAK_finish(node)
424 else: 423 else:
425 builder.CONTINUE_finish(node) 424 builder.CONTINUE_finish(node)
426 425
427 # NO RETURN 426 # NO RETURN
428 427
429 428
430 elif tokenType == "try": 429 elif tokenType == "try":
431 node = builder.TRY_build(tokenizer) 430 node = builder.TRY_build(tokenizer)
432 builder.TRY_setTryBlock(node, Block(tokenizer, staticContext)) 431 builder.TRY_setTryBlock(node, Block(tokenizer, staticContext))
433 432
434 while tokenizer.match("catch"): 433 while tokenizer.match("catch"):
435 childNode = builder.CATCH_build(tokenizer) 434 childNode = builder.CATCH_build(tokenizer)
436 tokenizer.mustMatch("left_paren") 435 tokenizer.mustMatch("left_paren")
437 nextTokenType = tokenizer.get() 436 nextTokenType = tokenizer.get()
438 437
439 if nextTokenType == "left_bracket" or nextTokenType == "left_curly": 438 if nextTokenType == "left_bracket" or nextTokenType == "left_curly":
440 # Destructured catch identifiers. 439 # Destructured catch identifiers.
441 tokenizer.unget() 440 tokenizer.unget()
442 exception = DestructuringExpression(tokenizer, staticContext, True) 441 exception = DestructuringExpression(tokenizer, staticContext, True)
443 442
444 elif nextTokenType == "identifier": 443 elif nextTokenType == "identifier":
445 exception = builder.CATCH_wrapException(tokenizer) 444 exception = builder.CATCH_wrapException(tokenizer)
446 445
447 else: 446 else:
448 raise SyntaxError("Missing identifier in catch", tokenizer) 447 raise SyntaxError("Missing identifier in catch", tokenizer)
449 448
450 builder.CATCH_setException(childNode, exception) 449 builder.CATCH_setException(childNode, exception)
451 450
452 if tokenizer.match("if"): 451 if tokenizer.match("if"):
453 if staticContext.ecma3OnlyMode: 452 if staticContext.ecma3OnlyMode:
454 raise SyntaxError("Illegal catch guard", tokenizer) 453 raise SyntaxError("Illegal catch guard", tokenizer)
455 454
456 if node.getChildrenLength() > 0 and not node.getUnrelatedChildren()[0].guard: 455 if node.getChildrenLength() > 0 and not node.getUnrelatedChildren()[0].guard:
457 raise SyntaxError("Guarded catch after unguarded", tokenizer) 456 raise SyntaxError("Guarded catch after unguarded", tokenizer)
458 457
459 builder.CATCH_setGuard(childNode, Expression(tokenizer, staticContext)) 458 builder.CATCH_setGuard(childNode, Expression(tokenizer, staticContext))
460 459
461 else: 460 else:
462 builder.CATCH_setGuard(childNode, None) 461 builder.CATCH_setGuard(childNode, None)
463 462
464 tokenizer.mustMatch("right_paren") 463 tokenizer.mustMatch("right_paren")
465 464
466 builder.CATCH_setBlock(childNode, Block(tokenizer, staticContext)) 465 builder.CATCH_setBlock(childNode, Block(tokenizer, staticContext))
467 builder.CATCH_finish(childNode) 466 builder.CATCH_finish(childNode)
468 467
469 builder.TRY_addCatch(node, childNode) 468 builder.TRY_addCatch(node, childNode)
470 469
471 builder.TRY_finishCatches(node) 470 builder.TRY_finishCatches(node)
472 471
473 if tokenizer.match("finally"): 472 if tokenizer.match("finally"):
474 builder.TRY_setFinallyBlock(node, Block(tokenizer, staticContext)) 473 builder.TRY_setFinallyBlock(node, Block(tokenizer, staticContext))
475 474
476 if node.getChildrenLength() == 0 and not hasattr(node, "finallyBlock"): 475 if node.getChildrenLength() == 0 and not hasattr(node, "finallyBlock"):
477 raise SyntaxError("Invalid try statement", tokenizer) 476 raise SyntaxError("Invalid try statement", tokenizer)
478 477
479 builder.TRY_finish(node) 478 builder.TRY_finish(node)
480 return node 479 return node
481 480
482 481
483 elif tokenType == "catch" or tokenType == "finally": 482 elif tokenType == "catch" or tokenType == "finally":
484 raise SyntaxError(tokenizer.tokens[tokenType] + " without preceding try", tokenizer) 483 raise SyntaxError(tokens[tokenType] + " without preceding try", tokenizer)
485 484
486 485
487 elif tokenType == "throw": 486 elif tokenType == "throw":
488 node = builder.THROW_build(tokenizer) 487 node = builder.THROW_build(tokenizer)
489 488
490 builder.THROW_setException(node, Expression(tokenizer, staticContext)) 489 builder.THROW_setException(node, Expression(tokenizer, staticContext))
491 builder.THROW_finish(node) 490 builder.THROW_finish(node)
492 491
493 # NO RETURN 492 # NO RETURN
494 493
495 494
496 elif tokenType == "return": 495 elif tokenType == "return":
497 node = returnOrYield(tokenizer, staticContext) 496 node = returnOrYield(tokenizer, staticContext)
498 497
499 # NO RETURN 498 # NO RETURN
500 499
501 500
502 elif tokenType == "with": 501 elif tokenType == "with":
503 node = builder.WITH_build(tokenizer) 502 node = builder.WITH_build(tokenizer)
509 return node 508 return node
510 509
511 510
512 elif tokenType == "var" or tokenType == "const": 511 elif tokenType == "var" or tokenType == "const":
513 node = Variables(tokenizer, staticContext) 512 node = Variables(tokenizer, staticContext)
514 513
515 # NO RETURN 514 # NO RETURN
516 515
517 516
518 elif tokenType == "let": 517 elif tokenType == "let":
519 if tokenizer.peek() == "left_paren": 518 if tokenizer.peek() == "left_paren":
520 node = LetBlock(tokenizer, staticContext, True) 519 node = LetBlock(tokenizer, staticContext, True)
521 else: 520 else:
522 node = Variables(tokenizer, staticContext) 521 node = Variables(tokenizer, staticContext)
523 522
524 # NO RETURN 523 # NO RETURN
525 524
526 525
527 elif tokenType == "debugger": 526 elif tokenType == "debugger":
528 node = builder.DEBUGGER_build(tokenizer) 527 node = builder.DEBUGGER_build(tokenizer)
529 528
530 # NO RETURN 529 # NO RETURN
531 530
532 531
533 elif tokenType == "newline" or tokenType == "semicolon": 532 elif tokenType == "newline" or tokenType == "semicolon":
534 node = builder.SEMICOLON_build(tokenizer) 533 node = builder.SEMICOLON_build(tokenizer)
535 534
536 builder.SEMICOLON_setExpression(node, None) 535 builder.SEMICOLON_setExpression(node, None)
537 builder.SEMICOLON_finish(tokenizer) 536 builder.SEMICOLON_finish(tokenizer)
538 537
539 return node 538 return node
540 539
541 540
542 else: 541 else:
543 if tokenType == "identifier": 542 if tokenType == "identifier":
545 544
546 # Labeled statement. 545 # Labeled statement.
547 if tokenType == "colon": 546 if tokenType == "colon":
548 label = tokenizer.token.value 547 label = tokenizer.token.value
549 statementStack = staticContext.statementStack 548 statementStack = staticContext.statementStack
550 549
551 i = len(statementStack)-1 550 i = len(statementStack)-1
552 while i >= 0: 551 while i >= 0:
553 if getattr(statementStack[i], "label", None) == label: 552 if getattr(statementStack[i], "label", None) == label:
554 raise SyntaxError("Duplicate label", tokenizer) 553 raise SyntaxError("Duplicate label", tokenizer)
555 554
556 i -= 1 555 i -= 1
557 556
558 tokenizer.get() 557 tokenizer.get()
559 node = builder.LABEL_build(tokenizer) 558 node = builder.LABEL_build(tokenizer)
560 559
561 builder.LABEL_setLabel(node, label) 560 builder.LABEL_setLabel(node, label)
562 builder.LABEL_setStatement(node, nest(tokenizer, staticContext, node, Statement)) 561 builder.LABEL_setStatement(node, nest(tokenizer, staticContext, node, Statement))
563 builder.LABEL_finish(node) 562 builder.LABEL_finish(node)
564 563
565 return node 564 return node
566 565
567 # Expression statement. 566 # Expression statement.
568 # We unget the current token to parse the expression as a whole. 567 # We unget the current token to parse the expression as a whole.
569 node = builder.SEMICOLON_build(tokenizer) 568 node = builder.SEMICOLON_build(tokenizer)
570 tokenizer.unget() 569 tokenizer.unget()
571 builder.SEMICOLON_setExpression(node, Expression(tokenizer, staticContext)) 570 builder.SEMICOLON_setExpression(node, Expression(tokenizer, staticContext))
572 node.end = node.expression.end 571 node.end = node.expression.end
573 builder.SEMICOLON_finish(node) 572 builder.SEMICOLON_finish(node)
574 573
575 # NO RETURN 574 # NO RETURN
576 575
577 576
578 MagicalSemicolon(tokenizer) 577 MagicalSemicolon(tokenizer)
579 return node 578 return node
580 579
581 580
582 581
583 def MagicalSemicolon(tokenizer): 582 def MagicalSemicolon(tokenizer):
584 if tokenizer.line == tokenizer.token.line: 583 if tokenizer.line == tokenizer.token.line:
585 tokenType = tokenizer.peekOnSameLine() 584 tokenType = tokenizer.peekOnSameLine()
586 585
587 if tokenType != "end" and tokenType != "newline" and tokenType != "semicolon" and tokenType != "right_curly": 586 if tokenType != "end" and tokenType != "newline" and tokenType != "semicolon" and tokenType != "right_curly":
588 raise SyntaxError("Missing ; before statement", tokenizer) 587 raise SyntaxError("Missing ; before statement", tokenizer)
589 588
590 tokenizer.match("semicolon") 589 tokenizer.match("semicolon")
591 590
592 591
593 592
594 def returnOrYield(tokenizer, staticContext): 593 def returnOrYield(tokenizer, staticContext):
595 builder = staticContext.builder 594 builder = staticContext.builder
596 tokenType = tokenizer.token.type 595 tokenType = tokenizer.token.type
597 596
598 if tokenType == "return": 597 if tokenType == "return":
599 if not staticContext.inFunction: 598 if not staticContext.inFunction:
600 raise SyntaxError("Return not in function", tokenizer) 599 raise SyntaxError("Return not in function", tokenizer)
601 600
602 node = builder.RETURN_build(tokenizer) 601 node = builder.RETURN_build(tokenizer)
603 602
604 else: 603 else:
605 if not staticContext.inFunction: 604 if not staticContext.inFunction:
606 raise SyntaxError("Yield not in function", tokenizer) 605 raise SyntaxError("Yield not in function", tokenizer)
607 606
608 staticContext.isGenerator = True 607 staticContext.isGenerator = True
609 node = builder.YIELD_build(tokenizer) 608 node = builder.YIELD_build(tokenizer)
610 609
611 nextTokenType = tokenizer.peek(True) 610 nextTokenType = tokenizer.peek(True)
612 if nextTokenType != "end" and nextTokenType != "newline" and nextTokenType != "semicolon" and nextTokenType != "right_curly" and (tokenType != "yield" or (nextTokenType != tokenType and nextTokenType != "right_bracket" and nextTokenType != "right_paren" and nextTokenType != "colon" and nextTokenType != "comma")): 611 if nextTokenType != "end" and nextTokenType != "newline" and nextTokenType != "semicolon" and nextTokenType != "right_curly" and (tokenType != "yield" or (nextTokenType != tokenType and nextTokenType != "right_bracket" and nextTokenType != "right_paren" and nextTokenType != "colon" and nextTokenType != "comma")):
613 if tokenType == "return": 612 if tokenType == "return":
614 builder.RETURN_setValue(node, Expression(tokenizer, staticContext)) 613 builder.RETURN_setValue(node, Expression(tokenizer, staticContext))
615 staticContext.hasReturnWithValue = True 614 staticContext.hasReturnWithValue = True
616 else: 615 else:
617 builder.YIELD_setValue(node, AssignExpression(tokenizer, staticContext)) 616 builder.YIELD_setValue(node, AssignExpression(tokenizer, staticContext))
618 617
619 elif tokenType == "return": 618 elif tokenType == "return":
620 staticContext.hasEmptyReturn = True 619 staticContext.hasEmptyReturn = True
621 620
622 # Disallow return v; in generator. 621 # Disallow return v; in generator.
623 if staticContext.hasReturnWithValue and staticContext.isGenerator: 622 if staticContext.hasReturnWithValue and staticContext.isGenerator:
633 632
634 633
635 def FunctionDefinition(tokenizer, staticContext, requireName, functionForm): 634 def FunctionDefinition(tokenizer, staticContext, requireName, functionForm):
636 builder = staticContext.builder 635 builder = staticContext.builder
637 functionNode = builder.FUNCTION_build(tokenizer) 636 functionNode = builder.FUNCTION_build(tokenizer)
638 637
639 if tokenizer.match("identifier"): 638 if tokenizer.match("identifier"):
640 builder.FUNCTION_setName(functionNode, tokenizer.token.value) 639 builder.FUNCTION_setName(functionNode, tokenizer.token.value)
641 elif requireName: 640 elif requireName:
642 raise SyntaxError("Missing def identifier", tokenizer) 641 raise SyntaxError("Missing def identifier", tokenizer)
643 642
644 tokenizer.mustMatch("left_paren") 643 tokenizer.mustMatch("left_paren")
645 644
646 if not tokenizer.match("right_paren"): 645 if not tokenizer.match("right_paren"):
647 builder.FUNCTION_initParams(functionNode, tokenizer) 646 builder.FUNCTION_initParams(functionNode, tokenizer)
648 prevParamNode = None 647 prevParamNode = None
649 while True: 648 while True:
650 tokenType = tokenizer.get() 649 tokenType = tokenizer.get()
651 if tokenType == "left_bracket" or tokenType == "left_curly": 650 if tokenType == "left_bracket" or tokenType == "left_curly":
652 # Destructured formal parameters. 651 # Destructured formal parameters.
653 tokenizer.unget() 652 tokenizer.unget()
654 paramNode = DestructuringExpression(tokenizer, staticContext) 653 paramNode = DestructuringExpression(tokenizer, staticContext)
655 654
656 elif tokenType == "identifier": 655 elif tokenType == "identifier":
657 paramNode = builder.FUNCTION_wrapParam(tokenizer) 656 paramNode = builder.FUNCTION_wrapParam(tokenizer)
658 657
659 else: 658 else:
660 raise SyntaxError("Missing formal parameter", tokenizer) 659 raise SyntaxError("Missing formal parameter", tokenizer)
661 660
662 builder.FUNCTION_addParam(functionNode, tokenizer, paramNode) 661 builder.FUNCTION_addParam(functionNode, tokenizer, paramNode)
663 builder.COMMENTS_add(paramNode, prevParamNode, tokenizer.getComments()) 662 builder.COMMENTS_add(paramNode, prevParamNode, tokenizer.getComments())
664 663
665 if not tokenizer.match("comma"): 664 if not tokenizer.match("comma"):
666 break 665 break
667 666
668 prevParamNode = paramNode 667 prevParamNode = paramNode
669 668
670 tokenizer.mustMatch("right_paren") 669 tokenizer.mustMatch("right_paren")
671 670
672 # Do we have an expression closure or a normal body? 671 # Do we have an expression closure or a normal body?
673 tokenType = tokenizer.get() 672 tokenType = tokenizer.get()
674 if tokenType != "left_curly": 673 if tokenType != "left_curly":
675 builder.FUNCTION_setExpressionClosure(functionNode, True) 674 builder.FUNCTION_setExpressionClosure(functionNode, True)
676 tokenizer.unget() 675 tokenizer.unget()
677 676
678 childContext = StaticContext(True, builder) 677 childContext = StaticContext(True, builder)
679 tokenizer.save() 678
680
681 if staticContext.inFunction: 679 if staticContext.inFunction:
682 # Inner functions don't reset block numbering, only functions at 680 # Inner functions don't reset block numbering, only functions at
683 # the top level of the program do. 681 # the top level of the program do.
684 childContext.blockId = staticContext.blockId 682 childContext.blockId = staticContext.blockId
685 683
686 if tokenType != "left_curly": 684 if tokenType != "left_curly":
687 builder.FUNCTION_setBody(functionNode, AssignExpression(tokenizer, staticContext)) 685 builder.FUNCTION_setBody(functionNode, AssignExpression(tokenizer, staticContext))
688 if staticContext.isGenerator: 686 if staticContext.isGenerator:
689 raise SyntaxError("Generator returns a value", tokenizer) 687 raise SyntaxError("Generator returns a value", tokenizer)
690 688
691 else: 689 else:
692 builder.FUNCTION_hoistVars(childContext.blockId) 690 builder.FUNCTION_hoistVars(childContext.blockId)
693 builder.FUNCTION_setBody(functionNode, Script(tokenizer, childContext)) 691 builder.FUNCTION_setBody(functionNode, Script(tokenizer, childContext))
694 692
695 #
696 # Hoisting makes parse-time binding analysis tricky. A taxonomy of hoists:
697 #
698 # 1. vars hoist to the top of their function:
699 #
700 # var x = 'global';
701 # function f() {
702 # x = 'f';
703 # if (false)
704 # var x;
705 # }
706 # f();
707 # print(x); // "global"
708 #
709 # 2. lets hoist to the top of their block:
710 #
711 # function f() { // id: 0
712 # var x = 'f';
713 # {
714 # {
715 # print(x); // "undefined"
716 # }
717 # let x;
718 # }
719 # }
720 # f();
721 #
722 # 3. inner functions at function top-level hoist to the beginning
723 # of the function.
724 #
725 # If the builder used is doing parse-time analyses, hoisting may
726 # invalidate earlier conclusions it makes about variable scope.
727 #
728 # The builder can opt to set the needsHoisting flag in a
729 # CompilerContext (in the case of var and function hoisting) or in a
730 # node of type BLOCK (in the case of let hoisting). This signals for
731 # the parser to reparse sections of code.
732 #
733 # To avoid exponential blowup, if a function at the program top-level
734 # has any hoists in its child blocks or inner functions, we reparse
735 # the entire toplevel function. Each toplevel function is parsed at
736 # most twice.
737 #
738 # The list of declarations can be tied to block ids to aid talking
739 # about declarations of blocks that have not yet been fully parsed.
740 #
741 # Blocks are already uniquely numbered; see the comment in
742 # Statements.
743 #
744
745 #
746 # wpbasti:
747 # Don't have the feeling that I need this functionality because the
748 # tree is often modified before the variables and names inside are
749 # of any interest. So better doing this in a post-scan.
750 #
751
752 #
753 # if childContext.needsHoisting:
754 # # Order is important here! Builders expect functions to come after variables!
755 # builder.setHoists(functionNode.body.id, childContext.variables.concat(childContext.functions))
756 #
757 # if staticContext.inFunction:
758 # # If an inner function needs hoisting, we need to propagate
759 # # this flag up to the parent function.
760 # staticContext.needsHoisting = True
761 #
762 # else:
763 # # Only re-parse functions at the top level of the program.
764 # childContext = StaticContext(True, builder)
765 # tokenizer.rewind(rp)
766 #
767 # # Set a flag in case the builder wants to have different behavior
768 # # on the second pass.
769 # builder.secondPass = True
770 # builder.FUNCTION_hoistVars(functionNode.body.id, True)
771 # builder.FUNCTION_setBody(functionNode, Script(tokenizer, childContext))
772 # builder.secondPass = False
773
774 if tokenType == "left_curly": 693 if tokenType == "left_curly":
775 tokenizer.mustMatch("right_curly") 694 tokenizer.mustMatch("right_curly")
776 695
777 functionNode.end = tokenizer.token.end 696 functionNode.end = tokenizer.token.end
778 functionNode.functionForm = functionForm 697 functionNode.functionForm = functionForm
779 698
780 builder.COMMENTS_add(functionNode.body, functionNode.body, tokenizer.getComments()) 699 builder.COMMENTS_add(functionNode.body, functionNode.body, tokenizer.getComments())
781 builder.FUNCTION_finish(functionNode, staticContext) 700 builder.FUNCTION_finish(functionNode, staticContext)
782 701
783 return functionNode 702 return functionNode
784 703
785 704
786 705
787 def Variables(tokenizer, staticContext, letBlock=None): 706 def Variables(tokenizer, staticContext, letBlock=None):
788 """Parses a comma-separated list of var declarations (and maybe initializations).""" 707 """Parses a comma-separated list of var declarations (and maybe initializations)."""
789 708
790 builder = staticContext.builder 709 builder = staticContext.builder
791 if tokenizer.token.type == "var": 710 if tokenizer.token.type == "var":
792 build = builder.VAR_build 711 build = builder.VAR_build
793 addDecl = builder.VAR_addDecl 712 addDecl = builder.VAR_addDecl
794 finish = builder.VAR_finish 713 finish = builder.VAR_finish
795 childContext = staticContext 714 childContext = staticContext
796 715
797 elif tokenizer.token.type == "const": 716 elif tokenizer.token.type == "const":
798 build = builder.CONST_build 717 build = builder.CONST_build
799 addDecl = builder.CONST_addDecl 718 addDecl = builder.CONST_addDecl
800 finish = builder.CONST_finish 719 finish = builder.CONST_finish
801 childContext = staticContext 720 childContext = staticContext
802 721
803 elif tokenizer.token.type == "let" or tokenizer.token.type == "left_paren": 722 elif tokenizer.token.type == "let" or tokenizer.token.type == "left_paren":
804 build = builder.LET_build 723 build = builder.LET_build
805 addDecl = builder.LET_addDecl 724 addDecl = builder.LET_addDecl
806 finish = builder.LET_finish 725 finish = builder.LET_finish
807 726
808 if not letBlock: 727 if not letBlock:
809 statementStack = staticContext.statementStack 728 statementStack = staticContext.statementStack
810 i = len(statementStack) - 1 729 i = len(statementStack) - 1
811 730
812 # a BLOCK *must* be found. 731 # a BLOCK *must* be found.
813 while statementStack[i].type != "block": 732 while statementStack[i].type != "block":
814 i -= 1 733 i -= 1
815 734
816 # Lets at the def toplevel are just vars, at least in SpiderMonkey. 735 # Lets at the def toplevel are just vars, at least in SpiderMonkey.
820 finish = builder.VAR_finish 739 finish = builder.VAR_finish
821 childContext = staticContext 740 childContext = staticContext
822 741
823 else: 742 else:
824 childContext = statementStack[i] 743 childContext = statementStack[i]
825 744
826 else: 745 else:
827 childContext = letBlock 746 childContext = letBlock
828 747
829 node = build(tokenizer) 748 node = build(tokenizer)
830 749
831 while True: 750 while True:
832 tokenType = tokenizer.get() 751 tokenType = tokenizer.get()
833 752
834 # Done in Python port! 753 # Done in Python port!
835 # FIXME Should have a special DECLARATION node instead of overloading 754 # FIXME Should have a special DECLARATION node instead of overloading
836 # IDENTIFIER to mean both identifier declarations and destructured 755 # IDENTIFIER to mean both identifier declarations and destructured
837 # declarations. 756 # declarations.
838 childNode = builder.DECL_build(tokenizer) 757 childNode = builder.DECL_build(tokenizer)
839 758
840 if tokenType == "left_bracket" or tokenType == "left_curly": 759 if tokenType == "left_bracket" or tokenType == "left_curly":
841 # Pass in childContext if we need to add each pattern matched into 760 # Pass in childContext if we need to add each pattern matched into
842 # its variables, else pass in staticContext. 761 # its variables, else pass in staticContext.
843 # Need to unget to parse the full destructured expression. 762 # Need to unget to parse the full destructured expression.
844 tokenizer.unget() 763 tokenizer.unget()
845 builder.DECL_setNames(childNode, DestructuringExpression(tokenizer, staticContext, True, childContext)) 764 builder.DECL_setNames(childNode, DestructuringExpression(tokenizer, staticContext, True, childContext))
846 765
847 if staticContext.inForLoopInit and tokenizer.peek() == "in": 766 if staticContext.inForLoopInit and tokenizer.peek() == "in":
848 addDecl(node, childNode, childContext) 767 addDecl(node, childNode, childContext)
849 if tokenizer.match("comma"): 768 if tokenizer.match("comma"):
850 continue 769 continue
851 else: 770 else:
852 break 771 break
853 772
854 tokenizer.mustMatch("assign") 773 tokenizer.mustMatch("assign")
855 if tokenizer.token.assignOp: 774 if tokenizer.token.assignOp:
856 raise SyntaxError("Invalid variable initialization", tokenizer) 775 raise SyntaxError("Invalid variable initialization", tokenizer)
857 776
858 # Parse the init as a normal assignment. 777 # Parse the init as a normal assignment.
859 builder.DECL_setInitializer(childNode, AssignExpression(tokenizer, staticContext)) 778 builder.DECL_setInitializer(childNode, AssignExpression(tokenizer, staticContext))
860 builder.DECL_finish(childNode) 779 builder.DECL_finish(childNode)
861 addDecl(node, childNode, childContext) 780 addDecl(node, childNode, childContext)
862 781
863 # Copy over names for variable list 782 # Copy over names for variable list
864 # for nameNode in childNode.names: 783 # for nameNode in childNode.names:
865 # childContext.variables.add(nameNode.value) 784 # childContext.variables.add(nameNode.value)
866 785
867 if tokenizer.match("comma"): 786 if tokenizer.match("comma"):
868 continue 787 continue
869 else: 788 else:
870 break 789 break
871 790
872 if tokenType != "identifier": 791 if tokenType != "identifier":
873 raise SyntaxError("Missing variable name", tokenizer) 792 raise SyntaxError("Missing variable name", tokenizer)
874 793
875 builder.DECL_setName(childNode, tokenizer.token.value) 794 builder.DECL_setName(childNode, tokenizer.token.value)
882 801
883 initializerNode = AssignExpression(tokenizer, staticContext) 802 initializerNode = AssignExpression(tokenizer, staticContext)
884 builder.DECL_setInitializer(childNode, initializerNode) 803 builder.DECL_setInitializer(childNode, initializerNode)
885 804
886 builder.DECL_finish(childNode) 805 builder.DECL_finish(childNode)
887 806
888 # If we directly use the node in "let" constructs 807 # If we directly use the node in "let" constructs
889 # if not hasattr(childContext, "variables"): 808 # if not hasattr(childContext, "variables"):
890 # childContext.variables = set() 809 # childContext.variables = set()
891 810
892 # childContext.variables.add(childNode.name) 811 # childContext.variables.add(childNode.name)
893 812
894 if not tokenizer.match("comma"): 813 if not tokenizer.match("comma"):
895 break 814 break
896 815
897 finish(node) 816 finish(node)
898 return node 817 return node
899 818
900 819
901 820
919 isStatement = False 838 isStatement = False
920 839
921 if isStatement: 840 if isStatement:
922 childNode = Block(tokenizer, staticContext) 841 childNode = Block(tokenizer, staticContext)
923 builder.LETBLOCK_setBlock(node, childNode) 842 builder.LETBLOCK_setBlock(node, childNode)
924 843
925 else: 844 else:
926 childNode = AssignExpression(tokenizer, staticContext) 845 childNode = AssignExpression(tokenizer, staticContext)
927 builder.LETBLOCK_setExpression(node, childNode) 846 builder.LETBLOCK_setExpression(node, childNode)
928 847
929 builder.LETBLOCK_finish(node) 848 builder.LETBLOCK_finish(node)
931 850
932 851
933 def checkDestructuring(tokenizer, staticContext, node, simpleNamesOnly=None, data=None): 852 def checkDestructuring(tokenizer, staticContext, node, simpleNamesOnly=None, data=None):
934 if node.type == "array_comp": 853 if node.type == "array_comp":
935 raise SyntaxError("Invalid array comprehension left-hand side", tokenizer) 854 raise SyntaxError("Invalid array comprehension left-hand side", tokenizer)
936 855
937 if node.type != "array_init" and node.type != "object_init": 856 if node.type != "array_init" and node.type != "object_init":
938 return 857 return
939 858
940 builder = staticContext.builder 859 builder = staticContext.builder
941 860
942 for child in node: 861 for child in node:
943 if child == None: 862 if child == None:
944 continue 863 continue
945 864
946 if child.type == "property_init": 865 if child.type == "property_init":
947 lhs = child[0] 866 lhs = child[0]
948 rhs = child[1] 867 rhs = child[1]
949 else: 868 else:
950 lhs = None 869 lhs = None
951 rhs = None 870 rhs = None
952 871
953 872
954 if rhs and (rhs.type == "array_init" or rhs.type == "object_init"): 873 if rhs and (rhs.type == "array_init" or rhs.type == "object_init"):
955 checkDestructuring(tokenizer, staticContext, rhs, simpleNamesOnly, data) 874 checkDestructuring(tokenizer, staticContext, rhs, simpleNamesOnly, data)
956 875
957 if lhs and simpleNamesOnly: 876 if lhs and simpleNamesOnly:
958 # In declarations, lhs must be simple names 877 # In declarations, lhs must be simple names
959 if lhs.type != "identifier": 878 if lhs.type != "identifier":
960 raise SyntaxError("Missing name in pattern", tokenizer) 879 raise SyntaxError("Missing name in pattern", tokenizer)
961 880
962 elif data: 881 elif data:
963 childNode = builder.DECL_build(tokenizer) 882 childNode = builder.DECL_build(tokenizer)
964 builder.DECL_setName(childNode, lhs.value) 883 builder.DECL_setName(childNode, lhs.value)
965 884
966 # Don't need to set initializer because it's just for 885 # Don't need to set initializer because it's just for
967 # hoisting anyways. 886 # hoisting anyways.
968 builder.DECL_finish(childNode) 887 builder.DECL_finish(childNode)
969 888
970 # Each pattern needs to be added to variables. 889 # Each pattern needs to be added to variables.
971 # data.variables.add(childNode.name) 890 # data.variables.add(childNode.name)
972 891
973 892
974 # JavaScript 1.7 893 # JavaScript 1.7
975 def DestructuringExpression(tokenizer, staticContext, simpleNamesOnly=None, data=None): 894 def DestructuringExpression(tokenizer, staticContext, simpleNamesOnly=None, data=None):
976 node = PrimaryExpression(tokenizer, staticContext) 895 node = PrimaryExpression(tokenizer, staticContext)
977 checkDestructuring(tokenizer, staticContext, node, simpleNamesOnly, data) 896 checkDestructuring(tokenizer, staticContext, node, simpleNamesOnly, data)
985 node = builder.GENERATOR_build(tokenizer) 904 node = builder.GENERATOR_build(tokenizer)
986 905
987 builder.GENERATOR_setExpression(node, expression) 906 builder.GENERATOR_setExpression(node, expression)
988 builder.GENERATOR_setTail(node, comprehensionTail(tokenizer, staticContext)) 907 builder.GENERATOR_setTail(node, comprehensionTail(tokenizer, staticContext))
989 builder.GENERATOR_finish(node) 908 builder.GENERATOR_finish(node)
990 909
991 return node 910 return node
992 911
993 912
994 # JavaScript 1.7 Comprehensions Tails (Generators / Arrays) 913 # JavaScript 1.7 Comprehensions Tails (Generators / Arrays)
995 def comprehensionTail(tokenizer, staticContext): 914 def comprehensionTail(tokenizer, staticContext):
996 builder = staticContext.builder 915 builder = staticContext.builder
997 916
998 # tokenizer.token.type must be "for" 917 # tokenizer.token.type must be "for"
999 body = builder.COMPTAIL_build(tokenizer) 918 body = builder.COMPTAIL_build(tokenizer)
1000 919
1001 while True: 920 while True:
1002 node = builder.FOR_build(tokenizer) 921 node = builder.FOR_build(tokenizer)
1003 922
1004 # Comprehension tails are always for..in loops. 923 # Comprehension tails are always for..in loops.
1005 builder.FOR_rebuildForIn(node) 924 builder.FOR_rebuildForIn(node)
1006 if tokenizer.match("identifier"): 925 if tokenizer.match("identifier"):
1007 # But sometimes they're for each..in. 926 # But sometimes they're for each..in.
1008 if tokenizer.token.value == "each": 927 if tokenizer.token.value == "each":
1009 builder.FOR_rebuildForEach(node) 928 builder.FOR_rebuildForEach(node)
1010 else: 929 else:
1011 tokenizer.unget() 930 tokenizer.unget()
1012 931
1013 tokenizer.mustMatch("left_paren") 932 tokenizer.mustMatch("left_paren")
1014 933
1015 tokenType = tokenizer.get() 934 tokenType = tokenizer.get()
1016 if tokenType == "left_bracket" or tokenType == "left_curly": 935 if tokenType == "left_bracket" or tokenType == "left_curly":
1017 tokenizer.unget() 936 tokenizer.unget()
1018 # Destructured left side of for in comprehension tails. 937 # Destructured left side of for in comprehension tails.
1019 builder.FOR_setIterator(node, DestructuringExpression(tokenizer, staticContext)) 938 builder.FOR_setIterator(node, DestructuringExpression(tokenizer, staticContext))
1020 939
1021 elif tokenType == "identifier": 940 elif tokenType == "identifier":
1022 # Removed variable/declaration substructure in Python port. 941 # Removed variable/declaration substructure in Python port.
1023 # Variable declarations are not allowed here. So why process them in such a way? 942 # Variable declarations are not allowed here. So why process them in such a way?
1024 943
1025 # declaration = builder.DECL_build(tokenizer) 944 # declaration = builder.DECL_build(tokenizer)
1026 # builder.DECL_setName(declaration, tokenizer.token.value) 945 # builder.DECL_setName(declaration, tokenizer.token.value)
1027 # builder.DECL_finish(declaration) 946 # builder.DECL_finish(declaration)
1028 # childNode = builder.VAR_build(tokenizer) 947 # childNode = builder.VAR_build(tokenizer)
1029 # builder.VAR_addDecl(childNode, declaration) 948 # builder.VAR_addDecl(childNode, declaration)
1030 # builder.VAR_finish(childNode) 949 # builder.VAR_finish(childNode)
1031 # builder.FOR_setIterator(node, declaration) 950 # builder.FOR_setIterator(node, declaration)
1032 951
1033 # Don't add to variables since the semantics of comprehensions is 952 # Don't add to variables since the semantics of comprehensions is
1034 # such that the variables are in their own def when desugared. 953 # such that the variables are in their own def when desugared.
1035 954
1036 identifier = builder.PRIMARY_build(tokenizer, "identifier") 955 identifier = builder.PRIMARY_build(tokenizer, "identifier")
1037 builder.FOR_setIterator(node, identifier) 956 builder.FOR_setIterator(node, identifier)
1038 957
1039 else: 958 else:
1040 raise SyntaxError("Missing identifier", tokenizer) 959 raise SyntaxError("Missing identifier", tokenizer)
1041 960
1042 tokenizer.mustMatch("in") 961 tokenizer.mustMatch("in")
1043 builder.FOR_setObject(node, Expression(tokenizer, staticContext)) 962 builder.FOR_setObject(node, Expression(tokenizer, staticContext))
1044 tokenizer.mustMatch("right_paren") 963 tokenizer.mustMatch("right_paren")
1045 builder.COMPTAIL_addFor(body, node) 964 builder.COMPTAIL_addFor(body, node)
1046 965
1047 if not tokenizer.match("for"): 966 if not tokenizer.match("for"):
1048 break 967 break
1049 968
1050 # Optional guard. 969 # Optional guard.
1051 if tokenizer.match("if"): 970 if tokenizer.match("if"):
1069 988
1070 err = "expression must be parenthesized" 989 err = "expression must be parenthesized"
1071 if tokenizer.match("for"): 990 if tokenizer.match("for"):
1072 if node.type == "yield" and not node.parenthesized: 991 if node.type == "yield" and not node.parenthesized:
1073 raise SyntaxError("Yield " + err, tokenizer) 992 raise SyntaxError("Yield " + err, tokenizer)
1074 993
1075 if node.type == "comma" and not node.parenthesized: 994 if node.type == "comma" and not node.parenthesized:
1076 raise SyntaxError("Generator " + err, tokenizer) 995 raise SyntaxError("Generator " + err, tokenizer)
1077 996
1078 node = GeneratorExpression(tokenizer, staticContext, node) 997 node = GeneratorExpression(tokenizer, staticContext, node)
1079 998
1080 tokenizer.mustMatch("right_paren") 999 tokenizer.mustMatch("right_paren")
1081 1000
1082 return node 1001 return node
1094 while True: 1013 while True:
1095 childNode = node[len(node)-1] 1014 childNode = node[len(node)-1]
1096 if childNode.type == "yield" and not childNode.parenthesized: 1015 if childNode.type == "yield" and not childNode.parenthesized:
1097 raise SyntaxError("Yield expression must be parenthesized", tokenizer) 1016 raise SyntaxError("Yield expression must be parenthesized", tokenizer)
1098 builder.COMMA_addOperand(node, AssignExpression(tokenizer, staticContext)) 1017 builder.COMMA_addOperand(node, AssignExpression(tokenizer, staticContext))
1099 1018
1100 if not tokenizer.match("comma"): 1019 if not tokenizer.match("comma"):
1101 break 1020 break
1102 1021
1103 builder.COMMA_finish(node) 1022 builder.COMMA_finish(node)
1104 1023
1105 return node 1024 return node
1106 1025
1107 1026
1126 checkDestructuring(tokenizer, staticContext, lhs) 1045 checkDestructuring(tokenizer, staticContext, lhs)
1127 elif lhs.type == "identifier" or lhs.type == "dot" or lhs.type == "index" or lhs.type == "call": 1046 elif lhs.type == "identifier" or lhs.type == "dot" or lhs.type == "index" or lhs.type == "call":
1128 pass 1047 pass
1129 else: 1048 else:
1130 raise SyntaxError("Bad left-hand side of assignment", tokenizer) 1049 raise SyntaxError("Bad left-hand side of assignment", tokenizer)
1131 1050
1132 builder.ASSIGN_setAssignOp(node, tokenizer.token.assignOp) 1051 builder.ASSIGN_setAssignOp(node, tokenizer.token.assignOp)
1133 builder.ASSIGN_addOperand(node, lhs) 1052 builder.ASSIGN_addOperand(node, lhs)
1134 builder.ASSIGN_addOperand(node, AssignExpression(tokenizer, staticContext)) 1053 builder.ASSIGN_addOperand(node, AssignExpression(tokenizer, staticContext))
1135 builder.ASSIGN_finish(node) 1054 builder.ASSIGN_finish(node)
1136 1055
1151 # for statement. 1070 # for statement.
1152 oldLoopInit = staticContext.inForLoopInit 1071 oldLoopInit = staticContext.inForLoopInit
1153 staticContext.inForLoopInit = False 1072 staticContext.inForLoopInit = False
1154 builder.HOOK_setThenPart(node, AssignExpression(tokenizer, staticContext)) 1073 builder.HOOK_setThenPart(node, AssignExpression(tokenizer, staticContext))
1155 staticContext.inForLoopInit = oldLoopInit 1074 staticContext.inForLoopInit = oldLoopInit
1156 1075
1157 if not tokenizer.match("colon"): 1076 if not tokenizer.match("colon"):
1158 raise SyntaxError("Missing : after ?", tokenizer) 1077 raise SyntaxError("Missing : after ?", tokenizer)
1159 1078
1160 builder.HOOK_setElsePart(node, AssignExpression(tokenizer, staticContext)) 1079 builder.HOOK_setElsePart(node, AssignExpression(tokenizer, staticContext))
1161 builder.HOOK_finish(node) 1080 builder.HOOK_finish(node)
1162 1081
1163 return node 1082 return node
1164 1083
1165 1084
1166 def OrExpression(tokenizer, staticContext): 1085 def OrExpression(tokenizer, staticContext):
1167 builder = staticContext.builder 1086 builder = staticContext.builder
1168 node = AndExpression(tokenizer, staticContext) 1087 node = AndExpression(tokenizer, staticContext)
1169 1088
1170 while tokenizer.match("or"): 1089 while tokenizer.match("or"):
1171 childNode = builder.OR_build(tokenizer) 1090 childNode = builder.OR_build(tokenizer)
1172 builder.OR_addOperand(childNode, node) 1091 builder.OR_addOperand(childNode, node)
1173 builder.OR_addOperand(childNode, AndExpression(tokenizer, staticContext)) 1092 builder.OR_addOperand(childNode, AndExpression(tokenizer, staticContext))
1174 builder.OR_finish(childNode) 1093 builder.OR_finish(childNode)
1192 1111
1193 1112
1194 def BitwiseOrExpression(tokenizer, staticContext): 1113 def BitwiseOrExpression(tokenizer, staticContext):
1195 builder = staticContext.builder 1114 builder = staticContext.builder
1196 node = BitwiseXorExpression(tokenizer, staticContext) 1115 node = BitwiseXorExpression(tokenizer, staticContext)
1197 1116
1198 while tokenizer.match("bitwise_or"): 1117 while tokenizer.match("bitwise_or"):
1199 childNode = builder.BITWISEOR_build(tokenizer) 1118 childNode = builder.BITWISEOR_build(tokenizer)
1200 builder.BITWISEOR_addOperand(childNode, node) 1119 builder.BITWISEOR_addOperand(childNode, node)
1201 builder.BITWISEOR_addOperand(childNode, BitwiseXorExpression(tokenizer, staticContext)) 1120 builder.BITWISEOR_addOperand(childNode, BitwiseXorExpression(tokenizer, staticContext))
1202 builder.BITWISEOR_finish(childNode) 1121 builder.BITWISEOR_finish(childNode)
1206 1125
1207 1126
1208 def BitwiseXorExpression(tokenizer, staticContext): 1127 def BitwiseXorExpression(tokenizer, staticContext):
1209 builder = staticContext.builder 1128 builder = staticContext.builder
1210 node = BitwiseAndExpression(tokenizer, staticContext) 1129 node = BitwiseAndExpression(tokenizer, staticContext)
1211 1130
1212 while tokenizer.match("bitwise_xor"): 1131 while tokenizer.match("bitwise_xor"):
1213 childNode = builder.BITWISEXOR_build(tokenizer) 1132 childNode = builder.BITWISEXOR_build(tokenizer)
1214 builder.BITWISEXOR_addOperand(childNode, node) 1133 builder.BITWISEXOR_addOperand(childNode, node)
1215 builder.BITWISEXOR_addOperand(childNode, BitwiseAndExpression(tokenizer, staticContext)) 1134 builder.BITWISEXOR_addOperand(childNode, BitwiseAndExpression(tokenizer, staticContext))
1216 builder.BITWISEXOR_finish(childNode) 1135 builder.BITWISEXOR_finish(childNode)
1234 1153
1235 1154
1236 def EqualityExpression(tokenizer, staticContext): 1155 def EqualityExpression(tokenizer, staticContext):
1237 builder = staticContext.builder 1156 builder = staticContext.builder
1238 node = RelationalExpression(tokenizer, staticContext) 1157 node = RelationalExpression(tokenizer, staticContext)
1239 1158
1240 while tokenizer.match("eq") or tokenizer.match("ne") or tokenizer.match("strict_eq") or tokenizer.match("strict_ne"): 1159 while tokenizer.match("eq") or tokenizer.match("ne") or tokenizer.match("strict_eq") or tokenizer.match("strict_ne"):
1241 childNode = builder.EQUALITY_build(tokenizer) 1160 childNode = builder.EQUALITY_build(tokenizer)
1242 builder.EQUALITY_addOperand(childNode, node) 1161 builder.EQUALITY_addOperand(childNode, node)
1243 builder.EQUALITY_addOperand(childNode, RelationalExpression(tokenizer, staticContext)) 1162 builder.EQUALITY_addOperand(childNode, RelationalExpression(tokenizer, staticContext))
1244 builder.EQUALITY_finish(childNode) 1163 builder.EQUALITY_finish(childNode)
1260 childNode = builder.RELATIONAL_build(tokenizer) 1179 childNode = builder.RELATIONAL_build(tokenizer)
1261 builder.RELATIONAL_addOperand(childNode, node) 1180 builder.RELATIONAL_addOperand(childNode, node)
1262 builder.RELATIONAL_addOperand(childNode, ShiftExpression(tokenizer, staticContext)) 1181 builder.RELATIONAL_addOperand(childNode, ShiftExpression(tokenizer, staticContext))
1263 builder.RELATIONAL_finish(childNode) 1182 builder.RELATIONAL_finish(childNode)
1264 node = childNode 1183 node = childNode
1265 1184
1266 staticContext.inForLoopInit = oldLoopInit 1185 staticContext.inForLoopInit = oldLoopInit
1267 1186
1268 return node 1187 return node
1269 1188
1270 1189
1271 def ShiftExpression(tokenizer, staticContext): 1190 def ShiftExpression(tokenizer, staticContext):
1272 builder = staticContext.builder 1191 builder = staticContext.builder
1273 node = AddExpression(tokenizer, staticContext) 1192 node = AddExpression(tokenizer, staticContext)
1274 1193
1275 while tokenizer.match("lsh") or tokenizer.match("rsh") or tokenizer.match("ursh"): 1194 while tokenizer.match("lsh") or tokenizer.match("rsh") or tokenizer.match("ursh"):
1276 childNode = builder.SHIFT_build(tokenizer) 1195 childNode = builder.SHIFT_build(tokenizer)
1277 builder.SHIFT_addOperand(childNode, node) 1196 builder.SHIFT_addOperand(childNode, node)
1278 builder.SHIFT_addOperand(childNode, AddExpression(tokenizer, staticContext)) 1197 builder.SHIFT_addOperand(childNode, AddExpression(tokenizer, staticContext))
1279 builder.SHIFT_finish(childNode) 1198 builder.SHIFT_finish(childNode)
1283 1202
1284 1203
1285 def AddExpression(tokenizer, staticContext): 1204 def AddExpression(tokenizer, staticContext):
1286 builder = staticContext.builder 1205 builder = staticContext.builder
1287 node = MultiplyExpression(tokenizer, staticContext) 1206 node = MultiplyExpression(tokenizer, staticContext)
1288 1207
1289 while tokenizer.match("plus") or tokenizer.match("minus"): 1208 while tokenizer.match("plus") or tokenizer.match("minus"):
1290 childNode = builder.ADD_build(tokenizer) 1209 childNode = builder.ADD_build(tokenizer)
1291 builder.ADD_addOperand(childNode, node) 1210 builder.ADD_addOperand(childNode, node)
1292 builder.ADD_addOperand(childNode, MultiplyExpression(tokenizer, staticContext)) 1211 builder.ADD_addOperand(childNode, MultiplyExpression(tokenizer, staticContext))
1293 builder.ADD_finish(childNode) 1212 builder.ADD_finish(childNode)
1297 1216
1298 1217
1299 def MultiplyExpression(tokenizer, staticContext): 1218 def MultiplyExpression(tokenizer, staticContext):
1300 builder = staticContext.builder 1219 builder = staticContext.builder
1301 node = UnaryExpression(tokenizer, staticContext) 1220 node = UnaryExpression(tokenizer, staticContext)
1302 1221
1303 while tokenizer.match("mul") or tokenizer.match("div") or tokenizer.match("mod"): 1222 while tokenizer.match("mul") or tokenizer.match("div") or tokenizer.match("mod"):
1304 childNode = builder.MULTIPLY_build(tokenizer) 1223 childNode = builder.MULTIPLY_build(tokenizer)
1305 builder.MULTIPLY_addOperand(childNode, node) 1224 builder.MULTIPLY_addOperand(childNode, node)
1306 builder.MULTIPLY_addOperand(childNode, UnaryExpression(tokenizer, staticContext)) 1225 builder.MULTIPLY_addOperand(childNode, UnaryExpression(tokenizer, staticContext))
1307 builder.MULTIPLY_finish(childNode) 1226 builder.MULTIPLY_finish(childNode)
1315 tokenType = tokenizer.get(True) 1234 tokenType = tokenizer.get(True)
1316 1235
1317 if tokenType in ["delete", "void", "typeof", "not", "bitwise_not", "plus", "minus"]: 1236 if tokenType in ["delete", "void", "typeof", "not", "bitwise_not", "plus", "minus"]:
1318 node = builder.UNARY_build(tokenizer) 1237 node = builder.UNARY_build(tokenizer)
1319 builder.UNARY_addOperand(node, UnaryExpression(tokenizer, staticContext)) 1238 builder.UNARY_addOperand(node, UnaryExpression(tokenizer, staticContext))
1320 1239
1321 elif tokenType == "increment" or tokenType == "decrement": 1240 elif tokenType == "increment" or tokenType == "decrement":
1322 # Prefix increment/decrement. 1241 # Prefix increment/decrement.
1323 node = builder.UNARY_build(tokenizer) 1242 node = builder.UNARY_build(tokenizer)
1324 builder.UNARY_addOperand(node, MemberExpression(tokenizer, staticContext, True)) 1243 builder.UNARY_addOperand(node, MemberExpression(tokenizer, staticContext, True))
1325 1244
1344 builder = staticContext.builder 1263 builder = staticContext.builder
1345 1264
1346 if tokenizer.match("new"): 1265 if tokenizer.match("new"):
1347 node = builder.MEMBER_build(tokenizer) 1266 node = builder.MEMBER_build(tokenizer)
1348 builder.MEMBER_addOperand(node, MemberExpression(tokenizer, staticContext, False)) 1267 builder.MEMBER_addOperand(node, MemberExpression(tokenizer, staticContext, False))
1349 1268
1350 if tokenizer.match("left_paren"): 1269 if tokenizer.match("left_paren"):
1351 builder.MEMBER_rebuildNewWithArgs(node) 1270 builder.MEMBER_rebuildNewWithArgs(node)
1352 builder.MEMBER_addOperand(node, ArgumentList(tokenizer, staticContext)) 1271 builder.MEMBER_addOperand(node, ArgumentList(tokenizer, staticContext))
1353 1272
1354 builder.MEMBER_finish(node) 1273 builder.MEMBER_finish(node)
1355 1274
1356 else: 1275 else:
1357 node = PrimaryExpression(tokenizer, staticContext) 1276 node = PrimaryExpression(tokenizer, staticContext)
1358 1277
1359 while True: 1278 while True:
1360 tokenType = tokenizer.get() 1279 tokenType = tokenizer.get()
1361 if tokenType == "end": 1280 if tokenType == "end":
1362 break 1281 break
1363 1282
1364 if tokenType == "dot": 1283 if tokenType == "dot":
1365 childNode = builder.MEMBER_build(tokenizer) 1284 childNode = builder.MEMBER_build(tokenizer)
1366 builder.MEMBER_addOperand(childNode, node) 1285 builder.MEMBER_addOperand(childNode, node)
1367 tokenizer.mustMatch("identifier") 1286 tokenizer.mustMatch("identifier")
1368 builder.MEMBER_addOperand(childNode, builder.MEMBER_build(tokenizer)) 1287 builder.MEMBER_addOperand(childNode, builder.MEMBER_build(tokenizer))
1389 1308
1390 1309
1391 def ArgumentList(tokenizer, staticContext): 1310 def ArgumentList(tokenizer, staticContext):
1392 builder = staticContext.builder 1311 builder = staticContext.builder
1393 node = builder.LIST_build(tokenizer) 1312 node = builder.LIST_build(tokenizer)
1394 1313
1395 if tokenizer.match("right_paren", True): 1314 if tokenizer.match("right_paren", True):
1396 return node 1315 return node
1397 1316
1398 while True: 1317 while True:
1399 childNode = AssignExpression(tokenizer, staticContext) 1318 childNode = AssignExpression(tokenizer, staticContext)
1400 if childNode.type == "yield" and not childNode.parenthesized and tokenizer.peek() == "comma": 1319 if childNode.type == "yield" and not childNode.parenthesized and tokenizer.peek() == "comma":
1401 raise SyntaxError("Yield expression must be parenthesized", tokenizer) 1320 raise SyntaxError("Yield expression must be parenthesized", tokenizer)
1402 1321
1403 if tokenizer.match("for"): 1322 if tokenizer.match("for"):
1404 childNode = GeneratorExpression(tokenizer, staticContext, childNode) 1323 childNode = GeneratorExpression(tokenizer, staticContext, childNode)
1405 if len(node) > 1 or tokenizer.peek(True) == "comma": 1324 if len(node) > 1 or tokenizer.peek(True) == "comma":
1406 raise SyntaxError("Generator expression must be parenthesized", tokenizer) 1325 raise SyntaxError("Generator expression must be parenthesized", tokenizer)
1407 1326
1408 builder.LIST_addOperand(node, childNode) 1327 builder.LIST_addOperand(node, childNode)
1409 if not tokenizer.match("comma"): 1328 if not tokenizer.match("comma"):
1410 break 1329 break
1411 1330
1412 tokenizer.mustMatch("right_paren") 1331 tokenizer.mustMatch("right_paren")
1426 node = builder.ARRAYINIT_build(tokenizer) 1345 node = builder.ARRAYINIT_build(tokenizer)
1427 while True: 1346 while True:
1428 tokenType = tokenizer.peek(True) 1347 tokenType = tokenizer.peek(True)
1429 if tokenType == "right_bracket": 1348 if tokenType == "right_bracket":
1430 break 1349 break
1431 1350
1432 if tokenType == "comma": 1351 if tokenType == "comma":
1433 tokenizer.get() 1352 tokenizer.get()
1434 builder.ARRAYINIT_addElement(node, None) 1353 builder.ARRAYINIT_addElement(node, None)
1435 continue 1354 continue
1436 1355
1444 if len(node) == 1 and tokenizer.match("for"): 1363 if len(node) == 1 and tokenizer.match("for"):
1445 childNode = builder.ARRAYCOMP_build(tokenizer) 1364 childNode = builder.ARRAYCOMP_build(tokenizer)
1446 builder.ARRAYCOMP_setExpression(childNode, node[0]) 1365 builder.ARRAYCOMP_setExpression(childNode, node[0])
1447 builder.ARRAYCOMP_setTail(childNode, comprehensionTail(tokenizer, staticContext)) 1366 builder.ARRAYCOMP_setTail(childNode, comprehensionTail(tokenizer, staticContext))
1448 node = childNode 1367 node = childNode
1449 1368
1450 builder.COMMENTS_add(node, node, tokenizer.getComments()) 1369 builder.COMMENTS_add(node, node, tokenizer.getComments())
1451 tokenizer.mustMatch("right_bracket") 1370 tokenizer.mustMatch("right_bracket")
1452 builder.PRIMARY_finish(node) 1371 builder.PRIMARY_finish(node)
1453 1372
1454 elif tokenType == "left_curly": 1373 elif tokenType == "left_curly":
1457 if not tokenizer.match("right_curly"): 1376 if not tokenizer.match("right_curly"):
1458 while True: 1377 while True:
1459 tokenType = tokenizer.get() 1378 tokenType = tokenizer.get()
1460 tokenValue = getattr(tokenizer.token, "value", None) 1379 tokenValue = getattr(tokenizer.token, "value", None)
1461 comments = tokenizer.getComments() 1380 comments = tokenizer.getComments()
1462 1381
1463 if tokenValue in ("get", "set") and tokenizer.peek() == "identifier": 1382 if tokenValue in ("get", "set") and tokenizer.peek() == "identifier":
1464 if staticContext.ecma3OnlyMode: 1383 if staticContext.ecma3OnlyMode:
1465 raise SyntaxError("Illegal property accessor", tokenizer) 1384 raise SyntaxError("Illegal property accessor", tokenizer)
1466 1385
1467 fd = FunctionDefinition(tokenizer, staticContext, True, "expressed_form") 1386 fd = FunctionDefinition(tokenizer, staticContext, True, "expressed_form")
1468 builder.OBJECTINIT_addProperty(node, fd) 1387 builder.OBJECTINIT_addProperty(node, fd)
1469 1388
1470 else: 1389 else:
1471 if tokenType == "identifier" or tokenType == "number" or tokenType == "string": 1390 if tokenType == "identifier" or tokenType == "number" or tokenType == "string":
1472 id = builder.PRIMARY_build(tokenizer, "identifier") 1391 id = builder.PRIMARY_build(tokenizer, "identifier")
1473 builder.PRIMARY_finish(id) 1392 builder.PRIMARY_finish(id)
1474 1393
1475 elif tokenType == "right_curly": 1394 elif tokenType == "right_curly":
1476 if staticContext.ecma3OnlyMode: 1395 if staticContext.ecma3OnlyMode:
1477 raise SyntaxError("Illegal trailing ,", tokenizer) 1396 raise SyntaxError("Illegal trailing ,", tokenizer)
1478 1397
1479 tokenizer.unget() 1398 tokenizer.unget()
1480 break 1399 break
1481 1400
1482 else: 1401 else:
1483 if tokenValue in jasy.js.tokenize.Lang.keywords: 1402 if tokenValue in jasy.js.tokenize.Lang.keywords:
1484 id = builder.PRIMARY_build(tokenizer, "identifier") 1403 id = builder.PRIMARY_build(tokenizer, "identifier")
1485 builder.PRIMARY_finish(id) 1404 builder.PRIMARY_finish(id)
1486 else: 1405 else:
1487 print("Value is '%s'" % tokenValue) 1406 print("Value is '%s'" % tokenValue)
1488 raise SyntaxError("Invalid property name", tokenizer) 1407 raise SyntaxError("Invalid property name", tokenizer)
1489 1408
1490 if tokenizer.match("colon"): 1409 if tokenizer.match("colon"):
1491 childNode = builder.PROPERTYINIT_build(tokenizer) 1410 childNode = builder.PROPERTYINIT_build(tokenizer)
1492 builder.COMMENTS_add(childNode, node, comments) 1411 builder.COMMENTS_add(childNode, node, comments)
1493 builder.PROPERTYINIT_addOperand(childNode, id) 1412 builder.PROPERTYINIT_addOperand(childNode, id)
1494 builder.PROPERTYINIT_addOperand(childNode, AssignExpression(tokenizer, staticContext)) 1413 builder.PROPERTYINIT_addOperand(childNode, AssignExpression(tokenizer, staticContext))
1495 builder.PROPERTYINIT_finish(childNode) 1414 builder.PROPERTYINIT_finish(childNode)
1496 builder.OBJECTINIT_addProperty(node, childNode) 1415 builder.OBJECTINIT_addProperty(node, childNode)
1497 1416
1498 else: 1417 else:
1499 # Support, e.g., |var {staticContext, y} = o| as destructuring shorthand 1418 # Support, e.g., |var {staticContext, y} = o| as destructuring shorthand
1500 # for |var {staticContext: staticContext, y: y} = o|, per proposed JS2/ES4 for JS1.8. 1419 # for |var {staticContext: staticContext, y: y} = o|, per proposed JS2/ES4 for JS1.8.
1501 if tokenizer.peek() != "comma" and tokenizer.peek() != "right_curly": 1420 if tokenizer.peek() != "comma" and tokenizer.peek() != "right_curly":
1502 raise SyntaxError("Missing : after property", tokenizer) 1421 raise SyntaxError("Missing : after property", tokenizer)
1503 builder.OBJECTINIT_addProperty(node, id) 1422 builder.OBJECTINIT_addProperty(node, id)
1504 1423
1505 if not tokenizer.match("comma"): 1424 if not tokenizer.match("comma"):
1506 break 1425 break
1507 1426
1508 builder.COMMENTS_add(node, node, tokenizer.getComments()) 1427 builder.COMMENTS_add(node, node, tokenizer.getComments())
1509 tokenizer.mustMatch("right_curly") 1428 tokenizer.mustMatch("right_curly")

eric ide

mercurial