ThirdParty/Jasy/jasy/js/parse/Parser.py

branch
maintenance
changeset 6693
3629d88ae235
parent 6647
2a11e1b2dcbe
parent 6692
c104c120e043
child 6694
1cccd74fd355
equal deleted inserted replaced
6647:2a11e1b2dcbe 6693:3629d88ae235
1 #
2 # Jasy - Web Tooling Framework
3 # Copyright 2010-2012 Zynga Inc.
4 # Copyright 2013-2014 Sebastian Werner
5 #
6
7 #
8 # License: MPL 1.1/GPL 2.0/LGPL 2.1
9 # Authors:
10 # - Brendan Eich <brendan@mozilla.org> (Original JavaScript) (2004-2010)
11 # - Sebastian Werner <info@sebastian-werner.net> (Python Port) (2010-2012)
12 #
13
14 from __future__ import unicode_literals
15
16 import jasy.js.tokenize.Tokenizer
17 import jasy.js.parse.VanillaBuilder
18 import jasy.js.tokenize.Lang
19
20 __all__ = [ "parse", "parseExpression" ]
21
22 def parseExpression(source, fileId=None, line=1, builder=None):
23 if builder == None:
24 builder = jasy.js.parse.VanillaBuilder.VanillaBuilder()
25
26 # Convert source into expression statement to be friendly to the Tokenizer
27 if not source.endswith(";"):
28 source = source + ";"
29
30 tokenizer = jasy.js.tokenize.Tokenizer.Tokenizer(source, fileId, line)
31 staticContext = StaticContext(False, builder)
32
33 return Expression(tokenizer, staticContext)
34
35
36
37 def parse(source, fileId=None, line=1, builder=None):
38 if builder == None:
39 builder = jasy.js.parse.VanillaBuilder.VanillaBuilder()
40
41 tokenizer = jasy.js.tokenize.Tokenizer.Tokenizer(source, fileId, line)
42 staticContext = StaticContext(False, builder)
43 node = Script(tokenizer, staticContext)
44
45 # store fileId on top-level node
46 node.fileId = tokenizer.fileId
47
48 # add missing comments e.g. empty file with only a comment etc.
49 # if there is something non-attached by an inner node it is attached to
50 # the top level node, which is not correct, but might be better than
51 # just ignoring the comment after all.
52 if len(node) > 0:
53 builder.COMMENTS_add(node[-1], None, tokenizer.getComments())
54 else:
55 builder.COMMENTS_add(node, None, tokenizer.getComments())
56
57 if not tokenizer.done():
58 raise SyntaxError("Unexpected end of file", tokenizer)
59
60 return node
61
62
63
64 class SyntaxError(Exception):
65 def __init__(self, message, tokenizer):
66 Exception.__init__(self, "Syntax error: %s\n%s:%s" % (message, tokenizer.fileId, tokenizer.line))
67
68
69 # Used as a status container during tree-building for every def body and the global body
70 class StaticContext(object):
71 # inFunction is used to check if a return stm appears in a valid context.
72 def __init__(self, inFunction, builder):
73 # Whether this is inside a function, mostly True, only for top-level scope it's False
74 self.inFunction = inFunction
75
76 self.hasEmptyReturn = False
77 self.hasReturnWithValue = False
78 self.isGenerator = False
79 self.blockId = 0
80 self.builder = builder
81 self.statementStack = []
82
83 # Sets to store variable uses
84 # self.functions = set()
85 # self.variables = set()
86
87 # Status
88 # self.needsHoisting = False
89 self.bracketLevel = 0
90 self.curlyLevel = 0
91 self.parenLevel = 0
92 self.hookLevel = 0
93
94 # Configure strict ecmascript 3 mode
95 self.ecma3OnlyMode = False
96
97 # Status flag during parsing
98 self.inForLoopInit = False
99
100
101 def Script(tokenizer, staticContext):
102 """Parses the toplevel and def bodies."""
103 node = Statements(tokenizer, staticContext)
104
105 # change type from "block" to "script" for script root
106 node.type = "script"
107
108 # copy over data from compiler context
109 # node.functions = staticContext.functions
110 # node.variables = staticContext.variables
111
112 return node
113
114
115 def nest(tokenizer, staticContext, node, func, end=None):
116 """Statement stack and nested statement handler."""
117 staticContext.statementStack.append(node)
118 node = func(tokenizer, staticContext)
119 staticContext.statementStack.pop()
120 end and tokenizer.mustMatch(end)
121
122 return node
123
124
125 def Statements(tokenizer, staticContext):
126 """Parses a list of Statements."""
127
128 builder = staticContext.builder
129 node = builder.BLOCK_build(tokenizer, staticContext.blockId)
130 staticContext.blockId += 1
131
132 builder.BLOCK_hoistLets(node)
133 staticContext.statementStack.append(node)
134
135 prevNode = None
136 while not tokenizer.done() and tokenizer.peek(True) != "right_curly":
137 comments = tokenizer.getComments()
138 childNode = Statement(tokenizer, staticContext)
139 builder.COMMENTS_add(childNode, prevNode, comments)
140 builder.BLOCK_addStatement(node, childNode)
141 prevNode = childNode
142
143 staticContext.statementStack.pop()
144 builder.BLOCK_finish(node)
145
146 # if getattr(node, "needsHoisting", False):
147 # # TODO
148 # raise Exception("Needs hoisting went true!!!")
149 # builder.setHoists(node.id, node.variables)
150 # # Propagate up to the function.
151 # staticContext.needsHoisting = True
152
153 return node
154
155
156 def Block(tokenizer, staticContext):
157 tokenizer.mustMatch("left_curly")
158 node = Statements(tokenizer, staticContext)
159 tokenizer.mustMatch("right_curly")
160
161 return node
162
163
164 def Statement(tokenizer, staticContext):
165 """Parses a Statement."""
166
167 tokenType = tokenizer.get(True)
168 builder = staticContext.builder
169
170 # Cases for statements ending in a right curly return early, avoiding the
171 # common semicolon insertion magic after this switch.
172
173 if tokenType == "function":
174 # "declared_form" extends functions of staticContext, "statement_form" doesn'tokenizer.
175 if len(staticContext.statementStack) > 1:
176 kind = "statement_form"
177 else:
178 kind = "declared_form"
179
180 return FunctionDefinition(tokenizer, staticContext, True, kind)
181
182
183 elif tokenType == "left_curly":
184 node = Statements(tokenizer, staticContext)
185 tokenizer.mustMatch("right_curly")
186
187 return node
188
189
190 elif tokenType == "if":
191 node = builder.IF_build(tokenizer)
192 builder.IF_setCondition(node, ParenExpression(tokenizer, staticContext))
193 staticContext.statementStack.append(node)
194 builder.IF_setThenPart(node, Statement(tokenizer, staticContext))
195
196 if tokenizer.match("else"):
197 comments = tokenizer.getComments()
198 elsePart = Statement(tokenizer, staticContext)
199 builder.COMMENTS_add(elsePart, node, comments)
200 builder.IF_setElsePart(node, elsePart)
201
202 staticContext.statementStack.pop()
203 builder.IF_finish(node)
204
205 return node
206
207
208 elif tokenType == "switch":
209 # This allows CASEs after a "default", which is in the standard.
210 node = builder.SWITCH_build(tokenizer)
211 builder.SWITCH_setDiscriminant(node, ParenExpression(tokenizer, staticContext))
212 staticContext.statementStack.append(node)
213
214 tokenizer.mustMatch("left_curly")
215 tokenType = tokenizer.get()
216
217 while tokenType != "right_curly":
218 if tokenType == "default":
219 if node.defaultIndex >= 0:
220 raise SyntaxError("More than one switch default", tokenizer)
221
222 childNode = builder.DEFAULT_build(tokenizer)
223 builder.SWITCH_setDefaultIndex(node, len(node)-1)
224 tokenizer.mustMatch("colon")
225 builder.DEFAULT_initializeStatements(childNode, tokenizer)
226
227 while True:
228 tokenType=tokenizer.peek(True)
229 if tokenType == "case" or tokenType == "default" or tokenType == "right_curly":
230 break
231 builder.DEFAULT_addStatement(childNode, Statement(tokenizer, staticContext))
232
233 builder.DEFAULT_finish(childNode)
234
235 elif tokenType == "case":
236 childNode = builder.CASE_build(tokenizer)
237 builder.CASE_setLabel(childNode, Expression(tokenizer, staticContext))
238 tokenizer.mustMatch("colon")
239 builder.CASE_initializeStatements(childNode, tokenizer)
240
241 while True:
242 tokenType=tokenizer.peek(True)
243 if tokenType == "case" or tokenType == "default" or tokenType == "right_curly":
244 break
245 builder.CASE_addStatement(childNode, Statement(tokenizer, staticContext))
246
247 builder.CASE_finish(childNode)
248
249 else:
250 raise SyntaxError("Invalid switch case", tokenizer)
251
252 builder.SWITCH_addCase(node, childNode)
253 tokenType = tokenizer.get()
254
255 staticContext.statementStack.pop()
256 builder.SWITCH_finish(node)
257
258 return node
259
260
261 elif tokenType == "for":
262 node = builder.FOR_build(tokenizer)
263 forBlock = None
264
265 if tokenizer.match("identifier") and tokenizer.token.value == "each":
266 builder.FOR_rebuildForEach(node)
267
268 tokenizer.mustMatch("left_paren")
269 tokenType = tokenizer.peek()
270 childNode = None
271
272 if tokenType != "semicolon":
273 staticContext.inForLoopInit = True
274
275 if tokenType == "var" or tokenType == "const":
276 tokenizer.get()
277 childNode = Variables(tokenizer, staticContext)
278
279 elif tokenType == "let":
280 tokenizer.get()
281
282 if tokenizer.peek() == "left_paren":
283 childNode = LetBlock(tokenizer, staticContext, False)
284
285 else:
286 # Let in for head, we need to add an implicit block
287 # around the rest of the for.
288 forBlock = builder.BLOCK_build(tokenizer, staticContext.blockId)
289 staticContext.blockId += 1
290 staticContext.statementStack.append(forBlock)
291 childNode = Variables(tokenizer, staticContext, forBlock)
292
293 else:
294 childNode = Expression(tokenizer, staticContext)
295
296 staticContext.inForLoopInit = False
297
298 if childNode and tokenizer.match("in"):
299 builder.FOR_rebuildForIn(node)
300 builder.FOR_setObject(node, Expression(tokenizer, staticContext), forBlock)
301
302 if childNode.type == "var" or childNode.type == "let":
303 if len(childNode) != 1:
304 raise SyntaxError("Invalid for..in left-hand side", tokenizer)
305
306 builder.FOR_setIterator(node, childNode, forBlock)
307
308 else:
309 builder.FOR_setIterator(node, childNode, forBlock)
310
311 else:
312 builder.FOR_setSetup(node, childNode)
313 tokenizer.mustMatch("semicolon")
314
315 if node.isEach:
316 raise SyntaxError("Invalid for each..in loop", tokenizer)
317
318 if tokenizer.peek() == "semicolon":
319 builder.FOR_setCondition(node, None)
320 else:
321 builder.FOR_setCondition(node, Expression(tokenizer, staticContext))
322
323 tokenizer.mustMatch("semicolon")
324
325 if tokenizer.peek() == "right_paren":
326 builder.FOR_setUpdate(node, None)
327 else:
328 builder.FOR_setUpdate(node, Expression(tokenizer, staticContext))
329
330 tokenizer.mustMatch("right_paren")
331 builder.FOR_setBody(node, nest(tokenizer, staticContext, node, Statement))
332
333 if forBlock:
334 builder.BLOCK_finish(forBlock)
335 staticContext.statementStack.pop()
336
337 builder.FOR_finish(node)
338 return node
339
340
341 elif tokenType == "while":
342 node = builder.WHILE_build(tokenizer)
343
344 builder.WHILE_setCondition(node, ParenExpression(tokenizer, staticContext))
345 builder.WHILE_setBody(node, nest(tokenizer, staticContext, node, Statement))
346 builder.WHILE_finish(node)
347
348 return node
349
350
351 elif tokenType == "do":
352 node = builder.DO_build(tokenizer)
353
354 builder.DO_setBody(node, nest(tokenizer, staticContext, node, Statement, "while"))
355 builder.DO_setCondition(node, ParenExpression(tokenizer, staticContext))
356 builder.DO_finish(node)
357
358 if not staticContext.ecma3OnlyMode:
359 # <script language="JavaScript"> (without version hints) may need
360 # automatic semicolon insertion without a newline after do-while.
361 # See http://bugzilla.mozilla.org/show_bug.cgi?id=238945.
362 tokenizer.match("semicolon")
363 return node
364
365 # NO RETURN
366
367
368 elif tokenType == "break" or tokenType == "continue":
369 if tokenType == "break":
370 node = builder.BREAK_build(tokenizer)
371 else:
372 node = builder.CONTINUE_build(tokenizer)
373
374 if tokenizer.peekOnSameLine() == "identifier":
375 tokenizer.get()
376
377 if tokenType == "break":
378 builder.BREAK_setLabel(node, tokenizer.token.value)
379 else:
380 builder.CONTINUE_setLabel(node, tokenizer.token.value)
381
382 statementStack = staticContext.statementStack
383 i = len(statementStack)
384 label = node.label if hasattr(node, "label") else None
385
386 if label:
387 while True:
388 i -= 1
389 if i < 0:
390 raise SyntaxError("Label not found", tokenizer)
391 if getattr(statementStack[i], "label", None) == label:
392 break
393
394 #
395 # Both break and continue to label need to be handled specially
396 # within a labeled loop, so that they target that loop. If not in
397 # a loop, then break targets its labeled statement. Labels can be
398 # nested so we skip all labels immediately enclosing the nearest
399 # non-label statement.
400 #
401 while i < len(statementStack) - 1 and statementStack[i+1].type == "label":
402 i += 1
403
404 if i < len(statementStack) - 1 and getattr(statementStack[i+1], "isLoop", False):
405 i += 1
406 elif tokenType == "continue":
407 raise SyntaxError("Invalid continue", tokenizer)
408
409 else:
410 while True:
411 i -= 1
412 if i < 0:
413 if tokenType == "break":
414 raise SyntaxError("Invalid break", tokenizer)
415 else:
416 raise SyntaxError("Invalid continue", tokenizer)
417
418 if getattr(statementStack[i], "isLoop", False) or (tokenType == "break" and statementStack[i].type == "switch"):
419 break
420
421 if tokenType == "break":
422 builder.BREAK_finish(node)
423 else:
424 builder.CONTINUE_finish(node)
425
426 # NO RETURN
427
428
429 elif tokenType == "try":
430 node = builder.TRY_build(tokenizer)
431 builder.TRY_setTryBlock(node, Block(tokenizer, staticContext))
432
433 while tokenizer.match("catch"):
434 childNode = builder.CATCH_build(tokenizer)
435 tokenizer.mustMatch("left_paren")
436 nextTokenType = tokenizer.get()
437
438 if nextTokenType == "left_bracket" or nextTokenType == "left_curly":
439 # Destructured catch identifiers.
440 tokenizer.unget()
441 exception = DestructuringExpression(tokenizer, staticContext, True)
442
443 elif nextTokenType == "identifier":
444 exception = builder.CATCH_wrapException(tokenizer)
445
446 else:
447 raise SyntaxError("Missing identifier in catch", tokenizer)
448
449 builder.CATCH_setException(childNode, exception)
450
451 if tokenizer.match("if"):
452 if staticContext.ecma3OnlyMode:
453 raise SyntaxError("Illegal catch guard", tokenizer)
454
455 if node.getChildrenLength() > 0 and not node.getUnrelatedChildren()[0].guard:
456 raise SyntaxError("Guarded catch after unguarded", tokenizer)
457
458 builder.CATCH_setGuard(childNode, Expression(tokenizer, staticContext))
459
460 else:
461 builder.CATCH_setGuard(childNode, None)
462
463 tokenizer.mustMatch("right_paren")
464
465 builder.CATCH_setBlock(childNode, Block(tokenizer, staticContext))
466 builder.CATCH_finish(childNode)
467
468 builder.TRY_addCatch(node, childNode)
469
470 builder.TRY_finishCatches(node)
471
472 if tokenizer.match("finally"):
473 builder.TRY_setFinallyBlock(node, Block(tokenizer, staticContext))
474
475 if node.getChildrenLength() == 0 and not hasattr(node, "finallyBlock"):
476 raise SyntaxError("Invalid try statement", tokenizer)
477
478 builder.TRY_finish(node)
479 return node
480
481
482 elif tokenType == "catch" or tokenType == "finally":
483 raise SyntaxError(tokens[tokenType] + " without preceding try", tokenizer)
484
485
486 elif tokenType == "throw":
487 node = builder.THROW_build(tokenizer)
488
489 builder.THROW_setException(node, Expression(tokenizer, staticContext))
490 builder.THROW_finish(node)
491
492 # NO RETURN
493
494
495 elif tokenType == "return":
496 node = returnOrYield(tokenizer, staticContext)
497
498 # NO RETURN
499
500
501 elif tokenType == "with":
502 node = builder.WITH_build(tokenizer)
503
504 builder.WITH_setObject(node, ParenExpression(tokenizer, staticContext))
505 builder.WITH_setBody(node, nest(tokenizer, staticContext, node, Statement))
506 builder.WITH_finish(node)
507
508 return node
509
510
511 elif tokenType == "var" or tokenType == "const":
512 node = Variables(tokenizer, staticContext)
513
514 # NO RETURN
515
516
517 elif tokenType == "let":
518 if tokenizer.peek() == "left_paren":
519 node = LetBlock(tokenizer, staticContext, True)
520 else:
521 node = Variables(tokenizer, staticContext)
522
523 # NO RETURN
524
525
526 elif tokenType == "debugger":
527 node = builder.DEBUGGER_build(tokenizer)
528
529 # NO RETURN
530
531
532 elif tokenType == "newline" or tokenType == "semicolon":
533 node = builder.SEMICOLON_build(tokenizer)
534
535 builder.SEMICOLON_setExpression(node, None)
536 builder.SEMICOLON_finish(tokenizer)
537
538 return node
539
540
541 else:
542 if tokenType == "identifier":
543 tokenType = tokenizer.peek()
544
545 # Labeled statement.
546 if tokenType == "colon":
547 label = tokenizer.token.value
548 statementStack = staticContext.statementStack
549
550 i = len(statementStack)-1
551 while i >= 0:
552 if getattr(statementStack[i], "label", None) == label:
553 raise SyntaxError("Duplicate label", tokenizer)
554
555 i -= 1
556
557 tokenizer.get()
558 node = builder.LABEL_build(tokenizer)
559
560 builder.LABEL_setLabel(node, label)
561 builder.LABEL_setStatement(node, nest(tokenizer, staticContext, node, Statement))
562 builder.LABEL_finish(node)
563
564 return node
565
566 # Expression statement.
567 # We unget the current token to parse the expression as a whole.
568 node = builder.SEMICOLON_build(tokenizer)
569 tokenizer.unget()
570 builder.SEMICOLON_setExpression(node, Expression(tokenizer, staticContext))
571 node.end = node.expression.end
572 builder.SEMICOLON_finish(node)
573
574 # NO RETURN
575
576
577 MagicalSemicolon(tokenizer)
578 return node
579
580
581
582 def MagicalSemicolon(tokenizer):
583 if tokenizer.line == tokenizer.token.line:
584 tokenType = tokenizer.peekOnSameLine()
585
586 if tokenType != "end" and tokenType != "newline" and tokenType != "semicolon" and tokenType != "right_curly":
587 raise SyntaxError("Missing ; before statement", tokenizer)
588
589 tokenizer.match("semicolon")
590
591
592
593 def returnOrYield(tokenizer, staticContext):
594 builder = staticContext.builder
595 tokenType = tokenizer.token.type
596
597 if tokenType == "return":
598 if not staticContext.inFunction:
599 raise SyntaxError("Return not in function", tokenizer)
600
601 node = builder.RETURN_build(tokenizer)
602
603 else:
604 if not staticContext.inFunction:
605 raise SyntaxError("Yield not in function", tokenizer)
606
607 staticContext.isGenerator = True
608 node = builder.YIELD_build(tokenizer)
609
610 nextTokenType = tokenizer.peek(True)
611 if nextTokenType != "end" and nextTokenType != "newline" and nextTokenType != "semicolon" and nextTokenType != "right_curly" and (tokenType != "yield" or (nextTokenType != tokenType and nextTokenType != "right_bracket" and nextTokenType != "right_paren" and nextTokenType != "colon" and nextTokenType != "comma")):
612 if tokenType == "return":
613 builder.RETURN_setValue(node, Expression(tokenizer, staticContext))
614 staticContext.hasReturnWithValue = True
615 else:
616 builder.YIELD_setValue(node, AssignExpression(tokenizer, staticContext))
617
618 elif tokenType == "return":
619 staticContext.hasEmptyReturn = True
620
621 # Disallow return v; in generator.
622 if staticContext.hasReturnWithValue and staticContext.isGenerator:
623 raise SyntaxError("Generator returns a value", tokenizer)
624
625 if tokenType == "return":
626 builder.RETURN_finish(node)
627 else:
628 builder.YIELD_finish(node)
629
630 return node
631
632
633
634 def FunctionDefinition(tokenizer, staticContext, requireName, functionForm):
635 builder = staticContext.builder
636 functionNode = builder.FUNCTION_build(tokenizer)
637
638 if tokenizer.match("identifier"):
639 builder.FUNCTION_setName(functionNode, tokenizer.token.value)
640 elif requireName:
641 raise SyntaxError("Missing def identifier", tokenizer)
642
643 tokenizer.mustMatch("left_paren")
644
645 if not tokenizer.match("right_paren"):
646 builder.FUNCTION_initParams(functionNode, tokenizer)
647 prevParamNode = None
648 while True:
649 tokenType = tokenizer.get()
650 if tokenType == "left_bracket" or tokenType == "left_curly":
651 # Destructured formal parameters.
652 tokenizer.unget()
653 paramNode = DestructuringExpression(tokenizer, staticContext)
654
655 elif tokenType == "identifier":
656 paramNode = builder.FUNCTION_wrapParam(tokenizer)
657
658 else:
659 raise SyntaxError("Missing formal parameter", tokenizer)
660
661 builder.FUNCTION_addParam(functionNode, tokenizer, paramNode)
662 builder.COMMENTS_add(paramNode, prevParamNode, tokenizer.getComments())
663
664 if not tokenizer.match("comma"):
665 break
666
667 prevParamNode = paramNode
668
669 tokenizer.mustMatch("right_paren")
670
671 # Do we have an expression closure or a normal body?
672 tokenType = tokenizer.get()
673 if tokenType != "left_curly":
674 builder.FUNCTION_setExpressionClosure(functionNode, True)
675 tokenizer.unget()
676
677 childContext = StaticContext(True, builder)
678
679 if staticContext.inFunction:
680 # Inner functions don't reset block numbering, only functions at
681 # the top level of the program do.
682 childContext.blockId = staticContext.blockId
683
684 if tokenType != "left_curly":
685 builder.FUNCTION_setBody(functionNode, AssignExpression(tokenizer, staticContext))
686 if staticContext.isGenerator:
687 raise SyntaxError("Generator returns a value", tokenizer)
688
689 else:
690 builder.FUNCTION_hoistVars(childContext.blockId)
691 builder.FUNCTION_setBody(functionNode, Script(tokenizer, childContext))
692
693 if tokenType == "left_curly":
694 tokenizer.mustMatch("right_curly")
695
696 functionNode.end = tokenizer.token.end
697 functionNode.functionForm = functionForm
698
699 builder.COMMENTS_add(functionNode.body, functionNode.body, tokenizer.getComments())
700 builder.FUNCTION_finish(functionNode, staticContext)
701
702 return functionNode
703
704
705
706 def Variables(tokenizer, staticContext, letBlock=None):
707 """Parses a comma-separated list of var declarations (and maybe initializations)."""
708
709 builder = staticContext.builder
710 if tokenizer.token.type == "var":
711 build = builder.VAR_build
712 addDecl = builder.VAR_addDecl
713 finish = builder.VAR_finish
714 childContext = staticContext
715
716 elif tokenizer.token.type == "const":
717 build = builder.CONST_build
718 addDecl = builder.CONST_addDecl
719 finish = builder.CONST_finish
720 childContext = staticContext
721
722 elif tokenizer.token.type == "let" or tokenizer.token.type == "left_paren":
723 build = builder.LET_build
724 addDecl = builder.LET_addDecl
725 finish = builder.LET_finish
726
727 if not letBlock:
728 statementStack = staticContext.statementStack
729 i = len(statementStack) - 1
730
731 # a BLOCK *must* be found.
732 while statementStack[i].type != "block":
733 i -= 1
734
735 # Lets at the def toplevel are just vars, at least in SpiderMonkey.
736 if i == 0:
737 build = builder.VAR_build
738 addDecl = builder.VAR_addDecl
739 finish = builder.VAR_finish
740 childContext = staticContext
741
742 else:
743 childContext = statementStack[i]
744
745 else:
746 childContext = letBlock
747
748 node = build(tokenizer)
749
750 while True:
751 tokenType = tokenizer.get()
752
753 # Done in Python port!
754 # FIXME Should have a special DECLARATION node instead of overloading
755 # IDENTIFIER to mean both identifier declarations and destructured
756 # declarations.
757 childNode = builder.DECL_build(tokenizer)
758
759 if tokenType == "left_bracket" or tokenType == "left_curly":
760 # Pass in childContext if we need to add each pattern matched into
761 # its variables, else pass in staticContext.
762 # Need to unget to parse the full destructured expression.
763 tokenizer.unget()
764 builder.DECL_setNames(childNode, DestructuringExpression(tokenizer, staticContext, True, childContext))
765
766 if staticContext.inForLoopInit and tokenizer.peek() == "in":
767 addDecl(node, childNode, childContext)
768 if tokenizer.match("comma"):
769 continue
770 else:
771 break
772
773 tokenizer.mustMatch("assign")
774 if tokenizer.token.assignOp:
775 raise SyntaxError("Invalid variable initialization", tokenizer)
776
777 # Parse the init as a normal assignment.
778 builder.DECL_setInitializer(childNode, AssignExpression(tokenizer, staticContext))
779 builder.DECL_finish(childNode)
780 addDecl(node, childNode, childContext)
781
782 # Copy over names for variable list
783 # for nameNode in childNode.names:
784 # childContext.variables.add(nameNode.value)
785
786 if tokenizer.match("comma"):
787 continue
788 else:
789 break
790
791 if tokenType != "identifier":
792 raise SyntaxError("Missing variable name", tokenizer)
793
794 builder.DECL_setName(childNode, tokenizer.token.value)
795 builder.DECL_setReadOnly(childNode, node.type == "const")
796 addDecl(node, childNode, childContext)
797
798 if tokenizer.match("assign"):
799 if tokenizer.token.assignOp:
800 raise SyntaxError("Invalid variable initialization", tokenizer)
801
802 initializerNode = AssignExpression(tokenizer, staticContext)
803 builder.DECL_setInitializer(childNode, initializerNode)
804
805 builder.DECL_finish(childNode)
806
807 # If we directly use the node in "let" constructs
808 # if not hasattr(childContext, "variables"):
809 # childContext.variables = set()
810
811 # childContext.variables.add(childNode.name)
812
813 if not tokenizer.match("comma"):
814 break
815
816 finish(node)
817 return node
818
819
820
821 def LetBlock(tokenizer, staticContext, isStatement):
822 """Does not handle let inside of for loop init."""
823 builder = staticContext.builder
824
825 # tokenizer.token.type must be "let"
826 node = builder.LETBLOCK_build(tokenizer)
827 tokenizer.mustMatch("left_paren")
828 builder.LETBLOCK_setVariables(node, Variables(tokenizer, staticContext, node))
829 tokenizer.mustMatch("right_paren")
830
831 if isStatement and tokenizer.peek() != "left_curly":
832 # If this is really an expression in let statement guise, then we
833 # need to wrap the "let_block" node in a "semicolon" node so that we pop
834 # the return value of the expression.
835 childNode = builder.SEMICOLON_build(tokenizer)
836 builder.SEMICOLON_setExpression(childNode, node)
837 builder.SEMICOLON_finish(childNode)
838 isStatement = False
839
840 if isStatement:
841 childNode = Block(tokenizer, staticContext)
842 builder.LETBLOCK_setBlock(node, childNode)
843
844 else:
845 childNode = AssignExpression(tokenizer, staticContext)
846 builder.LETBLOCK_setExpression(node, childNode)
847
848 builder.LETBLOCK_finish(node)
849 return node
850
851
852 def checkDestructuring(tokenizer, staticContext, node, simpleNamesOnly=None, data=None):
853 if node.type == "array_comp":
854 raise SyntaxError("Invalid array comprehension left-hand side", tokenizer)
855
856 if node.type != "array_init" and node.type != "object_init":
857 return
858
859 builder = staticContext.builder
860
861 for child in node:
862 if child == None:
863 continue
864
865 if child.type == "property_init":
866 lhs = child[0]
867 rhs = child[1]
868 else:
869 lhs = None
870 rhs = None
871
872
873 if rhs and (rhs.type == "array_init" or rhs.type == "object_init"):
874 checkDestructuring(tokenizer, staticContext, rhs, simpleNamesOnly, data)
875
876 if lhs and simpleNamesOnly:
877 # In declarations, lhs must be simple names
878 if lhs.type != "identifier":
879 raise SyntaxError("Missing name in pattern", tokenizer)
880
881 elif data:
882 childNode = builder.DECL_build(tokenizer)
883 builder.DECL_setName(childNode, lhs.value)
884
885 # Don't need to set initializer because it's just for
886 # hoisting anyways.
887 builder.DECL_finish(childNode)
888
889 # Each pattern needs to be added to variables.
890 # data.variables.add(childNode.name)
891
892
893 # JavaScript 1.7
894 def DestructuringExpression(tokenizer, staticContext, simpleNamesOnly=None, data=None):
895 node = PrimaryExpression(tokenizer, staticContext)
896 checkDestructuring(tokenizer, staticContext, node, simpleNamesOnly, data)
897
898 return node
899
900
901 # JavsScript 1.7
902 def GeneratorExpression(tokenizer, staticContext, expression):
903 builder = staticContext.builder
904 node = builder.GENERATOR_build(tokenizer)
905
906 builder.GENERATOR_setExpression(node, expression)
907 builder.GENERATOR_setTail(node, comprehensionTail(tokenizer, staticContext))
908 builder.GENERATOR_finish(node)
909
910 return node
911
912
913 # JavaScript 1.7 Comprehensions Tails (Generators / Arrays)
914 def comprehensionTail(tokenizer, staticContext):
915 builder = staticContext.builder
916
917 # tokenizer.token.type must be "for"
918 body = builder.COMPTAIL_build(tokenizer)
919
920 while True:
921 node = builder.FOR_build(tokenizer)
922
923 # Comprehension tails are always for..in loops.
924 builder.FOR_rebuildForIn(node)
925 if tokenizer.match("identifier"):
926 # But sometimes they're for each..in.
927 if tokenizer.token.value == "each":
928 builder.FOR_rebuildForEach(node)
929 else:
930 tokenizer.unget()
931
932 tokenizer.mustMatch("left_paren")
933
934 tokenType = tokenizer.get()
935 if tokenType == "left_bracket" or tokenType == "left_curly":
936 tokenizer.unget()
937 # Destructured left side of for in comprehension tails.
938 builder.FOR_setIterator(node, DestructuringExpression(tokenizer, staticContext))
939
940 elif tokenType == "identifier":
941 # Removed variable/declaration substructure in Python port.
942 # Variable declarations are not allowed here. So why process them in such a way?
943
944 # declaration = builder.DECL_build(tokenizer)
945 # builder.DECL_setName(declaration, tokenizer.token.value)
946 # builder.DECL_finish(declaration)
947 # childNode = builder.VAR_build(tokenizer)
948 # builder.VAR_addDecl(childNode, declaration)
949 # builder.VAR_finish(childNode)
950 # builder.FOR_setIterator(node, declaration)
951
952 # Don't add to variables since the semantics of comprehensions is
953 # such that the variables are in their own def when desugared.
954
955 identifier = builder.PRIMARY_build(tokenizer, "identifier")
956 builder.FOR_setIterator(node, identifier)
957
958 else:
959 raise SyntaxError("Missing identifier", tokenizer)
960
961 tokenizer.mustMatch("in")
962 builder.FOR_setObject(node, Expression(tokenizer, staticContext))
963 tokenizer.mustMatch("right_paren")
964 builder.COMPTAIL_addFor(body, node)
965
966 if not tokenizer.match("for"):
967 break
968
969 # Optional guard.
970 if tokenizer.match("if"):
971 builder.COMPTAIL_setGuard(body, ParenExpression(tokenizer, staticContext))
972
973 builder.COMPTAIL_finish(body)
974
975 return body
976
977
978 def ParenExpression(tokenizer, staticContext):
979 tokenizer.mustMatch("left_paren")
980
981 # Always accept the 'in' operator in a parenthesized expression,
982 # where it's unambiguous, even if we might be parsing the init of a
983 # for statement.
984 oldLoopInit = staticContext.inForLoopInit
985 staticContext.inForLoopInit = False
986 node = Expression(tokenizer, staticContext)
987 staticContext.inForLoopInit = oldLoopInit
988
989 err = "expression must be parenthesized"
990 if tokenizer.match("for"):
991 if node.type == "yield" and not node.parenthesized:
992 raise SyntaxError("Yield " + err, tokenizer)
993
994 if node.type == "comma" and not node.parenthesized:
995 raise SyntaxError("Generator " + err, tokenizer)
996
997 node = GeneratorExpression(tokenizer, staticContext, node)
998
999 tokenizer.mustMatch("right_paren")
1000
1001 return node
1002
1003
1004 def Expression(tokenizer, staticContext):
1005 """Top-down expression parser matched against SpiderMonkey."""
1006 builder = staticContext.builder
1007 node = AssignExpression(tokenizer, staticContext)
1008
1009 if tokenizer.match("comma"):
1010 childNode = builder.COMMA_build(tokenizer)
1011 builder.COMMA_addOperand(childNode, node)
1012 node = childNode
1013 while True:
1014 childNode = node[len(node)-1]
1015 if childNode.type == "yield" and not childNode.parenthesized:
1016 raise SyntaxError("Yield expression must be parenthesized", tokenizer)
1017 builder.COMMA_addOperand(node, AssignExpression(tokenizer, staticContext))
1018
1019 if not tokenizer.match("comma"):
1020 break
1021
1022 builder.COMMA_finish(node)
1023
1024 return node
1025
1026
1027 def AssignExpression(tokenizer, staticContext):
1028 builder = staticContext.builder
1029
1030 # Have to treat yield like an operand because it could be the leftmost
1031 # operand of the expression.
1032 if tokenizer.match("yield", True):
1033 return returnOrYield(tokenizer, staticContext)
1034
1035 comments = tokenizer.getComments()
1036 node = builder.ASSIGN_build(tokenizer)
1037 lhs = ConditionalExpression(tokenizer, staticContext)
1038 builder.COMMENTS_add(lhs, None, comments)
1039
1040 if not tokenizer.match("assign"):
1041 builder.ASSIGN_finish(node)
1042 return lhs
1043
1044 if lhs.type == "object_init" or lhs.type == "array_init":
1045 checkDestructuring(tokenizer, staticContext, lhs)
1046 elif lhs.type == "identifier" or lhs.type == "dot" or lhs.type == "index" or lhs.type == "call":
1047 pass
1048 else:
1049 raise SyntaxError("Bad left-hand side of assignment", tokenizer)
1050
1051 builder.ASSIGN_setAssignOp(node, tokenizer.token.assignOp)
1052 builder.ASSIGN_addOperand(node, lhs)
1053 builder.ASSIGN_addOperand(node, AssignExpression(tokenizer, staticContext))
1054 builder.ASSIGN_finish(node)
1055
1056 return node
1057
1058
1059 def ConditionalExpression(tokenizer, staticContext):
1060 builder = staticContext.builder
1061 node = OrExpression(tokenizer, staticContext)
1062
1063 if tokenizer.match("hook"):
1064 childNode = node
1065 node = builder.HOOK_build(tokenizer)
1066 builder.HOOK_setCondition(node, childNode)
1067
1068 # Always accept the 'in' operator in the middle clause of a ternary,
1069 # where it's unambiguous, even if we might be parsing the init of a
1070 # for statement.
1071 oldLoopInit = staticContext.inForLoopInit
1072 staticContext.inForLoopInit = False
1073 builder.HOOK_setThenPart(node, AssignExpression(tokenizer, staticContext))
1074 staticContext.inForLoopInit = oldLoopInit
1075
1076 if not tokenizer.match("colon"):
1077 raise SyntaxError("Missing : after ?", tokenizer)
1078
1079 builder.HOOK_setElsePart(node, AssignExpression(tokenizer, staticContext))
1080 builder.HOOK_finish(node)
1081
1082 return node
1083
1084
1085 def OrExpression(tokenizer, staticContext):
1086 builder = staticContext.builder
1087 node = AndExpression(tokenizer, staticContext)
1088
1089 while tokenizer.match("or"):
1090 childNode = builder.OR_build(tokenizer)
1091 builder.OR_addOperand(childNode, node)
1092 builder.OR_addOperand(childNode, AndExpression(tokenizer, staticContext))
1093 builder.OR_finish(childNode)
1094 node = childNode
1095
1096 return node
1097
1098
1099 def AndExpression(tokenizer, staticContext):
1100 builder = staticContext.builder
1101 node = BitwiseOrExpression(tokenizer, staticContext)
1102
1103 while tokenizer.match("and"):
1104 childNode = builder.AND_build(tokenizer)
1105 builder.AND_addOperand(childNode, node)
1106 builder.AND_addOperand(childNode, BitwiseOrExpression(tokenizer, staticContext))
1107 builder.AND_finish(childNode)
1108 node = childNode
1109
1110 return node
1111
1112
1113 def BitwiseOrExpression(tokenizer, staticContext):
1114 builder = staticContext.builder
1115 node = BitwiseXorExpression(tokenizer, staticContext)
1116
1117 while tokenizer.match("bitwise_or"):
1118 childNode = builder.BITWISEOR_build(tokenizer)
1119 builder.BITWISEOR_addOperand(childNode, node)
1120 builder.BITWISEOR_addOperand(childNode, BitwiseXorExpression(tokenizer, staticContext))
1121 builder.BITWISEOR_finish(childNode)
1122 node = childNode
1123
1124 return node
1125
1126
1127 def BitwiseXorExpression(tokenizer, staticContext):
1128 builder = staticContext.builder
1129 node = BitwiseAndExpression(tokenizer, staticContext)
1130
1131 while tokenizer.match("bitwise_xor"):
1132 childNode = builder.BITWISEXOR_build(tokenizer)
1133 builder.BITWISEXOR_addOperand(childNode, node)
1134 builder.BITWISEXOR_addOperand(childNode, BitwiseAndExpression(tokenizer, staticContext))
1135 builder.BITWISEXOR_finish(childNode)
1136 node = childNode
1137
1138 return node
1139
1140
1141 def BitwiseAndExpression(tokenizer, staticContext):
1142 builder = staticContext.builder
1143 node = EqualityExpression(tokenizer, staticContext)
1144
1145 while tokenizer.match("bitwise_and"):
1146 childNode = builder.BITWISEAND_build(tokenizer)
1147 builder.BITWISEAND_addOperand(childNode, node)
1148 builder.BITWISEAND_addOperand(childNode, EqualityExpression(tokenizer, staticContext))
1149 builder.BITWISEAND_finish(childNode)
1150 node = childNode
1151
1152 return node
1153
1154
1155 def EqualityExpression(tokenizer, staticContext):
1156 builder = staticContext.builder
1157 node = RelationalExpression(tokenizer, staticContext)
1158
1159 while tokenizer.match("eq") or tokenizer.match("ne") or tokenizer.match("strict_eq") or tokenizer.match("strict_ne"):
1160 childNode = builder.EQUALITY_build(tokenizer)
1161 builder.EQUALITY_addOperand(childNode, node)
1162 builder.EQUALITY_addOperand(childNode, RelationalExpression(tokenizer, staticContext))
1163 builder.EQUALITY_finish(childNode)
1164 node = childNode
1165
1166 return node
1167
1168
1169 def RelationalExpression(tokenizer, staticContext):
1170 builder = staticContext.builder
1171 oldLoopInit = staticContext.inForLoopInit
1172
1173 # Uses of the in operator in shiftExprs are always unambiguous,
1174 # so unset the flag that prohibits recognizing it.
1175 staticContext.inForLoopInit = False
1176 node = ShiftExpression(tokenizer, staticContext)
1177
1178 while tokenizer.match("lt") or tokenizer.match("le") or tokenizer.match("ge") or tokenizer.match("gt") or (oldLoopInit == False and tokenizer.match("in")) or tokenizer.match("instanceof"):
1179 childNode = builder.RELATIONAL_build(tokenizer)
1180 builder.RELATIONAL_addOperand(childNode, node)
1181 builder.RELATIONAL_addOperand(childNode, ShiftExpression(tokenizer, staticContext))
1182 builder.RELATIONAL_finish(childNode)
1183 node = childNode
1184
1185 staticContext.inForLoopInit = oldLoopInit
1186
1187 return node
1188
1189
1190 def ShiftExpression(tokenizer, staticContext):
1191 builder = staticContext.builder
1192 node = AddExpression(tokenizer, staticContext)
1193
1194 while tokenizer.match("lsh") or tokenizer.match("rsh") or tokenizer.match("ursh"):
1195 childNode = builder.SHIFT_build(tokenizer)
1196 builder.SHIFT_addOperand(childNode, node)
1197 builder.SHIFT_addOperand(childNode, AddExpression(tokenizer, staticContext))
1198 builder.SHIFT_finish(childNode)
1199 node = childNode
1200
1201 return node
1202
1203
1204 def AddExpression(tokenizer, staticContext):
1205 builder = staticContext.builder
1206 node = MultiplyExpression(tokenizer, staticContext)
1207
1208 while tokenizer.match("plus") or tokenizer.match("minus"):
1209 childNode = builder.ADD_build(tokenizer)
1210 builder.ADD_addOperand(childNode, node)
1211 builder.ADD_addOperand(childNode, MultiplyExpression(tokenizer, staticContext))
1212 builder.ADD_finish(childNode)
1213 node = childNode
1214
1215 return node
1216
1217
1218 def MultiplyExpression(tokenizer, staticContext):
1219 builder = staticContext.builder
1220 node = UnaryExpression(tokenizer, staticContext)
1221
1222 while tokenizer.match("mul") or tokenizer.match("div") or tokenizer.match("mod"):
1223 childNode = builder.MULTIPLY_build(tokenizer)
1224 builder.MULTIPLY_addOperand(childNode, node)
1225 builder.MULTIPLY_addOperand(childNode, UnaryExpression(tokenizer, staticContext))
1226 builder.MULTIPLY_finish(childNode)
1227 node = childNode
1228
1229 return node
1230
1231
1232 def UnaryExpression(tokenizer, staticContext):
1233 builder = staticContext.builder
1234 tokenType = tokenizer.get(True)
1235
1236 if tokenType in ["delete", "void", "typeof", "not", "bitwise_not", "plus", "minus"]:
1237 node = builder.UNARY_build(tokenizer)
1238 builder.UNARY_addOperand(node, UnaryExpression(tokenizer, staticContext))
1239
1240 elif tokenType == "increment" or tokenType == "decrement":
1241 # Prefix increment/decrement.
1242 node = builder.UNARY_build(tokenizer)
1243 builder.UNARY_addOperand(node, MemberExpression(tokenizer, staticContext, True))
1244
1245 else:
1246 tokenizer.unget()
1247 node = MemberExpression(tokenizer, staticContext, True)
1248
1249 # Don't look across a newline boundary for a postfix {in,de}crement.
1250 if tokenizer.tokens[(tokenizer.tokenIndex + tokenizer.lookahead - 1) & 3].line == tokenizer.line:
1251 if tokenizer.match("increment") or tokenizer.match("decrement"):
1252 childNode = builder.UNARY_build(tokenizer)
1253 builder.UNARY_setPostfix(childNode)
1254 builder.UNARY_finish(node)
1255 builder.UNARY_addOperand(childNode, node)
1256 node = childNode
1257
1258 builder.UNARY_finish(node)
1259 return node
1260
1261
1262 def MemberExpression(tokenizer, staticContext, allowCallSyntax):
1263 builder = staticContext.builder
1264
1265 if tokenizer.match("new"):
1266 node = builder.MEMBER_build(tokenizer)
1267 builder.MEMBER_addOperand(node, MemberExpression(tokenizer, staticContext, False))
1268
1269 if tokenizer.match("left_paren"):
1270 builder.MEMBER_rebuildNewWithArgs(node)
1271 builder.MEMBER_addOperand(node, ArgumentList(tokenizer, staticContext))
1272
1273 builder.MEMBER_finish(node)
1274
1275 else:
1276 node = PrimaryExpression(tokenizer, staticContext)
1277
1278 while True:
1279 tokenType = tokenizer.get()
1280 if tokenType == "end":
1281 break
1282
1283 if tokenType == "dot":
1284 childNode = builder.MEMBER_build(tokenizer)
1285 builder.MEMBER_addOperand(childNode, node)
1286 tokenizer.mustMatch("identifier")
1287 builder.MEMBER_addOperand(childNode, builder.MEMBER_build(tokenizer))
1288
1289 elif tokenType == "left_bracket":
1290 childNode = builder.MEMBER_build(tokenizer, "index")
1291 builder.MEMBER_addOperand(childNode, node)
1292 builder.MEMBER_addOperand(childNode, Expression(tokenizer, staticContext))
1293 tokenizer.mustMatch("right_bracket")
1294
1295 elif tokenType == "left_paren" and allowCallSyntax:
1296 childNode = builder.MEMBER_build(tokenizer, "call")
1297 builder.MEMBER_addOperand(childNode, node)
1298 builder.MEMBER_addOperand(childNode, ArgumentList(tokenizer, staticContext))
1299
1300 else:
1301 tokenizer.unget()
1302 return node
1303
1304 builder.MEMBER_finish(childNode)
1305 node = childNode
1306
1307 return node
1308
1309
1310 def ArgumentList(tokenizer, staticContext):
1311 builder = staticContext.builder
1312 node = builder.LIST_build(tokenizer)
1313
1314 if tokenizer.match("right_paren", True):
1315 return node
1316
1317 while True:
1318 childNode = AssignExpression(tokenizer, staticContext)
1319 if childNode.type == "yield" and not childNode.parenthesized and tokenizer.peek() == "comma":
1320 raise SyntaxError("Yield expression must be parenthesized", tokenizer)
1321
1322 if tokenizer.match("for"):
1323 childNode = GeneratorExpression(tokenizer, staticContext, childNode)
1324 if len(node) > 1 or tokenizer.peek(True) == "comma":
1325 raise SyntaxError("Generator expression must be parenthesized", tokenizer)
1326
1327 builder.LIST_addOperand(node, childNode)
1328 if not tokenizer.match("comma"):
1329 break
1330
1331 tokenizer.mustMatch("right_paren")
1332 builder.LIST_finish(node)
1333
1334 return node
1335
1336
1337 def PrimaryExpression(tokenizer, staticContext):
1338 builder = staticContext.builder
1339 tokenType = tokenizer.get(True)
1340
1341 if tokenType == "function":
1342 node = FunctionDefinition(tokenizer, staticContext, False, "expressed_form")
1343
1344 elif tokenType == "left_bracket":
1345 node = builder.ARRAYINIT_build(tokenizer)
1346 while True:
1347 tokenType = tokenizer.peek(True)
1348 if tokenType == "right_bracket":
1349 break
1350
1351 if tokenType == "comma":
1352 tokenizer.get()
1353 builder.ARRAYINIT_addElement(node, None)
1354 continue
1355
1356 builder.ARRAYINIT_addElement(node, AssignExpression(tokenizer, staticContext))
1357
1358 if tokenType != "comma" and not tokenizer.match("comma"):
1359 break
1360
1361 # If we matched exactly one element and got a "for", we have an
1362 # array comprehension.
1363 if len(node) == 1 and tokenizer.match("for"):
1364 childNode = builder.ARRAYCOMP_build(tokenizer)
1365 builder.ARRAYCOMP_setExpression(childNode, node[0])
1366 builder.ARRAYCOMP_setTail(childNode, comprehensionTail(tokenizer, staticContext))
1367 node = childNode
1368
1369 builder.COMMENTS_add(node, node, tokenizer.getComments())
1370 tokenizer.mustMatch("right_bracket")
1371 builder.PRIMARY_finish(node)
1372
1373 elif tokenType == "left_curly":
1374 node = builder.OBJECTINIT_build(tokenizer)
1375
1376 if not tokenizer.match("right_curly"):
1377 while True:
1378 tokenType = tokenizer.get()
1379 tokenValue = getattr(tokenizer.token, "value", None)
1380 comments = tokenizer.getComments()
1381
1382 if tokenValue in ("get", "set") and tokenizer.peek() == "identifier":
1383 if staticContext.ecma3OnlyMode:
1384 raise SyntaxError("Illegal property accessor", tokenizer)
1385
1386 fd = FunctionDefinition(tokenizer, staticContext, True, "expressed_form")
1387 builder.OBJECTINIT_addProperty(node, fd)
1388
1389 else:
1390 if tokenType == "identifier" or tokenType == "number" or tokenType == "string":
1391 id = builder.PRIMARY_build(tokenizer, "identifier")
1392 builder.PRIMARY_finish(id)
1393
1394 elif tokenType == "right_curly":
1395 if staticContext.ecma3OnlyMode:
1396 raise SyntaxError("Illegal trailing ,", tokenizer)
1397
1398 tokenizer.unget()
1399 break
1400
1401 else:
1402 if tokenValue in jasy.js.tokenize.Lang.keywords:
1403 id = builder.PRIMARY_build(tokenizer, "identifier")
1404 builder.PRIMARY_finish(id)
1405 else:
1406 print("Value is '%s'" % tokenValue)
1407 raise SyntaxError("Invalid property name", tokenizer)
1408
1409 if tokenizer.match("colon"):
1410 childNode = builder.PROPERTYINIT_build(tokenizer)
1411 builder.COMMENTS_add(childNode, node, comments)
1412 builder.PROPERTYINIT_addOperand(childNode, id)
1413 builder.PROPERTYINIT_addOperand(childNode, AssignExpression(tokenizer, staticContext))
1414 builder.PROPERTYINIT_finish(childNode)
1415 builder.OBJECTINIT_addProperty(node, childNode)
1416
1417 else:
1418 # Support, e.g., |var {staticContext, y} = o| as destructuring shorthand
1419 # for |var {staticContext: staticContext, y: y} = o|, per proposed JS2/ES4 for JS1.8.
1420 if tokenizer.peek() != "comma" and tokenizer.peek() != "right_curly":
1421 raise SyntaxError("Missing : after property", tokenizer)
1422 builder.OBJECTINIT_addProperty(node, id)
1423
1424 if not tokenizer.match("comma"):
1425 break
1426
1427 builder.COMMENTS_add(node, node, tokenizer.getComments())
1428 tokenizer.mustMatch("right_curly")
1429
1430 builder.OBJECTINIT_finish(node)
1431
1432 elif tokenType == "left_paren":
1433 # ParenExpression does its own matching on parentheses, so we need to unget.
1434 tokenizer.unget()
1435 node = ParenExpression(tokenizer, staticContext)
1436 node.parenthesized = True
1437
1438 elif tokenType == "let":
1439 node = LetBlock(tokenizer, staticContext, False)
1440
1441 elif tokenType in ["null", "this", "true", "false", "identifier", "number", "string", "regexp"]:
1442 node = builder.PRIMARY_build(tokenizer, tokenType)
1443 builder.PRIMARY_finish(node)
1444
1445 else:
1446 raise SyntaxError("Missing operand. Found type: %s" % tokenType, tokenizer)
1447
1448 return node

eric ide

mercurial