# -*- coding: utf-8 -*- # Copyright JS Foundation and other contributors, https://js.foundation/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, unicode_literals
from .objects import Object from .compat import basestring, unicode from .utils import format from .error_handler import ErrorHandler from .messages import Messages from .scanner import RawToken, Scanner, SourceLocation, Position, RegExp from .token import Token, TokenName from .syntax import Syntax from . import nodes as Node
class Value(object): def __init__(self, value):
self.value = value
class Marker(object): def __init__(self, index=None, line=None, column=None):
self.index = index
self.line = line
self.column = column
class TokenEntry(Object): def __init__(self, type=None, value=None, regex=None, range=None, loc=None):
self.type = type
self.value = value
self.regex = regex
self.range = range
self.loc = loc
class Parser(object): def __init__(self, code, options={}, delegate=None):
self.config = Config(**options)
def unexpectedTokenError(self, token=None, message=None):
msg = message or Messages.UnexpectedToken if token: ifnot message:
typ = token.type if typ is Token.EOF:
msg = Messages.UnexpectedEOS elif typ is Token.Identifier:
msg = Messages.UnexpectedIdentifier elif typ is Token.NumericLiteral:
msg = Messages.UnexpectedNumber elif typ is Token.StringLiteral:
msg = Messages.UnexpectedString elif typ is Token.Template:
msg = Messages.UnexpectedTemplate elif typ is Token.Keyword: if self.scanner.isFutureReservedWord(token.value):
msg = Messages.UnexpectedReserved elif self.context.strict and self.scanner.isStrictModeReservedWord(token.value):
msg = Messages.StrictReservedWord else:
msg = Messages.UnexpectedToken
value = token.value else:
value = 'ILLEGAL'
msg = msg.replace('%0', unicode(value), 1)
if token and isinstance(token.lineNumber, int):
index = token.start
line = token.lineNumber
lastMarkerLineStart = self.lastMarker.index - self.lastMarker.column
column = token.start - lastMarkerLineStart + 1 return self.errorHandler.createError(index, line, column, msg) else:
index = self.lastMarker.index
line = self.lastMarker.line
column = self.lastMarker.column + 1 return self.errorHandler.createError(index, line, column, msg)
next = self.scanner.lex()
self.hasLineTerminator = token.lineNumber != next.lineNumber
if next and self.context.strict and next.type is Token.Identifier: if self.scanner.isStrictModeReservedWord(next.value):
next.type = Token.Keyword
self.lookahead = next
if self.config.tokens and next.type isnot Token.EOF:
self.tokens.append(self.convertToken(next))
return token
def nextRegexToken(self):
self.collectComments()
token = self.scanner.scanRegExp() if self.config.tokens: # Pop the previous token, '/' or '/=' # self is added from the lookahead token.
self.tokens.pop()
self.tokens.append(self.convertToken(token))
# Prime the next lookahead.
self.lookahead = token
self.nextToken()
# Expect the next token to match the specified punctuator. # If not, an exception will be thrown.
def expect(self, value):
token = self.nextToken() if token.type isnot Token.Punctuator or token.value != value:
self.throwUnexpectedToken(token)
# Quietly expect a comma when in tolerant mode, otherwise delegates to expect().
def expectCommaSeparator(self): if self.config.tolerant:
token = self.lookahead if token.type is Token.Punctuator and token.value == ',':
self.nextToken() elif token.type is Token.Punctuator and token.value == ';':
self.nextToken()
self.tolerateUnexpectedToken(token) else:
self.tolerateUnexpectedToken(token, Messages.UnexpectedToken) else:
self.expect(',')
# Expect the next token to match the specified keyword. # If not, an exception will be thrown.
def expectKeyword(self, keyword):
token = self.nextToken() if token.type isnot Token.Keyword or token.value != keyword:
self.throwUnexpectedToken(token)
# Return true if the next token matches the specified punctuator.
def match(self, *value): return self.lookahead.type is Token.Punctuator and self.lookahead.value in value
# Return true if the next token matches the specified keyword
def matchKeyword(self, *keyword): return self.lookahead.type is Token.Keyword and self.lookahead.value in keyword
# Return true if the next token matches the specified contextual keyword # (where an identifier is sometimes a keyword depending on the context)
def matchContextualKeyword(self, *keyword): return self.lookahead.type is Token.Identifier and self.lookahead.value in keyword
# Return true if the next token is an assignment operator
def matchAssign(self): if self.lookahead.type isnot Token.Punctuator: returnFalse
op = self.lookahead.value return op in ('=', '*=', '**=', '/=', '%=', '+=', '-=', '<<=', '>>=', '>>>=', '&=', '^=', '|=')
# Cover grammar support. # # When an assignment expression position starts with an left parenthesis, the determination of the type # of the syntax is to be deferred arbitrarily long until the end of the parentheses pair (plus a lookahead) # or the first comma. This situation also defers the determination of all the expressions nested in the pair. # # There are three productions that can be parsed in a parentheses pair that needs to be determined # after the outermost pair is closed. They are: # # 1. AssignmentExpression # 2. BindingElements # 3. AssignmentTargets # # In order to avoid exponential backtracking, we use two flags to denote if the production can be # binding element or assignment target. # # The three productions have the relationship: # # BindingElements ⊆ AssignmentTargets ⊆ AssignmentExpression # # with a single exception that CoverInitializedName when used directly in an Expression, generates # an early error. Therefore, we need the third state, firstCoverInitializedNameError, to track the # first usage of CoverInitializedName and report it when we reached the end of the parentheses pair. # # isolateCoverGrammar function runs the given parser function with a new cover grammar context, and it does not # effect the current flags. This means the production the parser parses is only used as an expression. Therefore # the CoverInitializedName check is conducted. # # inheritCoverGrammar function runs the given parse function with a new cover grammar context, and it propagates # the flags outside of the parser. This means the production the parser parses is used as a part of a potential # pattern. The CoverInitializedName check is deferred.
result = parseFunction() if self.context.firstCoverInitializedNameError isnotNone:
self.throwUnexpectedToken(self.context.firstCoverInitializedNameError)
self.context.isBindingElement = self.context.isBindingElement and previousIsBindingElement
self.context.isAssignmentTarget = self.context.isAssignmentTarget and previousIsAssignmentTarget
self.context.firstCoverInitializedNameError = previousFirstCoverInitializedNameError or self.context.firstCoverInitializedNameError
typ = self.lookahead.type if typ is Token.Identifier: if (self.context.isModule or self.context.allowAwait) and self.lookahead.value == 'await':
self.tolerateUnexpectedToken(self.lookahead)
expr = self.parseFunctionExpression() if self.matchAsyncFunction() else self.finalize(node, Node.Identifier(self.nextToken().value))
elif typ in (
Token.NumericLiteral,
Token.StringLiteral,
): if self.context.strict and self.lookahead.octal:
self.tolerateUnexpectedToken(self.lookahead, Messages.StrictOctalLiteral)
self.context.isAssignmentTarget = False
self.context.isBindingElement = False
token = self.nextToken()
raw = self.getTokenRaw(token)
expr = self.finalize(node, Node.Literal(token.value, raw))
elif typ is Token.BooleanLiteral:
self.context.isAssignmentTarget = False
self.context.isBindingElement = False
token = self.nextToken()
raw = self.getTokenRaw(token)
expr = self.finalize(node, Node.Literal(token.value == 'true', raw))
elif typ is Token.NullLiteral:
self.context.isAssignmentTarget = False
self.context.isBindingElement = False
token = self.nextToken()
raw = self.getTokenRaw(token)
expr = self.finalize(node, Node.Literal(None, raw))
elif typ is Token.Template:
expr = self.parseTemplateLiteral()
elif typ is Token.Punctuator:
value = self.lookahead.value if value == '(':
self.context.isBindingElement = False
expr = self.inheritCoverGrammar(self.parseGroupExpression) elif value == '[':
expr = self.inheritCoverGrammar(self.parseArrayInitializer) elif value == '{':
expr = self.inheritCoverGrammar(self.parseObjectInitializer) elif value in ('/', '/='):
self.context.isAssignmentTarget = False
self.context.isBindingElement = False
self.scanner.index = self.startMarker.index
token = self.nextRegexToken()
raw = self.getTokenRaw(token)
expr = self.finalize(node, Node.RegexLiteral(token.regex, raw, token.pattern, token.flags)) else:
expr = self.throwUnexpectedToken(self.nextToken())
elif typ is Token.Keyword: ifnot self.context.strict and self.context.allowYield and self.matchKeyword('yield'):
expr = self.parseIdentifierName() elifnot self.context.strict and self.matchKeyword('let'):
expr = self.finalize(node, Node.Identifier(self.nextToken().value)) else:
self.context.isAssignmentTarget = False
self.context.isBindingElement = False if self.matchKeyword('function'):
expr = self.parseFunctionExpression() elif self.matchKeyword('this'):
self.nextToken()
expr = self.finalize(node, Node.ThisExpression()) elif self.matchKeyword('class'):
expr = self.parseClassExpression() elif self.matchImportCall():
expr = self.parseImportCall() else:
expr = self.throwUnexpectedToken(self.nextToken())
typ = token.type if typ in (
Token.StringLiteral,
Token.NumericLiteral,
): if self.context.strict and token.octal:
self.tolerateUnexpectedToken(token, Messages.StrictOctalLiteral)
raw = self.getTokenRaw(token)
key = self.finalize(node, Node.Literal(token.value, raw))
elif typ in (
Token.Identifier,
Token.BooleanLiteral,
Token.NullLiteral,
Token.Keyword,
):
key = self.finalize(node, Node.Identifier(token.value))
elif typ is Token.Punctuator: if token.value == '[':
key = self.isolateCoverGrammar(self.parseAssignmentExpression)
self.expect(']') else:
key = self.throwUnexpectedToken(token)
else:
key = self.throwUnexpectedToken(token)
return key
def isPropertyKey(self, key, value): return (
(key.type is Syntax.Identifier and key.name == value) or
(key.type is Syntax.Literal and key.value == value)
)
def reinterpretExpressionAsPattern(self, expr):
typ = expr.type if typ in (
Syntax.Identifier,
Syntax.MemberExpression,
Syntax.RestElement,
Syntax.AssignmentPattern,
): pass elif typ is Syntax.SpreadElement:
expr.type = Syntax.RestElement
self.reinterpretExpressionAsPattern(expr.argument) elif typ is Syntax.ArrayExpression:
expr.type = Syntax.ArrayPattern for elem in expr.elements: if elem isnotNone:
self.reinterpretExpressionAsPattern(elem) elif typ is Syntax.ObjectExpression:
expr.type = Syntax.ObjectPattern for prop in expr.properties:
self.reinterpretExpressionAsPattern(prop if prop.type is Syntax.SpreadElement else prop.value) elif typ is Syntax.AssignmentExpression:
expr.type = Syntax.AssignmentPattern del expr.operator
self.reinterpretExpressionAsPattern(expr.left) else: # Allow other node type for tolerant parsing. pass
self.context.isAssignmentTarget = False
expressions.append(expr) while self.lookahead.type isnot Token.EOF: ifnot self.match(','): break
self.nextToken() if self.match(')'):
self.nextToken() for expression in expressions:
self.reinterpretExpressionAsPattern(expression)
arrow = True
expr = Node.ArrowParameterPlaceHolder(expressions) elif self.match('...'): ifnot self.context.isBindingElement:
self.throwUnexpectedToken(self.lookahead)
expressions.append(self.parseRestElement(params))
self.expect(')') ifnot self.match('=>'):
self.expect('=>')
self.context.isBindingElement = False for expression in expressions:
self.reinterpretExpressionAsPattern(expression)
arrow = True
expr = Node.ArrowParameterPlaceHolder(expressions) else:
expressions.append(self.inheritCoverGrammar(self.parseAssignmentExpression)) if arrow: break ifnot arrow:
expr = self.finalize(self.startNode(startToken), Node.SequenceExpression(expressions))
ifnot arrow:
self.expect(')') if self.match('=>'): if expr.type is Syntax.Identifier and expr.name == 'yield':
arrow = True
expr = Node.ArrowParameterPlaceHolder([expr]) ifnot arrow: ifnot self.context.isBindingElement:
self.throwUnexpectedToken(self.lookahead)
if expr.type is Syntax.SequenceExpression: for expression in expr.expressions:
self.reinterpretExpressionAsPattern(expression) else:
self.reinterpretExpressionAsPattern(expr)
if expr.type is Syntax.SequenceExpression:
parameters = expr.expressions else:
parameters = [expr]
expr = Node.ArrowParameterPlaceHolder(parameters)
self.context.isBindingElement = False
def parseArguments(self):
self.expect('(')
args = [] ifnot self.match(')'): whileTrue: if self.match('...'):
expr = self.parseSpreadElement() else:
expr = self.isolateCoverGrammar(self.parseAssignmentExpression)
args.append(expr) if self.match(')'): break
self.expectCommaSeparator() if self.match(')'): break
self.expect(')')
return args
def isIdentifierName(self, token): return (
token.type is Token.Identifier or
token.type is Token.Keyword or
token.type is Token.BooleanLiteral or
token.type is Token.NullLiteral
)
def parseAsyncArguments(self):
self.expect('(')
args = [] ifnot self.match(')'): whileTrue: if self.match('...'):
expr = self.parseSpreadElement() else:
expr = self.isolateCoverGrammar(self.parseAsyncArgument)
args.append(expr) if self.match(')'): break
self.expectCommaSeparator() if self.match(')'): break
self.expect(')')
return args
def matchImportCall(self):
match = self.matchKeyword('import') if match:
state = self.scanner.saveState()
self.scanner.scanComments()
next = self.scanner.lex()
self.scanner.restoreState(state)
match = (next.type is Token.Punctuator) and (next.value == '(')
elif self.match('('):
asyncArrow = maybeAsync and (startToken.lineNumber == self.lookahead.lineNumber)
self.context.isBindingElement = False
self.context.isAssignmentTarget = False if asyncArrow:
args = self.parseAsyncArguments() else:
args = self.parseArguments() if expr.type is Syntax.Importand len(args) != 1:
self.tolerateError(Messages.BadImportCallArity)
expr = self.finalize(self.startNode(startToken), Node.CallExpression(expr, args)) if asyncArrow and self.match('=>'): for arg in args:
self.reinterpretExpressionAsPattern(arg)
expr = Node.AsyncArrowParameterPlaceHolder(args) elif self.match('['):
self.context.isBindingElement = False
self.context.isAssignmentTarget = True
self.expect('[')
property = self.isolateCoverGrammar(self.parseExpression)
self.expect(']')
expr = self.finalize(self.startNode(startToken), Node.ComputedMemberExpression(expr, property))
elif self.lookahead.type is Token.Template and self.lookahead.head:
quasi = self.parseTemplateLiteral()
expr = self.finalize(self.startNode(startToken), Node.TaggedTemplateExpression(expr, quasi))
elif self.lookahead.type is Token.Template and self.lookahead.head:
quasi = self.parseTemplateLiteral()
expr = self.finalize(node, Node.TaggedTemplateExpression(expr, quasi))
# Reduce: make a binary expression from the three topmost entries. while len(stack) > 2 and prec <= precedences[-1]:
right = stack.pop()
operator = stack.pop()
precedences.pop()
left = stack.pop()
markers.pop()
node = self.startNode(markers[-1])
stack.append(self.finalize(node, Node.BinaryExpression(operator, left, right)))
def checkPatternParam(self, options, param):
typ = param.type if typ is Syntax.Identifier:
self.validateParam(options, param, param.name) elif typ is Syntax.RestElement:
self.checkPatternParam(options, param.argument) elif typ is Syntax.AssignmentPattern:
self.checkPatternParam(options, param.left) elif typ is Syntax.ArrayPattern: for element in param.elements: if element isnotNone:
self.checkPatternParam(options, element) elif typ is Syntax.ObjectPattern: for prop in param.properties:
self.checkPatternParam(options, prop if prop.type is Syntax.RestElement else prop.value)
options.simple = options.simple and isinstance(param, Node.Identifier)
asyncArrow = False
typ = expr.type if typ is Syntax.Identifier: pass elif typ is Syntax.ArrowParameterPlaceHolder:
params = expr.params
asyncArrow = expr.isAsync else: returnNone
options = Params(
simple=True,
paramSet={},
)
for param in params: if param.type is Syntax.AssignmentPattern: if param.right.type is Syntax.YieldExpression: if param.right.argument:
self.throwUnexpectedToken(self.lookahead)
param.right.type = Syntax.Identifier
param.right.name = 'yield' del param.right.argument del param.right.delegate elif asyncArrow and param.type is Syntax.Identifier and param.name == 'await':
self.throwUnexpectedToken(self.lookahead)
self.checkPatternParam(options, param)
if self.context.strict ornot self.context.allowYield: for param in params: if param.type is Syntax.YieldExpression:
self.throwUnexpectedToken(self.lookahead)
if options.message is Messages.StrictParamDupe:
token = options.stricted if self.context.strict else options.firstRestricted
self.throwUnexpectedToken(token, options.message)
if token.type is Token.Identifier and (token.lineNumber == self.lookahead.lineNumber) and token.value == 'async': if self.lookahead.type is Token.Identifier or self.matchKeyword('yield'):
arg = self.parsePrimaryExpression()
self.reinterpretExpressionAsPattern(arg)
expr = Node.AsyncArrowParameterPlaceHolder([arg])
if expr.type is Syntax.ArrowParameterPlaceHolder or self.match('=>'):
if self.context.strict and expr.type is Syntax.Identifier:
id = expr if self.scanner.isRestrictedWord(id.name):
self.tolerateUnexpectedToken(token, Messages.StrictLHSAssignment) if self.scanner.isStrictModeReservedWord(id.name):
self.tolerateUnexpectedToken(token, Messages.StrictReservedWord)
while self.match(','):
self.nextToken()
lst.append(self.parseLexicalBinding(kind, options))
return lst
def isLexicalDeclaration(self):
state = self.scanner.saveState()
self.scanner.scanComments()
next = self.scanner.lex()
self.scanner.restoreState(state)
return (
(next.type is Token.Identifier) or
(next.type is Token.Punctuator and next.value == '[') or
(next.type is Token.Punctuator and next.value == '{') or
(next.type is Token.Keyword and next.value == 'let') or
(next.type is Token.Keyword and next.value == 'yield')
)
def parseLexicalDeclaration(self, options):
node = self.createNode()
kind = self.nextToken().value assert kind == 'let'or kind == 'const', 'Lexical declaration must be either or const'
self.expect('[')
elements = [] whilenot self.match(']'): if self.match(','):
self.nextToken()
elements.append(None) else: if self.match('...'):
--> --------------------
--> maximum size reached
--> --------------------
Messung V0.5
¤ Dauer der Verarbeitung: 0.24 Sekunden
(vorverarbeitet)
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.