Source code for enaml.core.parser

#------------------------------------------------------------------------------
#  Copyright (c) 2011, Enthought, Inc.
#  All rights reserved.
#------------------------------------------------------------------------------
import ast
import os

import ply.yacc as yacc

from . import enaml_ast
from .lexer import syntax_error, EnamlLexer, ParsingError


#------------------------------------------------------------------------------
# Parser Setup
#------------------------------------------------------------------------------
# Lexer tokens which need to be exposed to the parser
tokens = EnamlLexer.tokens


#------------------------------------------------------------------------------
# Parsing Helpers
#------------------------------------------------------------------------------
# Ast Context Singletons
Store = ast.Store()
Load = ast.Load()
Del = ast.Del()


# The translation table for expression operators
operator_table = {
    '=': 'Equal',
    '<': 'Less',
    '>': 'Greater',
    ':': 'Colon',
}


# The allowed ast node types for ast.Store contexts
context_allowed = set([
    ast.Attribute,
    ast.Subscript,
    ast.Name,
    ast.List,
    ast.Tuple,
])


# Python 2.6 compatibility. Transform set comprehension into set(generator)
try:
    SetComp = ast.SetComp
except AttributeError:
    def SetComp(elt, generators):
        gen = ast.GeneratorExp(elt=elt, generators=generators)
        call = ast.Call()
        call.func = ast.Name(id='set', ctx=Load)
        call.args = [gen]
        call.keywords = []
        call.starargs = None
        call.kwargs = None
        return call


# Python 2.6 compatibility. Transform dict comprehension into dict(generator)
try:
    DictComp = ast.DictComp
except AttributeError:
    def DictComp(key, value, generators):
        elt = ast.Tuple(elts=[key, value], ctx=Load)
        gen = ast.GeneratorExp(elt=elt, generators=generators)
        call = ast.Call()
        call.func = ast.Name(id='dict', ctx=Load)
        call.args = [gen]
        call.keywords = []
        call.starargs = None
        call.kwargs = None
        return call


# Python 2.6 compatibility. Transform set literal in set(list_literal)
try:
    Set = ast.Set
except AttributeError:
    def Set(elts):
        lst = ast.List(elts=elts, ctx=Load)
        call = ast.Call()
        call.func = ast.Name(id='set', ctx=Load)
        call.args = [lst]
        call.keywords = []
        call.starargs = None
        call.kwargs = None
        return call


# The disallowed ast node types for ast.Store contexts and
# the associated message tag for error reporting.
context_disallowed = {
    ast.Lambda: 'lambda',
    ast.Call: 'function call',
    ast.BoolOp: 'operator',
    ast.BinOp: 'operator',
    ast.UnaryOp: 'operator',
    ast.GeneratorExp: 'generator expression',
    ast.Yield: 'yield expression',
    ast.ListComp: 'list comprehension',
    SetComp: 'set comprehension',
    DictComp: 'dict comprehension',
    ast.Dict: 'literal',
    Set: 'literal',
    ast.Num: 'literal',
    ast.Str: 'literal',
    ast.Ellipsis: 'Ellipsis',
    ast.Compare: 'comparison',
    ast.IfExp: 'conditional expression',
}


# The node types allowed in aug assignment
aug_assign_allowed = set([
    ast.Name,
    ast.Attribute,
    ast.Subscript,
])


# The disallowed ast types on the rhs of a :: operator
notification_disallowed = {
    ast.FunctionDef: 'function definition',
    ast.ClassDef: 'class definition',
    ast.Yield: 'yield statement',
    ast.Return: 'return statement',
}


# A mapping of aug assignment operators to ast types
augassign_table = {
    '&=': ast.BitAnd(),
    '^=': ast.BitXor(),
    '//=': ast.FloorDiv(),
    '<<=': ast.LShift(),
    '-=': ast.Sub(),
    '%=': ast.Mod(),
    '+=': ast.Add(),
    '>>=': ast.RShift(),
    '/=': ast.Div(),
    '*=': ast.Mult(),
    '|=': ast.BitOr(),
    '**=': ast.Pow(),
}


[docs]class FakeToken(object): """ A fake token used to store the lexer before calling the syntax error functions. """ def __init__(self, lexer, lineno): self.lexer = lexer self.lineno = lineno
[docs]def translate_operator(op): """ Converts a symbolic operator into a string of the form __operator_<name>__ where <name> is result of translating the symbolic operator using the operator_table. """ op_table = operator_table name = ''.join(op_table[char] for char in op) return '__operator_%s__' % name
[docs]def set_context(node, ctx, p): """ Recursively sets the context of the node to the given context which should be Store or Del. If the node is not one of the allowed types for the context, an erro is raised with an appropriate message. """ # XXX passing the yacc production object to raise the error # message is a bit flakey and gets things wrong occasionally # when there are blank lines around the error. We can do better. items = None err_msg = '' node_type = type(node) if node_type in context_allowed: node.ctx = ctx if ctx == Store: if node_type == ast.Tuple: if len(node.elts) == 0: err_msg = '()' else: items = node.elts elif node_type == ast.List: items = node.elts elif node_type in context_disallowed: err_msg = context_disallowed[node_type] else: msg = 'unexpected expression in assignment %d (line %d)' raise SystemError(msg % (node_type.__name__, node.lineno)) if err_msg: m = 'assign to' if ctx == Store else 'delete' msg = "can't %s %s" % (m, err_msg) tok = FakeToken(p.lexer.lexer, p.lexer.lexer.lineno - 1) syntax_error(msg, tok) if items is not None: for item in items: set_context(item, ctx, p)
[docs]def ast_for_testlist(testlist): """ If the testlist is a list, returns an ast.Tuple with a Load context, otherwise returns the orginal node. """ if isinstance(testlist, list): value = ast.Tuple() value.elts = testlist value.ctx = Load else: value = testlist return value
def ast_for_dotted_name(dotted_name): parts = dotted_name.split('.') name = ast.Name(id=parts.pop(0), ctx=Load) res = name for part in parts: attr = ast.Attribute() attr.value = res attr.attr = part attr.ctx = Load res = attr return res # The nodes which can be inverted to form an assignable expression. _INVERTABLE = (ast.Name, ast.Attribute, ast.Call, ast.Subscript)
[docs]def validate_invertable(node, lineno, p): """ Validates that its possible for the compiler to generated inversion code for the given ast node. Currently, code is invertable if the expression terminates with a node of the following types: Name, Attribute, Call, Subscript. Parameters ---------- node : ast.AST The ast expression node to validate. lineno : int The line number of the declaration. p : Yacc Production The Ply object passed to the parser rule. This is used to extract the filename for syntax error reporting. """ if not isinstance(node, _INVERTABLE): msg = "can't assign to expression of this form" syntax_error(msg, FakeToken(p.lexer.lexer, lineno))
[docs]def build_attr_declaration(kw, name, attr_type, default, lineno, p): """ Builds an ast node for an attr or event declaration. Parameters ---------- kw : string The keyword used in the declaration. A syntax error is raised if this is not 'attr' or 'event'. name : string The name of the attribute or event being declared. attr_type : str or None The type being declared, or None if not using a type. default : AttributeBinding or None The default attribute binding or None if not supply the default. lineno : int The line number of the declaration. p : Yacc Production The Ply object passed to the parser rule. This is used to extract the filename for syntax error reporting. Returns ------- result : AttributeDeclaration The Enaml AttributeDeclaration ast node. """ if kw not in ('attr', 'event'): msg = "Expected keyword 'attr' or 'event', got '%s' instead." % kw syntax_error(msg, FakeToken(p.lexer.lexer, lineno)) if kw == 'attr': res = enaml_ast.AttributeDeclaration( name, attr_type, default, False, lineno, ) else: res = enaml_ast.AttributeDeclaration( name, attr_type, default, True, lineno, ) return res
[docs]class CommaSeparatedList(object): """ A parsing helper to delineate a comma separated list. """ def __init__(self, values=None): self.values = values or []
[docs]class GeneratorInfo(object): """ A parsing helper to delineate a generator body. """ def __init__(self, elt=None, generators=None): self.elt = elt self.generators = generators or []
[docs]class Arguments(object): """ A parsing helper object to delineate call arguments. """ def __init__(self, args=None, keywords=None, starargs=None, kwargs=None): self.args = args or [] self.keywords = keywords or [] self.starargs = starargs self.kwargs = kwargs #============================================================================== # Begin Parsing Rules #============================================================================== #------------------------------------------------------------------------------ # Enaml Module #------------------------------------------------------------------------------ # These special rules to handle the variations of newline and endmarkers # are because of the various lexer states that deal with python blocks # and enaml code, as well as completely empty files.
[docs]def p_enaml1(p): ''' enaml : enaml_module NEWLINE ENDMARKER | enaml_module ENDMARKER ''' p[0] = p[1]
[docs]def p_enaml2(p): ''' enaml : NEWLINE ENDMARKER | ENDMARKER ''' p[0] = enaml_ast.Module([], -1)
[docs]def p_enaml_module(p): ''' enaml_module : enaml_module_body ''' # Separate the Python statements from the declarations and # collect them into their node python_nodes = [] body_nodes = [] for item in p[1]: if isinstance(item, enaml_ast.Declaration): if python_nodes: mod = ast.Module(body=python_nodes) body_nodes.append(enaml_ast.Python(mod, -1)) python_nodes = [] body_nodes.append(item) else: python_nodes.append(item) if python_nodes: mod = ast.Module(body=python_nodes) body_nodes.append(enaml_ast.Python(mod, -1)) p[0] = enaml_ast.Module(body_nodes, -1)
[docs]def p_enaml_module_body1(p): ''' enaml_module_body : enaml_module_body enaml_module_item ''' p[0] = p[1] + [p[2]]
[docs]def p_enaml_module_body2(p): ''' enaml_module_body : enaml_module_item ''' p[0] = [p[1]]
[docs]def p_enaml_module_item2(p): ''' enaml_module_item : declaration ''' p[0] = p[1]
[docs]def p_enaml_module_item1(p): ''' enaml_module_item : stmt ''' p[0] = p[1] #------------------------------------------------------------------------------ # Declaration #------------------------------------------------------------------------------
[docs]def p_declaration1(p): ''' declaration : ENAMLDEF NAME LPAR NAME RPAR COLON declaration_body ''' doc, idn, items = p[7] p[0] = enaml_ast.Declaration(p[2], p[4], idn, doc, items, p.lineno(1))
[docs]def p_declaration2(p): ''' declaration : ENAMLDEF NAME LPAR NAME RPAR COLON PASS NEWLINE ''' p[0] = enaml_ast.Declaration(p[2], p[4], None, '', [], p.lineno(1))
[docs]def p_declaration3(p): ''' declaration : ENAMLDEF NAME LPAR NAME RPAR COLON NAME COLON declaration_body ''' lineno = p.lineno(1) doc, idn, items = p[9] if idn is not None: msg = 'multiple identifiers declared' syntax_error(msg, FakeToken(p.lexer.lexer, lineno)) p[0] = enaml_ast.Declaration(p[2], p[4], p[7], doc, items, lineno)
[docs]def p_declaration4(p): ''' declaration : ENAMLDEF NAME LPAR NAME RPAR COLON NAME COLON PASS NEWLINE ''' p[0] = enaml_ast.Declaration(p[2], p[4], p[7], '', [], p.lineno(1))
[docs]def p_declaration_body1(p): ''' declaration_body : NEWLINE INDENT declaration_body_items DEDENT ''' # Filter out any pass statements items = filter(None, p[3]) p[0] = ('', None, items)
[docs]def p_declaration_body2(p): ''' declaration_body : NEWLINE INDENT identifier DEDENT ''' p[0] = ('', p[3], [])
[docs]def p_declaration_body3(p): ''' declaration_body : NEWLINE INDENT identifier declaration_body_items DEDENT ''' # Filter out any pass statements items = filter(None, p[4]) p[0] = ('', p[3], items)
[docs]def p_declaration_body4(p): ''' declaration_body : NEWLINE INDENT STRING NEWLINE declaration_body_items DEDENT ''' # Filter out any pass statements items = filter(None, p[5]) p[0] = (p[3], None, items)
[docs]def p_declaration_body5(p): ''' declaration_body : NEWLINE INDENT STRING NEWLINE identifier DEDENT ''' p[0] = (p[3], p[5], [])
[docs]def p_declaration_body6(p): ''' declaration_body : NEWLINE INDENT STRING NEWLINE identifier declaration_body_items DEDENT ''' # Filter out any pass statements items = filter(None, p[6]) p[0] = (p[3], p[5], items)
[docs]def p_declaration_body_items1(p): ''' declaration_body_items : declaration_body_item ''' p[0] = [p[1]]
[docs]def p_declaration_body_items2(p): ''' declaration_body_items : declaration_body_items declaration_body_item ''' p[0] = p[1] + [p[2]]
[docs]def p_declaration_body_item1(p): ''' declaration_body_item : attribute_declaration ''' p[0] = p[1]
[docs]def p_declaration_body_item2(p): ''' declaration_body_item : attribute_binding ''' p[0] = p[1]
[docs]def p_declaration_body_item3(p): ''' declaration_body_item : instantiation ''' p[0] = p[1]
[docs]def p_declaration_body_item4(p): ''' declaration_body_item : PASS NEWLINE ''' p[0] = None #------------------------------------------------------------------------------ # Attribute Declaration #------------------------------------------------------------------------------
[docs]def p_attribute_declaration1(p): ''' attribute_declaration : NAME NAME NEWLINE ''' p[0] = build_attr_declaration(p[1], p[2], None, None, p.lineno(1), p)
[docs]def p_attribute_declaration2(p): ''' attribute_declaration : NAME NAME COLON NAME NEWLINE ''' p[0] = build_attr_declaration(p[1], p[2], p[4], None, p.lineno(1), p)
[docs]def p_attribute_declaration3(p): ''' attribute_declaration : NAME NAME binding ''' lineno = p.lineno(1) name = p[2] binding = enaml_ast.AttributeBinding(name, p[3], lineno) p[0] = build_attr_declaration(p[1], name, None, binding, lineno, p)
[docs]def p_attribute_declaration4(p): ''' attribute_declaration : NAME NAME COLON NAME binding ''' lineno = p.lineno(1) name = p[2] binding = enaml_ast.AttributeBinding(name, p[5], lineno) p[0] = build_attr_declaration(p[1], name, p[4], binding, lineno, p) #------------------------------------------------------------------------------ # Identifier #------------------------------------------------------------------------------
[docs]def p_identifier(p): ''' identifier : NAME COLON NAME NEWLINE ''' lhs = p[1] if lhs != 'id': msg = "'id' required. Got '%s' instead." % lhs syntax_error(msg, FakeToken(p.lexer.lexer, p.lineno(1))) p[0] = p[3] #------------------------------------------------------------------------------ # Instantiation #------------------------------------------------------------------------------
[docs]def p_instantiation1(p): ''' instantiation : NAME COLON instantiation_body ''' identifier, items = p[3] p[0] = enaml_ast.Instantiation(p[1], identifier, items, p.lineno(1))
[docs]def p_instantiation2(p): ''' instantiation : NAME COLON attribute_binding ''' p[0] = enaml_ast.Instantiation(p[1], None, [p[3]], p.lineno(1))
[docs]def p_instantiation3(p): ''' instantiation : NAME COLON PASS NEWLINE ''' p[0] = enaml_ast.Instantiation(p[1], None, [], p.lineno(1))
[docs]def p_instantiation4(p): ''' instantiation : NAME COLON NAME COLON instantiation_body ''' identifier, items = p[5] if identifier is not None: msg = 'multiple identifiers declared' syntax_error(msg, FakeToken(p.lexer.lexer, p.lineno(1))) p[0] = enaml_ast.Instantiation(p[1], p[3], items, p.lineno(1))
[docs]def p_instantiation5(p): ''' instantiation : NAME COLON NAME COLON attribute_binding ''' p[0] = enaml_ast.Instantiation(p[1], p[3], [p[5]], p.lineno(1))
[docs]def p_instantiation6(p): ''' instantiation : NAME COLON NAME COLON PASS NEWLINE ''' p[0] = enaml_ast.Instantiation(p[1], p[3], [], p.lineno(1))
[docs]def p_instantiation_body1(p): ''' instantiation_body : NEWLINE INDENT instantiation_body_items DEDENT ''' # Filter out any pass statements items = filter(None, p[3]) p[0] = (None, items)
[docs]def p_instantiation_body2(p): ''' instantiation_body : NEWLINE INDENT identifier DEDENT ''' p[0] = (p[3], [])
[docs]def p_instantiation_body3(p): ''' instantiation_body : NEWLINE INDENT identifier instantiation_body_items DEDENT ''' # Filter out any pass statements items = filter(None, p[4]) p[0] = (p[3], items)
[docs]def p_instantiation_body_items1(p): ''' instantiation_body_items : instantiation_body_item ''' p[0] = [p[1]]
[docs]def p_instantiation_body_items2(p): ''' instantiation_body_items : instantiation_body_items instantiation_body_item ''' p[0] = p[1] + [p[2]]
[docs]def p_instantiation_body_item1(p): ''' instantiation_body_item : instantiation ''' p[0] = p[1]
[docs]def p_instantiation_body_item2(p): ''' instantiation_body_item : attribute_binding ''' p[0] = p[1]
[docs]def p_instantiation_body_item3(p): ''' instantiation_body_item : PASS NEWLINE ''' p[0] = None #------------------------------------------------------------------------------ # Attribute Binding #------------------------------------------------------------------------------
[docs]def p_attribute_binding(p): ''' attribute_binding : NAME binding ''' p[0] = enaml_ast.AttributeBinding(p[1], p[2], p.lineno(1))
[docs]def p_binding1(p): ''' binding : EQUAL test NEWLINE | LEFTSHIFT test NEWLINE ''' lineno = p.lineno(1) operator = translate_operator(p[1]) expr = ast.Expression(body=p[2]) expr.lineno = lineno ast.fix_missing_locations(expr) expr_node = enaml_ast.Python(expr, lineno) p[0] = enaml_ast.BoundExpression(operator, expr_node, lineno)
[docs]def p_binding2(p): ''' binding : COLONEQUAL test NEWLINE | RIGHTSHIFT test NEWLINE ''' lineno = p.lineno(1) validate_invertable(p[2], lineno, p) operator = translate_operator(p[1]) expr = ast.Expression(body=p[2]) expr.lineno = lineno ast.fix_missing_locations(expr) expr_node = enaml_ast.Python(expr, lineno) p[0] = enaml_ast.BoundExpression(operator, expr_node, lineno)
[docs]def p_binding3(p): ''' binding : DOUBLECOLON suite ''' lineno = p.lineno(1) operator = translate_operator(p[1]) mod = ast.Module() mod.body = p[2] for item in ast.walk(mod): if type(item) in notification_disallowed: msg = '%s not allowed in a notification block' msg = msg % notification_disallowed[type(item)] syntax_error(msg, FakeToken(p.lexer.lexer, item.lineno)) expr_node = enaml_ast.Python(mod, lineno) p[0] = enaml_ast.BoundExpression(operator, expr_node, lineno) #------------------------------------------------------------------------------ # Python Grammar #------------------------------------------------------------------------------
[docs]def p_suite1(p): ''' suite : simple_stmt ''' # stmt may be a list of simple_stmt due to this piece of grammar: # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE stmt = p[1] if isinstance(stmt, list): res = stmt else: res = [stmt] p[0] = res
[docs]def p_suite2(p): ''' suite : NEWLINE INDENT stmt_list DEDENT ''' p[0] = p[3]
[docs]def p_stmt_list1(p): ''' stmt_list : stmt stmt_list ''' # stmt may be a list of simple_stmt due to this piece of grammar: # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE stmt = p[1] if isinstance(stmt, list): res = stmt + p[2] else: res = [stmt] + p[2] p[0] = res
[docs]def p_stmt_list2(p): ''' stmt_list : stmt ''' # stmt may be a list of simple_stmt due to this piece of grammar: # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE stmt = p[1] if isinstance(stmt, list): res = stmt else: res = [stmt] p[0] = res
[docs]def p_stmt(p): ''' stmt : simple_stmt | compound_stmt ''' p[0] = p[1]
[docs]def p_simple_stmt1(p): ''' simple_stmt : small_stmt NEWLINE ''' stmt = p[1] stmt.lineno = p.lineno(2) ast.fix_missing_locations(stmt) p[0] = stmt
[docs]def p_simple_stmt2(p): ''' simple_stmt : small_stmt_list NEWLINE ''' lineno = p.lineno(2) stmts = p[1] for stmt in stmts: stmt.lineno = lineno ast.fix_missing_locations(stmt) p[0] = stmts
[docs]def p_small_stmt_list1(p): ''' small_stmt_list : small_stmt SEMI ''' p[0] = [p[1]]
[docs]def p_small_stmt_list2(p): ''' small_stmt_list : small_stmt small_stmt_list_list ''' p[0] = [p[1]] + p[2]
[docs]def p_small_stmt_list3(p): ''' small_stmt_list : small_stmt small_stmt_list_list SEMI ''' p[0] = [p[1]] + p[2]
[docs]def p_small_stmt_list_list1(p): ''' small_stmt_list_list : SEMI small_stmt ''' p[0] = [p[2]]
[docs]def p_small_stmt_list_list2(p): ''' small_stmt_list_list : small_stmt_list_list SEMI small_stmt ''' p[0] = p[1] + [p[3]]
[docs]def p_small_stmt1(p): ''' small_stmt : expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt | import_stmt | global_stmt | exec_stmt | assert_stmt ''' p[0] = p[1]
[docs]def p_print_stmt1(p): ''' print_stmt : PRINT ''' prnt = ast.Print() prnt.dest = None prnt.values = [] prnt.nl = True p[0] = prnt
[docs]def p_print_stmt2(p): ''' print_stmt : PRINT test ''' prnt = ast.Print() prnt.dest = None prnt.values = [p[2]] prnt.nl = True p[0] = prnt
[docs]def p_print_stmt3(p): ''' print_stmt : PRINT print_list ''' prnt = ast.Print() all_values = p[2] good_values = [item for item in all_values if item is not None] if all_values[-1] is None: nl = False else: nl = True prnt.dest = None prnt.values = good_values prnt.nl = nl p[0] = prnt
[docs]def p_print_stmt4(p): ''' print_stmt : PRINT RIGHTSHIFT test ''' prnt = ast.Print() prnt.dest = p[3] prnt.values = [] prnt.nl = True p[0] = prnt
[docs]def p_print_stmt5(p): ''' print_stmt : PRINT RIGHTSHIFT test COMMA test ''' prnt = ast.Print() prnt.dest = p[3] prnt.values = [p[5]] prnt.nl = True p[0] = prnt
[docs]def p_print_stmt6(p): ''' print_stmt : PRINT RIGHTSHIFT test COMMA print_list ''' prnt = ast.Print() all_values = p[5] good_values = [item for item in all_values if item is not None] if all_values[-1] is None: nl = False else: nl = True prnt.dest = p[3] prnt.values = good_values prnt.nl = nl p[0] = prnt
[docs]def p_print_list1(p): ''' print_list : test COMMA ''' p[0] = [p[1], None]
[docs]def p_print_list2(p): ''' print_list : test print_list_list ''' p[0] = [p[1]] + p[2]
[docs]def p_print_list3(p): ''' print_list : test print_list_list COMMA ''' p[0] = [p[1]] + p[2] + [None]
[docs]def p_print_list_list1(p): ''' print_list_list : COMMA test ''' p[0] = [p[2]]
[docs]def p_print_list_list2(p): ''' print_list_list : print_list_list COMMA test ''' p[0] = p[1] + [p[3]]
[docs]def p_del_stmt(p): ''' del_stmt : DEL exprlist ''' exprlist = p[2] set_context(exprlist, Del, p) del_stmt = ast.Delete() del_stmt.targets = [exprlist] p[0] = del_stmt
[docs]def p_pass_stmt(p): ''' pass_stmt : PASS ''' pass_stmt = ast.Pass() pass_stmt.lineno = p.lineno(1) p[0] = pass_stmt
[docs]def p_flow_stmt(p): ''' flow_stmt : break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt ''' p[0] = p[1]
[docs]def p_break_stmt(p): ''' break_stmt : BREAK ''' break_stmt = ast.Break() break_stmt.lineno = p.lineno(1) p[0] = break_stmt
[docs]def p_continue_stmt(p): ''' continue_stmt : CONTINUE ''' continue_stmt = ast.Continue() continue_stmt.lineno = p.lineno(1) p[0] = continue_stmt
[docs]def p_return_stmt1(p): ''' return_stmt : RETURN ''' ret = ast.Return() ret.value = None p[0] = ret
[docs]def p_return_stmt2(p): ''' return_stmt : RETURN testlist ''' value = ast_for_testlist(p[2]) ret = ast.Return() ret.value = value p[0] = ret
[docs]def p_raise_stmt1(p): ''' raise_stmt : RAISE ''' raise_stmt = ast.Raise() raise_stmt.type = None raise_stmt.inst = None raise_stmt.tback = None p[0] = raise_stmt
[docs]def p_raise_stmt2(p): ''' raise_stmt : RAISE test ''' raise_stmt = ast.Raise() raise_stmt.type = p[2] raise_stmt.inst = None raise_stmt.tback = None p[0] = raise_stmt
[docs]def p_raise_stmt3(p): ''' raise_stmt : RAISE test COMMA test ''' raise_stmt = ast.Raise() raise_stmt.type = p[2] raise_stmt.inst = p[4] raise_stmt.tback = None p[0] = raise_stmt
[docs]def p_raise_stmt4(p): ''' raise_stmt : RAISE test COMMA test COMMA test ''' raise_stmt = ast.Raise() raise_stmt.type = p[2] raise_stmt.inst = p[4] raise_stmt.tback = p[6] p[0] = raise_stmt
[docs]def p_yield_stmt(p): ''' yield_stmt : yield_expr ''' p[0] = ast.Expr(value=p[1])
[docs]def p_yield_expr1(p): ''' yield_expr : YIELD ''' p[0] = ast.Yield(value=None, lineno=p.lineno(1))
[docs]def p_yield_expr2(p): ''' yield_expr : YIELD testlist ''' value = ast_for_testlist(p[2]) p[0] = ast.Yield(value=value, lineno=p.lineno(1))
[docs]def p_global_stmt1(p): ''' global_stmt : GLOBAL NAME ''' global_stmt = ast.Global() global_stmt.names = [p[2]] global_stmt.lineno = p.lineno(1) p[0] = global_stmt
[docs]def p_global_stmt2(p): ''' global_stmt : GLOBAL NAME globals_list ''' global_stmt = ast.Global() global_stmt.names = [p[2]] + p[3] global_stmt.lineno = p.lineno(1) p[0] = global_stmt
[docs]def p_globals_list1(p): ''' globals_list : COMMA NAME globals_list ''' p[0] = [p[2]] + p[3]
[docs]def p_globals_list2(p): ''' globals_list : COMMA NAME ''' p[0] = [p[2]]
[docs]def p_exec_stmt1(p): ''' exec_stmt : EXEC expr ''' exec_stmt = ast.Exec() exec_stmt.body = p[2] exec_stmt.globals = None exec_stmt.locals = None p[0] = exec_stmt
[docs]def p_exec_stmt2(p): ''' exec_stmt : EXEC expr IN test ''' exec_stmt = ast.Exec() exec_stmt.body = p[2] exec_stmt.globals= p[4] exec_stmt.locals = None p[0] = exec_stmt
[docs]def p_exec_stmt3(p): ''' exec_stmt : EXEC expr IN test COMMA test ''' exec_stmt = ast.Exec() exec_stmt.body = p[2] exec_stmt.globals = p[4] exec_stmt.locals = p[6] p[0] = exec_stmt
[docs]def p_assert_stmt1(p): ''' assert_stmt : ASSERT test ''' assert_stmt = ast.Assert() assert_stmt.test = p[2] assert_stmt.msg = None p[0] = assert_stmt
[docs]def p_assert_stmt2(p): ''' assert_stmt : ASSERT test COMMA test ''' assert_stmt = ast.Assert() assert_stmt.test = p[2] assert_stmt.msg = p[4] p[0] = assert_stmt
[docs]def p_expr_stmt1(p): ''' expr_stmt : testlist ''' expr = ast.Expr() expr.value = ast_for_testlist(p[1]) p[0] = expr
[docs]def p_expr_stmt2(p): ''' expr_stmt : testlist augassign testlist | testlist augassign yield_expr ''' op, lineno = p[2] lhs = ast_for_testlist(p[1]) rhs = ast_for_testlist(p[3]) set_context(lhs, Store, p) if type(lhs) not in aug_assign_allowed: msg = 'illegal expression for augmented assignment' syntax_error(msg, FakeToken(p.lexer.lexer, lineno)) aug = ast.AugAssign() aug.target = lhs aug.value = rhs aug.op = op p[0] = aug
[docs]def p_expr_stmt3(p): ''' expr_stmt : testlist equal_list ''' all_items = [p[1]] + p[2] targets = map(ast_for_testlist, all_items) value = targets.pop() for item in targets: if type(item) == ast.Yield: msg = "assignment to yield expression not possible" syntax_error(msg, FakeToken(p.lexer.lexer, item.lineno)) set_context(item, Store, p) assg = ast.Assign() assg.targets = targets assg.value = value p[0] = assg
[docs]def p_augassign(p): ''' augassign : AMPEREQUAL | CIRCUMFLEXEQUAL | DOUBLESLASHEQUAL | DOUBLESTAREQUAL | LEFTSHIFTEQUAL | MINUSEQUAL | PERCENTEQUAL | PLUSEQUAL | RIGHTSHIFTEQUAL | SLASHEQUAL | STAREQUAL | VBAREQUAL ''' lineno = p.lineno(1) op = augassign_table[p[1]] p[0] = (op, lineno)
[docs]def p_equal_list1(p): ''' equal_list : EQUAL testlist | EQUAL yield_expr ''' p[0] = [p[2]]
[docs]def p_equal_list2(p): ''' equal_list : EQUAL testlist equal_list | EQUAL yield_expr equal_list ''' p[0] = [p[2]] + p[3]
[docs]def p_testlist1(p): ''' testlist : test ''' p[0] = p[1]
[docs]def p_testlist2(p): ''' testlist : test COMMA ''' p[0] = [p[1]]
[docs]def p_testlist3(p): ''' testlist : test testlist_list ''' p[0] = [p[1]] + p[2]
[docs]def p_testlist4(p): ''' testlist : test testlist_list COMMA ''' p[0] = [p[1]] + p[2]
[docs]def p_testlist_list1(p): ''' testlist_list : COMMA test ''' p[0] = [p[2]]
[docs]def p_testlist_list2(p): ''' testlist_list : testlist_list COMMA test ''' p[0] = p[1] + [p[3]]
[docs]def p_compound_stmt(p): ''' compound_stmt : if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated ''' p[0] = p[1]
[docs]def p_if_stmt1(p): ''' if_stmt : IF test COLON suite ''' if_stmt = ast.If() if_stmt.test = p[2] if_stmt.body = p[4] if_stmt.lineno = p.lineno(1) ast.fix_missing_locations(if_stmt) if_stmt.orelse = [] p[0] = if_stmt
[docs]def p_if_stmt2(p): ''' if_stmt : IF test COLON suite elif_stmts ''' if_stmt = ast.If() if_stmt.test = p[2] if_stmt.body = p[4] if_stmt.lineno = p.lineno(1) if_stmt.orelse = [p[5]] ast.fix_missing_locations(if_stmt) p[0] = if_stmt
[docs]def p_if_stmt3(p): ''' if_stmt : IF test COLON suite else_stmt ''' if_stmt = ast.If() if_stmt.test = p[2] if_stmt.body = p[4] if_stmt.lineno = p.lineno(1) if_stmt.orelse = p[5] ast.fix_missing_locations(if_stmt) p[0] = if_stmt
[docs]def p_if_stmt4(p): ''' if_stmt : IF test COLON suite elif_stmts else_stmt ''' if_stmt = ast.If() if_stmt.test = p[2] if_stmt.body = p[4] if_stmt.lineno = p.lineno(1) elif_stmt = p[5] if_stmt.orelse = [elif_stmt] else_stmt = p[6] while elif_stmt.orelse: elif_stmt = elif_stmt.orelse[0] elif_stmt.orelse = else_stmt ast.fix_missing_locations(if_stmt) p[0] = if_stmt
[docs]def p_elif_stmts1(p): ''' elif_stmts : elif_stmts elif_stmt ''' elif_stmt = p[1] elif_stmt.orelse = [p[2]] p[0] = elif_stmt
[docs]def p_elif_stmts2(p): ''' elif_stmts : elif_stmt ''' p[0] = p[1]
[docs]def p_elif_stmt(p): ''' elif_stmt : ELIF test COLON suite ''' if_stmt = ast.If() if_stmt.test = p[2] if_stmt.body = p[4] if_stmt.lineno = p.lineno(1) if_stmt.orelse = [] ast.fix_missing_locations(if_stmt) p[0] = if_stmt
[docs]def p_else_stmt(p): ''' else_stmt : ELSE COLON suite ''' p[0] = p[3]
[docs]def p_while_stmt1(p): ''' while_stmt : WHILE test COLON suite ''' while_stmt = ast.While() while_stmt.test = p[2] while_stmt.body = p[4] while_stmt.orelse = [] while_stmt.lineno = p.lineno(1) ast.fix_missing_locations(while_stmt) p[0] = while_stmt
[docs]def p_while_stmt2(p): ''' while_stmt : WHILE test COLON suite ELSE COLON suite ''' while_stmt = ast.While() while_stmt.test = p[2] while_stmt.body = p[4] while_stmt.orelse = p[7] while_stmt.lineno = p.lineno(1) ast.fix_missing_locations(while_stmt) p[0] = while_stmt
[docs]def p_for_stmt1(p): ''' for_stmt : FOR exprlist IN testlist COLON suite ''' for_stmt = ast.For() target = p[2] set_context(target, Store, p) for_stmt.target = target for_stmt.iter = ast_for_testlist(p[4]) for_stmt.body = p[6] for_stmt.orelse = [] for_stmt.lineno = p.lineno(1) ast.fix_missing_locations(for_stmt) p[0] = for_stmt
[docs]def p_for_stmt2(p): ''' for_stmt : FOR exprlist IN testlist COLON suite ELSE COLON suite ''' for_stmt = ast.For() target = p[2] set_context(target, Store, p) for_stmt.target = target for_stmt.iter = ast_for_testlist(p[4]) for_stmt.body = p[6] for_stmt.orelse = p[9] for_stmt.lineno = p.lineno(1) ast.fix_missing_locations(for_stmt) p[0] = for_stmt
[docs]def p_try_stmt1(p): ''' try_stmt : TRY COLON suite FINALLY COLON suite ''' try_finally = ast.TryFinally() try_finally.body = p[3] try_finally.finalbody = p[6] try_finally.lineno = p.lineno(1) ast.fix_missing_locations(try_finally) p[0] = try_finally
[docs]def p_try_stmt2(p): ''' try_stmt : TRY COLON suite except_clauses ''' try_stmt = ast.TryExcept() try_stmt.body = p[3] try_stmt.handlers = p[4] try_stmt.orelse = [] try_stmt.lineno = p.lineno(1) ast.fix_missing_locations(try_stmt) p[0] = try_stmt
[docs]def p_try_stmt3(p): ''' try_stmt : TRY COLON suite except_clauses ELSE COLON suite ''' try_stmt = ast.TryExcept() try_stmt.body = p[3] try_stmt.handlers = p[4] try_stmt.orelse = p[7] try_stmt.lineno = p.lineno(1) ast.fix_missing_locations(try_stmt) p[0] = try_stmt
[docs]def p_try_stmt4(p): ''' try_stmt : TRY COLON suite except_clauses FINALLY COLON suite ''' lineno = p.lineno(1) try_finally = ast.TryFinally() try_stmt = ast.TryExcept() try_stmt.body = p[3] try_stmt.handlers = p[4] try_stmt.orelse = [] try_stmt.lineno = lineno ast.fix_missing_locations(try_stmt) try_finally.body = [try_stmt] try_finally.finalbody = p[7] try_finally.lineno = lineno ast.fix_missing_locations(try_finally) p[0] = try_finally
[docs]def p_try_stmt5(p): ''' try_stmt : TRY COLON suite except_clauses ELSE COLON suite FINALLY COLON suite ''' lineno = p.lineno(1) try_finally = ast.TryFinally() try_stmt = ast.TryExcept() try_stmt.body = p[3] try_stmt.handlers = p[4] try_stmt.orelse = p[7] try_stmt.lineno = lineno ast.fix_missing_locations(try_stmt) try_finally.body = [try_stmt] try_finally.finalbody = p[10] try_finally.lineno = lineno ast.fix_missing_locations(try_finally) p[0] = try_finally
[docs]def p_except_clauses1(p): ''' except_clauses : except_clause except_clauses ''' p[0] = [p[1]] + p[2]
[docs]def p_except_clauses2(p): ''' except_clauses : except_clause ''' p[0] = [p[1]]
[docs]def p_except_clause1(p): ''' except_clause : EXCEPT COLON suite ''' excpt = ast.ExceptHandler() excpt.type = None excpt.name = None excpt.body = p[3] excpt.lineno = p.lineno(1) ast.fix_missing_locations(excpt) p[0] = excpt
[docs]def p_except_clause2(p): ''' except_clause : EXCEPT test COLON suite ''' excpt = ast.ExceptHandler() excpt.type = p[2] excpt.name = None excpt.body = p[4] excpt.lineno = p.lineno(1) ast.fix_missing_locations(excpt) p[0] = excpt
[docs]def p_except_clause3(p): ''' except_clause : EXCEPT test AS test COLON suite | EXCEPT test COMMA test COLON suite ''' excpt = ast.ExceptHandler() excpt.type = p[2] name = p[4] set_context(name, Store, p) excpt.name = name excpt.body = p[6] excpt.lineno = p.lineno(1) ast.fix_missing_locations(excpt) p[0] = excpt
[docs]def p_with_stmt1(p): ''' with_stmt : WITH with_item COLON suite ''' with_stmt = ast.With() ctxt, opt_vars = p[2] with_stmt.context_expr = ctxt with_stmt.optional_vars = opt_vars with_stmt.body = p[4] with_stmt.lineno = p.lineno(1) ast.fix_missing_locations(with_stmt) p[0] = with_stmt
[docs]def p_with_stmt2(p): ''' with_stmt : WITH with_item with_item_list COLON suite ''' with_stmt = ast.With() ctxt, opt_vars = p[2] with_stmt.context_expr = ctxt with_stmt.optional_vars = opt_vars root = with_stmt last = with_stmt for ctxt, opt_vars in p[3]: with_stmt = ast.With() with_stmt.context_expr = ctxt with_stmt.optional_vars = opt_vars last.body = [with_stmt] last = with_stmt last.body = p[5] root.lineno = p.lineno(1) ast.fix_missing_locations(root) p[0] = root
[docs]def p_with_item1(p): ''' with_item : test ''' p[0] = (p[1], None)
[docs]def p_with_item2(p): ''' with_item : test AS expr ''' expr = p[3] set_context(expr, Store, p) p[0] = (p[1], expr)
[docs]def p_with_item_list1(p): ''' with_item_list : COMMA with_item with_item_list ''' p[0] = [p[2]] + p[3]
[docs]def p_with_item_list2(p): ''' with_item_list : COMMA with_item ''' p[0] = [p[2]]
[docs]def p_funcdef(p): ''' funcdef : DEF NAME parameters COLON suite ''' funcdef = ast.FunctionDef() funcdef.name = p[2] funcdef.args = p[3] funcdef.body = p[5] funcdef.decorator_list = [] funcdef.lineno = p.lineno(1) ast.fix_missing_locations(funcdef) p[0] = funcdef
[docs]def p_parameters1(p): ''' parameters : LPAR RPAR ''' p[0] = ast.arguments(args=[], defaults=[], vararg=None, kwarg=None)
[docs]def p_parameters2(p): ''' parameters : LPAR varargslist RPAR ''' p[0] = p[2]
[docs]def p_classdef1(p): ''' classdef : CLASS NAME COLON suite ''' classdef = ast.ClassDef() classdef.name = p[2] classdef.bases = [] classdef.body = p[4] classdef.decorator_list = [] classdef.lineno = p.lineno(1) ast.fix_missing_locations(classdef) p[0] = classdef
[docs]def p_classdef2(p): ''' classdef : CLASS NAME LPAR RPAR COLON suite ''' classdef = ast.ClassDef() classdef.name = p[2] classdef.bases = [] classdef.body = p[6] classdef.decorator_list = [] classdef.lineno = p.lineno(1) ast.fix_missing_locations(classdef) p[0] = classdef
[docs]def p_classdef3(p): ''' classdef : CLASS NAME LPAR testlist RPAR COLON suite ''' classdef = ast.ClassDef() classdef.name = p[2] bases = p[4] if not isinstance(bases, list): bases = [bases] classdef.bases = bases classdef.body = p[7] classdef.decorator_list = [] classdef.lineno = p.lineno(1) ast.fix_missing_locations(classdef) p[0] = classdef
[docs]def p_decorated(p): ''' decorated : decorators funcdef | decorators classdef ''' decs = p[1] target = p[2] target.decorator_list = decs p[0] = target
[docs]def p_decorators1(p): ''' decorators : decorator decorators ''' p[0] = [p[1]] + p[2]
[docs]def p_decorators2(p): ''' decorators : decorator ''' p[0] = [p[1]]
[docs]def p_decorator1(p): ''' decorator : AT dotted_name NEWLINE ''' name = ast_for_dotted_name(p[2]) name.lineno = p.lineno(1) ast.fix_missing_locations(name) p[0] = name
[docs]def p_decorator2(p): ''' decorator : AT dotted_name LPAR RPAR NEWLINE ''' call = ast.Call() call.func = ast_for_dotted_name(p[2]) call.args = [] call.keywords = [] call.stargs = None call.kwargs = None call.lineno = p.lineno(1) ast.fix_missing_locations(call) p[0] = call
[docs]def p_decorator3(p): ''' decorator : AT dotted_name LPAR arglist RPAR NEWLINE ''' args = p[4] call = ast.Call() call.func = ast_for_dotted_name(p[2]) call.args = args.args call.keywords = args.keywords call.starargs = args.starargs call.kwargs = args.kwargs call.lineno = p.lineno(1) ast.fix_missing_locations(call) p[0] = call
[docs]def p_import_stmt1(p): ''' import_stmt : import_name ''' p[0] = p[1]
[docs]def p_import_stmt2(p): ''' import_stmt : import_from ''' p[0] = p[1]
[docs]def p_import_name(p): ''' import_name : IMPORT dotted_as_names ''' imprt = ast.Import(names=p[2]) imprt.col_offset = 0 p[0] = imprt
[docs]def p_import_from1(p): ''' import_from : FROM dotted_name IMPORT STAR ''' alias = ast.alias(name=p[4], asname=None) imprt = ast.ImportFrom(module=p[2], names=[alias], level=0) imprt.col_offset = 0 p[0] = imprt
[docs]def p_import_from2(p): ''' import_from : FROM dotted_name IMPORT import_as_names ''' imprt = ast.ImportFrom(module=p[2], names=p[4], level=0) imprt.col_offset = 0 p[0] = imprt
[docs]def p_import_from3(p): ''' import_from : FROM dotted_name IMPORT LPAR import_as_names RPAR ''' imprt = ast.ImportFrom(module=p[2], names=p[5], level=0) imprt.col_offset = 0 p[0] = imprt
[docs]def p_import_from4(p): ''' import_from : FROM import_from_dots dotted_name IMPORT STAR ''' alias = ast.alias(name=p[5], asname=None) imprt = ast.ImportFrom(module=p[3], names=[alias], level=len(p[2])) imprt.col_offset = 0 p[0] = imprt
[docs]def p_import_from5(p): ''' import_from : FROM import_from_dots dotted_name IMPORT import_as_name ''' imprt = ast.ImportFrom(module=p[3], names=[p[5]], level=len(p[2])) imprt.col_offset = 0 p[0] = imprt
[docs]def p_import_from6(p): ''' import_from : FROM import_from_dots dotted_name IMPORT LPAR import_as_names RPAR ''' imprt = ast.ImportFrom(module=p[3], names=p[6], level=len(p[2])) imprt.col_offset = 0 p[0] = imprt
[docs]def p_import_from7(p): ''' import_from : FROM import_from_dots IMPORT STAR ''' alias = ast.alias(name=p[4], asname=None) imprt = ast.ImportFrom(module=None, names=[alias], level=len(p[2])) imprt.col_offset = 0 p[0] = imprt
[docs]def p_import_from8(p): ''' import_from : FROM import_from_dots IMPORT import_as_names ''' imprt = ast.ImportFrom(module=None, names=p[4], level=len(p[2])) imprt.col_offset = 0 p[0] = imprt
[docs]def p_import_from9(p): ''' import_from : FROM import_from_dots IMPORT LPAR import_as_names RPAR ''' imprt = ast.ImportFrom(module=None, names=p[5], level=len(p[2])) imprt.col_offset = 0 p[0] = imprt
[docs]def p_import_from_dots1(p): ''' import_from_dots : DOT ''' p[0] = [p[1]]
[docs]def p_import_from_dots2(p): ''' import_from_dots : import_from_dots DOT ''' p[0] = p[1] + [p[2]]
[docs]def p_import_as_name1(p): ''' import_as_name : NAME ''' p[0] = ast.alias(name=p[1], asname=None)
[docs]def p_import_as_name2(p): ''' import_as_name : NAME AS NAME ''' p[0] = ast.alias(name=p[1], asname=p[3])
[docs]def p_dotted_as_name1(p): ''' dotted_as_name : dotted_name ''' alias = ast.alias(name=p[1], asname=None) p[0] = alias
[docs]def p_dotted_as_name2(p): ''' dotted_as_name : dotted_name AS NAME ''' alias = ast.alias(name=p[1], asname=p[3]) p[0] = alias
[docs]def p_import_as_names1(p): ''' import_as_names : import_as_name ''' p[0] = [p[1]]
[docs]def p_import_as_names2(p): ''' import_as_names : import_as_name COMMA ''' p[0] = [p[1]]
[docs]def p_import_as_names3(p): ''' import_as_names : import_as_name import_as_names_list ''' p[0] = [p[1]] + p[2]
[docs]def p_import_as_names4(p): ''' import_as_names : import_as_name import_as_names_list COMMA ''' p[0] = [p[1]] + p[2]
[docs]def p_import_as_names_list1(p): ''' import_as_names_list : COMMA import_as_name ''' p[0] = [p[2]]
[docs]def p_import_as_names_list2(p): ''' import_as_names_list : import_as_names_list COMMA import_as_name ''' p[0] = p[1] + [p[3]]
[docs]def p_dotted_as_names1(p): ''' dotted_as_names : dotted_as_name ''' p[0] = [p[1]]
[docs]def p_dotted_as_names2(p): ''' dotted_as_names : dotted_as_name dotted_as_names_list ''' p[0] = [p[1]] + p[2]
[docs]def p_dotted_as_names_list1(p): ''' dotted_as_names_list : COMMA dotted_as_name ''' p[0] = [p[2]]
[docs]def p_dotted_as_names_star_list2(p): ''' dotted_as_names_list : dotted_as_names_list COMMA dotted_as_name ''' p[0] = p[1] + [p[3]]
[docs]def p_dotted_name1(p): ''' dotted_name : NAME ''' p[0] = p[1]
[docs]def p_dotted_name2(p): ''' dotted_name : NAME dotted_name_list ''' p[0] = p[1] + p[2]
[docs]def p_dotted_name_list1(p): ''' dotted_name_list : DOT NAME ''' p[0] = p[1] + p[2]
[docs]def p_dotted_name_list2(p): ''' dotted_name_list : dotted_name_list DOT NAME ''' p[0] = p[1] + p[2] + p[3]
[docs]def p_test1(p): ''' test : or_test ''' p[0] = p[1]
[docs]def p_test2(p): ''' test : or_test IF or_test ELSE test ''' ifexp = ast.IfExp(body=p[1], test=p[3], orelse=p[5]) p[0] = ifexp
[docs]def p_test3(p): ''' test : lambdef ''' p[0] = p[1]
[docs]def p_or_test1(p): ''' or_test : and_test ''' p[0] = p[1]
[docs]def p_or_test2(p): ''' or_test : and_test or_test_list ''' values = [p[1]] + p[2] or_node = ast.BoolOp(op=ast.Or(), values=values) p[0] = or_node
[docs]def p_or_test_list1(p): ''' or_test_list : OR and_test ''' p[0] = [p[2]]
[docs]def p_or_test_list2(p): ''' or_test_list : or_test_list OR and_test ''' p[0] = p[1] + [p[3]]
[docs]def p_and_test1(p): ''' and_test : not_test ''' p[0] = p[1]
[docs]def p_and_test2(p): ''' and_test : not_test and_test_list ''' values = [p[1]] + p[2] and_node = ast.BoolOp(op=ast.And(), values=values) p[0] = and_node
[docs]def p_and_test_list1(p): ''' and_test_list : AND not_test ''' p[0] = [p[2]]
[docs]def p_and_test_list2(p): ''' and_test_list : and_test_list AND not_test ''' p[0] = p[1] + [p[3]]
[docs]def p_not_test(p): ''' not_test : comparison ''' p[0] = p[1]
[docs]def p_not_test2(p): ''' not_test : NOT not_test ''' un_node = ast.UnaryOp(op=ast.Not(), operand=p[2]) p[0] = un_node
[docs]def p_comparison1(p): ''' comparison : expr ''' p[0] = p[1]
[docs]def p_comparison2(p): ''' comparison : expr comparison_list ''' left = p[1] ops, comparators = zip(*p[2]) cmpr = ast.Compare(left=left, ops=list(ops), comparators=list(comparators)) p[0] = cmpr
[docs]def p_comparison_list1(p): ''' comparison_list : comp_op expr ''' p[0] = [[p[1], p[2]]]
[docs]def p_comparison_list2(p): ''' comparison_list : comparison_list comp_op expr ''' p[0] = p[1] + [[p[2], p[3]]]
[docs]def p_comp_op1(p): ''' comp_op : LESS ''' p[0] = ast.Lt()
[docs]def p_comp_op2(p): ''' comp_op : GREATER ''' p[0] = ast.Gt()
[docs]def p_comp_op3(p): ''' comp_op : EQEQUAL ''' p[0] = ast.Eq()
[docs]def p_comp_op4(p): ''' comp_op : GREATEREQUAL ''' p[0] = ast.GtE()
[docs]def p_comp_op5(p): ''' comp_op : LESSEQUAL ''' p[0] = ast.LtE()
[docs]def p_comp_op6(p): ''' comp_op : NOTEQUAL ''' p[0] = ast.NotEq()
[docs]def p_comp_op7(p): ''' comp_op : IN ''' p[0] = ast.In()
[docs]def p_comp_op8(p): ''' comp_op : NOT IN ''' p[0] = ast.NotIn()
[docs]def p_comp_op9(p): ''' comp_op : IS ''' p[0] = ast.Is()
[docs]def p_comp_op10(p): ''' comp_op : IS NOT ''' p[0] = ast.IsNot()
[docs]def p_expr1(p): ''' expr : xor_expr ''' p[0] = p[1]
[docs]def p_expr2(p): ''' expr : xor_expr expr_list ''' node = p[1] for op, right in p[2]: node = ast.BinOp(left=node, op=op, right=right) p[0] = node
[docs]def p_expr_list1(p): ''' expr_list : VBAR xor_expr ''' p[0] = [[ast.BitOr(), p[2]]]
[docs]def p_expr_list2(p): ''' expr_list : expr_list VBAR xor_expr ''' p[0] = p[1] + [[ast.BitOr(), p[3]]]
[docs]def p_xor_expr1(p): ''' xor_expr : and_expr ''' p[0] = p[1]
[docs]def p_xor_expr2(p): ''' xor_expr : and_expr xor_expr_list ''' node = p[1] for op, right in p[2]: node = ast.BinOp(left=node, op=op, right=right) p[0] = node
[docs]def p_xor_expr_list1(p): ''' xor_expr_list : CIRCUMFLEX and_expr ''' p[0] = [[ast.BitXor(), p[1]]]
[docs]def p_xor_expr_list2(p): ''' xor_expr_list : xor_expr_list CIRCUMFLEX and_expr ''' p[0] = p[1] + [[ast.BitXor(), p[3]]]
[docs]def p_and_expr1(p): ''' and_expr : shift_expr ''' p[0] = p[1]
[docs]def p_and_expr2(p): ''' and_expr : shift_expr and_expr_list ''' node = p[1] for op, right in p[2]: node = ast.BinOp(left=node, op=op, right=right) p[0] = node
[docs]def p_and_expr_list1(p): ''' and_expr_list : AMPER shift_expr ''' p[0] = [[ast.BitAnd(), p[3]]]
[docs]def p_and_expr_list2(p): ''' and_expr_list : and_expr_list AMPER shift_expr ''' p[0] = p[1] + [[ast.BitAnd(), p[3]]]
[docs]def p_shift_expr1(p): ''' shift_expr : arith_expr ''' p[0] = p[1]
[docs]def p_shift_expr2(p): ''' shift_expr : arith_expr shift_list ''' node = p[1] for op, right in p[2]: node = ast.BinOp(left=node, op=op, right=right) p[0] = node
[docs]def p_shift_list1(p): ''' shift_list : shift_op ''' p[0] = [p[1]]
[docs]def p_shift_list2(p): ''' shift_list : shift_list shift_op ''' p[0] = p[1] + [p[2]]
[docs]def p_shift_op1(p): ''' shift_op : LEFTSHIFT arith_expr ''' p[0] = [ast.LShift(), p[2]]
[docs]def p_shift_op2(p): ''' shift_op : RIGHTSHIFT arith_expr ''' p[0] = [ast.RShift(), p[2]]
[docs]def p_arith_expr1(p): ''' arith_expr : term ''' p[0] = p[1]
[docs]def p_arith_expr2(p): ''' arith_expr : term arith_expr_list ''' node = p[1] for op, right in p[2]: node = ast.BinOp(left=node, op=op, right=right) p[0] = node
[docs]def p_arith_expr_list1(p): ''' arith_expr_list : arith_op ''' p[0] = [p[1]]
[docs]def p_arith_expr_list2(p): ''' arith_expr_list : arith_expr_list arith_op ''' p[0] = p[1] + [p[2]]
[docs]def p_arith_op1(p): ''' arith_op : PLUS term ''' node = ast.Add() p[0] = [node, p[2]]
[docs]def p_arith_op2(p): ''' arith_op : MINUS term ''' p[0] = [ast.Sub(), p[2]]
[docs]def p_term1(p): ''' term : factor ''' p[0] = p[1]
[docs]def p_term2(p): ''' term : factor term_list ''' node = p[1] for op, right in p[2]: node = ast.BinOp(left=node, op=op, right=right) p[0] = node
[docs]def p_term_list1(p): ''' term_list : term_op ''' p[0] = [p[1]]
[docs]def p_term_list2(p): ''' term_list : term_list term_op ''' p[0] = p[1] + [p[2]]
[docs]def p_term_op1(p): ''' term_op : STAR factor ''' p[0] = [ast.Mult(), p[2]]
[docs]def p_term_op2(p): ''' term_op : SLASH factor ''' p[0] = [ast.Div(), p[2]]
[docs]def p_term_op3(p): ''' term_op : PERCENT factor ''' p[0] = [ast.Mod(), p[2]]
[docs]def p_term_op4(p): ''' term_op : DOUBLESLASH factor ''' p[0] = [ast.FloorDiv(), p[2]]
[docs]def p_factor1(p): ''' factor : power ''' p[0] = p[1]
[docs]def p_factor2(p): ''' factor : PLUS factor ''' op = ast.UAdd() operand = p[2] node = ast.UnaryOp(op=op, operand=operand) p[0] = node
[docs]def p_factor3(p): ''' factor : MINUS factor ''' op = ast.USub() operand = p[2] node = ast.UnaryOp(op=op, operand=operand) p[0] = node
[docs]def p_factor4(p): ''' factor : TILDE factor ''' op = ast.Invert() operand = p[2] node = ast.UnaryOp(op=op, operand=operand) p[0] = node
[docs]def p_power1(p): ''' power : atom ''' p[0] = p[1]
[docs]def p_power2(p): ''' power : atom DOUBLESTAR factor ''' node = ast.BinOp(left=p[1], op=ast.Pow(), right=p[3]) p[0] = node
[docs]def p_power3(p): ''' power : atom power_list ''' root = p[1] for node in p[2]: if isinstance(node, ast.Call): node.func = root elif isinstance(node, ast.Attribute): node.value = root elif isinstance(node, ast.Subscript): node.value = root else: raise TypeError('Unexpected trailer node: %s' % node) root = node p[0] = root
[docs]def p_power4(p): ''' power : atom power_list DOUBLESTAR factor ''' root = p[1] for node in p[2]: if isinstance(node, ast.Call): node.func = root elif isinstance(node, ast.Attribute): node.value = root elif isinstance(node, ast.Subscript): node.value = root else: raise TypeError('Unexpected trailer node: %s' % node) root = node power = ast.BinOp(left=root, op=ast.Pow(), right=p[4]) p[0] = power
[docs]def p_power_list1(p): ''' power_list : trailer ''' p[0] = [p[1]]
[docs]def p_power_list2(p): ''' power_list : power_list trailer ''' p[0] = p[1] + [p[2]]
[docs]def p_atom1(p): ''' atom : LPAR RPAR ''' p[0] = ast.Tuple(elts=[], ctx=Load)
[docs]def p_atom2(p): ''' atom : LPAR yield_expr RPAR ''' p[0] = p[2]
[docs]def p_atom3(p): ''' atom : LPAR testlist_comp RPAR ''' info = p[2] if isinstance(info, CommaSeparatedList): node = ast.Tuple(elts=info.values, ctx=Load) elif isinstance(info, GeneratorInfo): node = ast.GeneratorExp(elt=info.elt, generators=info.generators) else: # We have a test node by itself in parenthesis controlling # order of operations, so just return the node. node = info p[0] = node
[docs]def p_atom4(p): ''' atom : LSQB RSQB ''' p[0] = ast.List(elts=[], ctx=Load)
[docs]def p_atom5(p): ''' atom : LSQB listmaker RSQB ''' info = p[2] if isinstance(info, CommaSeparatedList): node = ast.List(elts=info.values, ctx=Load) elif isinstance(info, GeneratorInfo): node = ast.ListComp(elt=info.elt, generators=info.generators) else: raise TypeError('Unexpected node for listmaker: %s' % info) p[0] = node
[docs]def p_atom6(p): ''' atom : LBRACE RBRACE ''' p[0] = ast.Dict(keys=[], values=[])
[docs]def p_atom7(p): ''' atom : LBRACE dictorsetmaker RBRACE ''' info = p[2] if isinstance(info, GeneratorInfo): if isinstance(info.elt, tuple): key, value = info.elt generators = info.generators node = DictComp(key=key, value=value, generators=generators) else: node = SetComp(elt=info.elt, generators=info.generators) elif isinstance(info, CommaSeparatedList): if isinstance(info.values[0], tuple): keys, values = zip(*info.values) node = ast.Dict(keys=list(keys), values=list(values)) else: node = Set(elts=info.values) else: raise TypeError('Unexpected node for dictorsetmaker: %s' % info) p[0] = node
[docs]def p_atom8(p): ''' atom : NAME ''' p[0] = ast.Name(id=p[1], ctx=Load)
[docs]def p_atom9(p): ''' atom : NUMBER ''' n = ast.Num(n=eval(p[1])) p[0] = n
[docs]def p_atom10(p): ''' atom : atom_string_list ''' s = ast.Str(s=p[1]) p[0] = s
[docs]def p_atom_string_list1(p): ''' atom_string_list : STRING ''' p[0] = p[1]
[docs]def p_atom_string_list2(p): ''' atom_string_list : atom_string_list STRING ''' p[0] = p[1] + p[2] # We dont' allow the backqoute atom from standard Python. Just # use repr(...). This simplifies the grammar since we don't have # to define a testlist1.
[docs]def p_listmaker1(p): ''' listmaker : test list_for ''' p[0] = GeneratorInfo(elt=p[1], generators=p[2])
[docs]def p_listmaker2(p): ''' listmaker : test ''' p[0] = CommaSeparatedList(values=[p[1]])
[docs]def p_listmaker3(p): ''' listmaker : test COMMA ''' p[0] = CommaSeparatedList(values=[p[1]])
[docs]def p_listmaker4(p): ''' listmaker : test listmaker_list ''' values = [p[1]] + p[2] p[0] = CommaSeparatedList(values=values)
[docs]def p_listmaker5(p): ''' listmaker : test listmaker_list COMMA ''' values = [p[1]] + p[2] p[0] = CommaSeparatedList(values=values)
[docs]def p_listmaker_list1(p): ''' listmaker_list : COMMA test ''' p[0] = [p[2]]
[docs]def p_listmaker_list2(p): ''' listmaker_list : listmaker_list COMMA test ''' p[0] = p[1] + [p[3]]
[docs]def p_testlist_comp1(p): ''' testlist_comp : test comp_for ''' p[0] = GeneratorInfo(elt=p[1], generators=p[2])
[docs]def p_testlist_comp2(p): ''' testlist_comp : test ''' p[0] = p[1]
[docs]def p_testlist_comp3(p): ''' testlist_comp : test COMMA ''' p[0] = CommaSeparatedList(values=[p[1]])
[docs]def p_testlist_comp4(p): ''' testlist_comp : test testlist_comp_list ''' values = [p[1]] + p[2] p[0] = CommaSeparatedList(values=values)
[docs]def p_testlist_comp5(p): ''' testlist_comp : test testlist_comp_list COMMA ''' values = [p[1]] + p[2] p[0] = CommaSeparatedList(values=values)
[docs]def p_testlist_comp_list1(p): ''' testlist_comp_list : COMMA test ''' p[0] = [p[2]]
[docs]def p_testlist_comp_list2(p): ''' testlist_comp_list : testlist_comp_list COMMA test ''' p[0] = p[1] + [p[3]]
[docs]def p_trailer1(p): ''' trailer : LPAR RPAR ''' p[0] = ast.Call(args=[], keywords=[], starargs=None, kwargs=None)
[docs]def p_trailer2(p): ''' trailer : LPAR arglist RPAR ''' args = p[2] p[0] = ast.Call(args=args.args, keywords=args.keywords, starargs=args.starargs, kwargs=args.kwargs)
[docs]def p_trailer3(p): ''' trailer : LSQB subscriptlist RSQB ''' p[0] = ast.Subscript(slice=p[2], ctx=Load)
[docs]def p_trailer4(p): ''' trailer : DOT NAME ''' p[0] = ast.Attribute(attr=p[2], ctx=Load)
[docs]def p_subscriptlist1(p): ''' subscriptlist : subscript ''' p[0] = p[1]
[docs]def p_subscriptlist2(p): ''' subscriptlist : subscript COMMA ''' dims = [p[1]] p[0] = ast.ExtSlice(dims=dims)
[docs]def p_subscriptlist3(p): ''' subscriptlist : subscript subscriptlist_list ''' dims = [p[1]] + p[2] p[0] = ast.ExtSlice(dims=dims)
[docs]def p_subscriptlist4(p): ''' subscriptlist : subscript subscriptlist_list COMMA ''' dims = [p[1]] + p[2] p[0] = ast.ExtSlice(dims=dims)
[docs]def p_subscriptlist_list1(p): ''' subscriptlist_list : COMMA subscript ''' p[0] = [p[2]]
[docs]def p_subscript_list2(p): ''' subscriptlist_list : subscriptlist_list COMMA subscript ''' p[0] = p[1] + [p[3]]
[docs]def p_subscript1(p): ''' subscript : ELLIPSIS ''' p[0] = ast.Ellipsis()
[docs]def p_subcript2(p): ''' subscript : test ''' p[0] = ast.Index(value=p[1])
[docs]def p_subscript3(p): ''' subscript : COLON ''' p[0] = ast.Slice(lower=None, upper=None, step=None)
[docs]def p_subscript4(p): ''' subscript : DOUBLECOLON ''' name = ast.Name(id='None', ctx=Load) p[0] = ast.Slice(lower=None, upper=None, step=name)
[docs]def p_subscript5(p): ''' subscript : test COLON ''' p[0] = ast.Slice(lower=p[1], uppper=None, step=None)
[docs]def p_subscrip6(p): ''' subscript : test DOUBLECOLON ''' name = ast.Name(id='None', ctx=Load) p[0] = ast.Slice(lower=p[1], upper=None, step=name)
[docs]def p_subscript7(p): ''' subscript : COLON test ''' p[0] = ast.Slice(lower=None, upper=p[2], step=None)
[docs]def p_subscript8(p): ''' subscript : COLON test COLON ''' name = ast.Name(id='None', ctx=Load) p[0] = ast.Slice(lower=None, upper=p[2], step=name)
[docs]def p_subscript9(p): ''' subscript : DOUBLECOLON test ''' p[0] = ast.Slice(lower=None, upper=None, step=p[2])
[docs]def p_subscript10(p): ''' subscript : test COLON test ''' p[0] = ast.Slice(lower=p[1], upper=p[3], step=None)
[docs]def p_subscript11(p): ''' subscript : test COLON test COLON ''' name = ast.Name(id='None', ctx=Load) p[0] = ast.Slice(lower=p[1], upper=p[3], step=name)
[docs]def p_subscript12(p): ''' subscript : COLON test COLON test ''' p[0] = ast.Slice(lower=None, upper=p[2], step=p[4])
[docs]def p_subscript13(p): ''' subscript : test COLON test COLON test ''' p[0] = ast.Slice(lower=p[1], upper=p[3], step=p[5])
[docs]def p_subscript14(p): ''' subscript : test DOUBLECOLON test ''' p[0] = ast.Slice(lower=p[1], upper=None, step=p[3])
[docs]def p_exprlist1(p): ''' exprlist : expr ''' p[0] = p[1]
[docs]def p_exprlist2(p): ''' exprlist : expr COMMA ''' tup = ast.Tuple() tup.elts = [p[1]] p[0] = tup
[docs]def p_exprlist3(p): ''' exprlist : expr exprlist_list ''' tup = ast.Tuple() tup.elts = [p[1]] + p[2] p[0] = tup
[docs]def p_exprlist4(p): ''' exprlist : expr exprlist_list COMMA ''' tup = ast.Tuple() tup.elts = [p[1]] + p[2] p[0] = tup
[docs]def p_exprlist_list1(p): ''' exprlist_list : COMMA expr ''' p[0] = [p[2]]
[docs]def p_exprlist_list2(p): ''' exprlist_list : exprlist_list COMMA expr ''' p[0] = p[1] + [p[3]]
[docs]def p_dictorsetmaker1(p): ''' dictorsetmaker : test COLON test comp_for ''' p[0] = GeneratorInfo(elt=(p[1], p[3]), generators=p[4])
[docs]def p_dictorsetmaker2(p): ''' dictorsetmaker : test COLON test ''' values = [(p[1], p[3])] p[0] = CommaSeparatedList(values=values)
[docs]def p_dictorsetmaker3(p): ''' dictorsetmaker : test COLON test COMMA ''' values = [(p[1], p[3])] p[0] = CommaSeparatedList(values=values)
[docs]def p_dictorsetmaker4(p): ''' dictorsetmaker : test COLON test dosm_colon_list ''' values = [(p[1], p[3])] + p[4] p[0] = CommaSeparatedList(values=values)
[docs]def p_dictorsetmaker5(p): ''' dictorsetmaker : test COLON test dosm_colon_list COMMA ''' values = [(p[1], p[3])] + p[4] p[0] = CommaSeparatedList(values=values)
[docs]def p_dictorsetmaker6(p): ''' dictorsetmaker : test comp_for ''' p[0] = GeneratorInfo(elt=p[1], generators=p[2])
[docs]def p_dictorsetmaker7(p): ''' dictorsetmaker : test COMMA ''' values = [p[1]] p[0] = CommaSeparatedList(values=values)
[docs]def p_dictorsetmaker8(p): ''' dictorsetmaker : test dosm_comma_list ''' values = [p[1]] + p[2] p[0] = CommaSeparatedList(values=values)
[docs]def p_dictorsetmaker9(p): ''' dictorsetmaker : test dosm_comma_list COMMA ''' values = [p[1]] + p[2] p[0] = CommaSeparatedList(values=values)
[docs]def p_dosm_colon_list1(p): ''' dosm_colon_list : COMMA test COLON test ''' p[0] = [(p[2], p[4])]
[docs]def p_dosm_colon_list2(p): ''' dosm_colon_list : dosm_colon_list COMMA test COLON test ''' p[0] = p[1] + [(p[3], p[5])]
[docs]def p_dosm_comma_list1(p): ''' dosm_comma_list : COMMA test ''' p[0] = [p[2]]
[docs]def p_dosm_comma_list2(p): ''' dosm_comma_list : dosm_comma_list COMMA test ''' p[0] = p[1] + [p[3]]
[docs]def p_arglist1(p): ''' arglist : argument ''' if isinstance(p[1], ast.keyword): p[0] = Arguments(keywords=[p[1]]) else: p[0] = Arguments(args=[p[1]])
[docs]def p_arglist2(p): ''' arglist : argument COMMA ''' if isinstance(p[1], ast.keyword): p[0] = Arguments(keywords=[p[1]]) else: p[0] = Arguments(args=[p[1]])
[docs]def p_arglist3(p): ''' arglist : STAR test ''' p[0] = Arguments(starargs=p[2])
[docs]def p_arglist4(p): ''' arglist : STAR test COMMA DOUBLESTAR test ''' p[0] = Arguments(starargs=p[2], kwargs=p[5])
[docs]def p_arglist5(p): ''' arglist : DOUBLESTAR test ''' p[0] = Arguments(kwargs=p[2])
[docs]def p_arglist6(p): ''' arglist : arglist_list argument ''' args = [] kws = [] for arg in (p[1] + [p[2]]): if isinstance(arg, ast.keyword): kws.append(arg) else: args.append(arg) p[0] = Arguments(args=args, keywords=kws)
[docs]def p_arglist7(p): ''' arglist : arglist_list argument COMMA ''' args = [] kws = [] for arg in (p[1] + [p[2]]): if isinstance(arg, ast.keyword): kws.append(arg) else: args.append(arg) p[0] = Arguments(args=args, keywords=kws)
[docs]def p_arglist8(p): ''' arglist : arglist_list STAR test ''' args = [] kws = [] for arg in p[1]: if isinstance(arg, ast.keyword): kws.append(arg) else: args.append(arg) p[0] = Arguments(args=args, keywords=kws, starargs=p[3])
[docs]def p_arglist9(p): ''' arglist : arglist_list STAR test COMMA DOUBLESTAR test ''' args = [] kws = [] for arg in p[1]: if isinstance(arg, ast.keyword): kws.append(arg) else: args.append(arg) p[0] = Arguments(args=args, keywords=kws, starargs=p[3], kwargs=p[6])
[docs]def p_arglist10(p): ''' arglist : arglist_list DOUBLESTAR test ''' args = [] kws = [] for arg in p[1]: if isinstance(arg, ast.keyword): kws.append(arg) else: args.append(arg) p[0] = Arguments(args=args, keywords=kws, kwargs=p[3])
[docs]def p_arglist11(p): ''' arglist : STAR test COMMA argument ''' keyword = p[4] if isinstance(keyword, ast.keyword): p[0] = Arguments(keywords=[keyword], starargs=p[2]) else: msg = 'only named arguments may follow *expression' tok = FakeToken(p.lexer.lexer, p.lineno(1)) syntax_error(msg, tok)
[docs]def p_arglist12(p): ''' arglist : STAR test COMMA argument COMMA DOUBLESTAR test ''' keyword = p[4] if isinstance(keyword, ast.keyword): p[0] = Arguments(keywords=[keyword], starargs=p[2], kwargs=p[7]) else: msg = 'only named arguments may follow *expression' tok = FakeToken(p.lexer.lexer, p.lineno(1)) syntax_error(msg, tok)
[docs]def p_arglist13(p): ''' arglist : STAR test COMMA arglist_list argument ''' keywords = p[4] + [p[5]] for kw in keywords: if not isinstance(kw, ast.keyword): msg = 'only named arguments may follow *expression' tok = FakeToken(p.lexer.lexer, p.lineno(1)) syntax_error(msg, tok) p[0] = Arguments(keywords=keywords, starargs=p[2])
[docs]def p_arglist14(p): ''' arglist : STAR test COMMA arglist_list argument COMMA DOUBLESTAR test ''' keywords = p[4] + [p[5]] for kw in keywords: if not isinstance(kw, ast.keyword): msg = 'only named arguments may follow *expression' tok = FakeToken(p.lexer.lexer, p.lineno(1)) syntax_error(msg, tok) p[0] = Arguments(keywords=keywords, starargs=p[2], kwargs=p[8])
[docs]def p_arglist_list1(p): ''' arglist_list : argument COMMA ''' p[0] = [p[1]]
[docs]def p_arglist_list2(p): ''' arglist_list : arglist_list argument COMMA ''' p[0] = p[1] + [p[2]]
[docs]def p_argument1(p): ''' argument : test ''' p[0] = p[1]
[docs]def p_argument2(p): ''' argument : test comp_for ''' p[0] = ast.GeneratorExp(elt=p[1], generators=p[2]) # This keyword argument needs to be asserted as a NAME, but using NAME # here causes ambiguity in the parse tables.
[docs]def p_argument3(p): ''' argument : test EQUAL test ''' arg = p[1] assert isinstance(arg, ast.Name), 'Keyword arg must be a Name.' value = p[3] p[0] = ast.keyword(arg=arg.id, value=value)
[docs]def p_list_for1(p): ''' list_for : FOR exprlist IN testlist_safe ''' target = p[2] set_context(target, Store, p) p[0] = [ast.comprehension(target=target, iter=p[4], ifs=[])]
[docs]def p_list_for2(p): ''' list_for : FOR exprlist IN testlist_safe list_iter ''' target = p[2] set_context(target, Store, p) gens = [] gens.append(ast.comprehension(target=target, iter=p[4], ifs=[])) for item in p[5]: if isinstance(item, ast.comprehension): gens.append(item) else: gens[-1].ifs.append(item) p[0] = gens
[docs]def p_list_iter1(p): ''' list_iter : list_for ''' p[0] = p[1]
[docs]def p_list_iter2(p): ''' list_iter : list_if ''' p[0] = p[1]
[docs]def p_list_if1(p): ''' list_if : IF old_test ''' p[0] = [p[2]]
[docs]def p_list_if2(p): ''' list_if : IF old_test list_iter ''' p[0] = [p[2]] + p[3]
[docs]def p_comp_for1(p): ''' comp_for : FOR exprlist IN or_test ''' target = p[2] set_context(target, Store, p) p[0] = [ast.comprehension(target=target, iter=p[4], ifs=[])]
[docs]def p_comp_for2(p): ''' comp_for : FOR exprlist IN or_test comp_iter ''' target = p[2] set_context(target, Store, p) gens = [] gens.append(ast.comprehension(target=target, iter=p[4], ifs=[])) for item in p[5]: if isinstance(item, ast.comprehension): gens.append(item) else: gens[-1].ifs.append(item) p[0] = gens
[docs]def p_comp_iter1(p): ''' comp_iter : comp_for ''' p[0] = p[1]
[docs]def p_comp_iter2(p): ''' comp_iter : comp_if ''' p[0] = p[1]
[docs]def p_comp_if1(p): ''' comp_if : IF old_test ''' p[0] = [p[2]]
[docs]def p_comp_if2(p): ''' comp_if : IF old_test comp_iter ''' p[0] = [p[2]] + p[3]
[docs]def p_testlist_safe1(p): ''' testlist_safe : old_test ''' p[0] = p[1]
[docs]def p_testlist_safe2(p): ''' testlist_safe : old_test testlist_safe_list ''' values = [p[1]] + p[2] p[0] = ast.Tuple(elts=values, ctx=Load)
[docs]def p_testlist_safe3(p): ''' testlist_safe : old_test testlist_safe_list COMMA ''' values = [p[1]] + p[2] p[0] = ast.Tuple(elts=values, ctx=Load)
[docs]def p_testlist_safe_list1(p): ''' testlist_safe_list : COMMA old_test ''' p[0] = [p[2]]
[docs]def p_testlist_safe_list2(p): ''' testlist_safe_list : testlist_safe_list COMMA old_test ''' p[0] = p[1] + [p[3]]
[docs]def p_old_test1(p): ''' old_test : or_test ''' p[0] = p[1]
[docs]def p_old_test2(p): ''' old_test : old_lambdef ''' p[0] = p[1]
[docs]def p_old_lambdef1(p): ''' old_lambdef : LAMBDA COLON old_test ''' args = ast.arguments(args=[], defaults=[], kwarg=None, vararg=None) body = p[3] p[0] = ast.Lambda(args=args, body=body)
[docs]def p_old_lambdef2(p): ''' old_lambdef : LAMBDA varargslist COLON old_test ''' args = p[2] body = p[4] p[0] = ast.Lambda(args=args, body=body)
[docs]def p_lambdef1(p): ''' lambdef : LAMBDA COLON test ''' args = ast.arguments(args=[], defaults=[], kwarg=None, vararg=None) body = p[3] p[0] = ast.Lambda(args=args, body=body)
[docs]def p_lambdef2(p): ''' lambdef : LAMBDA varargslist COLON test ''' args = p[2] body = p[4] p[0] = ast.Lambda(args=args, body=body)
[docs]def p_varargslist1(p): ''' varargslist : fpdef COMMA STAR NAME ''' # def f(a, *args): pass # def f((a, b), *args): pass p[0] = ast.arguments(args=[p[1]], defaults=[], vararg=p[4], kwarg=None)
[docs]def p_varargslist2(p): ''' varargslist : fpdef COMMA STAR NAME COMMA DOUBLESTAR NAME ''' # def f(a, *args, **kwargs): pass # def f((a, b), *args, **kwargs): pass p[0] = ast.arguments(args=[p[1]], defaults=[], vararg=p[4], kwarg=p[7])
[docs]def p_varargslist3(p): ''' varargslist : fpdef COMMA DOUBLESTAR NAME ''' # def f(a, **kwargs): pass # def f((a, b), **kwargs): pass p[0] = ast.arguments(args=[p[1]], defaults=[], vararg=None, kwarg=p[4])
[docs]def p_varargslist4(p): ''' varargslist : fpdef ''' # def f(a): pass # def f((a, b)): pass p[0] = ast.arguments(args=[p[1]], defaults=[], vararg=None, kwarg=None)
[docs]def p_varargslist5(p): ''' varargslist : fpdef COMMA ''' # def f(a,): pass # def f((a,b),): pass p[0] = ast.arguments(args=[p[1]], defaults=[], vararg=None, kwarg=None)
[docs]def p_varargslist6(p): ''' varargslist : fpdef varargslist_list COMMA STAR NAME ''' # def f((a, b), c, *args): pass # def f((a, b), c, d=4, *args): pass list_args, defaults = p[2] args = [p[1]] + list_args p[0] = ast.arguments(args=args, defaults=defaults, vararg=p[5], kwarg=None)
[docs]def p_varargslist7(p): ''' varargslist : fpdef varargslist_list COMMA STAR NAME COMMA DOUBLESTAR NAME ''' # def f((a, b), c, *args, **kwargs): pass # def f((a, b), c, d=4, *args, **kwargs): pass list_args, defaults = p[2] args = [p[1]] + list_args p[0] = ast.arguments(args=args, defaults=defaults, vararg=p[5], kwarg=p[8])
[docs]def p_varargslist8(p): ''' varargslist : fpdef varargslist_list COMMA DOUBLESTAR NAME ''' # def f((a, b), c, **kwargs): pass # def f((a, b), c, d=4, **kwargs): pass list_args, defaults = p[2] args = [p[1]] + list_args p[0] = ast.arguments(args=args, defaults=defaults, vararg=None, kwarg=p[5])
[docs]def p_varargslist9(p): ''' varargslist : fpdef varargslist_list ''' # def f((a, b), c): pass # def f((a, b), c, d=4): pass list_args, defaults = p[2] args = [p[1]] + list_args p[0] = ast.arguments(args=args, defaults=defaults, vararg=None, kwarg=None)
[docs]def p_varargslist10(p): ''' varargslist : fpdef varargslist_list COMMA ''' # def f((a, b), c,): pass # def f((a, b), c, d=4,): pass list_args, defaults = p[2] args = [p[1]] + list_args p[0] = ast.arguments(args=args, defaults=defaults, vararg=None, kwarg=None)
[docs]def p_varargslist11(p): ''' varargslist : fpdef EQUAL test COMMA STAR NAME ''' # def f(a=1, *args): pass # def f((a,b)=(1,2), *args): pass p[0] = ast.arguments(args=[p[1]], defaults=[p[3]], vararg=p[6], kwarg=None)
[docs]def p_varargslist12(p): ''' varargslist : fpdef EQUAL test COMMA STAR NAME COMMA DOUBLESTAR NAME ''' # def f(a=1, *args, **kwargs): pass # def f((a,b)=(1,2), *args, **kwargs): pass p[0] = ast.arguments(args=[p[1]], defaults=[p[3]], vararg=p[6], kwarg=p[9])
[docs]def p_varargslist13(p): ''' varargslist : fpdef EQUAL test COMMA DOUBLESTAR NAME ''' # def f(a=1, **kwargs): pass # def f((a,b)=(1,2), **kwargs): pass p[0] = ast.arguments(args=[p[1]], defaults=[p[3]], vararg=None, kwarg=p[6])
[docs]def p_varargslist14(p): ''' varargslist : fpdef EQUAL test ''' # def f(a=1): pass # def f((a,b)=(1,2)): pass p[0] = ast.arguments(args=[p[1]], defaults=[p[3]], vararg=None, kwarg=None)
[docs]def p_varargslist15(p): ''' varargslist : fpdef EQUAL test COMMA ''' # def f(a=1,): pass # def f((a,b)=(1,2),): pass p[0] = ast.arguments(args=[p[1]], defaults=[p[3]], vararg=None, kwarg=None)
[docs]def p_varargslist16(p): ''' varargslist : fpdef EQUAL test varargslist_list COMMA STAR NAME ''' # def f(a=1, b=2, *args): pass list_args, list_defaults = p[4] if len(list_args) != len(list_defaults): msg = 'non-default argument follows default argument.' tok = FakeToken(p.lexer.lexer, p.lineno(2)) syntax_error(msg, tok) args = [p[1]] + list_args defaults = [p[3]] + list_defaults p[0] = ast.arguments(args=args, defaults=defaults, vararg=p[7], kwarg=None)
[docs]def p_varargslist17(p): ''' varargslist : fpdef EQUAL test varargslist_list COMMA STAR NAME COMMA DOUBLESTAR NAME ''' # def f(a=1, b=2, *args, **kwargs) list_args, list_defaults = p[4] if len(list_args) != len(list_defaults): msg = 'non-default argument follows default argument.' tok = FakeToken(p.lexer.lexer, p.lineno(2)) syntax_error(msg, tok) args = [p[1]] + list_args defaults = [p[3]] + list_defaults p[0] = ast.arguments(args=args, defaults=defaults, vararg=p[7], kwarg=p[10])
[docs]def p_varargslist18(p): ''' varargslist : fpdef EQUAL test varargslist_list COMMA DOUBLESTAR NAME ''' # def f(a=1, b=2, **kwargs): pass list_args, list_defaults = p[4] if len(list_args) != len(list_defaults): msg = 'non-default argument follows default argument.' tok = FakeToken(p.lexer.lexer, p.lineno(2)) syntax_error(msg, tok) args = [p[1]] + list_args defaults = [p[3]] + list_defaults p[0] = ast.arguments(args=args, defaults=defaults, vararg=None, kwarg=p[7])
[docs]def p_varargslist19(p): ''' varargslist : fpdef EQUAL test varargslist_list ''' # def f(a=1, b=2): pass list_args, list_defaults = p[4] if len(list_args) != len(list_defaults): msg = 'non-default argument follows default argument.' tok = FakeToken(p.lexer.lexer, p.lineno(2)) syntax_error(msg, tok) args = [p[1]] + list_args defaults = [p[3]] + list_defaults p[0] = ast.arguments(args=args, defaults=defaults, vararg=None, kwarg=None)
[docs]def p_varargslist20(p): ''' varargslist : fpdef EQUAL test varargslist_list COMMA ''' # def f(a=1, b=2,): pass list_args, list_defaults = p[4] if len(list_args) != len(list_defaults): msg = 'non-default argument follows default argument.' tok = FakeToken(p.lexer.lexer, p.lineno(2)) syntax_error(msg, tok) args = [p[1]] + list_args defaults = [p[3]] + list_defaults p[0] = ast.arguments(args=args, defaults=defaults, vararg=None, kwarg=None)
[docs]def p_varargslist21(p): ''' varargslist : STAR NAME ''' # def f(*args): pass p[0] = ast.arguments(args=[], defaults=[], vararg=p[2], kwarg=None)
[docs]def p_varargslist22(p): ''' varargslist : STAR NAME COMMA DOUBLESTAR NAME ''' # def f(*args, **kwargs): pass p[0] = ast.arguments(args=[], defaults=[], vararg=p[2], kwarg=p[5])
[docs]def p_varargslist23(p): ''' varargslist : DOUBLESTAR NAME ''' # def f(**kwargs): pass p[0] = ast.arguments(args=[], defaults=[], vararg=None, kwarg=p[2]) # The varargslist_list handlers return a 2-tuple of (args, defaults) lists
[docs]def p_varargslist_list1(p): ''' varargslist_list : COMMA fpdef ''' p[0] = ([p[2]], [])
[docs]def p_varargslist_list2(p): ''' varargslist_list : COMMA fpdef EQUAL test ''' p[0] = ([p[2]], [p[4]])
[docs]def p_varargslist_list3(p): ''' varargslist_list : varargslist_list COMMA fpdef ''' list_args, list_defaults = p[1] if list_defaults: msg = 'non-default argument follows default argument.' tok = FakeToken(p.lexer.lexer, p.lineno(2)) syntax_error(msg, tok) args = list_args + [p[3]] p[0] = (args, list_defaults)
[docs]def p_varargslist_list4(p): ''' varargslist_list : varargslist_list COMMA fpdef EQUAL test ''' list_args, list_defaults = p[1] args = list_args + [p[3]] defaults = list_defaults +[p[5]] p[0] = (args, defaults)
[docs]def p_fpdef1(p): ''' fpdef : NAME ''' p[0] = ast.Name(id=p[1], ctx=ast.Param())
[docs]def p_fpdef2(p): ''' fpdef : LPAR fplist RPAR ''' # fplist will return a NAME or a TUPLE, so we don't need that # logic here. p[0] = p[2]
[docs]def p_fplist1(p): ''' fplist : fpdef ''' p[0] = p[1]
[docs]def p_fplist2(p): ''' fplist : fpdef COMMA ''' tup = ast.Tuple() tup.elts = [p[1]] set_context(tup, Store, p) p[0] = tup
[docs]def p_fplist3(p): ''' fplist : fpdef fplist_list ''' elts = [p[1]] + p[2] tup = ast.Tuple() tup.elts = elts set_context(tup, Store, p) p[0] = tup
[docs]def p_fplist4(p): ''' fplist : fpdef fplist_list COMMA ''' elts = [p[1]] + p[2] tup = ast.Tuple() tup.elts = elts set_context(tup, Store, p) p[0] = tup
[docs]def p_fplist_list1(p): ''' fplist_list : COMMA fpdef ''' p[0] = [p[2]]
[docs]def p_fplist_list2(p): ''' fplist_list : fplist_list COMMA fpdef ''' p[0] = p[1] + [p[3]]
def p_error(t): msg = 'invalid syntax' lexer = t.lexer # Ply has a weird thing where sometimes we get the EnamlLexer and # other times we get the Ply lexer if isinstance(lexer, EnamlLexer): lexer = lexer.lexer syntax_error(msg, FakeToken(lexer, t.lineno)) #============================================================================== # End Parsing Rules #============================================================================== # Get a save directory for the lex and parse tables _parse_dir = os.path.join(os.path.dirname(__file__), 'parse_tab') _parse_module = 'enaml.core.parse_tab.parsetab' _parser = yacc.yacc( debug=0, outputdir=_parse_dir, tabmodule=_parse_module, optimize=1, errorlog=yacc.NullLogger(), ) def parse(enaml_source, filename='Enaml'): # All errors in the parsing and lexing rules are raised as a custom # ParsingError. This exception object can be called to return the # actual exception instance that should be raised. This is done # because Ply enters an error recovery mode whenever a SyntaxError # is raised from within a rule. We don't want error recovery, we'd # rather just fail immediately. So this mechanism allows us to # stop parsing immediately and then re-raise the errors outside # of the control of Ply. try: lexer = EnamlLexer(filename) return _parser.parse(enaml_source, debug=0, lexer=lexer) except ParsingError as parse_error: raise parse_error()