bison-patches
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

Patches to support python: 3/5: New files to support python


From: Dennis Heimbigner
Subject: Patches to support python: 3/5: New files to support python
Date: Tue, 03 Sep 2013 16:19:24 -0600
User-agent: Thunderbird 2.0.0.24 (Windows/20100228)

>From 38cf992d3a00a83ce102bb065520281a659d2088 Mon Sep 17 00:00:00 2001
From: dmh <address@hidden>
Date: Tue, 3 Sep 2013 15:43:31 -0600
Subject: [PATCH 2/2] Add support for parsers using the python language. Part2:
 new files

* data/lalr1.py
* data/python.m4
* data/python-skel.m4
* tests/python.at
---
 data/lalr1.py       | 1062 +++++++++++++++++++++++++++++++++
 data/python-skel.m4 |   26 +
 data/python.m4      |  336 +++++++++++
 tests/python.at     | 1620 +++++++++++++++++++++++++++++++++++++++++++++++++++
 4 files changed, 3044 insertions(+)
 create mode 100644 data/lalr1.py
 create mode 100644 data/python-skel.m4
 create mode 100644 data/python.m4
 create mode 100644 tests/python.at

diff --git a/data/lalr1.py b/data/lalr1.py
new file mode 100644
index 0000000..0aca5f3
--- /dev/null
+++ b/data/lalr1.py
@@ -0,0 +1,1062 @@
+# Python skeleton for Bison -*- autoconf -*-
+# Derived from existing Java skeleton
+#
+# Copyright (C) 2007-2013 Free Software Foundation, Inc.
+# 
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+# 
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+# 
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# WARNING: In Python, line breaks
+# and indentation have syntactic significance
+# so be very careful to verify that any
+# changes are correct under any set of options.
+# An important rule is to never use tabs in this code.
+# Also note that because the python comment character (#)
+# is the same as the m4 comment character, so comments
+# only intended to appear in the skeleton leak through
+# to the final parse code.
+# Fixing this is tricky: using m4_changecom still leaves
+# comments inside quotes. So currently, the comment leaks
+# are left in place.
+
+m4_include(b4_pkgdatadir/[python.m4])
+
+# Adding text location lines to Python must be suppressed because it
+# causes python compilation errors
+
+b4_defines_if([b4_fatal([%s: %%defines does not make sense in Python],
+              [b4_skeleton])])
+
+# Define test macros for lex-param and parse-param
+m4_define([b4_lex_param_if],[m4_ifset([b4_lex_param],[$1],[$2])])
+
+m4_define([b4_parse_param_if],[m4_ifset([b4_parse_param],[$1],[$2])])
+
+# We do not depend on %debug in Python, but pacify warnings about
+# non-used flags.
+b4_parse_trace_if([0], [0])
+
+# Destructors are not supported
+m4_define([b4_symbol_no_destructor_assert],
+
+[b4_symbol_if([$1], [has_destructor],
+              [b4_fatal([%s: %s: %%destructor not supporte in Python],
+                      [b4_skeleton],
+                      [b4_symbol_action_location([$1], [destructor])])])])
+b4_symbol_foreach([b4_symbol_no_destructor_assert])
+
+# Define a macro to encapsulate the parse state variables.
+# This allows them to be defined either in parse
+# pull parsing, or as class instance variable when doing push parsing.
+
+m4_define([b4_define_state],[[
+    # Lookahead and lookahead in internal form.
+    ]b4_self[yychar = yyempty_
+    ]b4_self[yytoken = 0
+
+    # State.
+    ]b4_self[yyn = 0
+    ]b4_self[yylen = 0
+    ]b4_self[yystate = 0
+    ]b4_self[yystack = YYStack ()
+    ]b4_self[label = YYNEWSTATE
+
+    # Error handling.
+    ]b4_self[yynerrs_ = 0
+    ]b4_locations_if([[# The location where the error started.
+    # Location.
+    ]b4_self[yylloc = ]b4_location_type[ (
+                                          ]b4_position_type[ (0,0),
+                                          ]b4_position_type[ (0,0))]])[
+    # Semantic value of the lookahead.
+    ]b4_self[yylval = None
+]])
+
+b4_output_begin([b4_parser_file_name])
+b4_copyright([Skeleton implementation for Bison LALR(1) parsers in Python],
+             [2013])
+
+[# A Bison parser, automatically generated from
+#  <tt>]m4_bpatsubst(b4_file_name, [^"\(.*\)"$], [\1])[</tt>.
+#
+# @@author LALR (1) parser skeleton written by Dennis Heimbigner,
+#          derived from the Java parser skeleton.
+
+# Parser skeleton starts here
+
+# First part of user declarations.
+]b4_user_pre_prologue
+b4_user_post_prologue[
+
+# Import modules required for parser operation
+import sys
+import traceback
+# Additional, user specified, imports
+]b4_percent_code_get([[imports]])[
+
+# %code top
+]b4_percent_code_get([[top]])[
+
+##################################################
+# Module Level Declarations
+##################################################
+
+# Returned by a Bison action in order to stop the parsing process and
+# return success (<tt>true</tt>).
+YYACCEPT = 0
+
+# Returned by a Bison action in order to stop the parsing process and
+# return failure (<tt>false</tt>).
+YYABORT = 1
+# Returned by a Bison action in order to start error recovery without
+# printing an error message.
+YYERROR = 2
+
+]b4_push_if([[
+# Returned by a Bison action in order to request a new token.
+YYPUSH_MORE = 4]])[
+
+# Internal return codes that are not supported for user semantic
+# actions.
+
+YYERRLAB = 3
+YYNEWSTATE = 4
+YYDEFAULT = 5
+YYREDUCE = 6
+YYERRLAB1 = 7
+YYRETURN = 8
+
+]b4_push_if([[YYGETTOKEN = 9 # Signify that a new token
+               # is expected when doing push-parsing.
+]])[
+
+# Define a symbol for use with our fake switch statement in yyaction ()
+YYACTION = 10
+
+# Map internal labels to strings for those that occur in the "switch".
+LABELNAMES = (
+"YYACCEPT",
+"YYABORT",
+"YYERROR",
+"YYERRLAB",
+"YYNEWSTATE",
+"YYDEFAULT",
+"YYREDUCE",
+"YYERRLAB1",
+"YYRETURN",
+"YYGETTOKEN",
+"YYACTION"
+)
+
+# For Python, the Token identifiers are define
+# at the module level rather than inside
+# the Lexer class. Given python's name scoping,
+# this simplifiesthings.
+
+# Token returned by the scanner to signal the end of its input.
+EOF = 0
+
+
+]b4_token_defines[
+
+# %code requires
+]b4_percent_code_get([[requires]])[
+
+##################################################
+# Table data and methods
+##################################################
+
+# Whether the given <code>yypact_</code> value indicates a defaulted state.
+# @@param yyvalue   the value to check
+
+def yy_pact_value_is_default_ (yyvalue) :
+  return yyvalue == yypact_ninf_
+
+# Whether the given <code>yytable_</code>
+# value indicates a syntax error.
+# @@param yyvalue the value to check
+
+def yy_table_value_is_error_ (yyvalue) :
+  return yyvalue == yytable_ninf_
+
+##################################################
+# Define the parsing tables
+##################################################
+
+]b4_python_parser_tables_define[
+
+]b4_integral_python_parser_table_define([token_number], [b4_toknum],
+[[YYTOKEN_NUMBER[YYLEX-NUM] -- Internal symbol number corresponding
+ to YYLEX-NUM.]])[
+
+# YYTNAME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM.
+#   First, the terminals, then, starting at yyntokens_, nonterminals.
+]b4_integral_python_parser_table_define([tname], [b4_tname])[
+
+]b4_integral_python_parser_table_define([rline], [b4_rline],
+[[YYRLINE[YYN] -- Source line where rule number YYN was defined.]])[
+
+# YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX.
+]b4_integral_python_parser_table_define([translate_table], [b4_translate])[
+
+def yytranslate_ (t) :
+  if (t >= 0 and t <= yyuser_token_number_max_) :
+    return yytranslate_table_[t]
+  else :
+    return yyundef_token_
+
+  # Return whether error recovery is being done.  In this state, the parser
+  # reads token until it reaches a known state, and then restarts normal
+  # operation.
+  def recovering (self) :
+    return self.yyerrstatus_ == 0
+
+# Table variable related constants
+yylast_ = ]b4_last[
+yynnts_ = ]b4_nterms_number[
+yyempty_ = -2
+yyfinal_ = ]b4_final_state_number[
+yyterror_ = 1
+yyerrcode_ = 256
+yyntokens_ = ]b4_tokens_number[
+
+yyuser_token_number_max_ = ]b4_user_token_number_max[
+yyundef_token_ = ]b4_undef_token_number[
+
+yypact_ninf_ = ]b4_pact_ninf[
+yytable_ninf_ = ]b4_table_ninf[
+
+##################################################
+# Auxilliary Classes
+##################################################
+
+]b4_locations_if([[
+#
+# A class defining a pair of positions.  Positions, defined by the
+# <code>]b4_position_type[</code> class, denote a point in the input.
+# Locations represent a part of the input through the beginning
+# and ending positions.
+
+class ]b4_location_type[ :
+
+  # User varadic init to what in Jave would use multiple constructor
+  def __init__ (self, *argv) :
+    if (len(argv) == 0) :
+      self.begin = None
+      self.end = self.begin
+    elif (len(argv) == 1) :
+      self.begin = argv[0]
+      self.begin = self.begin
+    else :
+      self.begin = argv[0]
+      self.end = argv[1]
+  # end __init__
+
+  # Print a representation of the location.
+
+  def __str__ (self) :
+    if (self.begin == self.end) :
+      return str(self.begin)
+    else :
+      return str(self.begin) + '-' + str(self.end)
+# end class ]b4_location_type[
+]])[
+
+class YYStack :
+
+  def __init__ (self) :
+    self.size = 16
+    self.height = -1
+    self.stateStack=[]
+    self.valueStack=[]
+    ]b4_locations_if([self.locStack=[[]]])[
+
+  def push (self, state, value, ]b4_locations_if([loc])[) :
+    self.height += 1
+    self.stateStack.append(state)
+    self.valueStack.append(value)
+    ]b4_locations_if([self.locStack.append(loc)])[
+
+  def pop (self, num) :
+    if (num > 0) :
+      for i in range(num) :
+        self.valueStack.pop()
+        self.stateStack.pop()
+        ]b4_locations_if([[self.locStack.pop()]])[
+    self.height -= num
+
+  def stateAt (self, i) :
+    return self.stateStack[self.height - i]
+
+  ]b4_locations_if([[def locationAt (self, i) :
+    return self.locStack[self.height - i]
+  ]])[
+  def valueAt (self, i) :
+    return self.valueStack[self.height - i]
+
+  # Print the state stack on the debug stream.
+  # Note: needs to be renamed for Python
+  def yyprint (self, out) :
+    out.write ("Stack now")
+    for x in self.stateStack[:] :
+        out.write (' ')
+        out.write (str(x))
+    out.write ('\n')
+# end class YYStack
+
+##################################################
+# Class Lexer
+##################################################
+
+# This class defines the  Communication interface between the
+# scanner and the Bison-generated parser <tt>]b4_parser_class_name[</tt>.
+#
+# For Python there are some things to note.
+# 1. Lexer is defined as a class because Java-like interfaces
+#    are not supported.
+# 2. The lexer class is defined at module scope.
+# 3. Python allows for the return of multiple values, so
+#    yylex can return the token and lval all at one time.
+#    Location information is still obtained by calling
+#    Lexer methods.
+# 4. The lexer also supports the user's yyerror method,
+#    but see the note at the push_parse method.
+
+class Lexer :
+
+  def __init__ (self]b4_locations_if([, loc])[) :
+    pass
+
+]b4_locations_if([[
+  # Method to retrieve the beginning position of the last scanned token.
+  # @@return the position at which the last scanned token starts.
+
+  def getStartPos (self) :
+    return None
+
+  # Method to retrieve the ending position of the last scanned token.
+  # @@return the first position beyond the last scanned token.
+
+  def getEndPos (self) :
+    return None
+]])[
+  # Entry point for the scanner.
+  # Returns two values: (1) the token identifier corresponding
+  # to the next token, and 2) the semantic value associated with the token.
+  # ]b4_locations_if([Additionally it records the
+  #  beginning/ending positions of the token.])[
+  # @@return the token identifier corresponding to the next token
+  #          and the semantic value.
+
+  def yylex (self) :
+    return (0,None)
+
+  # Entry point for error reporting.  Emits an error
+  # ]b4_locations_if([referring to the given location ])[
+  # in a user-defined way.
+  #
+  # @@param msg The string for the error message.
+  # ]b4_locations_if([[@@param location
+  #        The location of the element to which the
+  #        error message is related]])[
+
+  def yyerror (self, ]b4_locations_if([location, ])[msg) :
+    s = msg
+    ]b4_locations_if([[s = str(location) + ": " + s]])[
+    sys.stderr.write(s+'\n')
+
+# If the user specifies %code lexer ...
+# Then insert it here
+]b4_lex_param_if([[class YYLexer (Lexer) :
+]b4_percent_code_get([[lexer]])[
+]])[
+
+
+##################################################
+# Primary Parser Class
+##################################################
+
+class ]b4_parser_class_name[]b4_percent_define_get3([extends], [(], [)])[ :
+]b4_identification[
+
+
+  ##################################################
+  # Class ]b4_parser_class_name[ API
+  ##################################################
+
+  # Instance Variables
+
+  # Return whether verbose error messages are enabled.
+  def getErrorVerbose(self) :
+    return self.yyErrorVerbose
+
+  # Set the verbosity of error messages.
+  # @@param verbose True to request verbose error messages.
+  def setErrorVerbose(self, verbose) :
+    self.yyErrorVerbose = verbose
+
+  # Return the <tt>PrintStream</tt> on which the debugging output is
+  # printed.
+
+  def getDebugStream (self) :
+    return self.yyDebugStream
+
+  # Set the <tt>PrintStream</tt> on which the debug output is printed.
+  # @@param s The stream that is used for debugging output.
+  def setDebugStream(self, s) :
+    self.yyDebugStream = s
+
+  # Answer the verbosity of the debugging output 0 means that all kinds of
+  # output from the parser are suppressed.
+  def getDebugLevel (self) :
+    return self.yydebug
+
+  # Set the verbosity of the debugging output 0 means that all kinds of
+  # output from the parser are suppressed.
+  # @@param level The verbosity level for debugging output.
+  def setDebugLevel (self, level) :
+    self.yydebug = level
+
+  ##################################################
+  # Class ]b4_parser_class_name[ Constructor
+  ##################################################
+
+  # Instantiates the Bison-generated parser.
+  # ]b4_lex_param_if([],[[@@param yylexer The scanner class
+  # instance that will supply tokens to the parser.]])[
+
+  def __init__  (self]b4_lex_param_if([],
+                 [, yylexer])[]b4_lex_param_if(
+                 [, b4_lex_param_decl])[]b4_parse_param_if(
+                 [, b4_parse_param_decl])[):
+]b4_percent_code_get([[init]])[
+]b4_lex_param_if([],[    self.yylexer = yylexer])[
+]b4_lex_param_if([[    self.yylexer = YYLexer(]b4_lex_param_call[)]])[
+]b4_parse_param_if([[]b4_parse_param_cons[]])[
+    self.yyDebugStream = sys.stderr
+    self.yydebug = 0
+    self.yyerrstatus_ = 0
+]b4_push_if([    self.push_parse_initialized = False])[
+]b4_error_verbose_if([    # True if verbose error messages are enabled.
+    self.yyErrorVerbose = True])[
+  # end __init__
+
+]b4_push_if([b4_define_state])[
+
+  ##################################################
+  # User defined action invocation.
+  ##################################################
+
+  # For python, pass in the yyerror function
+  # so simplify access so the caller does not need to prefix it.
+  def yyaction (self, yyn, yystack, yylen, yyerror) :
+    yylval = None
+    ]b4_locations_if([[yyloc = self.yylocation (yystack, yylen)]])[
+
+    # If YYLEN is nonzero, implement the default value of the action:
+    #   '$$ = $1'.  Otherwise, use the top of the stack.
+    #    Otherwise, the following line sets YYVAL to garbage.
+    #    This behavior is undocumented and Bison
+    #    users should not rely upon it.
+    if (yylen > 0) :
+      yyval = yystack.valueAt (yylen - 1)
+    else :
+      yyval = yystack.valueAt (0)
+
+    self.yy_reduce_print (yyn, yystack)
+
+    # Simulate a switch in python using if ... elif ... else .
+    # This is inefficient, but until python adds a true switch
+    # and in light of the fact that one cannot do assignments
+    # in lambda expression, this seems to be the best solution available.
+    # The performance cost is potentially horrendous.
+    # Advice on an alternative that allows for assignment would be welcome.
+    # Note that the user_actions gives no clue about if the case call
+    # is the first or not, so we need to fake an initial false call.
+
+    # Note that the action body is indentation sensitive
+
+    if False :
+      pass
+
+]b4_user_actions[
+
+    else: pass
+
+    self.yy_symbol_print ("-> $$ =",
+                          yyr1_[yyn], yyval]b4_locations_if([, yyloc])[)
+
+    yystack.pop (yylen)
+    yylen = 0
+
+    # Shift the result of the reduction.
+    yyn = yyr1_[yyn]
+    tmp = yyntokens_ # quote problem
+    yystate = yypgoto_[yyn - tmp] + yystack.stateAt (0)
+    if (0 <= yystate
+        and yystate <= yylast_
+        and yycheck_[yystate] == yystack.stateAt (0)) :
+      yystate = yytable_[yystate]
+    else :
+      yystate = yydefgoto_[yyn - tmp]
+
+    yystack.push (yystate, yyval]b4_locations_if([, yyloc])[)
+    return YYNEWSTATE
+  # end yyaction
+
+  ##################################################
+  # Debug output for rule reduction
+  # Report on the debug stream that the rule yyrule is going to be reduced.
+  ##################################################
+
+  def yy_reduce_print (self, yyrule, yystack) :
+    if (self.yydebug == 0) :
+      return
+
+    yylno = yyrline_[yyrule]
+    yynrhs = yyr2_[yyrule]
+    # Print the symbols being reduced, and their result.
+    self.yycdebug ("Reducing stack by rule " + str(yyrule - 1)
+               + " (line " + str(yylno) + "), ")
+
+    # The symbols being reduced.
+    for yyi in range(yynrhs) :
+      self.yy_symbol_print ("   $" + str(yyi + 1) + " =",
+                       yystos_[yystack.stateAt(yynrhs - (yyi + 1))],
+                       ]b4_rhs_value(yynrhs, yyi + 1)b4_locations_if([,
+                       b4_rhs_location(yynrhs, yyi + 1)])[)
+  # end yy_reduce_print
+
+
+]b4_push_if([],[[
+
+  # Primary push parser API method.
+  # Parse input from the scanner that was specified at object construction
+  # time.  Return whether the end of the input was reached successfully.
+  #
+  # @@return <tt>True</tt> if the parsing succeeds.
+  # Note that this does not imply that there were no syntax errors.
+
+  def  parse (self) :
+    ]b4_locations_if([    # @@$.
+])
+[]b4_define_state[
+]])[
+]b4_push_if([[
+
+  # Primary push parser API method
+  # Push parse given input from an external lexer.
+  # Position provided rather than Location.
+  #
+  # @@param yylextoken current token
+  # @@param yylexval current lval
+  # @@param (Optional) location=None current position;
+  #ignored if location tracking is disabled.
+  #
+  # @@return <tt>YYACCEPT, YYABORT, YYPUSH_MORE</tt>
+
+  def push_parse (self, yylextoken, yylexval]b4_locations_if([
+                  , yylexloc])[) :
+]b4_locations_if([    # @@$.
+])[
+]])[
+]b4_push_if([[
+    if (not self.push_parse_initialized) :
+      self.push_parse_initialize ()
+      self.yycdebug ("Starting parse\n")
+      self.yyerrstatus_ = 0
+      # Initialize the stack.
+      ]b4_self[yystack.push (]b4_self[yystate,
+                             ]b4_self[yylval ]b4_locations_if(
+                             [, ]b4_self[yylloc])[)
+]m4_ifdef([b4_initial_action], [
+b4_dollar_pushdef([b4_self[]yylval], [], [b4_self[]yylloc])dnl
+      # User initialization code.
+      b4_debrace([b4_user_initial_action])
+b4_dollar_popdef[]dnl
+])[
+    else :
+      ]b4_self[label = YYGETTOKEN
+    push_token_consumed = True
+]])[
+]b4_push_if([],[[
+    self.yycdebug ("Starting parse\n")
+    self.yyerrstatus_ = 0
+    # Initialize the stack.
+    ]b4_self[yystack.push (]b4_self[yystate,
+                           ]b4_self[yylval ]b4_locations_if(
+                           [[,]b4_self[yylloc]])[)
+
+]m4_ifdef([b4_initial_action], [
+b4_dollar_pushdef([b4_self[]yylval], [], [b4_self[]yylloc])dnl
+    # User initialization code.
+    b4_debrace([b4_user_initial_action])
+b4_dollar_popdef[]dnl
+])[
+]])[
+
+    ##################################################
+    # Begin code common to push and pull parsing
+    ##################################################
+
+    while True :
+      #sys.stderr.write("label=("+str(self.label)+")="
+      #+LABELNAMES[self.label]+'\n')
+
+      # For python we need to simulate switch using if statements
+      # Because we have the enclosing while loop, we can exit the switch
+      # using continue instead of break (assumes that there is no code
+      # following the switch).
+      # switch label :
+      # New state.  Unlike in the C/C++ skeletons, the state is already
+      # pushed when we come here.
+      if ]b4_self[label == YYNEWSTATE : # case YYNEWSTATE
+        self.yycdebug ("Entering state " + str(]b4_self[yystate) + '\n')
+        if (self.yydebug > 0) :
+          ]b4_self[yystack.yyprint (self.yyDebugStream)
+
+        # Accept?
+        if (]b4_self[yystate == yyfinal_) :
+          ]b4_push_if([]b4_self[label = YYACCEPT
+          continue],
+          [return True])[
+
+        # Take a decision.  First try without lookahead.
+        # Quote problem
+        tmp = ]b4_self[yystate
+        ]b4_self[yyn = yypact_[tmp]
+        if (yy_pact_value_is_default_ (]b4_self[yyn)) :
+            ]b4_self[label = YYDEFAULT
+            continue; # break switch
+]b4_push_if([[
+        ]b4_self[label = YYGETTOKEN # Cheat to simulate fall thru
+      elif ]b4_self[label == YYGETTOKEN : # case YYGETTOKEN
+        # Read a lookahead token.
+        if (]b4_self[yychar == yyempty_) :
+          if ( not push_token_consumed) :
+            return YYPUSH_MORE
+          self.yycdebug ("Reading a token: ")
+          ]b4_self[yychar = yylextoken
+          ]b4_self[yylval = yylexval]b4_locations_if([
+          ]b4_self[yylloc = yylexloc])[
+          push_token_consumed = False]])[
+]b4_push_if([],[[
+        # Read a lookahead token.
+        if (]b4_self[yychar == yyempty_) :
+          self.yycdebug ("Reading a token: ")
+          yychar, yylval = self.yylexer.yylex()
+          ]b4_locations_if([
+          yylloc = ]b4_location_type[ (self.yylexer.getStartPos (),
+                                       self.yylexer.getEndPos ())])[
+]])[
+        # Convert token to internal form.
+        if (]b4_self[yychar <= EOF) :
+          ]b4_self[yychar = EOF
+          ]b4_self[yytoken = EOF
+          self.yycdebug ("Now at end of input.\n")
+        else :
+          ]b4_self[yytoken = yytranslate_ (]b4_self[yychar)
+          self.yy_symbol_print ("Next token is",
+                                ]b4_self[yytoken,
+                                ]b4_self[yylval
+                                ]b4_locations_if([, ]b4_self[yylloc])[
+                                )
+
+        # If the proper action on seeing token YYTOKEN is to reduce or to
+        # detect an error, then take that action.
+        ]b4_self[yyn += ]b4_self[yytoken
+        tmp = ]b4_self[yyn # Quote problem
+        if (]b4_self[yyn < 0 
+            or yylast_ < ]b4_self[yyn
+            or yycheck_[tmp] != ]b4_self[yytoken) :
+          ]b4_self[label = YYDEFAULT
+
+        # <= 0 means reduce or error.
+        elif (yytable_[tmp] <= 0) :
+          ]b4_self[yyn = yytable_[tmp]
+          if (yy_table_value_is_error_ (]b4_self[yyn)) :
+            ]b4_self[label = YYERRLAB
+          else :
+            ]b4_self[yyn = -]b4_self[yyn
+            ]b4_self[label = YYREDUCE
+        else :
+          tmp = ]b4_self[yyn # Quote problem
+          ]b4_self[yyn = yytable_[tmp]
+          # Shift the lookahead token.
+          self.yy_symbol_print ("Shifting",
+                                ]b4_self[yytoken,
+                                ]b4_self[yylval]b4_locations_if(
+                                [, ]b4_self[yylloc])[)
+
+          # Discard the token being shifted.
+          ]b4_self[yychar = yyempty_
+
+          # Count tokens shifted since error after three, turn off error
+          # status.
+          if (self.yyerrstatus_ > 0) :
+              self.yyerrstatus_ -= 1
+
+          ]b4_self[yystate = ]b4_self[yyn
+          ]b4_self[yystack.push (]b4_self[yystate, ]b4_self[]dnl
+            [yylval]b4_locations_if([, ]b4_self[yylloc])[)
+          ]b4_self[label = YYNEWSTATE
+      # end case YYNEWSTATE
+
+      #-----------------------------------------------------------.
+      #| yydefault -- do the default action for the current state.  |
+      #-----------------------------------------------------------
+      elif ]b4_self[label == YYDEFAULT : #case YYDEFAULT
+        tmp = ]b4_self[yystate # Quote problem
+        ]b4_self[yyn = yydefact_[tmp]
+        if (]b4_self[yyn == 0) :
+          ]b4_self[label = YYERRLAB
+        else :
+          ]b4_self[label = YYREDUCE
+      # end case YYDEFAULT
+
+      #-----------------------------.
+      #| yyreduce -- Do a reduction.  |
+      #-----------------------------
+      elif ]b4_self[label == YYREDUCE : #case YYREDUCE
+        tmp = ]b4_self[yyn # Quote problem
+        ]b4_self[yylen = yyr2_[tmp]
+        ]b4_self[label = self.yyaction (]b4_self[yyn, ]dnl
+        b4_self[yystack, ]b4_self[yylen, self.yylexer.yyerror)
+        ]b4_self[yystate = ]b4_self[yystack.stateAt (0)
+      # end case YYDEFAULT
+
+      #------------------------------------.
+      #| yyerrlab -- here on detecting error |
+      #------------------------------------
+      elif ]b4_self[label == YYERRLAB: #case YYERRLAB
+        # If not already recovering from an error, report this error.
+        if (self.yyerrstatus_ == 0) :
+          ]b4_self[yynerrs_ += 1
+          if (]b4_self[yychar == yyempty_) :
+            ]b4_self[yytoken = yyempty_
+          tmp = self.yysyntax_error (]b4_self[yystate, ]b4_self[yytoken)
+          self.yyerror (]b4_locations_if([[]b4_self[yylloc, ]])[tmp)
+
+        ]b4_locations_if([]b4_self[yyerrloc = ]b4_self[yylloc])[
+        if (self.yyerrstatus_ == 3) :
+          # If just tried and failed to reuse lookahead token after an
+          # error, discard it.
+
+          if (]b4_self[yychar <= EOF) :
+            # Return failure if at end of input.
+            if (]b4_self[yychar == EOF) :
+              ]b4_push_if([]b4_self[label = YYABORT
+              continue],
+              [return False])[
+          else :
+            ]b4_self[yychar = yyempty_
+
+        # Else will try to reuse lookahead token after
+        # shifting the error token.
+        ]b4_self[label = YYERRLAB1
+      # end case YYERRLAB
+
+      #-------------------------------------------------.
+      #| errorlab -- error raised explicitly by YYERROR.  |
+      #-------------------------------------------------
+      elif ]b4_self[label == YYERROR : #case YYERROR
+        ]b4_locations_if([]b4_self[yyerrloc = ]b4_self[]dnl
+          [yystack.locationAt (]b4_self[yylen - 1)])[
+        # Do not reclaim the symbols of the rule which action triggered
+        # this YYERROR.
+        ]b4_self[yystack.pop (]b4_self[yylen)
+        ]b4_self[yylen = 0
+        ]b4_self[yystate = ]b4_self[yystack.stateAt (0)
+        ]b4_self[label = YYERRLAB1
+      # end case YYERROR
+
+      #-------------------------------------------------------------.
+      #| yyerrlab1 -- common code for both syntax error and YYERROR.  |
+      #-------------------------------------------------------------
+      elif ]b4_self[label == YYERRLAB1 : #case YYERRLAB1
+        self.yyerrstatus_ = 3 # Each real token shifted decrements this.
+        while True :
+          tmp = ]b4_self[yystate # Quote problem
+          ]b4_self[yyn = yypact_[tmp]
+          if ( not yy_pact_value_is_default_ (]b4_self[yyn)) :
+            ]b4_self[yyn += yyterror_
+            tmp = ]b4_self[yyn # Quote problem
+            if (0 <= ]b4_self[yyn and ]b4_self[yyn <= yylast_ \
+                and yycheck_[tmp] == yyterror_) :
+              ]b4_self[yyn = yytable_[tmp]
+              if (0 < ]b4_self[yyn) :
+                break # leave while loop
+
+            # Pop the current state because it cannot handle the
+            # error token.
+            if (]b4_self[yystack.height == 0) :
+              ]b4_push_if([]b4_self[label = YYABORT
+              continue # Leave the switch],
+              [return False])[
+
+            ]b4_locations_if(
+            []b4_self[yyerrloc = ]b4_self[yystack.locationAt (0)])[
+            ]b4_self[yystack.pop (1)
+            ]b4_self[yystate = ]b4_self[yystack.stateAt (0)
+            if (self.yydebug > 0) :
+              ]b4_self[yystack.yyprint (self.yyDebugStream)
+
+        if (]b4_self[label == YYABORT) :
+          continue # Leave the switch.
+]b4_locations_if([
+        # Muck with the stack to setup for ]b4_self[yylloc.
+        ]b4_self[yystack.push (0, None, ]b4_self[yylloc)
+        ]b4_self[yystack.push (0, None, ]b4_self[yyerrloc)
+        yyloc = self.yylocation (]b4_self[yystack, 2)
+        ]b4_self[yystack.pop (2)])[
+
+        # Shift the error token.
+        tmp = ]b4_self[yyn
+        self.yy_symbol_print ("Shifting", yystos_[tmp],
+                         ]b4_self[yylval]b4_locations_if([, yyloc])[)
+
+        ]b4_self[yystate = ]b4_self[yyn
+        ]b4_self[yystack.push (]b4_self[yyn, ]b4_self[yylval]dnl
+               b4_locations_if([, yyloc])[)
+        ]b4_self[label = YYNEWSTATE
+        continue # leave the switch
+      # end case YYERRLAB1
+
+      # Accept.
+      elif ]b4_self[label == YYACCEPT : # case YYACCEPT
+        ]b4_push_if([self.push_parse_initialized = False
+        return YYACCEPT],
+        [return True])[
+      # end case YYACCEPT
+
+      # Abort.
+      elif ]b4_self[label == YYABORT: # case YYABORT
+        ]b4_push_if([self.push_parse_initialized = False
+        return YYABORT],
+        [return False])[
+      # end case YYABORT
+
+      else :
+        assert False, "Unknown State:" + str(]b4_self[label)
+
+  # end ]b4_push_if([push_parse],[parse])[
+
+
+]b4_push_if([[
+
+  # (Re-)Initialize the state of the push parser.
+
+  def push_parse_initialize(self) :
+]b4_define_state[
+
+    self.push_parse_initialized = True
+
+  # end push_parse_initialize
+]])[
+]b4_both_if([[
+  # Parse input from the scanner that was specified at object construction
+  # time.  Return whether the end of the input was reached successfully.
+  # This version of parse () is defined only when api.push-push=both.
+  #
+  # @@return <tt>true</tt> if the parsing succeeds.
+  #          Note that this does notimply that there were no syntax errors.
+
+  def parse (self) :
+    if (self.yylexer is None) :
+      raise Exception("Null Lexer")
+    while True :
+      yytoken, yylval = self.yylexer.yylex()
+      ]b4_locations_if(
+      [yyloc = ]b4_location_type[ (self.yylexer.getStartPos (),
+                        self.yylexer.getEndPos ())])[
+      status = self.push_parse(yytoken,yylval]b4_locations_if([[,yyloc]])[)
+      if (status  != YYPUSH_MORE) :
+        break
+    return (status == YYACCEPT)
+
+  # end parse
+]])[
+
+  ##################################################
+  # Class ]b4_parser_class_name[ Internal Methods
+  ##################################################
+
+]b4_locations_if([[
+  # Construct a position for a given rhs token
+  # This must be renamed in python to avoid
+  # conflict with the yylloc variable.
+  # Rename from yylloc to yylocation
+  def yylocation (self, rhs, n) :
+    if (n > 0) :
+      return ]b4_location_type[(rhs.locationAt (n-1).begin,
+              rhs.locationAt (0).end)
+    else :
+      return ]b4_location_type[ (rhs.locationAt (0).end)
+]])[
+
+  # Print an error message via the lexer.
+  # @@param msg The error message.
+  # @@param locaction (Optional) The location or position
+  #                associated with the message.
+
+  def yyerror (self, ]b4_locations_if([loc_or_pos, ])[msg) :
+    ]b4_locations_if([[if (loc_or_pos is not None
+         and issubclass(loc_or_pos.__class__, ]b4_position_type[)) :
+      self.yylexer.yyerror (]b4_location_type[(loc_or_pos), msg)
+    else :
+      self.yylexer.yyerror (loc_or_pos, msg)
+  ]],[self.yylexer.yyerror (msg)])[
+  # end yyerror
+
+  def yycdebug (self, s) :
+    if (self.yydebug > 0) :
+      self.yyDebugStream.write (s+'\n')
+  # end self.yycdebug
+
+]b4_error_verbose_if([[
+  # Return YYSTR after stripping away unnecessary quotes and
+  # backslashes, so that it's suitable for yyerror.  The heuristic is
+  # that double-quoting is unnecessary unless the string contains an
+  # apostrophe, a comma, or backslash (other than backslash-backslash).
+  # YYSTR is taken from yytname.
+  def yytnamerr_ (self, yystr) :
+    yyr = ""
+    if (yystr[0] == '"')  :
+      l = len(yystr)
+      i = 1
+      while (True) :
+        if (i >= l) : break
+        c = yystr[i]
+        if(c == "'" or c == ',') :
+          continue
+        if( c == '"'):
+          return yyr
+        if(c == '\\') :
+          i += 1
+          c = yystr[i]
+          if(c != '\\') :
+            break
+        yyr = yyr + c
+        i += 1
+      # end while
+    elif (yystr ==  "$end") :
+      return "end of input"
+    return yystr;
+  # end yytnamerr
+]])[
+
+  #--------------------------------.
+  #| Print this symbol on YYOUTPUT.  |
+  #--------------------------------
+
+  def yy_symbol_print (self, s, yytype, yyvaluep ]b4_locations_if(
+                                                 [, yylocationp])[) :
+    if (self.yydebug > 0) :
+      tag = " nterm "
+      if (yytype < yyntokens_) :
+        tag = " token "
+      if (yyvaluep is None) :
+        vps = "None"
+      else :
+        vps = str(yyvaluep)
+      tname = yytname_[yytype]
+      line = s + tag + tname
+      line += " ("
+      ]b4_locations_if([line += str(yylocationp)
+      line += ": "])[
+      line += vps
+      line += ')'
+      self.yycdebug (line)
+  # end yy_symbol_print
+
+  # Generate an error message.
+  def yysyntax_error (self, yystate, tok) :
+  ]b4_error_verbose_if([[
+    if (self.yyErrorVerbose) :
+      # There are many possibilities here to consider:
+      # - If this state is a consistent state with a default action,
+      #   then the only way this function was invoked is if the
+      #   default action is an error action.  In that case, don't
+      #   check for expected tokens because there are none.
+      # - The only way there can be no lookahead present (in tok) is
+      #   if this state is a consistent state with a default action.
+      #   Thus, detecting the absence of a lookahead is sufficient to
+      #   determine that there is no unexpected or expected token to
+      #   report.  In that case, just report a simple 'syntax error'.
+      # - Don't assume there isn't a lookahead just because this
+      #   state is a consistent state with a default action.  There
+      #   might have been a previous inconsistent state, consistent
+      #   state with a non-default action, or user semantic action
+      #   that manipulated ]b4_self[yychar.  (However, ]b4_self[yychar
+      #   is currently out of scope during semantic actions.)
+      # - Of course, the expected token list depends on states to
+      #   have correct lookahead information, and it depends on the
+      #   parser not to perform extra reductions after fetching a
+      #   lookahead from the scanner and before detecting a syntax
+      #   error.  Thus, state merging (from LALR or IELR) and default
+      #   reductions corrupt the expected token list.  However, the
+      #   list is correct for canonical LR with one exception: it
+      #   will still contain any token that will not be accepted due
+      #   to an error action in a later state.
+
+      if (tok  != yyempty_) :
+        # FIXME: This method of building the message is not compatible
+        # with internationalization.
+        res = "syntax error, unexpected "
+        res += (self.yytnamerr_ (yytname_[tok]))
+        tmp = ]b4_self[yystate
+        ]b4_self[yyn = yypact_[tmp]
+        if ( not yy_pact_value_is_default_ (]b4_self[yyn)) :
+          # Start YYX at -YYN if negative to avoid negative
+          # indexes in YYCHECK.  In other words, skip the first
+          # -YYN actions for this state because they are default actions.
+          yyxbegin = 0
+          if (]b4_self[yyn < 0) :
+            yyxbegin =  - ]b4_self[yyn
+          # Stay within bounds of both yycheck and yytname.
+          yychecklim = yylast_ - ]b4_self[yyn + 1
+          yyxend = yychecklim
+          if (yychecklim >= yyntokens_) :
+            yyxend = yyntokens_
+          count = 0
+          for x in range(yyxbegin,yyxend) :
+            tmp = ]b4_self[yyn
+            if (yycheck_[x + tmp] == x and x != yyterror_
+                and  not yy_table_value_is_error_ (yytable_[x + tmp])) :
+              count += 1
+          if (count < 5) :
+            count = 0
+            for x in range(yyxbegin,yyxend) :
+              tmp = ]b4_self[yyn
+              if (yycheck_[x + tmp] == x and x != yyterror_
+                  and  not yy_table_value_is_error_ (yytable_[x + tmp])) :
+                if (count == 0) :
+                  res += ", expecting "
+                else :
+                  res += " or "
+                count += 1
+                res += (self.yytnamerr_ (yytname_[x]))
+        return str(res)
+]])[
+    return "syntax error"
+  # end yysyntax_error
+
+
+]# %code provides.
+b4_percent_code_get([[provides]])[
+]# User implementation code.
+b4_percent_code_get[]dnl
+
+b4_epilogue[]dnl
+b4_output_end()
+
diff --git a/data/python-skel.m4 b/data/python-skel.m4
new file mode 100644
index 0000000..c5d2dcb
--- /dev/null
+++ b/data/python-skel.m4
@@ -0,0 +1,26 @@
+                                                            -*- Autoconf -*-
+
+# Python skeleton dispatching for Bison.
+
+# Copyright (C) 2007, 2009-2013 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+b4_glr_if(             [b4_complain([%%glr-parser not supported for Python])])
+b4_nondeterministic_if([b4_complain([%%nondeterministic-parser not supported 
for Python])])
+
+m4_define_default([b4_used_skeleton], [b4_pkgdatadir/[lalr1.py]])
+m4_define_default([b4_skeleton], ["b4_basename(b4_used_skeleton)"])
+
+m4_include(b4_used_skeleton)
diff --git a/data/python.m4 b/data/python.m4
new file mode 100644
index 0000000..09ac52d
--- /dev/null
+++ b/data/python.m4
@@ -0,0 +1,336 @@
+# Python language support for Bison
+
+# Copyright (C) 2007-2013 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+# Pretend python is c-like, but override
+# macros as needed
+m4_include(b4_pkgdatadir/[c-like.m4])
+
+##################################################
+# WARNING:
+# In Python, line breaks
+# and indentation have syntactic significance
+# so be very careful to verify that any
+# changes are correct under any set of options.
+# Also, it is a very bad idea to use tabs in any
+# python code that you write.
+##################################################
+
+m4_define([b4_remove_comma],
+  [m4_ifval(m4_quote($1), [$1, ], [])m4_shift2($@)])
+
+# b4_comment_(TEXT, OPEN, CONTINUE, END)
+# --------------------------------------
+# Put TEXT in comment.  Avoid trailing spaces: don't indent empty lines.
+# Avoid adding indentation to the first line, as the indentation comes
+# from OPEN.  That's why we don't patsubst([$1], [^\(.\)], [   \1]).
+#
+# Prefix all the output lines with PREFIX.
+m4_define([b4_comment_],
+[$2[]m4_bpatsubst(m4_expand([[$1]]), [
+\(.\)], [
+$3\1])$4])
+
+# b4_comment(TEXT, [PREFIX])
+# --------------------------
+# Put TEXT in comment. Prefix all the output lines with PREFIX.
+m4_define([b4_comment],
+[b4_comment_([$1], [$2# ], [$2# ])])
+
+# %name-prefix
+m4_define_default([b4_prefix], [[YY]])
+
+b4_percent_define_default([[parser_class_name]], [b4_prefix[]Parser])
+m4_define([b4_parser_class_name],
+  [b4_percent_define_get([[parser_class_name]])])
+
+b4_percent_define_default([[api.location.type]], [Location])
+m4_define([b4_location_type],
+  [b4_percent_define_get([[api.location.type]])])
+
+b4_percent_define_default([[api.position.type]], [Position])
+m4_define([b4_position_type],
+  [b4_percent_define_get([[api.position.type]])])
+
+# b4_percent_define_get3(DEF, PRE, POST, NOT)
+# -------------------------------------------
+# Expand to the value of DEF surrounded by PRE and POST if it's %define'ed,
+# otherwise NOT.
+m4_define([b4_percent_define_get3],
+          [m4_ifval(m4_quote(b4_percent_define_get([$1])),
+                [$2[]b4_percent_define_get([$1])[]$3], [$4])])
+
+# Override table declaring macros because of indentation issues
+
+# b4_integral_python_parser_table_define(TABLE-NAME, CONTENT, COMMENT)
+# -------------------------------------------------------------
+# Define "parser::yy<TABLE-NAME>_" whose contents is CONTENT.
+m4_define([b4_integral_python_parser_table_define],
+[yy$1_ = (
+  $2
+  )dnl
+])
+
+
+m4_define([b4_python_parser_tables_define],
+[b4_integral_parser_tables_map([b4_integral_python_parser_table_define])])
+
+
+# -----------------
+# Utility macros
+
+m4_define([b4_param_decls],
+          [m4_map([b4_param_decl], address@hidden)])
+m4_define([b4_param_decl], [, $1])
+
+m4_define([b4_param_calls],
+          [m4_map([b4_param_call], address@hidden)])
+m4_define([b4_param_call], [, $1])
+
+m4_define([b4_constructor_calls],
+          [m4_map([b4_constructor_call], address@hidden)])
+m4_define([b4_constructor_call],
+          [    self.$1 = $1
+])
+
+#-------------------------
+
+# b4_lex_param
+# b4_parse_param
+# --------------
+# If defined, b4_lex_param arrives double quoted, but below we prefer
+# it to be single quoted.  Same for b4_parse_param.
+
+# TODO: should be in bison.m4
+m4_define_default([b4_lex_param], [[]])
+m4_define([b4_lex_param], b4_lex_param)
+m4_define([b4_parse_param], b4_parse_param)
+
+########################
+# Since python is untyped, %lex-param and %parse-param
+# deliver the following format to the skeleton.
+# So it is necessary to untangle this (Ugh!).
+# [[lex_param1]], [[lex_param1]],
+# [[lex_param2]], [[lex_param2]]
+# and
+# |[[parse_param1]], [[parse_param1]],
+# [[parse_param2]], [[parse_param2]]
+# to
+# [lex_param1, lex_param2]
+# and
+# [parse_param1, parse_param2]
+#
+
+m4_define([b4_param_fix],
+  [m4_bpatsubst(m4_normalize(m4_foreach([x],[$1],[ x])),[[ ]],[, ])])
+
+########################
+
+
+# b4_lex_param_decl
+# -----------------
+# Extra formal arguments of the Lexer constructor.
+m4_define([b4_lex_param_decl],
+[m4_ifset([b4_lex_param],
+          [b4_remove_comma([$1],
+                           b4_param_decls(b4_lex_param))],
+          [$1])])
+
+# b4_parse_param_decl
+# -------------------
+# Extra formal arguments of the constructor.
+m4_define([b4_parse_param_decl],
+[m4_ifset([b4_parse_param],
+          [b4_remove_comma([$1],
+                           b4_param_decls(b4_parse_param))],
+          [$1])])
+
+# b4_lex_param_call
+# -----------------
+# Delegating the lexer parameters to the lexer constructor.
+m4_define([b4_lex_param_call],
+          [m4_ifset([b4_lex_param],
+                    [b4_remove_comma([$1],
+                                     b4_param_calls(b4_lex_param))],
+                    [$1])])
+
+
+# b4_parse_param_cons
+# -------------------
+# Extra initialisations of the constructor.
+m4_define([b4_parse_param_cons],
+          [m4_ifset([b4_parse_param],
+                    [b4_constructor_calls(b4_parse_param)])])
+
+# b4_token_define(TOKEN-NUM)
+# --------------------------
+# Output the definition of this token as #define.
+m4_define([b4_token_define],
+[b4_token_format([%s = %s], [$1])])
+
+# b4_token_defines
+# ----------------
+# Output the definition of the tokens.
+m4_define([b4_token_defines],
+[b4_any_token_visible_if([# Tokens.
+m4_join([
+], b4_symbol_map([b4_token_define]))
+])])
+
+# b4_flag_value(BOOLEAN-FLAG)
+# ---------------------------
+m4_define([b4_flag_value], [b4_flag_if([$1], [True], [False])])
+
+# b4_identification
+# -----------------
+m4_define([b4_identification],
+[  # Version number for the Bison executable that generated this parser.
+  bisonVersion = "b4_version"
+
+  # Name of the skeleton that generated this parser.
+  bisonSkeleton = b4_skeleton
+])
+
+# b4_null
+# -------
+m4_define([b4_null], [None])
+
+# b4_lexer_if(TRUE, FALSE)
+# ------------------------
+# True if parser contains %code lexer {...} else false
+m4_define([b4_lexer_if],
+[b4_percent_code_ifdef([[lexer]], [$1], [$2])])
+
+# b4_typed_parser_table_define(TYPE, NAME, DATA, COMMENT)
+# -------------------------------------------------------
+m4_define([b4_typed_parser_table_define],
+[m4_ifval([$4], [  b4_comment([$4])
+])dnl
+  [yy$2_ = (]$3[)]
+])
+
+
+# b4_debrace([CODE])
+#-----------------------------------------------------
+# Sometimes, the code has the surrounding {...}
+# left in. For python, remove these.
+m4_define([b4_debrace],[m4_translit($1,[{}])])
+
+
+# b4_integral_parser_table_define(NAME, DATA, COMMENT)
+#-----------------------------------------------------
+m4_define([b4_integral_parser_table_define],
+[b4_typed_parser_table_define(None, [$1], [$2], [$3])])
+
+# b4-case(ID, CODE)
+# -----------------
+m4_define([b4_case], [    elif yyn == [$1] :
+m4_bpatsubst(m4_bpatsubst(m4_bpatsubst($2,
+                                        [^\([ \\t\\n]*\){],[\1]),
+                           [}[ \\t\\n]*$]),
+              [^],[      ])
+])
+
+m4_define([b4_casex], [    elif yyn == [$1] :
+      m4_bpatsubst(
+        m4_bpatsubst(
+          [}[ \\t\\n]*$]),
+      [\\n],[\\n          ])
+])
+
+## ------------------------- ##
+## Assigning token numbers.  ##
+## ------------------------- ##
+
+# b4_token_enum(TOKEN-NUM)
+# ------------------------
+# Output the definition of this token as an enum.
+m4_define([b4_token_enum],
+[b4_token_format([    [#]Token number, to be returned by the scanner
+    %s = %s
+], [$1])])
+
+# b4_token_enums
+# --------------
+# Output the definition of the tokens (if there are) as enums.
+m4_define([b4_token_enums],
+[b4_any_token_visible_if([[#] Tokens.
+b4_symbol_foreach([b4_token_enum])])])
+
+# b4_lhs_value([TYPE])
+# --------------------
+# Expansion of $<TYPE>$.
+m4_define([b4_lhs_value], [yyval])
+
+
+# b4_rhs_value(RULE-LENGTH, NUM, [TYPE])
+# --------------------------------------
+# Expansion of $<TYPE>NUM, where the current rule has RULE-LENGTH
+# symbols on RHS.
+#
+# In this simple implementation, %token and %type have class names
+# between the angle brackets. For python, the type is ignored
+m4_define([b4_rhs_value],[(yystack.valueAt ($1-($2)))])
+
+# b4_lhs_location()
+# -----------------
+# Expansion of @$.
+m4_define([b4_lhs_location],
+[(yyloc)])
+
+
+# b4_rhs_location(RULE-LENGTH, NUM)
+# ---------------------------------
+# Expansion of @NUM, where the current rule has RULE-LENGTH symbols
+# on RHS.
+m4_define([b4_rhs_location],
+[yystack.locationAt ($1-($2))])
+
+m4_define([b4_yystype], [b4_percent_define_get([[api.value.type]])])
+b4_percent_define_default([[api.value.type]], [[object]])
+
+## -------------------------- ##
+## Push/Pull/Both interfaces. ##
+## -------------------------- ##
+
+# Setup some macros for api.push-pull.
+b4_percent_define_default([[api.push-pull]], [[pull]])
+b4_percent_define_check_values([[[[api.push-pull]],
+                                 [[pull]], [[push]], [[both]]]])
+
+# Define m4 conditional macros that encode the value
+# of the api.push-pull flag.
+b4_define_flag_if([pull]) m4_define([b4_pull_flag], [[1]])
+b4_define_flag_if([push]) m4_define([b4_push_flag], [[1]])
+m4_case(b4_percent_define_get([[api.push-pull]]),
+        [pull], [m4_define([b4_push_flag], [[0]])],
+        [push], [m4_define([b4_pull_flag], [[0]])])
+
+# Define a macro to be true when api.push-pull has the value 'both'.
+m4_define([b4_both_if],[b4_push_if([b4_pull_if([$1],[$2])],[$2])])
+
+# Handle BISON_USE_PUSH_FOR_PULL for the test suite.  So that push parsing
+# tests function as written, do not let BISON_USE_PUSH_FOR_PULL modify the
+# behavior of Bison at all when push parsing is already requested.
+b4_define_flag_if([use_push_for_pull])
+b4_use_push_for_pull_if([
+  b4_push_if([m4_define([b4_use_push_for_pull_flag], [[0]])],
+             [m4_define([b4_push_flag], [[1]])])])
+
+# For python, when doing push parsing, we need to use self.
+b4_push_if([m4_define([b4_self],[[self.]])],[m4_define([b4_self],[])])
+
+
diff --git a/tests/python.at b/tests/python.at
new file mode 100644
index 0000000..0949aaf
--- /dev/null
+++ b/tests/python.at
@@ -0,0 +1,1620 @@
+# Checking Python Parsing.                            -*- Autotest -*-
+## Copyright (C) 2013 Free Software Foundation, Inc.
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+##################################################
+# The Python parser tests are intended to
+# duplicate those in python.at with a couple of
+# additions to test basic operation and test
+# python specific features.
+##################################################
+
+##################################################
+# General Support Utilities
+##################################################
+
+# AT_NORMALIZE([STRING])
+# Clean up string
+# -------------------------------------
+
+m4_define([AT_NORMALIZE],[m4_normalize([$1])])
+
+# AT_DECOMMIFY([STRING])
+# Remove all commas from string
+# -------------------------------------
+m4_define([AT_DECOMMIFY],[m4_bpatsubst([$1],[,][ ])])
+
+# AT_COMMIFY([STRING])
+# insert ', ' in place of whitespace.
+# Note, if no leading whitespace, then no leading comma
+# -------------------------------------
+m4_define([AT_COMMIFY],[m4_bpatsubst([$1],[[ ][ ]*],[, ])])
+
+# AT_UNCOMMIFY1([STRING])
+# Remove leading comma from string; assumes input
+# came from commify
+# -------------------------------------
+m4_define([AT_UNCOMMIFY1],[m4_bpatsubst([$1],[^,[ ]*])])
+
+# AT_COMMIFY_EXCEPT([STRING])
+# Prepend comma to each element in string except first
+# -------------------------------------
+m4_define([AT_COMMIFY_EXCEPT],[AT_UNCOMMIFY1(AT_COMMIFY([$1]))])
+
+# AT_NONE_LIST([PARAMS]
+#------------------------------------------
+# Given a comma separated list of length n,
+# generate a comma separated list of instances
+# of 'None' of length n. Purpose is to provide
+# synthetic arguments for calling functions
+# that require lex or parse params
+m4_define([AT_NONE_LIST],[m4_map([_AT_NONE],[$1])])
+m4_define([_AT_NONE],[None])
+
+# AT_MAIN_PARAMS([LEX-PARAMS],[PARSE-PARAMS])
+#------------------------------------------
+# Generate a call list for the main() function
+# without a leading comma.
+
+# Aux macros
+m4_define([_AT_CLEAN],[AT_COMMIFY(AT_NORMALIZE([$1]))])
+m4_define([_AT_ADDPARAMS],[m4_if([$1],[],[],[_AT_CLEAN([$1])])])
+m4_define([_AT_ADDCOMMA],[m4_if([$1],[],[],[m4_if([$2],[],[],[, ])])])
+
+m4_define([AT_MAIN_PARAMS],
+[_AT_ADDPARAMS([$1])[]_AT_ADDCOMMA([$1],[$2])[]_AT_ADDPARAMS([$2])])
+
+
+# AT_CHECK_PYTHON_GREP([FILE], [LINE], [COUNT=1])
+# -------------------------------------
+# Check that specified file contains exactly COUNT lines matching ^LINE$
+# with grep.
+m4_define([AT_CHECK_PYTHON_GREP],
+[AT_CHECK_UNQUOTED([grep -c '$2' $1], [ignore], [m4_default([$3], [1])
+])])
+
+# AT_BOTHPARAM_IF([TRUE],[FALSE])
+# -------------------------------------
+# Execute the true arm if both AT_LEXPARAM_IF and AT_PARAM_IF are true.
+m4_define([AT_BOTHPARAM_IF],
+          [AT_LEXPARAM_IF([AT_PARAM_IF([$1],[$2])],[$2])])
+
+# AT_CHECK_PYTHON_GREP_FILE([FILE], [PATTERN], [COUNT=1])
+# -------------------------------------
+# Check, with grep, that specified file contains exactly
+# COUNT lines matching the contents of file PATTERN; SRC is
+# assumed to contain only a single, newline terminated line
+# containing a line to match. Fgrep is used instead of grep
+# because the PATTERN may contain arbitrary characters
+m4_define([AT_CHECK_PYTHON_GREP_FILE],
+[AT_CHECK_UNQUOTED([fgrep -c -f $2 $1], [ignore], [m4_default([$3], [1])
+])])
+
+# AT_FIX_EXPOUT([VERBOSE-AND-LOCATED-ERROR-MESSAGE],
+#               [KEEPLOCATION],[KEEPVERBOSE], [NOEOL])
+# --------------------------------------------------------------
+# Normalize the observed and expected error messages, depending upon the
+# options. KEEPLOCATED is 0 (or null) if the any location info in the
+# error message should be removed. Similarly,KEEPVERBOSE is 0
+# if the any verbose output info in the error message should be removed.
+# If NOEOL is non null, then do not add eol to the error msg.
+m4_define([AT_FIX_EXPOUT],[[
+# 1. Create the reference error message.
+]m4_if([$4],[],
+AT_DATA([[expout]],
+[[$1]
+]),AT_DATA([[expout]],
+[[$1]]))[
+# 2. If KEEPLOCATION != 1, then remove location info.
+]m4_if([$2],[1],[],
+[[sed 's/^[-0-9.]*:[ ]*//' expout >at-expout
+mv at-expout expout]])[
+# 3. If KEEPVERBOSE != 1, then remove verbose info.
+]m4_if([$3],[1],[],
+[[sed 's/syntax error, .*$/syntax error/' expout >at-expout
+mv at-expout expout]])[
+]])
+
+#AT_MODULE([MODULENAME])
+#-------------------------
+# Define the module name or default to YYParser.
+
+m4_define([AT_MODULE],[m4_if([$1],[],[YYParser],[$1])])
+
+##################################################
+# Test Macros
+##################################################
+
+# Define a single copy of the Calculator grammar.
+m4_define([AT_PYTHON_CALC_BODY],[
+
+/* Bison Declarations */
+%token <Integer> NUM "number"
+%type  <Integer> exp
+
+%nonassoc '=' /* comparison            */
+%left '-' '+'
+%left '*' '/'
+%left NEG     /* negation--unary minus */
+%right '^'    /* exponentiation        */
+
+/* Grammar follows */
+%%
+input:
+  line
+| input line
+;
+
+line:
+  '\n'
+| exp '\n'
+        {sys.stdout.write("total = " + str(int([$]1)) +'\n')}
+| error '\n'
+;
+
+exp:
+  NUM                { [$]$ = [$]1}
+| exp '=' exp
+  {
+    if ([$]1 != [$]3) :
+      yyerror (AT_LOCATION_IF(address@hidden, ]])
+               "calc: error: " + str([$]1) + " != " + str([$]3));
+  }
+| exp '+' exp
+    { [$]$ = [$]1 + [$]3 }
+| exp '-' exp
+    { [$]$ = [$]1 - [$]3 }
+| exp '*' exp
+    { [$]$ = [$]1 * [$]3 }
+| exp '/' exp
+    { [$]$ = [$]1 / [$]3 }
+| '-' exp  %prec NEG
+    { [$]$ = -[$]2 }
+| exp '^' exp
+    { [$]$ = math.pow([$]1,[$]3) }
+| '(' exp ')'        { [$]$ = [$]2}
+| '(' error ')'      { [$]$ = (1111)}
+| '!'
+       {
+       [$]$ = (0)
+       return YYERROR
+       }
+| '-' error
+      {
+      [$]$ = (0)
+      return YYERROR
+      }
+;
+])
+
+# Define the calculator input.
+# Warning: if you changes the input file
+# then the locations test file position numbers
+# may be incorrect and you will have
+# to modify that file as well.
+
+m4_define([AT_CALC_DATA],
+[[1 + 2 * 3 = 7
+1 + 2 * -3 = -5
+
+-1^2 = -1
+(-1)^2 = 1
+
+---1 = -1
+
+1 - 2 - 3 = -4
+1 - (2 - 3) = 2
+
+2^2^3 = 256
+(2^2)^3 = 64
+]])
+
+##################################################
+# Create a tokenizer that has the following properties:
+# 1. recognizes the tokens in the Calculator grammar
+# 2. produces line and row numbers
+# One could consider using the tokenize module,
+# but it suppresses empty lines and that changes
+# the parser debug output. Requires token and
+# string modules.
+
+m4_define([AT_TOKENIZER],[[
+def tokenizer(text) :
+  # Append a zero character to signal EOF
+  text += '\0'
+  tokens = []
+  line = 1
+  row = 0
+  index = 0
+  while True :
+    c = text[index]
+    if c == '\0' : break
+    elif c == ' ' :
+      row += 1
+      index += 1
+      continue
+    elif c == '\n' :
+      line += 1
+      row = 0
+      tokens.append((token.NEWLINE, c, (line, row), (line,row+1)))
+    elif string.find("-+=*^()!",c) >= 0 :
+      tokens.append((token.OP, c, (line, row), (line,row+1)))
+    elif string.find("0123456789",c) >= 0 :
+      saveline = line
+      saverow = row
+      number = c
+      while True :
+        index += 1
+        row += 1      
+        c = text[index]
+        if string.find("0123456789",c) < 0 : break;
+        number += c
+      # end while
+      index -= 1 # backup
+      row -= 1   
+      tokens.append((token.NUMBER,number,(saveline,saverow),(line,row)))
+    else :
+      tokens.append((token.NAME,c,(line,row),(line,row+1)))
+    index += 1
+    row += 1
+  # end while
+  assert c == '\0'
+  tokens.append((token.ENDMARKER,'\0',(line,row),(line,row)))
+  return tokens
+]])
+
+# WARNING: watch the indentation
+m4_define([AT_LEXER_BODY],[[
+  def __init__ (self) :
+]AT_LOCATION_IF([[
+    self.yypos = Position (1, 0)
+]])[
+    self.tokens = tokenizer(sys.stdin.read())
+    self.ntokens = len(self.tokens)
+    self.index = 0
+
+  def yyerror (self, ]AT_LOCATION_IF([[location, ]])[msg) :
+    s = msg
+    ]AT_LOCATION_IF([[
+    if location is not None :
+      s = str(location) + ": " + s]])[
+    sys.stderr.write(s+'\n')
+
+  def yylex (self) :
+    while (True) :
+      if (self.index >= self.ntokens) :
+        return (EOF, None)
+      type, text, start, end = self.tokens[self.index]
+      self.index += 1
+      ]AT_LOCATION_IF([[self.yypos = Position (start[0], start[1]+1)]])[
+      if type == token.NEWLINE :
+        return (ord('\n'), None)
+      elif type == token.NUMBER :
+        return (NUM, int(text))
+      elif type == token.OP :
+        return (ord(text[0]), None)
+      elif type == token.NAME : # Return the first character
+        return (ord(text[0]), None)          
+      elif type == token.ENDMARKER : # EOF
+        return (EOF,None)
+      else :
+       pass        
+  # end yylex
+]AT_LOCATION_IF([[
+  def getStartPos(self) :
+    return self.yypos
+
+  def getEndPos(self) :
+     return self.yypos
+]])[
+]])
+
+# AT_POSITION_CLASS([CLASSNAME])
+#-------------------------
+# Default Position Class
+m4_define([AT_POSITION_CLASS],
+[[
+class ]m4_if([$1],[],[Position],[$1])[ :
+
+  def __init__ (self, l, t) :
+    self.line = l
+    self.token = t
+
+  def __str__ (self) :
+    return str(self.line) + '.' + str(self.token)
+
+  def lineno (self) :
+    return self.line
+
+  def token (self) :
+    return self.token
+
+  def __eq__(self, other):
+    if isinstance(other,]m4_if([$1],[],[Position],[$1])[):
+      return self.line == other.line and self.token == other.token
+    return NotImplemented
+
+  def __ne__(self, other):
+    result = (self == other)
+    if result is NotImplemented:
+      return result
+    return not result
+
+]])
+
+m4_define([AT_IMPORTS],
+[%code imports {
+import os
+import token
+import math
+import re
+import string
+}])
+
+# AT_MIN_LEXER_BODY([LEXPARAMS])
+# --------------------------------------------------------------
+# Run minimal parser with specified directives and lex params
+
+m4_define([AT_MIN_LEXER_BODY],[[
+  def __init__(self]AT_LEXPARAM_IF([, AT_COMMIFY(AT_NORMALIZE($1))])[):
+    self.count=0
+  def getStartPos(self) : return Position(0,0)
+  def getEndPos(self) : return Position(0,0)
+  def yyerror (self, ]AT_LOCATION_IF([[loc ,]])[ s) :
+    sys.stderr.write (]AT_LOCATION_IF([str(loc) + ": " +])[ s + '\n')
+  def yylex (self) :
+    if self.count == 0 :
+      token = (]AT_TOKEN_PREFIX[END, None)
+    else :
+      token = (EOF, None)
+    self.count += 1
+    return token
+]])
+
+##################################################
+# Tests
+##################################################
+
+##################################################
+
+AT_BANNER([[Python pull parsing Tests]])
+
+AT_SETUP([[Python Calculator: Error detection]])
+
+AT_BISON_OPTION_PUSHDEFS([%locations %error-verbose])
+
+
+AT_DATA([Calc.y],[[
+%language "Python"
+%locations
+%error-verbose
+%name-prefix "Calc"
+%define parser_class_name {Calc}
+]AT_IMPORTS[
+%code {
+]AT_POSITION_CLASS[
+class CalcLexer (Lexer) :
+]AT_LEXER_BODY[
+}
+]AT_PYTHON_CALC_BODY[
+%%
+]AT_TOKENIZER[
+
+def main() :
+  yylexer = CalcLexer()
+  calc = Calc(yylexer=yylexer)
+  #calc.setDebugLevel(1)
+  calc.parse ()
+  sys.exit(0)
+
+if __name__ == '__main__' :
+  main()
+]])
+
+
+m4_define([AT_SYNTAX_TEST],[
+rm -f input
+AT_DATA([input],m4_if([$1],[],[],[$1
+]))
+AT_PYTHON_PARSER_CHECK([Calc.py <input],0,[stdout-nolog],[stderr-nolog])
+AT_FIX_EXPOUT([$2],[$3],[$4],[$5])
+AT_CHECK_PYTHON_GREP_FILE([stderr], [expout])
+])
+
+AT_BISON_CHECK([-o Calc.py Calc.y])
+
+# Test for some syntax errors.
+AT_SYNTAX_TEST([0 0],[1.3: syntax error, unexpected number],[1],[1])
+AT_SYNTAX_TEST([1 / / 2],
+[1.5: syntax error, unexpected '/', expecting number or '-' or '(' or '!'],
+[1],[1])
+AT_SYNTAX_TEST([error],[1.1: syntax error, unexpected $undefined],[1],[1])
+AT_SYNTAX_TEST([1 = 2 = 3],[1.7: syntax error, unexpected '='],[1],[1])
+AT_SYNTAX_TEST([
++1],[2.2: syntax error, unexpected '+'],[1],[1])
+AT_SYNTAX_TEST([],[1.1: syntax error, unexpected end of input],[1],[1])
+
+# Exercise the error token: without it, we die at the first error,
+# hence be sure to
+#
+# - have several errors which exercise different shift/discardings
+#   - (): nothing to pop, nothing to discard
+#   - (1 + 1 + 1 +): a lot to pop, nothing to discard
+#   - (* * *): nothing to pop, a lot to discard
+#   - (1 + 2 * *): some to pop and discard
+#
+# - test the action associated to 'error'
+#
+# - check the lookahead that triggers an error is not discarded
+#   when we enter error recovery.  Below, the lookahead causing the
+#   first error is ")", which is needed to recover from the error and
+#   produce the "0" that triggers the "0 != 1" error.
+#
+m4_pushdef([TMP],[() + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1])
+AT_SYNTAX_TEST(TMP,
+[1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!']
+,[1],[1],[1])
+AT_SYNTAX_TEST(TMP,
+[1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!']
+,[1],[1],[1])
+AT_SYNTAX_TEST(TMP,
+[1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!']
+,[1],[1],[1])
+AT_SYNTAX_TEST(TMP,
+[1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!']
+,[1],[1],[1])
+AT_SYNTAX_TEST(TMP,
+[1.1-1.46: calc: error: 4444 != 1]
+,[1],[1],[1])
+m4_popdef([TMP])
+
+
+# The same, but this time exercising explicitly triggered syntax errors.
+# POSIX says the lookahead causing the error should not be discarded.
+
+m4_pushdef([TMP],[(!) + (0 0) = 1])
+AT_SYNTAX_TEST(TMP,
+[1.10: syntax error, unexpected number]
+,[1],[1],[1])
+AT_SYNTAX_TEST(TMP,
+[1.1-1.15: calc: error: 2222 != 1]
+,[1],[1],[1])
+m4_popdef([TMP])
+
+m4_pushdef([TMP],[(- *) + (0 0) = 1])
+AT_SYNTAX_TEST(TMP,
+[1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!']
+,[1],[1],[1])
+AT_SYNTAX_TEST(TMP,
+[1.12: syntax error, unexpected number]
+,[1],[1],[1])
+AT_SYNTAX_TEST(TMP,
+[1.1-1.17: calc: error: 2222 != 1]
+,[1],[1],[1])
+m4_popdef([TMP])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+##################################################
+# Compile calculator grammare with various combinations
+# of %locations and %error-verbose.
+##################################################
+
+AT_SETUP([Calculator %locations and/or %error-verbose])
+
+AT_DATA([input],[AT_CALC_DATA])
+
+AT_DATA([expout],[total = 7
+total = -5
+total = -1
+total = 1
+total = -1
+total = -4
+total = 2
+total = 256
+total = 64
+])
+
+# AT_CALC_TEST([DIRECTIVES])
+# --------------------------------------------------------------
+# Run the calculator with specified directives
+m4_define([AT_CALC_TEST],[[
+]AT_DATA([Calc.y],[[
+%language "Python"
+%name-prefix "Calc"
+%define parser_class_name {Calc}
+]$1[
+]AT_IMPORTS[
+%code {
+]AT_POSITION_CLASS[
+class CalcLexer (Lexer) :
+]AT_LEXER_BODY[
+}
+]AT_PYTHON_CALC_BODY[
+%%
+]AT_TOKENIZER[
+
+def main() :
+  yylexer = CalcLexer()
+  calc = Calc(yylexer=yylexer)
+  #calc.setDebugLevel(1)
+  calc.parse ()
+  sys.exit(0)
+
+if __name__ == '__main__' :
+  main()
+]])[
+]AT_BISON_CHECK([-o Calc.py Calc.y])[
+]AT_PYTHON_PARSER_CHECK([Calc.py <input],0,[stdout-nolog],[stderr-nolog])[
+]AT_CHECK([cat stdout], [ignore], [expout], [ignore-nolog])[
+]])
+
+AT_BISON_OPTION_PUSHDEFS
+
+AT_CALC_TEST([%error-verbose
+])
+AT_BISON_OPTION_PUSHDEFS([%locations])
+AT_CALC_TEST([%locations
+])
+AT_CALC_TEST([%locations
+%error-verbose
+])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+##################################################
+# Compile a minimal parser to test various
+# directives.
+##################################################
+
+AT_SETUP([Minimal Parser])
+
+AT_DATA([input],[
+AT_CALC_DATA
+])
+
+# AT_DIRECTIVES_TEST1([DIRECTIVES],[LEXPARAMS],[PARSEPARAMS],[MODULE])
+# --------------------------------------------------------------
+# Run minimal parser with specified directives
+
+m4_define([AT_DIRECTIVES_TEST1],[
+AT_BISON_OPTION_PUSHDEFS([$1])
+AT_DATA([]AT_MODULE([$4])[.y],[[
+%language "Python"
+%debug
+%token-table
+%token END "end"
+$1
+]AT_LEXPARAM_IF([[
+%code lexer {
+]AT_MIN_LEXER_BODY([$2])[
+}
+]])[
+%%
+start: END {pass};
+%%
+]AT_POSITION_CLASS([Position])[
+]AT_LEXPARAM_IF([],[[
+class YYLexer :
+]AT_MIN_LEXER_BODY([$2])[
+]])[
+
+def main() :
+  p = ]AT_MODULE([$4])[(]AT_LEXPARAM_IF(
+[[]AT_NONE_LIST([[]_AT_CLEAN([$2])[]])[]])[]AT_LEXPARAM_IF(
+[],[[YYLexer()]])[)
+  p.parse()
+]])
+AT_BISON_CHECK([-o ]AT_MODULE([$4])[.py ]AT_MODULE([$4])[.y])
+AT_PYTHON_PARSER_CHECK([]AT_MODULE([$4])[.py],0,[stdout],[stderr])
+])
+
+# Test that using %locations & %error-verbose compiles
+AT_DIRECTIVES_TEST1([%debug
+%locations
+%error-verbose
+])
+
+AT_DIRECTIVES_TEST1([%debug
+%locations
+%error-verbose
+%lex-param {lexparam}
+])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+# ------------------------------------- #
+# Python parser class and package names.  #
+# ------------------------------------- #
+
+AT_SETUP([Python parser class and package names])
+
+AT_DIRECTIVES_TEST1([
+%name-prefix "Prefix"
+],[],[],[PrefixParser])
+AT_CHECK_PYTHON_GREP([PrefixParser.py],[^class PrefixParser])
+
+AT_DIRECTIVES_TEST1([
+%define parser_class_name {ParserClassName}
+],[],[],[ParserClassName])
+AT_CHECK_PYTHON_GREP([ParserClassName.py],[^class ParserClassName])
+
+AT_DIRECTIVES_TEST1([
+%define api.token.prefix {TOK_}
+])
+AT_CHECK_PYTHON_GREP([YYParser.py],[^TOK_END *[=] *[[0-9][0-9]*] *$])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+# ---------------------------------------- #
+# Python parser class inheritance          #
+# ---------------------------------------- #
+
+AT_SETUP([Python parser class inheritance])
+
+AT_DIRECTIVES_TEST1([
+%define extends {object}
+])
+AT_CHECK_PYTHON_GREP([YYParser.py],[^class YYParser[ ]*(object)])
+
+AT_DIRECTIVES_TEST1([
+])
+AT_CHECK_PYTHON_GREP([YYParser.py],[^  def __init__[ ]*(self,[ ]*yylexer)])
+AT_DIRECTIVES_TEST1([
+%parse-param {parseparam1}
+],
+[],
+[parseparam1]
+)
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^  def __init__[ ]*(self,[ ]*yylexer,[ ]*parseparam1[ ]*)])
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^    self.parseparam1 = parseparam1[ ]*$])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+# -------------------------------- #
+# Python %parse-param and %lex-param #
+# -------------------------------- #
+
+AT_SETUP([Python %parse-param and %lex-param])
+
+AT_DIRECTIVES_TEST1([
+%parse-param {parseparam1}
+],
+[],
+[parseparam1]
+)
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^  def __init__ *(self, *yylexer, *parseparam1 *)])
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^    self.parseparam1 = parseparam1[ ]*$])
+
+AT_DIRECTIVES_TEST1([
+%parse-param {parseparam1}
+%parse-param {parseparam2}
+],
+[],
+[parseparam1 parseparam2]
+)
+AT_CHECK_PYTHON_GREP([YYParser.py],
+[^  def __init__ *(self, *yylexer, *parseparam1, *parseparam2 *)])
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^    self.parseparam1 = parseparam1[ ]*$])
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^    self.parseparam2 = parseparam2[ ]*$])
+
+AT_DIRECTIVES_TEST1([
+%lex-param {lexparam1}
+],
+[lexparam1]
+)
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^  def __init__[[ ]]*(self,[[ ]]*lexparam1)],[2])
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^[[ ]]*self.yylexer[[ ]]*=[[ ]]*YYLexer(lexparam1)])
+
+AT_DIRECTIVES_TEST1([
+%lex-param {lexparam1}
+%lex-param {lexparam2}
+],
+[lexparam1 lexparam2]
+)
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^  def __init__[[ ]]*(self,[[ ]]*lexparam1,[[ ]*]lexparam2)],[2])
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^[[ ]]*self.yylexer[[ ]]*=[[ ]]*YYLexer(lexparam1,[[ ]*]lexparam2)])
+
+
+AT_DIRECTIVES_TEST1([
+%lex-param {lexparam1}
+%lex-param {lexparam2}
+%parse-param {parseparam1}
+%parse-param {parseparam2}
+],
+[lexparam1 lexparam2],
+[parseparam1 parseparam2]
+)
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^  def __init__[[ ]]*(self,[[  ]]*lexparam1,[[ ]]*lexparam2[[ ]]*)])
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^    self.yylexer[[ ]]*=[[ ]]*YYLexer(lexparam1,[[ ]]*lexparam2)])
+AT_CHECK_PYTHON_GREP([YYParser.py],
+  [^  p[[ ]]*=[[ ]]*YYParser(lexparam1,[[ ]]*\\
+lexparam2,[[ ]]*parseparam1,[[ ]]*parseparam2)])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+
+# -------------------------------- #
+# Python constructor init          #
+# -------------------------------- #
+
+AT_SETUP([Python constructor init])
+
+# AT_DIRECTIVES_TEST2([DIRECTIVES],[ACTION],[RUN],[MODULE])
+# --------------------------------------------------------------
+# Run minimal parser with specified directives
+
+m4_define([AT_DIRECTIVES_TEST2],[
+AT_BISON_OPTION_PUSHDEFS([$1])
+AT_DATA([]AT_MODULE([$4])[.y],[[
+%language "Python"
+%debug
+%token-table
+%token END "end"
+$1
+%%
+start: END
+]m4_if([$2],[],[{pass}],[$2])[
+;
+%%
+class YYLexer :
+]AT_MIN_LEXER_BODY([$2])[
+
+def main() :
+  p = ]AT_MODULE([$4])[(YYLexer())
+  p.parse()
+]m4_if([$3],[],[],[if __name__ == "__main__" : main()])[
+]])
+AT_BISON_CHECK([-o ]AT_MODULE([$4])[.py ]AT_MODULE([$4])[.y])
+AT_PYTHON_PARSER_CHECK([]AT_MODULE([$4])[.py],0,[stdout],[stderr])
+])
+
+AT_DIRECTIVES_TEST2([
+%initial-action {    # Watch the indent
+    sys.stdout.write("Initial action invoked\n")
+}],
+[],
+[1]
+)
+AT_CHECK_PYTHON_GREP([YYParser.py],
+    [^    sys.stdout.write("Initial action invoked\\n")])
+AT_CHECK_PYTHON_GREP([stdout],[^Initial action invoked$])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+# ------------------------------------------ #
+# Python position, and location types.  #
+# ------------------------------------------ #
+
+AT_SETUP([Python position, and location types])
+
+AT_DIRECTIVES_TEST2([
+%define api.location.type {MyLoc}
+%define api.position.type {MyPos}
+%locations
+%code {
+]AT_POSITION_CLASS([MyPos])[
+}
+]
+)
+AT_CHECK_PYTHON_GREP([YYParser.py],[^class[[ ]*]MyLoc[[ ]*]:])
+AT_CHECK_PYTHON_GREP([YYParser.py],[^class[[ ]*]MyPos[[ ]*]:])
+AT_CHECK_PYTHON_GREP([YYParser.py],
+    [^[[ ]*]yylloc[[ ]*]=[[ ]*]MyLoc[[ ]*][(]],[2])
+AT_CHECK_PYTHON_GREP([YYParser.py],[^[[ ]*]MyPos (0,0)],[2])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+
+# ------------------------------------------ #
+# Python value type enforcement              #
+# ------------------------------------------ #
+
+AT_SETUP([Python value type enforcement])
+
+AT_BISON_OPTION_PUSHDEFS
+
+# Test that value typing is checked by bison,
+# even though Python is dynamically typed.
+
+AT_DATA([YYParser.y],[[
+%language "Python"
+
+%token <object>END
+%token <File> FILE
+%type <File> file1 file2
+%type <object> start
+
+%%
+start: file1 ;
+file1: file2 END {$$=$2}
+file2: FILE {$$=$1}
+%%
+]])
+# This bison call should fail
+AT_BISON_CHECK([-o YYParser.py YYParser.y],[0],[stdout],[stderr])
+# This test must immediately follow the AT_BISON_CHECK test 
+AT_CHECK_PYTHON_GREP([stderr],
+    [warning: type clash on default action: <object> != <File>])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+# --------------------------------------------------#
+# Python syntax error handling without error token. #
+# --------------------------------------------------#
+
+AT_SETUP([Python syntax error handling without error token])
+
+AT_DATA([[YYParser.y]], [[
+%language "Python"
+%lex-param { s }
+%code lexer {
+  def __init__(self, s) :
+    self.Input = s
+    self.Position = 0
+  def yyerror(self,s):
+    sys.stderr.write (s+'\n')
+  def yylex(self) :
+    if (self.Position >= len(self.Input)) :
+      result = (EOF,None)
+    else :
+      # Remember: we must return an integer, not a character
+      # (= string of length 1)
+      result = (ord(self.Input[self.Position]),None)
+      self.Position += 1
+    return result
+}
+%%
+input:
+  'a' 'a'
+;
+%%
+def main ():
+  p = YYParser (sys.argv[1])
+  p.parse ();
+if __name__ == "__main__" :
+  main()
+]])
+
+AT_BISON_OPTION_PUSHDEFS
+
+AT_BISON_CHECK([[YYParser.y]])
+AT_DATA([experr],[syntax error
+])
+
+AT_PYTHON_PARSER_CHECK([[YYParser.py aa]], [[0]], [[]], [[]])
+AT_PYTHON_PARSER_CHECK([[YYParser.py ab]], [[0]], [[]], [[experr]])
+AT_PYTHON_PARSER_CHECK([[YYParser.py ba]], [[0]], [[]], [[experr]])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+# --------------------------------------------------#
+# Python %code ordering test
+# --------------------------------------------------#
+
+# Check that the %code imports/top/requires/provides/etc
+# occurr at the right places
+
+AT_SETUP([Python %code ordering test])
+
+AT_DATA([YYParser.y],[[
+%language "Python"
+%code imports {
+pass # %code imports
+}
+%code top {
+pass # %code top
+}
+%code requires{
+pass # %code requires
+}
+%code provides {
+pass # %code provides
+}
+%code {
+pass # %code 1
+}
+%code {
+pass # %code 2
+}
+%%
+start: 'a' 'b' 'c' {pass};
+%%
+pass # .y file tail
+]])
+
+AT_DATA([expout],[[pass # %code imports
+pass # %code top
+pass # %code requires
+pass # %code provides
+pass # %code 1
+pass # %code 2
+pass # .y file tail
+]])
+
+AT_BISON_CHECK([[YYParser.y]])
+
+# Verify by:
+# 1. visually examine the output
+# 2. use fgrep to get the lines in order.
+#    and verify that it matches expout
+
+AT_PYTHON_PARSER_CHECK([[YYParser.py]], [[0]], [[]], [[]])
+AT_CHECK([grep '^pass' YYParser.py],[ignore],[expout],[ignore-nolog])
+
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+##################################################
+# Push Parsing tests
+##################################################
+
+# The Python push parser tests are intended primarily
+# to verify that the sequence of states that the parser
+# traverses is the same as a pull parser would traverse.
+
+##################################################
+# Provide a way to generate data with and without push parsing
+# so  it is possible to capture the output for comparison
+# (except the "trivial" tests).
+# Use "both" rather than "push" so we can also set it to "pull" to
+# get the "experr" data.
+
+m4_ifdef([PUSHPULLFLAG],[],
+  [m4_define([PUSHPULLFLAG],[-Dapi.push-pull=both])])
+
+##################################################
+
+AT_BANNER([[Python Push Parsing Tests]])
+
+# Define the calculator input.
+# Warning: if you changes the input file
+# then the locations test file position numbers
+# may be incorrect and you will have
+# to modify that file as well.
+
+m4_define([AT_STD_INPUT],[[1 + 2 * 3 = 7
+1 + 2 * -3 = -5
+
+-1^2 = -1
+(-1)^2 = 1
+
+---1 = -1
+
+1 - 2 - 3 = -4
+1 - (2 - 3) = 2
+
+2^2^3 = 256
+(2^2)^3 = 64
+]])
+
+# Define a single copy of the trivial parser grammar.
+# This is missing main(), so two versions
+# are instantiated with different main() procedures.
+m4_define([AT_PYTHON_TRIVIAL_GRAMMAR],[
+%define parser_class_name {YYParser}
+%error-verbose
+%%
+
+start: 'a' 'b' 'c' ;
+
+%%
+])
+
+# Define comon code to be included in
+# module for the trivial parser tests.
+m4_define([AT_PYTHON_TRIVIAL_COMMON],[
+class YYerror (Lexer) :
+  def __init__(self) : pass
+  def yylex (self) : return EOF
+  def yyerror (self, ]AT_LOCATION_IF([[location, ]])[msg) :
+    s = msg
+    ]AT_LOCATION_IF([[
+    if location is not None :
+      s = str(location) + ": " + s]])[
+    sys.stderr.write(s+'\n')
+
+def setup() :
+  global parser, teststate
+  yyerror = YYerror()
+  parser = YYParser(yyerror)
+  parser.setDebugLevel(1)
+  teststate = -1
+
+teststatename = ("YYACCEPT","YYABORT","YYERROR","UNKNOWN","YYPUSH_MORE")
+
+def check(teststate, expected, msg) :
+  sys.stderr.write("teststate="+teststatename[[teststate]]
+                       +" expected="+teststatename[[expected]] + '\n')
+  if (teststate == expected) :
+    return
+  sys.stderr.write("unexpected state: "+msg+'\n')
+  sys.exit(1)
+])
+
+m4_define([AT_PYTHON_TRIVIAL_PARSER],[
+
+AT_PYTHON_TRIVIAL_GRAMMAR
+
+AT_PYTHON_TRIVIAL_COMMON
+
+def main() :
+  setup()
+  teststate = parser.push_parse(ord('a'), None)
+  check(teststate,YYPUSH_MORE,"push_parse('a', None)")
+
+  setup()
+  teststate = parser.push_parse(ord('a'), None)
+  check(teststate,YYPUSH_MORE,"push_parse('a', None)")
+  teststate = parser.push_parse(ord('b'), None)
+  check(teststate,YYPUSH_MORE,"push_parse('b', None)")
+  teststate = parser.push_parse(ord('c'), None)
+  check(teststate,YYPUSH_MORE,"push_parse('c', None)")
+  teststate = parser.push_parse(ord('\0'), None)
+  check(teststate,YYACCEPT,"push_parse('\\0', None)")
+
+  # Reuse the parser instance and cause a failure
+  teststate = parser.push_parse(ord('b'), None)
+  check(teststate,YYABORT,"push_parse('b', None)")
+
+  sys.exit(0)
+])
+
+m4_define([AT_PYTHON_TRIVIAL_PARSER_INITIAL_ACTION],[
+AT_PYTHON_TRIVIAL_GRAMMAR
+
+AT_PYTHON_TRIVIAL_COMMON
+
+def main() :
+  setup()
+  teststate = parser.push_parse(ord('a'), None)
+  check(teststate,YYPUSH_MORE,"push_parse('a', None)")
+  teststate = parser.push_parse(ord('b'), None)
+  check(teststate,YYPUSH_MORE,"push_parse('b', None)")
+  teststate = parser.push_parse(ord('c'), None)
+  check(teststate,YYPUSH_MORE,"push_parse('c', None)")
+  teststate = parser.push_parse(ord('\0'), None)
+  check(teststate,YYACCEPT,"push_parse('\\0', None)")
+
+  sys.exit(0)
+])
+
+## ----------------------------------------------------- ##
+## Trivial Push Parser with api.push-pull verification.  ##
+## ----------------------------------------------------- ##
+
+AT_SETUP([Trivial Push Parser with api.push-pull verification])
+AT_BISON_OPTION_PUSHDEFS
+
+AT_DATA([[input.y]],
+[[%language "Python"
+]AT_PYTHON_TRIVIAL_PARSER[
+]])
+
+AT_DATA([input],[AT_STD_INPUT])
+
+# Verify that the proper procedure(s) are generated for each case.
+AT_BISON_CHECK([[-Dapi.push-pull=pull -o Main.py input.y]])
+AT_CHECK_PYTHON_GREP([[Main.py]],
+                   [[^  def  *parse *(self).*$]],
+                   [1])
+# If BISON_USE_PUSH_FOR_PULL is set, then we have one occurrence of
+# this function, otherwise it should not be there.
+AT_CHECK_PYTHON_GREP([[Main.py]],
+        [[^  def  *push_parse *(self, yylextoken, yylexval).*$]],
+        [${BISON_USE_PUSH_FOR_PULL-0}])
+
+AT_BISON_CHECK([[-Dapi.push-pull=both -o Main.py input.y]])
+AT_CHECK_PYTHON_GREP([[Main.py]],
+                   [[^  def  *parse  *(self).*$]],
+                   [1])
+AT_CHECK_PYTHON_GREP([[Main.py]],
+        [[^  def  *push_parse  *(self, yylextoken, yylexval).*$]],
+        [1])
+
+AT_BISON_CHECK([[-Dapi.push-pull=push -o Main.py input.y]])
+AT_CHECK_PYTHON_GREP([[Main.py]],
+                   [[^  parse (self).*$]],
+                   [0])
+AT_CHECK_PYTHON_GREP([[Main.py]],
+        [[^  def  *push_parse *(self, yylextoken, yylexval).*$]],
+        [1])
+
+AT_PYTHON_PARSER_CHECK([Main.py <input],0,[],[stderr-nolog])
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+
+## ------------------------------------------ ##
+## Trivial Push Parser with %initial-action.  ##
+## ------------------------------------------ ##
+
+AT_SETUP([Trivial Push Parser with %initial-action])
+AT_BISON_OPTION_PUSHDEFS
+AT_DATA([[input.y]],[[
+%language "Python"
+%initial-action {      # Watch the indent
+      sys.stdout.write("Initial action invoked\n")
+}
+]AT_PYTHON_TRIVIAL_PARSER_INITIAL_ACTION[
+if __name__ == "__main__" :
+  main()
+]])
+AT_BISON_OPTION_POPDEFS
+AT_BISON_CHECK([[-Dapi.push-pull=push -o Main.py input.y]])
+AT_CHECK_PYTHON_GREP([[Main.py]],dnl Watch the indent in the following 
expression
+[[^      sys.stdout.write("Initial action invoked\\n")]])
+AT_PYTHON_PARSER_CHECK([Main.py], 0, [stdout-nolog], [stderr-nolog])
+# Verify that initial action is called exactly once.
+AT_CHECK_PYTHON_GREP(
+        [[stdout]],
+        [[^Initial action invoked$]],
+        [1])
+AT_CLEANUP
+
+# Test that the states transitioned by the push parser are the
+# same as for the pull parser.  This test is assumed to work
+# if it produces the same partial trace of stack states as is
+# produced when using pull parsing.  The output is verbose,
+# but seems essential for verifying push parsing.
+
+AT_SETUP([Calc parser with api.push-pull both])
+AT_BISON_OPTION_PUSHDEFS
+
+# Compose pieces to build the actual .y file.
+AT_DATA([Calc.y],[[/* Infix notation calculator--calc */
+%language "Python"
+%name-prefix "Calc"
+%define parser_class_name {Calc}
+
+]AT_IMPORTS[
+
+]AT_PYTHON_CALC_BODY[
+
+%%
+
+]AT_TOKENIZER[  
+
+class UserLexer(Lexer) :
+]AT_LEXER_BODY[
+
+def main() :
+    lexer = UserLexer()
+    calc = Calc(lexer)
+    calc.setDebugLevel(1)
+    calc.parse()
+
+if __name__ == "__main__" :
+  main()
+
+]])
+
+# This data was captured from running a pull parser.
+AT_DATA([[expout]],[[Stack now 0
+Stack now 0 2
+Stack now 0 9
+Stack now 0 9 19
+Stack now 0 9 19 2
+Stack now 0 9 19 28
+Stack now 0 9 19 28 20
+Stack now 0 9 19 28 20 2
+Stack now 0 9 19 28 20 29
+Stack now 0 9 19 28
+Stack now 0 9
+Stack now 0 9 17
+Stack now 0 9 17 2
+Stack now 0 9 17 26
+Stack now 0 9
+Stack now 0 9 23
+Stack now 0 8
+Stack now 0 7
+Stack now 0 7 2
+Stack now 0 7 9
+Stack now 0 7 9 19
+Stack now 0 7 9 19 2
+Stack now 0 7 9 19 28
+Stack now 0 7 9 19 28 20
+Stack now 0 7 9 19 28 20 3
+Stack now 0 7 9 19 28 20 3 2
+Stack now 0 7 9 19 28 20 3 12
+Stack now 0 7 9 19 28 20 29
+Stack now 0 7 9 19 28
+Stack now 0 7 9
+Stack now 0 7 9 17
+Stack now 0 7 9 17 3
+Stack now 0 7 9 17 3 2
+Stack now 0 7 9 17 3 12
+Stack now 0 7 9 17 26
+Stack now 0 7 9
+Stack now 0 7 9 23
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 4
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 3
+Stack now 0 7 3 2
+Stack now 0 7 3 12
+Stack now 0 7 3 12 22
+Stack now 0 7 3 12 22 2
+Stack now 0 7 3 12 22 31
+Stack now 0 7 3 12
+Stack now 0 7 9
+Stack now 0 7 9 17
+Stack now 0 7 9 17 3
+Stack now 0 7 9 17 3 2
+Stack now 0 7 9 17 3 12
+Stack now 0 7 9 17 26
+Stack now 0 7 9
+Stack now 0 7 9 23
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 5
+Stack now 0 7 5 3
+Stack now 0 7 5 3 2
+Stack now 0 7 5 3 12
+Stack now 0 7 5 14
+Stack now 0 7 5 14 25
+Stack now 0 7 9
+Stack now 0 7 9 22
+Stack now 0 7 9 22 2
+Stack now 0 7 9 22 31
+Stack now 0 7 9
+Stack now 0 7 9 17
+Stack now 0 7 9 17 2
+Stack now 0 7 9 17 26
+Stack now 0 7 9
+Stack now 0 7 9 23
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 4
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 3
+Stack now 0 7 3 3
+Stack now 0 7 3 3 3
+Stack now 0 7 3 3 3 2
+Stack now 0 7 3 3 3 12
+Stack now 0 7 3 3 12
+Stack now 0 7 3 12
+Stack now 0 7 9
+Stack now 0 7 9 17
+Stack now 0 7 9 17 3
+Stack now 0 7 9 17 3 2
+Stack now 0 7 9 17 3 12
+Stack now 0 7 9 17 26
+Stack now 0 7 9
+Stack now 0 7 9 23
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 4
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 2
+Stack now 0 7 9
+Stack now 0 7 9 18
+Stack now 0 7 9 18 2
+Stack now 0 7 9 18 27
+Stack now 0 7 9
+Stack now 0 7 9 18
+Stack now 0 7 9 18 2
+Stack now 0 7 9 18 27
+Stack now 0 7 9
+Stack now 0 7 9 17
+Stack now 0 7 9 17 3
+Stack now 0 7 9 17 3 2
+Stack now 0 7 9 17 3 12
+Stack now 0 7 9 17 26
+Stack now 0 7 9
+Stack now 0 7 9 23
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 2
+Stack now 0 7 9
+Stack now 0 7 9 18
+Stack now 0 7 9 18 5
+Stack now 0 7 9 18 5 2
+Stack now 0 7 9 18 5 14
+Stack now 0 7 9 18 5 14 18
+Stack now 0 7 9 18 5 14 18 2
+Stack now 0 7 9 18 5 14 18 27
+Stack now 0 7 9 18 5 14
+Stack now 0 7 9 18 5 14 25
+Stack now 0 7 9 18 27
+Stack now 0 7 9
+Stack now 0 7 9 17
+Stack now 0 7 9 17 2
+Stack now 0 7 9 17 26
+Stack now 0 7 9
+Stack now 0 7 9 23
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 4
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 2
+Stack now 0 7 9
+Stack now 0 7 9 22
+Stack now 0 7 9 22 2
+Stack now 0 7 9 22 31
+Stack now 0 7 9 22 31 22
+Stack now 0 7 9 22 31 22 2
+Stack now 0 7 9 22 31 22 31
+Stack now 0 7 9 22 31
+Stack now 0 7 9
+Stack now 0 7 9 17
+Stack now 0 7 9 17 2
+Stack now 0 7 9 17 26
+Stack now 0 7 9
+Stack now 0 7 9 23
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 5
+Stack now 0 7 5 2
+Stack now 0 7 5 14
+Stack now 0 7 5 14 22
+Stack now 0 7 5 14 22 2
+Stack now 0 7 5 14 22 31
+Stack now 0 7 5 14
+Stack now 0 7 5 14 25
+Stack now 0 7 9
+Stack now 0 7 9 22
+Stack now 0 7 9 22 2
+Stack now 0 7 9 22 31
+Stack now 0 7 9
+Stack now 0 7 9 17
+Stack now 0 7 9 17 2
+Stack now 0 7 9 17 26
+Stack now 0 7 9
+Stack now 0 7 9 23
+Stack now 0 7 16
+Stack now 0 7
+Stack now 0 7 15
+]])
+
+AT_DATA([input],[AT_STD_INPUT])
+
+AT_BISON_CHECK([PUSHPULLFLAG [-o Calc.py Calc.y]])
+#Verify that this is a push parser.
+AT_CHECK_PYTHON_GREP([[Calc.py]],
+                   [[^  def  *push_parse_initialize *(self).*$]])
+# Capture stderr output for comparison purposes.
+AT_PYTHON_PARSER_CHECK([Calc.py <input], 0, [ignore-nolog], [stderr-nolog])
+# Extract the "Stack Now" lines from the error output,
+# send them to stdout (via the sed command) and compare to expout.
+# NOTE: because the target is "expout", this macro automatically
+# compares the output of the sed command with the contents of
+# the file "expout" (defined above).
+AT_CHECK([[sed -e '/^Stack now.*$/p' -e d ./stderr]],
+    [ignore],[stdout-nolog],[ignore-nolog])
+AT_CHECK([[cat stdout]], [], [expout], [ignore-nolog])
+AT_BISON_OPTION_POPDEFS
+AT_CLEANUP
+
+# This test looks for location reporting by looking
+# at the lexer output with locations enabled.
+# It defines a lexer that reports location info.
+AT_SETUP([Calc parser with %locations %code lexer and api.push-pull both])
+AT_BISON_OPTION_PUSHDEFS
+
+AT_BISON_OPTION_PUSHDEFS([%locations])
+
+AT_DATA([Calc.y],[[/* Infix notation calculator--calc.  */
+%language "Python"
+%name-prefix "Calc"
+%define parser_class_name {Calc}
+%locations
+
+%code imports {
+import os
+import token
+import re
+import math
+import string
+}
+
+]AT_PYTHON_CALC_BODY[
+
+%%
+]AT_TOKENIZER[  
+
+class YYLexer(Lexer) :
+]AT_LEXER_BODY[
+
+]AT_POSITION_CLASS[
+
+def main () :
+  calc = Calc(YYLexer())
+  calc.setDebugLevel(1)
+  calc.parse()
+
+if __name__ == "__main__" :
+  main()
+
+]])
+
+# Define the expected calculator output.
+# This should match the output from a pull parser.
+AT_DATA([output],[[total = 7
+total = -5
+total = -1
+total = 1
+total = -1
+total = -4
+total = 2
+total = 256
+total = 64
+]])
+
+AT_DATA([locations],[[Next token is token "number" (1.1: 1)
+Next token is token '+' (1.3: None)
+Next token is token "number" (1.5: 2)
+Next token is token '*' (1.7: None)
+Next token is token "number" (1.9: 3)
+Next token is token '=' (1.11: None)
+Next token is token '=' (1.11: None)
+Next token is token '=' (1.11: None)
+Next token is token "number" (1.13: 7)
+Next token is token '\n' (2.1: None)
+Next token is token '\n' (2.1: None)
+Next token is token "number" (2.2: 1)
+Next token is token '+' (2.4: None)
+Next token is token "number" (2.6: 2)
+Next token is token '*' (2.8: None)
+Next token is token '-' (2.10: None)
+Next token is token "number" (2.11: 3)
+Next token is token '=' (2.13: None)
+Next token is token '=' (2.13: None)
+Next token is token '=' (2.13: None)
+Next token is token '=' (2.13: None)
+Next token is token '-' (2.15: None)
+Next token is token "number" (2.16: 5)
+Next token is token '\n' (3.1: None)
+Next token is token '\n' (3.1: None)
+Next token is token '\n' (3.1: None)
+Next token is token '\n' (4.1: None)
+Next token is token '-' (4.2: None)
+Next token is token "number" (4.3: 1)
+Next token is token '^' (4.4: None)
+Next token is token "number" (4.5: 2)
+Next token is token '=' (4.7: None)
+Next token is token '=' (4.7: None)
+Next token is token '=' (4.7: None)
+Next token is token '-' (4.9: None)
+Next token is token "number" (4.10: 1)
+Next token is token '\n' (5.1: None)
+Next token is token '\n' (5.1: None)
+Next token is token '\n' (5.1: None)
+Next token is token '(' (5.2: None)
+Next token is token '-' (5.3: None)
+Next token is token "number" (5.4: 1)
+Next token is token ')' (5.5: None)
+Next token is token ')' (5.5: None)
+Next token is token '^' (5.6: None)
+Next token is token "number" (5.7: 2)
+Next token is token '=' (5.9: None)
+Next token is token '=' (5.9: None)
+Next token is token "number" (5.11: 1)
+Next token is token '\n' (6.1: None)
+Next token is token '\n' (6.1: None)
+Next token is token '\n' (7.1: None)
+Next token is token '-' (7.2: None)
+Next token is token '-' (7.3: None)
+Next token is token '-' (7.4: None)
+Next token is token "number" (7.5: 1)
+Next token is token '=' (7.7: None)
+Next token is token '=' (7.7: None)
+Next token is token '=' (7.7: None)
+Next token is token '=' (7.7: None)
+Next token is token '-' (7.9: None)
+Next token is token "number" (7.10: 1)
+Next token is token '\n' (8.1: None)
+Next token is token '\n' (8.1: None)
+Next token is token '\n' (8.1: None)
+Next token is token '\n' (9.1: None)
+Next token is token "number" (9.2: 1)
+Next token is token '-' (9.4: None)
+Next token is token "number" (9.6: 2)
+Next token is token '-' (9.8: None)
+Next token is token '-' (9.8: None)
+Next token is token "number" (9.10: 3)
+Next token is token '=' (9.12: None)
+Next token is token '=' (9.12: None)
+Next token is token '-' (9.14: None)
+Next token is token "number" (9.15: 4)
+Next token is token '\n' (10.1: None)
+Next token is token '\n' (10.1: None)
+Next token is token '\n' (10.1: None)
+Next token is token "number" (10.2: 1)
+Next token is token '-' (10.4: None)
+Next token is token '(' (10.6: None)
+Next token is token "number" (10.7: 2)
+Next token is token '-' (10.9: None)
+Next token is token "number" (10.11: 3)
+Next token is token ')' (10.12: None)
+Next token is token ')' (10.12: None)
+Next token is token '=' (10.14: None)
+Next token is token '=' (10.14: None)
+Next token is token "number" (10.16: 2)
+Next token is token '\n' (11.1: None)
+Next token is token '\n' (11.1: None)
+Next token is token '\n' (12.1: None)
+Next token is token "number" (12.2: 2)
+Next token is token '^' (12.3: None)
+Next token is token "number" (12.4: 2)
+Next token is token '^' (12.5: None)
+Next token is token "number" (12.6: 3)
+Next token is token '=' (12.8: None)
+Next token is token '=' (12.8: None)
+Next token is token '=' (12.8: None)
+Next token is token "number" (12.10: 256)
+Next token is token '\n' (13.1: None)
+Next token is token '\n' (13.1: None)
+Next token is token '(' (13.2: None)
+Next token is token "number" (13.3: 2)
+Next token is token '^' (13.4: None)
+Next token is token "number" (13.5: 2)
+Next token is token ')' (13.6: None)
+Next token is token ')' (13.6: None)
+Next token is token '^' (13.7: None)
+Next token is token "number" (13.8: 3)
+Next token is token '=' (13.10: None)
+Next token is token '=' (13.10: None)
+Next token is token "number" (13.12: 64)
+Next token is token '\n' (14.1: None)
+Next token is token '\n' (14.1: None)
+]])
+
+AT_DATA([input],[AT_STD_INPUT])
+
+AT_BISON_CHECK([PUSHPULLFLAG [-o Calc.py Calc.y]])
+AT_PYTHON_PARSER_CHECK([Calc.py <input],0,[stdout-nolog],[stderr-nolog])
+# Verify that this is a push parser
+AT_CHECK_PYTHON_GREP([[Calc.py]],
+                   [[^  def  *push_parse_initialize *(self).*$]])
+# Capture the  stdout and stderr output for comparison purposes.
+AT_PYTHON_PARSER_CHECK([Calc.py <input], 0, [stdout-nolog], [stderr-nolog])
+# 1. Check that the token locations are correct
+AT_CHECK([[cp -f ./locations ./expout]],
+    [ignore],[ignore-nolog],[ignore-nolog])
+AT_CHECK([[sed -e '/^Next token.*$/p' -e d ./stderr]],
+    [ignore],[expout],[ignore-nolog])
+# 2. Check that the calculator output matches that of a pull parser
+AT_CHECK([[rm -f ./expout; cp -f ./output ./expout]],
+    [ignore],[ignore-nolog],[ignore-nolog])
+AT_CHECK([[cat ./stdout]],[ignore],[expout],[ignore-nolog])
+AT_CLEANUP
-- 
1.8.4.rc0.1.g8f6a3e5



reply via email to

[Prev in Thread] Current Thread [Next in Thread]